From 1f037bf426759eddb32de09eb91e51e5a99d99e0 Mon Sep 17 00:00:00 2001 From: Henry Date: Mon, 5 Feb 2024 15:21:23 +0100 Subject: [PATCH] :fire::art: delete old code, try to raise error - parsing file via include did not pick up conf.py configuration --- docs/docutils.conf | 4 ++++ vaep/tf_board.py | 57 ---------------------------------------------- 2 files changed, 4 insertions(+), 57 deletions(-) create mode 100644 docs/docutils.conf delete mode 100644 vaep/tf_board.py diff --git a/docs/docutils.conf b/docs/docutils.conf new file mode 100644 index 000000000..74349b17b --- /dev/null +++ b/docs/docutils.conf @@ -0,0 +1,4 @@ +[general] +nb_execution_mode: auto +nb_execution_raise_on_error: true +nb_merge_streams: true \ No newline at end of file diff --git a/vaep/tf_board.py b/vaep/tf_board.py deleted file mode 100644 index bf8959e09..000000000 --- a/vaep/tf_board.py +++ /dev/null @@ -1,57 +0,0 @@ -from pathlib import Path -from datetime import datetime -from torch.utils.tensorboard import SummaryWriter - - -class TensorboardModelNamer(): - """PyTorch SummaryWriter helper class for experiments. - - Creates new SummaryWriter for an experiment - """ - - def __init__(self, prefix_folder, root_dir=Path('runs')): - """[summary] - - Parameters - ---------- - prefix_folder : str - Experiment folder-name. All new setups will be written to new summary files. - root_dir : Path, optional - Root directory to store experiments, by default Path('runs') - """ - self.prefix_folder = prefix_folder - self.root_logdir = Path(root_dir) - self.folder = (self.root_logdir / - f'{self.prefix_folder}_{format(datetime.now(), "%y%m%d_%H%M")}') - - def get_model_name(self, hidden_layers: int, - neurons: list, - scaler: str, - ): - name = 'model_' - name += f'hl{hidden_layers:02d}' - - if isinstance(neurons, str): - neurons = neurons.split() - elif not type(neurons) in [list, tuple]: - raise TypeError( - "Provide expected format for neurons: [12, 13, 14], '12 13 14' or '12_13_14'") - - for x in neurons: - name += f'_{x}' - - if isinstance(scaler, str): - name += f'_{scaler}' - else: - name += f'_{scaler!r}' - return name - - def get_writer(self, hidden_layers: int, - neurons: list, - scaler: str, - ): - """Return a new SummaryWriter instance for one setup in an experiment.""" - model_name = self.get_model_name(hidden_layers=hidden_layers, - neurons=neurons, - scaler=scaler) - return SummaryWriter(log_dir=self.folder / model_name)