def on_stage_start(self, state: State): """@TODO: Docs. Contribution is welcome.""" assert state.logdir is not None extra_mode = "_base" log_dir = os.path.join(state.logdir, f"{extra_mode}_log") self.loggers[extra_mode] = SummaryWriter(log_dir)
def on_loader_start(self, state): """Prepare tensorboard writers for the current stage""" if state.logdir is None: return lm = state.loader_name if lm not in self.loggers: log_dir = os.path.join(state.logdir, f"{lm}_log") self.loggers[lm] = SummaryWriter(log_dir)
def _prepare_logger(self, logdir, mode): if logdir is not None: timestamp = utils.get_utcnow_time() logpath = f"{logdir}/" \ f"sampler.{mode}.{self._sampler_id}.{timestamp}" os.makedirs(logpath, exist_ok=True) self.logdir = logpath self.logger = SummaryWriter(logpath) else: self.logdir = None self.logger = None
def dump_environment( experiment_config: Dict, logdir: str, configs_path: List[str] = None, ) -> None: """ Saves config, environment variables and package list in JSON into logdir Args: experiment_config (dict): experiment config logdir (str): path to logdir configs_path: path(s) to config """ configs_path = configs_path or [] configs_path = [ Path(path) for path in configs_path if isinstance(path, str) ] config_dir = Path(logdir) / "configs" config_dir.mkdir(exist_ok=True, parents=True) environment = get_environment_vars() safitty.save(experiment_config, config_dir / "_config.json") safitty.save(environment, config_dir / "_environment.json") pip_pkg = list_pip_packages() (config_dir / "pip-packages.txt").write_text(pip_pkg) conda_pkg = list_conda_packages() if conda_pkg: (config_dir / "conda-packages.txt").write_text(conda_pkg) for path in configs_path: name: str = path.name outpath = config_dir / name shutil.copyfile(path, outpath) config_str = json.dumps(experiment_config, indent=2, ensure_ascii=False) config_str = config_str.replace("\n", "\n\n") environment_str = json.dumps(environment, indent=2, ensure_ascii=False) environment_str = environment_str.replace("\n", "\n\n") pip_pkg = pip_pkg.replace("\n", "\n\n") conda_pkg = conda_pkg.replace("\n", "\n\n") with SummaryWriter(config_dir) as writer: writer.add_text("_config", config_str, 0) writer.add_text("_environment", environment_str, 0) writer.add_text("pip-packages", pip_pkg, 0) if conda_pkg: writer.add_text("conda-packages", conda_pkg, 0)
def main(args, _=None): """Run ``catalyst-data project-embeddings`` script.""" df = pd.read_csv(args.in_csv) os.makedirs(args.out_dir, exist_ok=True) if args.meta_cols is not None: meta_header = args.meta_cols.split(",") else: raise ValueError("meta-cols must not be None") features = np.load(args.in_npy, mmap_mode="r") if args.num_rows is not None: df = df.sample(n=args.num_rows) if args.img_col is not None: image_names = [ path.join(args.img_rootpath, name) for name in df[args.img_col].values ] img_data = np.stack( [load_image(name, args.img_size) for name in image_names], axis=0) img_data = (img_data.transpose( (0, 3, 1, 2)) / 255.0).astype(np.float32) img_data = torch.from_numpy(img_data) else: img_data = None summary_writer = SummaryWriter(args.out_dir) summary_writer.add_embedding( features, metadata=df[meta_header].astype(str).values, label_img=img_data, metadata_header=meta_header, ) summary_writer.close() print(f"Done. Run `tensorboard --logdir={args.out_dir}` " f"to view in Tensorboard")
def on_loader_start(self, state: State): """Prepare tensorboard writers for the current stage""" if state.loader_name not in self.loggers: log_dir = os.path.join(state.logdir, f"{state.loader_name}_log") self.loggers[state.loader_name] = SummaryWriter(log_dir)
def on_stage_start(self, state: State): assert state.logdir is not None extra_mode = "_base" log_dir = os.path.join(state.logdir, f"{extra_mode}_log") self.loggers[extra_mode] = SummaryWriter(log_dir)
def _prepare_logger(self, logdir): timestamp = utils.get_utcnow_time() logpath = f"{logdir}/trainer.{timestamp}" os.makedirs(logpath, exist_ok=True) self.logger = SummaryWriter(logpath)