def _draw_masks(
        self,
        writer: SummaryWriter,
        global_step: int,
        image_over_predicted_mask: np.ndarray,
        image_over_gt_mask: Optional[np.ndarray] = None,
    ) -> None:
        """
        Draw image over mask to tensorboard

        Args:
            writer: loader writer
            global_step: global step
            image_over_predicted_mask: image over predicted mask
            image_over_gt_mask: image over ground truth mask
        """
        if image_over_gt_mask is not None:
            writer.add_image(
                f"{self.log_name} Ground Truth",
                image_over_gt_mask,
                global_step=global_step,
                dataformats="HWC",
            )

        writer.add_image(
            f"{self.log_name} Prediction",
            image_over_predicted_mask,
            global_step=global_step,
            dataformats="HWC",
        )
示例#2
0
 def __call__(self, writer: SummaryWriter, tag, sample, idx):
     if not isinstance(sample, torch.Tensor):
         sample = torch.tensor(sample)
     if sample.shape[-1] == 3:  # H, W, C
         sample = sample.permute(2, 0, 1)
     X = self.forward_pass_preprocess(sample).unsqueeze(0)
     device = next(self.model.parameters()).device
     X = X.to(device)
     logits = self.model(X)
     res = self.grad_cam.attribute(X, logits.argmax())
     res = res.squeeze().detach().cpu().numpy()
     res = (res - res.min())
     res /= res.max()
     res = (res * 255).astype(np.uint8)
     writer.add_image(f"{tag}_gradcam", res, global_step=idx)
示例#3
0
    def on_stage_start(self, runner: IRunner):
        """@TODO: Docs. Contribution is welcome."""
        assert runner.logdir is not None

        extra_mode = "_base"
        log_dir = os.path.join(runner.logdir, f"{extra_mode}_log")
        self.loggers[extra_mode] = SummaryWriter(log_dir)
示例#4
0
def dump_environment(
    experiment_config: Any,
    logdir: str,
    configs_path: List[str] = None,
) -> None:
    """
    Saves config, environment variables and package list in JSON into logdir.

    Args:
        experiment_config: experiment config
        logdir: path to logdir
        configs_path: path(s) to config
    """
    configs_path = configs_path or []
    configs_path = [
        Path(path) for path in configs_path if isinstance(path, str)
    ]
    config_dir = Path(logdir) / "configs"
    config_dir.mkdir(exist_ok=True, parents=True)

    if IS_HYDRA_AVAILABLE and isinstance(experiment_config, DictConfig):
        with open(config_dir / "config.yaml", "w") as f:
            f.write(OmegaConf.to_yaml(experiment_config, resolve=True))
        experiment_config = OmegaConf.to_container(experiment_config,
                                                   resolve=True)

    environment = get_environment_vars()

    save_config(experiment_config, config_dir / "_config.json")
    save_config(environment, config_dir / "_environment.json")

    pip_pkg = list_pip_packages()
    (config_dir / "pip-packages.txt").write_text(pip_pkg)
    conda_pkg = list_conda_packages()
    if conda_pkg:
        (config_dir / "conda-packages.txt").write_text(conda_pkg)

    for path in configs_path:
        name: str = path.name
        outpath = config_dir / name
        shutil.copyfile(path, outpath)

    config_str = json.dumps(experiment_config, indent=2, ensure_ascii=False)
    config_str = config_str.replace("\n", "\n\n")

    environment_str = json.dumps(environment, indent=2, ensure_ascii=False)
    environment_str = environment_str.replace("\n", "\n\n")

    pip_pkg = pip_pkg.replace("\n", "\n\n")
    conda_pkg = conda_pkg.replace("\n", "\n\n")
    with SummaryWriter(config_dir) as writer:
        writer.add_text("_config", config_str, 0)
        writer.add_text("_environment", environment_str, 0)
        writer.add_text("pip-packages", pip_pkg, 0)
        if conda_pkg:
            writer.add_text("conda-packages", conda_pkg, 0)
示例#5
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Stage start hook. Check ``logdir`` correctness.

        Args:
            runner: current runner
        """
        assert runner.logdir is not None

        extra_mode = "_base"
        log_dir = os.path.join(runner.logdir, f"{extra_mode}_log")
        self.loggers[extra_mode] = SummaryWriter(log_dir)
示例#6
0
    def initialize(self, state: State):
        """
        Initializes the tensorboard loggers.

        :param state: The state with which the callback is called.
        """
        if (self.logdir is not None) and (state.loader_name
                                          not in self.loggers):
            path = str(self.logdir / f"{state.loader_name}_log")
            writer = SummaryWriter(path)
            self.loggers[state.loader_name] = writer
    def on_loader_start(self, runner: "IRunner"):
        """Loader start hook.

        Args:
            runner: current runner
        """
        if runner.loader_key not in self.loggers:
            log_dir = os.path.join(runner.logdir,
                                   f"{runner.loader_key}_log/images/")
            self.loggers[runner.loader_key] = SummaryWriter(log_dir)
        self.step = 0
示例#8
0
 def on_loader_start(self, runner: IRunner):
     if not self._should_interpret_loader(runner):
         return
     if runner.loader_name not in self.loggers:
         logdir = runner.logdir / f"{runner.loader_name}_log"
         self.loggers[runner.loader_name] = SummaryWriter(str(logdir))
     if runner.loader_name not in self.interpretations:
         self.interpretations[runner.loader_name] = {
             "loss": [],
             "indices": [],
         }
示例#9
0
def main(args, _=None):
    """Run ``catalyst-data project-embeddings`` script."""
    df = pd.read_csv(args.in_csv)
    os.makedirs(args.out_dir, exist_ok=True)

    if args.meta_cols is not None:
        meta_header = args.meta_cols.split(",")
    else:
        raise ValueError("meta-cols must not be None")

    features = np.load(args.in_npy, mmap_mode="r")
    assert len(df) == len(features)

    if args.num_rows is not None:
        indices = np.random.choice(len(df), args.num_rows)
        features = features[indices, :]
        df = df.iloc[indices]

    if args.img_col is not None:
        img_data = _load_image_data(
            rootpath=args.img_rootpath, paths=df[args.img_col].values
        )
    else:
        img_data = None

    summary_writer = SummaryWriter(args.out_dir)
    metadata = df[meta_header].values.tolist()
    metadata = [
        [
            str(text)
            .replace("\n", " ")
            .replace(r"\s", " ")
            .replace(r"\s\s+", " ")
            .strip()
            for text in texts
        ]
        for texts in metadata
    ]
    assert len(metadata) == len(features)
    summary_writer.add_embedding(
        features,
        metadata=metadata,
        label_img=img_data,
        metadata_header=meta_header,
    )
    summary_writer.close()

    print(
        f"Done. Run `tensorboard --logdir={args.out_dir}` "
        + "to view in Tensorboard"
    )
示例#10
0
def main(args, _=None):
    """Run ``catalyst-data project-embeddings`` script."""
    df = pd.read_csv(args.in_csv)
    os.makedirs(args.out_dir, exist_ok=True)

    if args.meta_cols is not None:
        meta_header = args.meta_cols.split(",")
    else:
        raise ValueError("meta-cols must not be None")

    features = np.load(args.in_npy, mmap_mode="r")

    if args.num_rows is not None:
        df = df.sample(n=args.num_rows)

    if args.img_col is not None:
        image_names = [
            path.join(args.img_rootpath, name)
            for name in df[args.img_col].values
        ]
        img_data = np.stack(
            [load_image(name, args.img_size) for name in image_names], axis=0)
        img_data = (
            img_data.transpose((0, 3, 1, 2)) / 255.0  # noqa: WPS432
        ).astype(np.float32)
        img_data = torch.from_numpy(img_data)
    else:
        img_data = None

    summary_writer = SummaryWriter(args.out_dir)
    summary_writer.add_embedding(
        features,
        metadata=df[meta_header].astype(str).values,
        label_img=img_data,
        metadata_header=meta_header,
    )
    summary_writer.close()

    print(f"Done. Run `tensorboard --logdir={args.out_dir}` " +
          "to view in Tensorboard")
示例#11
0
def img(writer: SummaryWriter, sample: Tuple, prefix: str, task_name: str, key: str):
    X, y = sample
    writer.add_image(f'{prefix}_{task_name}_images', X[key])
示例#12
0
 def on_loader_start(self, runner: "IRunner"):
     """Prepare tensorboard writers for the current stage."""
     if runner.loader_key not in self.loggers:
         log_dir = os.path.join(runner.logdir, f"{runner.loader_key}_log")
         self.loggers[runner.loader_key] = SummaryWriter(log_dir)
示例#13
0
def text_publisher(writer: SummaryWriter, tag: str, sample: Any, idx: int):
    writer.add_text(f'{tag}_text', sample, global_step=idx)
示例#14
0
def img_publisher(writer: SummaryWriter, tag: str, sample: Any, idx: int):
    writer.add_image(f'{tag}_images', sample, global_step=idx)
示例#15
0
def text(writer: SummaryWriter, sample: Tuple, prefix: str, task_name: str, key: str):
    X, y = sample
    writer.add_text(f'{prefix}_{task_name}_text', X[key])
示例#16
0
 def initialize(self, state):
     if (self.logdir is not None) and (state.loader_name not in self.loggers):
         path = str(self.logdir / f"{state.loader_name}_log")
         writer = SummaryWriter(path)
         self.loggers[state.loader_name] = writer
示例#17
0
 def on_loader_start(self, state: State):
     """Prepare tensorboard writers for the current stage."""
     if state.loader_name not in self.loggers:
         log_dir = os.path.join(state.logdir, f"{state.loader_name}_log")
         self.loggers[state.loader_name] = SummaryWriter(log_dir)