Exemple #1
0
def add_animated_gif(
    writer: SummaryWriter,
    tag: str,
    image_tensor: Union[np.ndarray, torch.Tensor],
    max_out: int = 3,
    frame_dim: int = -3,
    scale_factor: float = 1.0,
    global_step: Optional[int] = None,
) -> None:
    """Creates an animated gif out of an image tensor in 'CHWD' format and writes it with SummaryWriter.

    Args:
        writer: Tensorboard SummaryWriter to write to
        tag: Data identifier
        image_tensor: tensor for the image to add, expected to be in `CHWD` format
        max_out: maximum number of image channels to animate through
        frame_dim: the dimension used as frames for GIF image, expect input data shape as `CHWD`,
            default to `-3` (the first spatial dim)
        scale_factor: amount to multiply values by. If the image data is between 0 and 1, using 255 for this value will
            scale it to displayable range
        global_step: Global step value to record
    """
    summary = make_animated_gif_summary(tag=tag,
                                        image=image_tensor,
                                        writer=writer,
                                        max_out=max_out,
                                        frame_dim=frame_dim,
                                        scale_factor=scale_factor)
    for s in summary:
        # add GIF for every channel separately
        writer._get_file_writer().add_summary(s, global_step)
Exemple #2
0
def add_animated_gif_no_channels(
    writer: SummaryWriter,
    tag: str,
    image_tensor: Union[np.ndarray, torch.Tensor],
    max_out: int,
    scale_factor: float,
    global_step: Optional[int] = None,
) -> None:
    """Creates an animated gif out of an image tensor in 'HWD' format that does not have
    a channel dimension and writes it with SummaryWriter. This is similar to the "add_animated_gif"
    after inserting a channel dimension of 1.

    Args:
        writer: Tensorboard SummaryWriter to write to
        tag: Data identifier
        image_tensor: tensor for the image to add, expected to be in CHWD format
        max_out: maximum number of slices to animate through
        scale_factor: amount to multiply values by. If the image data is between 0 and 1,
                              using 255 for this value will scale it to displayable range
        global_step: Global step value to record
    """
    writer._get_file_writer().add_summary(
        make_animated_gif_summary(tag,
                                  image_tensor,
                                  max_out=max_out,
                                  animation_axes=[1],
                                  image_axes=[1, 2],
                                  scale_factor=scale_factor),
        global_step,
    )
Exemple #3
0
def add_animated_gif(
    writer: SummaryWriter,
    tag: str,
    image_tensor: Union[np.array, Tensor],
    max_out: int,
    scale_factor: float,
    global_step: int = None,
):
    """Creates an animated gif out of an image tensor in 'CHWD' format and writes it with SummaryWriter.

    Args:
        writer (SummaryWriter): Tensorboard SummaryWriter to write to
        tag (str): Data identifier
        image_tensor (np.array or Tensor): tensor for the image to add, expected to be in CHWD format
        max_out (int): maximum number of slices to animate through
        scale_factor (float): amount to multiply values by. If the image data is between 0 and 1, using 255 for this value will
            scale it to displayable range
        global_step (int): Global step value to record
    """
    writer._get_file_writer().add_summary(
        make_animated_gif_summary(
            tag, image_tensor, max_out=max_out, animation_axes=[1], image_axes=[2, 3], scale_factor=scale_factor
        ),
        global_step,
    )
def async_log(tensorboard_dir, log):
    global log_queue
    if log_queue is None or log is None:
        return
    finished = False
    tensorboard_writer = SummaryWriter(tensorboard_dir)

    while True:
        if finished and log_queue.empty():
            tensorboard_writer.close()
            return

        temp = log_queue.get()
        if temp is None:
            return
        if temp == "finish":
            finished = True
            continue

        if temp[0] == "tensorboard":
            function, args = temp[1:]
            getattr(tensorboard_writer, function)(*args)
            tensorboard_writer._get_file_writer().flush()
        elif temp[0] == "log":
            log.info(temp[1])
Exemple #5
0
class TensorboardWriter:
    def __init__(self, outdir):
        assert (os.path.isdir(outdir))
        self.outdir = outdir
        self.writer = SummaryWriter(self.outdir, flush_secs=10)

    def save_scalar(self, tag, scalar_value, global_step=None):
        self.writer.add_scalar(tag, scalar_value, global_step)

    def save_scalars(self, main_tag, scalars_dict, global_step=None):
        self.writer.add_scalars(main_tag, scalars_dict, global_step)

    def save_image(self, tag, image, global_step=None, dataformats='CHW'):
        self.writer.add_image(tag=tag,
                              img_tensor=image,
                              global_step=global_step,
                              dataformats=dataformats)

    def save_figure(self, tag, figure, global_step=None, close=False):
        self.writer.add_figure(tag=tag,
                               figure=figure,
                               global_step=global_step,
                               close=close)

    def save_graph(self, model, inputs_to_model=None):
        """
        Saves graph to the tensorboard. Ideally call once.
        :param model: The torch.nn.Module object
        :param inputs_to_model: tensor or a list of tensor(batch) will also be showed.
        :return: None
        """
        try:
            self.writer.add_graph(model, inputs_to_model)
        except Exception as e:
            logger.exception(
                'Check this for fix: https://github.com/lanpa/tensorboardX/issues/389#issuecomment-475879228'
            )

    def save_embedding(self,
                       mat,
                       metadata=None,
                       label_img=None,
                       global_step=None,
                       tag='default',
                       metadata_header=None):
        self.writer.add_embedding(mat, metadata, label_img, global_step, tag,
                                  metadata_header)

    def flush(self):
        """
        If you need to flush all data immediately.
        """
        self.writer._get_file_writer().flush()

    def close(self):
        """
        To be called in the end
        """
        self.writer.close()
Exemple #6
0
class NestedFolderTensorboardLogger(LightningLoggerBase):
    def __init__(self, save_dir, name, **kwargs):
        super().__init__()
        self.save_dir = save_dir
        self._name = name
        self.experiment_root = None
        self.experiment_discriminator = None
        self.experiment_generator = None
        self.kwargs = kwargs
        self.setup_experiments()

    def setup_experiments(self):
        root_dir = os.path.join(self.save_dir, self.name)
        os.makedirs(root_dir, exist_ok=True)
        generator_dir = os.path.join(self.save_dir, self.name, "generator")
        discriminator_dir = os.path.join(self.save_dir, self.name,
                                         "discriminator")
        os.makedirs(generator_dir, exist_ok=True)
        os.makedirs(discriminator_dir, exist_ok=True)
        self.experiment_root = SummaryWriter(log_dir=root_dir, **self.kwargs)
        self.experiment_discriminator = SummaryWriter(
            log_dir=discriminator_dir, **self.kwargs)
        self.experiment_generator = SummaryWriter(log_dir=generator_dir,
                                                  **self.kwargs)

    @rank_zero_only
    def log_hyperparams(self, params):
        params = vars(params)
        self.experiment_root.add_hparams(hparam_dict=dict(params),
                                         metric_dict={})

    @rank_zero_only
    def log_metrics(self, metrics, step_num):
        return

    @rank_zero_only
    def log_image(self, step, visualization):
        self.experiment_root.add_image('visualization',
                                       visualization,
                                       step,
                                       dataformats='HWC')

    @rank_zero_only
    def log_generator_losses(self, step, gan_loss, reconstruction_loss):
        self.experiment_generator.add_scalars(
            'G/losses', {
                'gan_loss': gan_loss,
                'reconstruction_loss': reconstruction_loss
            }, step)

    @rank_zero_only
    def log_discriminator_losses(self, step, real_loss, fake_loss):
        self.experiment_discriminator.add_scalars('D/losses', {
            'real': real_loss,
            'fake': fake_loss
        }, step)

    @rank_zero_only
    def log_total_generator_loss(self, step, loss):
        self.experiment_generator.add_scalar('total', loss, step)

    @rank_zero_only
    def log_total_discriminator_loss(self, step, loss):
        self.experiment_discriminator.add_scalar('total', loss, step)

    @rank_zero_only
    def save(self):
        self.experiment_generator._get_file_writer().flush()
        self.experiment_discriminator._get_file_writer().flush()
        self.experiment_root._get_file_writer().flush()

    @rank_zero_only
    def finalize(self, status):
        self.save()

    @property
    def version(self):
        return self._name

    @property
    def name(self):
        return self._name
Exemple #7
0
class NestedTensorboardLogger(LightningLoggerBase):
    @property
    def experiment(self):
        return self.experiment_root

    @property
    def save_dir(self):
        return self._save_dir

    def __init__(self, save_dir, name, **kwargs):
        super().__init__()
        self._save_dir = save_dir
        self._name = name
        self.experiment_root = None
        self.experiment_train = None
        self.experiment_val = None
        self.kwargs = kwargs
        self.setup_experiments()

    def setup_experiments(self):
        root_dir = os.path.join(self.save_dir, self.name)
        os.makedirs(root_dir, exist_ok=True)
        train_dir = os.path.join(self.save_dir, self.name, "train")
        val_dir = os.path.join(self.save_dir, self.name, "val")
        os.makedirs(train_dir, exist_ok=True)
        os.makedirs(val_dir, exist_ok=True)
        self.experiment_root = SummaryWriter(log_dir=root_dir, **self.kwargs)
        self.experiment_val = SummaryWriter(log_dir=val_dir, **self.kwargs)
        self.experiment_train = SummaryWriter(log_dir=train_dir, **self.kwargs)

    def log_hyperparams(self, params):
        for k in params:
            if params[k] is None:
                params[k] = "None"
            if type(params[k]) == list:
                params[k] = torch.LongTensor(params[k])
        self.experiment_root.add_hparams(hparam_dict=dict(params),
                                         metric_dict={})

    def log_train_loss(self, global_step, error):
        self.experiment_train.add_scalar('loss', error, global_step)

    def log_val_loss(self, global_step, loss):
        self.experiment_val.add_scalar('loss', loss, global_step)

    def log_metrics(self, metrics, step):
        return

    def save(self):
        self.experiment_train._get_file_writer().flush()
        self.experiment_val._get_file_writer().flush()
        self.experiment_root._get_file_writer().flush()

    def finalize(self, status):
        self.save()

    @property
    def version(self):
        return self._name

    @property
    def name(self):
        return self._name