Example #1
0
class TensorboardXWriter(EventWriter):
    def __init__(self, log_dir: str, window_size: int = 20, **kwargs):
        self._window_size = window_size
        from torch.utils.tensorboard import SummaryWriter

        self._writer = SummaryWriter(log_dir, **kwargs)

    def write(self):
        storage = get_event_storage()
        for k, v in storage.latest_with_smoothing_hint(
                self._window_size).items():
            self._writer.add_scalar(k, v, storage.iter)

        if len(storage._vis_data) >= 1:
            for img_name, img, step_num in storage._vis_data:
                self._writer.add_image(img_name, img, step_num)
            storage.clear_images()

        if len(storage._histograms) >= 1:
            for params in storage._histograms:
                self._writer.add_histogram_raw(**params)
            storage.clear_histograms()

    def close(self):
        if hasattr(self, "_writer"):
            self._writer.close()
Example #2
0
class TensorboardXWriter(EventWriter):
    """
    Write all scalars to a tensorboard file.
    """
    def __init__(self, log_dir: str, window_size: int = 20, **kwargs):
        """
        Args:
            log_dir (str): the directory to save the output events
            window_size (int): the scalars will be median-smoothed by this window size

            kwargs: other arguments passed to `torch.utils.tensorboard.SummaryWriter(...)`
        """
        self._window_size = window_size
        from torch.utils.tensorboard import SummaryWriter

        self._writer = SummaryWriter(log_dir, **kwargs)
        self._last_write = -1

    def write(self):
        storage = get_event_storage()
        new_last_write = self._last_write
        for k, (v, iter) in storage.latest_with_smoothing_hint(
                self._window_size).items():
            if iter > self._last_write:
                self._writer.add_scalar(k, v, iter)
                new_last_write = max(new_last_write, iter)
        self._last_write = new_last_write

        # storage.put_{image,histogram} is only meant to be used by
        # tensorboard writer. So we access its internal fields directly from here.
        if len(storage._vis_data) >= 1:
            for img_name, img, step_num in storage._vis_data:
                self._writer.add_image(img_name, img, step_num)
            # Storage stores all image data and rely on this writer to clear them.
            # As a result it assumes only one writer will use its image data.
            # An alternative design is to let storage store limited recent
            # data (e.g. only the most recent image) that all writers can access.
            # In that case a writer may not see all image data if its period is long.
            storage.clear_images()

        if len(storage._histograms) >= 1:
            for params in storage._histograms:
                self._writer.add_histogram_raw(**params)
            storage.clear_histograms()

    def close(self):
        if hasattr(self,
                   "_writer"):  # doesn't exist when the code fails at import
            self._writer.close()
Example #3
0
class TensorboardLoggerHook(LogBufferWriter):
    """Write all scalars to a tensorboard file."""

    def __init__(self, log_dir: str, window_size: int = 20, **kwargs):
        """
        Args:
            log_dir (str): the directory used to save the output events
            window_size (int): the scalars will be median-smoothed by this window size

            kwargs: other arguments will be passed to `torch.utils.tensorboard.SummaryWriter(...)`
        """
        self._window_size = window_size

        from torch.utils.tensorboard import SummaryWriter
        self._writer = SummaryWriter(log_dir + '/runs/', **kwargs)

    def process_buffer_data(self):
        self._add_scalar()
        self._add_image()
        self._add_histogram()

    def _add_scalar(self):
        for name, value in self.log_buffer.latest_with_smoothing_hint(self._window_size).items():
            self._writer.add_scalar(name, value, self.log_buffer.iter)

    def _add_image(self):
        if len(self.log_buffer.vis_data) >= 1:
            for img_name, img_data, step_num in self.log_buffer.vis_data.images:
                self._writer.add_image(img_name, img_data, step_num)
            self.log_buffer.clear_images()

    def _add_histogram(self):
        if len(self.log_buffer.histograms) >= 1:
            for hist_params in self.log_buffer.histograms.histograms:
                self._writer.add_histogram_raw(**hist_params)
            self.log_buffer.clear_histograms()

    def close(self):
        if hasattr(self, '_writer'):
            self._writer.close()
Example #4
0
class TensorboardXWriter(HookBase):
    def __init__(self, cfg):
        self._period = cfg.TEST.WRITER_PERIOD
        self._writer = SummaryWriter(cfg.OUTPUT_DIR)
        self._last_write = -1

    def after_step(self):
        next_iter = self.trainer.iter + 1
        if next_iter % self._period == 0:
            self.write()

    def write(self):
        storage = self.trainer.storage
        new_last_write = self._last_write
        for k, (v, iteration) in storage.latest_with_smoothing(
                self._period).items():
            if iteration > self._last_write:
                new_last_write = max(new_last_write, iteration)
                if 'loss' in k: k = f'Loss/{k}'
                elif ('time' in k) or ('second' in k): k = f'Time/{k}'
                self._writer.add_scalar(k, v, iteration)
        self._last_write = new_last_write

        if len(storage._vis_data) >= 1:
            for img_name, img, step_num in storage._vis_data:
                self._writer.add_image(img_name, img, step_num)
            storage.clear_images()

        if len(storage._histograms) >= 1:
            for params in storage._histograms:
                self._writer.add_histogram_raw(**params)
            storage.clear_histograms()

    def after_train(self):
        if hasattr(self,
                   "_writer"):  # doesn't exist when the code fails at import
            self._writer.close()
Example #5
0
class SummaryWriter:
    def __init__(self, logdir, flush_secs=120):

        self.writer = TensorboardSummaryWriter(
            log_dir=logdir,
            purge_step=None,
            max_queue=10,
            flush_secs=flush_secs,
            filename_suffix='')

        self.global_step = None
        self.active = True

        # ------------------------------------------------------------------------
        # register add_* and set_* functions in summary module on instantiation
        # ------------------------------------------------------------------------
        this_module = sys.modules[__name__]
        list_of_names = dir(SummaryWriter)
        for name in list_of_names:

            # add functions (without the 'add' prefix)
            if name.startswith('add_'):
                setattr(this_module, name[4:], getattr(self, name))

            #  set functions
            if name.startswith('set_'):
                setattr(this_module, name, getattr(self, name))

    def set_global_step(self, value):
        self.global_step = value

    def set_active(self, value):
        self.active = value

    def add_audio(self, tag, snd_tensor, global_step=None, sample_rate=44100, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_audio(
                tag, snd_tensor, global_step=global_step, sample_rate=sample_rate, walltime=walltime)

    def add_custom_scalars(self, layout):
        if self.active:
            self.writer.add_custom_scalars(layout)

    def add_custom_scalars_marginchart(self, tags, category='default', title='untitled'):
        if self.active:
            self.writer.add_custom_scalars_marginchart(tags, category=category, title=title)

    def add_custom_scalars_multilinechart(self, tags, category='default', title='untitled'):
        if self.active:
            self.writer.add_custom_scalars_multilinechart(tags, category=category, title=title)

    def add_embedding(self, mat, metadata=None, label_img=None, global_step=None,
                      tag='default', metadata_header=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_embedding(
                mat, metadata=metadata, label_img=label_img, global_step=global_step,
                tag=tag, metadata_header=metadata_header)

    def add_figure(self, tag, figure, global_step=None, close=True, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_figure(
                tag, figure, global_step=global_step, close=close, walltime=walltime)

    def add_graph(self, model, input_to_model=None, verbose=False):
        if self.active:
            self.writer.add_graph(model, input_to_model=input_to_model, verbose=verbose)

    def add_histogram(self, tag, values, global_step=None, bins='tensorflow', walltime=None, max_bins=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_histogram(
                tag, values, global_step=global_step, bins=bins,
                walltime=walltime, max_bins=max_bins)

    def add_histogram_raw(self, tag, min, max, num, sum, sum_squares,
                          bucket_limits, bucket_counts, global_step=None,
                          walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_histogram_raw(
                tag, min=min, max=max, num=num, sum=sum, sum_squares=sum_squares,
                bucket_limits=bucket_limits, bucket_counts=bucket_counts,
                global_step=global_step, walltime=walltime)

    def add_image(self, tag, img_tensor, global_step=None, walltime=None, dataformats='CHW'):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_image(
                tag, img_tensor, global_step=global_step, walltime=walltime, dataformats=dataformats)

    def add_image_with_boxes(self, tag, img_tensor, box_tensor, global_step=None,
                             walltime=None, rescale=1, dataformats='CHW'):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_image_with_boxes(
                tag, img_tensor, box_tensor,
                global_step=global_step, walltime=walltime,
                rescale=rescale, dataformats=dataformats)

    def add_images(self, tag, img_tensor, global_step=None, walltime=None, dataformats='NCHW'):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_images(
                tag, img_tensor, global_step=global_step, walltime=walltime, dataformats=dataformats)

    def add_mesh(self, tag, vertices, colors=None, faces=None, config_dict=None, global_step=None, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_mesh(
                tag, vertices, colors=colors, faces=faces, config_dict=config_dict,
                global_step=global_step, walltime=walltime)

    def add_onnx_graph(self, graph):
        if self.active:
            self.writer.add_onnx_graph(graph)

    def add_pr_curve(self, tag, labels, predictions, global_step=None,
                     num_thresholds=127, weights=None, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_pr_curve(
                tag, labels, predictions, global_step=global_step,
                num_thresholds=num_thresholds, weights=weights, walltime=walltime)

    def add_pr_curve_raw(self, tag, true_positive_counts,
                         false_positive_counts,
                         true_negative_counts,
                         false_negative_counts,
                         precision,
                         recall,
                         global_step=None,
                         num_thresholds=127,
                         weights=None,
                         walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_pr_curve_raw(
                tag, true_positive_counts,
                false_positive_counts,
                true_negative_counts,
                false_negative_counts,
                precision,
                recall,
                global_step=global_step,
                num_thresholds=num_thresholds,
                weights=weights,
                walltime=walltime)

    def add_scalar(self, tag, scalar_value, global_step=None, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_scalar(
                tag, scalar_value, global_step=global_step, walltime=walltime)

    def add_scalars(self, main_tag, tag_scalar_dict, global_step=None, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_scalars(
                main_tag, tag_scalar_dict, global_step=global_step, walltime=walltime)

    def add_text(self, tag, text_string, global_step=None, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_text(
                tag, text_string, global_step=global_step, walltime=walltime)

    def add_video(self, tag, vid_tensor, global_step=None, fps=4, walltime=None):
        if self.active:
            global_step = self.global_step if global_step is None else global_step
            self.writer.add_video(
                tag, vid_tensor, global_step=global_step, fps=fps, walltime=walltime)

    def close(self):
        self.writer.close()

    def __enter__(self):
        return self.writer.__enter__()

    def __exit__(self, exc_type, exc_val, exc_tb):
        return self.writer.__exit__(exc_type, exc_val, exc_tb)
Example #6
0
	def eval(self, model: Model, writer: SummaryWriter, step):
		training = model.training
		model.eval()
		scalar_summaries = defaultdict(list)
		list_summaries = defaultdict(list)
		data_loader = DataLoader(
			self,
			batch_size=self.config['eval_batch_size'],
			num_workers=self.config['num_workers'],
			collate_fn=self.collate_fn,
			drop_last=True,
		)

		print('')
		for eval_step, data in enumerate(data_loader):
			x, y = data[0], data[1]
			x, y = x.to(self.config['device']), y.to(self.config['device'])
			with torch.no_grad():
				y_hat = model(x)
			loss, scalar_summary, list_summary = model.compute_loss(x, y, y_hat, step)
			print('\r[Evaluating, Step {:7}, Loss {:5}]'.format(
				eval_step, '%.3f' %loss), end=''
			)

			for (k, v) in scalar_summary.items():
				scalar_summaries[k].append(v)

			for (k, v) in list_summary.items():
				list_summaries[k] += v

		# write summaries
		for (k, v) in scalar_summaries.items():
			v = np.array(v).mean().item()
			writer.add_scalar(k, v, step)

		for (k, v) in list_summaries.items():
			v = np.array(v)

			if k[:4] == 'mIoU':
				num_classes = self.config['y_c']
				confusion_matrix = v.reshape(-1, num_classes ** 2)
				confusion_matrix = confusion_matrix.sum(axis=0) \
					.reshape(num_classes, num_classes)
				mious = []
				for i in range(num_classes):
					true_positive = confusion_matrix[i, i].item()
					false_positive = (confusion_matrix[i, :].sum() - true_positive).item()
					false_negative = (confusion_matrix[:, i].sum() - true_positive).item()
					denom = true_positive + false_positive + false_negative
					mious.append(0 if denom == 0 else float(true_positive) / denom)
					if hasattr(self, 'class_id2label'):
						writer.add_scalar(k + self.class_id2label[i], mious[-1], step)
				writer.add_scalar(k + 'mIoU/overall', sum(mious) / len(mious), step)
			else:
				bins = np.linspace(0., 1.1, num=12)
				counts, limits = np.histogram(v, bins=bins)
				sum_sq = v.dot(v)

				writer.add_histogram_raw(
					tag=k,
					min=v.min(), max=v.max(),
					num=len(v), sum=v.sum(),
					sum_squares=sum_sq,
					bucket_limits=limits[1:].tolist(),
					bucket_counts=counts.tolist(),
					global_step=step
				)

		model.train(training)
Example #7
0
class MyTensorboardXWriter(EventWriter):
    """Write all scalars to a tensorboard file."""
    def __init__(self,
                 log_dir: str,
                 window_size: int = 20,
                 backend: str = "pytorch",
                 **kwargs):
        """
        Args:
            log_dir (str): The directory to save the output events
            window_size (int): the scalars will be median-smoothed by this window size
            kwargs: other arguments passed to `torch.utils.tensorboard.SummaryWriter(...)`
        """
        self._window_size = window_size
        assert backend.lower() in ["pytorch", "tensorboardx"], backend

        if backend.lower() == "pytorch":
            from torch.utils.tensorboard import SummaryWriter
        elif backend.lower() == "tensorboardx":
            from tensorboardX import SummaryWriter
        else:
            raise ValueError(
                "Unknown TensorboardXWriter backend: {}, available backends are: pytorch or tensorboardX"
                .format(backend))

        self.backend = backend
        self._writer = SummaryWriter(log_dir, **kwargs)

    def write(self):
        storage = get_event_storage()
        # NOTE: this is default median(20)
        # for k, v in storage.latest_with_smoothing_hint(self._window_size).items():
        #     self._writer.add_scalar(k, v, storage.iter)
        # for k, v in storage.latest().items():  # let tensorboard do the smoothing
        #     self._writer.add_scalar(k, v, storage.iter)
        for k, v in storage.histories().items():
            self._writer.add_scalar(k, v.median(self._window_size),
                                    storage.iter)
            # self._writer.add_scalar(k, v.latest(), storage.iter)

        # storage.put_{image,histogram} is only meant to be used by
        # tensorboard writer. So we access its internal fields directly from here.
        if len(storage._vis_data) >= 1:
            for img_name, img, step_num in storage._vis_data:
                # default format CHW (C=1 or 3), rgb,
                # can be either float[0,1] or uint8[0,255]
                self._writer.add_image(img_name, img, step_num)
            # Storage stores all image data and rely on this writer to clear them.
            # As a result it assumes only one writer will use its image data.
            # An alternative design is to let storage store limited recent
            # data (e.g. only the most recent image) that all writers can access.
            # In that case a writer may not see all image data if its period is long.
            storage.clear_images()

        if len(storage._histograms) >= 1:
            for params in storage._histograms:
                self._writer.add_histogram_raw(**params)
            storage.clear_histograms()

    def close(self):
        if hasattr(self,
                   "_writer"):  # doesn't exist when the code fails at import
            self._writer.close()