def add_histogram(self, tag, values, global_step=None, bins='tensorflow', walltime=None): """Add histogram to summary. Args: tag (string): Data identifier values (torch.Tensor, numpy.array, or string/blobname): Values to build histogram global_step (int): Global step value to record bins (string): One of {'tensorflow','auto', 'fd', ...}. This determines how the bins are made. You can find other options in: https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram.html walltime (float): Optional override default walltime (time.time()) seconds after epoch of event Examples:: from torch.utils.tensorboard import SummaryWriter import numpy as np writer = SummaryWriter() for i in range(10): x = np.random.random(1000) writer.add_histogram('distribution centers', x + i, i) writer.close() Expected result: .. image:: _static/img/tensorboard/add_histogram.png :scale: 50 % """ self._get_file_writer().add_summary(summary.histogram( tag, values, step=global_step, buckets=self.default_bins), global_step=global_step, walltime=walltime)
def param_summary(model, writer, step): state = model.state_dict() for _p in state.keys(): param = state[_p].cpu().numpy() s = summary.histogram(_p, param.flatten()) writer.add_summary(s, global_step=step)
def test_log_histogram_summary(): logdir = './experiment/histogram' writer = FileWriter(logdir) for i in range(10): mu, sigma = i * 0.1, 1.0 values = np.random.normal(mu, sigma, 10000) # larger for better looking. hist = summary.histogram('discrete_normal', values) writer.add_summary(hist, i + 1) writer.flush() writer.close()
def log_gradient(self, network_name, gradient): assert self.summary_writer summary_value = summary.histogram('{0}'.format(network_name), gradient) self.summary_writer.add_summary(summary_value)
def test_histogram_summary(): mu, sigma = 0.1, 1.0 values = np.random.normal(mu, sigma, 10) hist = summary.histogram('discrete_normal', values) assert len(hist.value) == 1 assert hist.value[0].tag == 'discrete_normal'
def get_grad(g): # logging using tensorboard grad = g.asnumpy().flatten() s = summary.histogram(args.name, grad) summary_writer.add_summary(s) return mx.nd.norm(g)/np.sqrt(g.size)
def add_histo_summary(summary_writer, name, value, step): value = value.view(-1).data.cpu().numpy() summ = summary.histogram(name=name, values=value) summary_writer.add_summary(summary=summ, global_step=step)
def log_histogram(name, values, step=None): if _tf_logger is None: return ValueError _tf_logger.add_summary(histogram(name, values), global_step=step)