Ejemplo n.º 1
0
def test_add_text():
    # this will generate an event file under _LOGDIR and
    # a json file called tensors.json under _LOGDIR/plugins/tensorboard_text/tensors.json
    sw = SummaryWriter(logdir=_LOGDIR)
    sw.add_text(tag='test_add_text', text='Hello MXNet!')
    sw.close()
    check_and_remove_logdir_for_text()
Ejemplo n.º 2
0
class LogTB(object):

    def __init__(self, args):
        print('--- Initializing Tensorboard')
        self.tb = SummaryWriter(logdir=os.path.join(
            args.ckpt_dir, 'log', 'train'))
        self.tb.add_text(tag='config', text=str(args), global_step=0)

    def log(self,
            student,
            context_str,
            mlm_loss,
            mlm_acc,
            teacher_ce,
            teacher_mse,
            throughput,
            lr,
            duration,
            latency,
            n_total_iter):
        logging.info(f"{context_str}loggging to Tensorboard at {n_total_iter}")
        context_str = context_str.strip()
        self.tb.add_scalar(tag=f"{context_str}/losses/mlm_loss", value=mlm_loss, global_step=n_total_iter)
        self.tb.add_scalar(tag=f"{context_str}/losses/mlm_acc", value=mlm_acc, global_step=n_total_iter)
        self.tb.add_scalar(tag=f"{context_str}/losses/teacher_ce", value=teacher_ce, global_step=n_total_iter)
        self.tb.add_scalar(tag=f"{context_str}/losses/teacher_mse", value=teacher_mse, global_step=n_total_iter)

        self.tb.add_scalar(tag=f"{context_str}/latency/throughput", value=throughput, global_step=n_total_iter)
        self.tb.add_scalar(tag=f"{context_str}/latency/duration", value=duration, global_step=n_total_iter)
        self.tb.add_scalar(tag=f"{context_str}/latency/latency", value=latency, global_step=n_total_iter)

        self.tb.add_scalar(tag=f"{context_str}/learning_rate/lr", value=lr, global_step=n_total_iter)
        self.tb.add_scalar(tag=f"{context_str}/global/memory_usage", value=psutil.virtual_memory()._asdict()['used'] / 1_000_000, global_step=n_total_iter)