def write_weight_statitsics(writer: torch.utils.tensorboard.SummaryWriter, module: torch.nn.Module, epoch: int): # log the weights-norm for the parameters of the model for weight_name in module.state_dict().keys(): w = module.state_dict()[weight_name] norm = w.norm().item() writer.add_scalar(f'Norm/{weight_name}', norm, epoch) avg = w.abs().mean().item() writer.add_scalar(f'avg/{weight_name}', avg, epoch) writer.add_histogram(f'hist/{weight_name}', w, epoch)
def log_dict_with_writer(y_true: torch.Tensor, y_pred: torch.Tensor, summary_writer: torch.utils.tensorboard.SummaryWriter, thr=None, ood_label=0, global_step=None): """ Log metrics to tensorboard with summary writer :param y_true: true labels of the objects, shape=(N,) :param torch.Tensor y_pred: logits, predictions of the model BEFORE the softmax function, shape=(N, n_classes + 1). Note that ood label is not the one on which we train. :param SummaryWriter summary_writer: a writer for logging metrics to tensorboard :param float thr: Value of the maximum probability below which we consider an object as ood :param ood_label: label which corresponds to an ood object :return: """ metrics_dict = get_metrics_dict(y_true, y_pred, thr, ood_label) for names, hist in metrics_dict["hist"].items(): summary_writer.add_histogram(names + str(global_step), hist) for names, scalars in metrics_dict["scalar"].items(): summary_writer.add_histogram(names, scalars)