def log_run(split: str, epoch: int, writer: tf.summary.SummaryWriter,
            label_names: Sequence[str], metrics: MutableMapping[str, float],
            heaps: Mapping[str,
                           Mapping[int,
                                   List[HeapItem]]], cm: np.ndarray) -> None:
    """Logs the outputs (metrics, confusion matrix, tp/fp/fn images) from a
    single epoch run to Tensorboard.

    Args:
        metrics: dict, keys already prefixed with {split}/
    """
    per_class_recall = recall_from_confusion_matrix(cm, label_names)
    metrics.update(prefix_all_keys(per_class_recall, f'{split}/label_recall/'))

    # log metrics
    for metric, value in metrics.items():
        tf.summary.scalar(metric, value, epoch)

    # log confusion matrix
    cm_fig = plot_utils.plot_confusion_matrix(cm,
                                              classes=label_names,
                                              normalize=True)
    cm_fig_img = tf.convert_to_tensor(fig_to_img(cm_fig)[np.newaxis, ...])
    tf.summary.image(f'confusion_matrix/{split}', cm_fig_img, step=epoch)

    # log tp/fp/fn images
    for heap_type, heap_dict in heaps.items():
        log_images_with_confidence(heap_dict,
                                   label_names,
                                   epoch=epoch,
                                   tag=f'{split}/{heap_type}')
    writer.flush()
def log_images_with_confidence(heap_dict: Mapping[int, List[HeapItem]],
                               label_names: Sequence[str], epoch: int,
                               tag: str) -> None:
    """
    Args:
        heap_dict: dict, maps label_id to list of HeapItem, where each HeapItem
            data is a list [img, target, top3_conf, top3_preds, img_file],
            and img is a tf.Tensor of shape [H, W, 3]
        label_names: list of str, label names in order of label id
        epoch: int
        tag: str
    """
    for label_id, heap in heap_dict.items():
        label_name = label_names[label_id]

        sorted_heap = sorted(heap, reverse=True)  # sort largest to smallest
        imgs_list = [item.data for item in sorted_heap]
        fig, img_files = imgs_with_confidences(imgs_list, label_names)

        # tf.summary.image requires input of shape [N, H, W, C]
        fig_img = tf.convert_to_tensor(fig_to_img(fig)[np.newaxis, ...])
        tf.summary.image(f'{label_name}/{tag}', fig_img, step=epoch)
        tf.summary.text(f'{label_name}/{tag}_files',
                        '\n\n'.join(img_files),
                        step=epoch)
Exemplo n.º 3
0
def log_run(split: str, epoch: int, writer: tensorboard.SummaryWriter,
            label_names: Sequence[str], metrics: MutableMapping[str, float],
            heaps: Optional[Mapping[str, Mapping[int, list[HeapItem]]]],
            cm: np.ndarray) -> None:
    """Logs the outputs (metrics, confusion matrix, tp/fp/fn images) from a
    single epoch run to Tensorboard.

    Args:
        metrics: dict, keys already prefixed with {split}/
    """
    per_label_recall = recall_from_confusion_matrix(cm, label_names)
    metrics.update(prefix_all_keys(per_label_recall, f'{split}/label_recall/'))

    # log metrics
    for metric, value in metrics.items():
        writer.add_scalar(metric, value, epoch)

    # log confusion matrix
    cm_fig = plot_utils.plot_confusion_matrix(cm, classes=label_names,
                                              normalize=True)
    cm_fig_img = fig_to_img(cm_fig)
    writer.add_image(tag=f'confusion_matrix/{split}', img_tensor=cm_fig_img,
                     global_step=epoch, dataformats='HWC')

    # log tp/fp/fn images
    if heaps is not None:
        for heap_type, heap_dict in heaps.items():
            log_images_with_confidence(writer, heap_dict, label_names,
                                       epoch=epoch, tag=f'{split}/{heap_type}')
    writer.flush()
Exemplo n.º 4
0
def log_images_with_confidence(writer: tensorboard.SummaryWriter,
                               heap_dict: Mapping[int, list[HeapItem]],
                               label_names: Sequence[str], epoch: int,
                               tag: str) -> None:
    """
    Note: performs image normalization in-place

    Args:
        writer: tensorboard.SummaryWriter
        heap_dict: dict, maps label_id to list of HeapItem, where each HeapItem
            data is a tuple (img, target, top3_conf, top3_preds, img_file)
        label_names: list of str, label names in order of label id
        epoch: int
        tag: str
    """
    # for every image: undo normalization, clamp to [0, 1], CHW -> HWC
    # - cannot be in-place, because the HeapItem might be in multiple heaps
    unnormalize = tv.transforms.Normalize(mean=-MEANS / STDS, std=1.0 / STDS)
    for label_id, heap in heap_dict.items():
        label_name = label_names[label_id]

        imgs_list = []
        for item in sorted(heap, reverse=True):  # sort largest to smallest
            img = item.data[0].float()  # clamp() only supports fp32 on CPU
            img = unnormalize(img).clamp_(0, 1).permute(1, 2, 0)
            imgs_list.append((img, *item.data[1:]))

        fig, img_files = imgs_with_confidences(imgs_list, label_names)

        # writer.add_figure() has issues => using add_image() instead
        # writer.add_figure(f'{label_name}/{tag}', fig, global_step=epoch)
        writer.add_image(f'{label_name}/{tag}',
                         fig_to_img(fig),
                         global_step=epoch,
                         dataformats='HWC')
        writer.add_text(f'{label_name}/{tag}_files',
                        '\n\n'.join(img_files),
                        global_step=epoch)