Esempio n. 1
0
def insights(x: CaptumInterpretation, inp_data, debug=True):
    _baseline_func = lambda o: o * 0
    _get_vocab = lambda vocab: list(map(str, vocab)) if isinstance(
        vocab[0], bool) else vocab
    dl = x.dls.test_dl(L(inp_data), with_labels=True, bs=4)
    normalize_func = next(
        (func for func in dl.after_batch if type(func) == Normalize), noop)

    # captum v0.3 expects tensors without the batch dimension.
    if hasattr(normalize_func, 'mean'):
        if normalize_func.mean.ndim == 4: normalize_func.mean.squeeze_(0)
    if hasattr(normalize_func, 'std'):
        if normalize_func.std.ndim == 4: normalize_func.std.squeeze_(0)

    visualizer = AttributionVisualizer(
        models=[x.model],
        score_func=lambda o: torch.nn.functional.softmax(o, 1),
        classes=_get_vocab(dl.vocab),
        features=[
            ImageFeature(
                "Image",
                baseline_transforms=[_baseline_func],
                input_transforms=[normalize_func],
            )
        ],
        dataset=x._formatted_data_iter(dl, normalize_func))
    visualizer.render(debug=debug)
Esempio n. 2
0
    def visualize(self,inp_data,debug=True):
        _baseline_func= lambda o: o*0
        _get_vocab = lambda vocab: list(map(str,vocab)) if isinstance(vocab[0],bool) else vocab
        dl = self.dls.test_dl(L(inp_data),with_labels=True, bs=4)
        normalize_func= next((func for func in dl.after_batch if type(func)==Normalize),noop)

        visualizer = AttributionVisualizer(
            models=[self.model],
            score_func=lambda o: torch.nn.functional.softmax(o, 1),
            classes=_get_vocab(dl.vocab),
            features=[
                ImageFeature(
                    "Image",
                    baseline_transforms=[_baseline_func],
                    input_transforms=[normalize_func],
                )
            ],
            dataset=self._formatted_data_iter(dl,normalize_func)
        )
        visualizer.render(debug=debug)
Esempio n. 3
0
                                           download=True,
                                           transform=transforms.ToTensor())
    dataloader = iter(
        torch.utils.data.DataLoader(dataset,
                                    batch_size=4,
                                    shuffle=False,
                                    num_workers=2))
    while True:
        images, labels = next(dataloader)
        yield Batch(inputs=images, labels=labels)


if __name__ == "__main__":
    normalize = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    model = get_pretrained_model()
    visualizer = AttributionVisualizer(
        models=[model],
        score_func=lambda o: torch.nn.functional.softmax(o, 1),
        classes=get_classes(),
        features=[
            ImageFeature(
                "Photo",
                baseline_transforms=[baseline_func],
                input_transforms=[normalize],
            )
        ],
        dataset=formatted_data_iter(),
    )

    visualizer.render()
Esempio n. 4
0
                                           download=True,
                                           transform=transforms.ToTensor())
    dataloader = iter(
        torch.utils.data.DataLoader(dataset,
                                    batch_size=4,
                                    shuffle=False,
                                    num_workers=2))
    while True:
        images, labels = next(dataloader)
        yield Batch(inputs=images, labels=labels)


if __name__ == "__main__":
    normalize = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    model = get_pretrained_model()
    visualizer = AttributionVisualizer(
        models=[model],
        score_func=lambda o: torch.nn.functional.softmax(o, 1),
        classes=get_classes(),
        features=[
            ImageFeature(
                "Photo",
                baseline_transforms=[baseline_func],
                input_transforms=[normalize],
            )
        ],
        dataset=formatted_data_iter(),
    )

    visualizer.render(debug=True)