Esempio n. 1
0
def main():
    normalize = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    model = get_pretrained_model()
    visualizer = AttributionVisualizer(
        models=[model],
        score_func=lambda o: torch.nn.functional.softmax(o, 1),
        classes=get_classes(),
        features=[
            ImageFeature(
                "Photo",
                baseline_transforms=[baseline_func],
                input_transforms=[normalize],
            )
        ],
        dataset=formatted_data_iter(),
    )

    visualizer.serve(debug=True)
Esempio n. 2
0
                                           download=True,
                                           transform=transforms.ToTensor())
    dataloader = iter(
        torch.utils.data.DataLoader(dataset,
                                    batch_size=4,
                                    shuffle=False,
                                    num_workers=2))
    while True:
        images, labels = next(dataloader)
        yield Batch(inputs=images, labels=labels)


if __name__ == "__main__":
    normalize = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    model = get_pretrained_model()
    visualizer = AttributionVisualizer(
        models=[model],
        score_func=lambda o: torch.nn.functional.softmax(o, 1),
        classes=get_classes(),
        features=[
            ImageFeature(
                "Photo",
                baseline_transforms=[baseline_func],
                input_transforms=[normalize],
            )
        ],
        dataset=formatted_data_iter(),
    )

    visualizer.serve()
Esempio n. 3
0
                                           download=True,
                                           transform=transforms.ToTensor())
    dataloader = iter(
        torch.utils.data.DataLoader(dataset,
                                    batch_size=4,
                                    shuffle=False,
                                    num_workers=2))
    while True:
        images, labels = next(dataloader)
        yield Batch(inputs=images, labels=labels)


if __name__ == "__main__":
    normalize = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    model = get_pretrained_model()
    visualizer = AttributionVisualizer(
        models=[model],
        score_func=lambda o: torch.nn.functional.softmax(o, 1),
        classes=get_classes(),
        features=[
            ImageFeature(
                "Photo",
                baseline_transforms=[baseline_func],
                input_transforms=[normalize],
            )
        ],
        dataset=formatted_data_iter(),
    )

    visualizer.serve(debug=True)
Esempio n. 4
0
dataset_sizes = {x: len(image_datasets[x]) for x in stages}
class_names = image_datasets[stages[0]].classes

# Setup the device to run the computations
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('Device::', device)

# Load the best model from file
model_ = torch.load(model_file)
_ = model_.to(device).eval()

visualizer = AttributionVisualizer(
    models=[model_],
    score_func=lambda o: torch.nn.functional.softmax(o, 1),
    classes=class_names,
    features=[
        ImageFeature("Photo",
                     baseline_transforms=[baseline_func],
                     input_transforms=[
                         transforms.Resize(256),
                         transforms.CenterCrop(224),
                         transforms.ToTensor(),
                         transforms.Normalize([0.485, 0.456, 0.406],
                                              [0.229, 0.224, 0.225])
                     ])
    ],
    dataset=formatted_data_iter(dataloaders['test']),
)

visualizer.serve(port=8600)