Beispiel #1
0
# Let's automate this procedure for different networks.
# We need to reload the data for the inception network.
# You may need to change the 'datasetdir' parameter.

import os
from pynet.models.cam import get_cam_network
from pynet.cam import GradCam
import matplotlib.pyplot as plt

data = fetch_gradcam(datasetdir="/tmp/gradcam")
manager1 = DataManager(input_path=data.input_path,
                       metadata_path=data.metadata_path,
                       number_of_folds=2,
                       batch_size=1,
                       test_size=1)
loaders1 = manager1.get_dataloader(test=True)
data = fetch_gradcam(datasetdir="/tmp/gradcam", inception=True)
manager2 = DataManager(input_path=data.input_path,
                       metadata_path=data.metadata_path,
                       number_of_folds=2,
                       batch_size=1,
                       test_size=1)
loaders2 = manager2.get_dataloader(test=True)

for loaders, model_name in ((loaders1, "vgg19"), (loaders1, "densenet201"),
                            (loaders1, "resnet18")):
    # (loaders2, "inception_v3")):

    heatmaps = []
    print("-" * 10)
    print(model_name)
Beispiel #2
0
e_optimizer = torch.optim.Adam(encoder.parameters(), lr=0.0002)
real_y = Variable(
    torch.ones((batch_size, channels)).to(device, non_blocking=True))
fake_y = Variable(
    torch.zeros((batch_size, channels)).to(device, non_blocking=True))
board = Board(port=8097, host="http://localhost", env="vae")
outdir = "/tmp/vae-gan/checkpoint"
if not os.path.isdir(outdir):
    os.makedirs(outdir)

g_iter = 1
d_iter = 1
cd_iter = 1
total_iter = 200000
train_loader = manager.get_dataloader(train=True,
                                      validation=False,
                                      fold_index=0).train
loader = infinite_train_generartor(train_loader)

for iteration in range(total_iter):

    # Train Encoder - Generator
    for model, with_grad in [(discriminator, False),
                             (code_discriminator, False), (encoder, True),
                             (generator, True)]:
        for param in model.parameters():
            param.requires_grad = with_grad

    for iters in range(g_iter):
        generator.zero_grad()
        encoder.zero_grad()