Beispiel #1
0
optimizer = optim.Adam(net.parameters(), lr=lr)
scheduler = optim.lr_scheduler.StepLR(optimizer, 5)
net.to(device)
criterion.to(device)
train_loss = []
viz = Visdom()
for epoch in range(epochs):
    train_loss.clear()
    net.train()
    for step, (x, _) in enumerate(train_loader):
        x = x.to(device)
        x_hat = net(x)

        loss = criterion(x_hat, x)
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        train_loss.append(loss.item())

        if step%50==0:
            print('epoch:{} batch:{} loss:{:.6f}'.format(epoch, step, loss.item()))

    scheduler.step()
    net.eval()
    x, _= next(iter(test_loader))
    x = x.to(device)
    x_hat = net(x)
    viz.images(x, nrow=6, win='x', opts=dict(title='x'))
    viz.images(x_hat, nrow=6, win='x_hat', opts=dict(title='x_hat'))
    if (epoch+1)%10==0:
        torch.save(net, 'encoder_decoder/ae_{}.pkl'.format(epoch))
Beispiel #2
0
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

test_transform = transforms.Compose([
    transforms.ToTensor(),
])

testset = torchvision.datasets.ImageFolder(val_data_folder,
                                           transform=test_transform)
test_loader = torch.utils.data.DataLoader(testset,
                                          batch_size=batch_size,
                                          shuffle=False,
                                          num_workers=20)

model = AE(K=K).to(device)
model = nn.DataParallel(model, device_ids=[0])
model.load_state_dict(
    torch.load(saved_model_name, map_location={'cuda:1': 'cuda:0'}))

if not os.path.exists(save_folder_name):
    os.makedirs(save_folder_name)

with tqdm(total=len(test_loader), desc="Batches") as pbar:
    for i, (data) in enumerate(test_loader):
        model.eval()
        img, labels = data
        encoded, out, hashed = model(img)
        torch.save(out, save_folder_name + "/out/out_{}.pt".format(i))
        torch.save(labels, save_folder_name + "/lab/lab_{}.pt".format(i))
        torch.save(hashed, save_folder_name + "/hash/hash_{}.pt".format(i))
        pbar.update(1)