Exemplo n.º 1
0
    gan_model = ALI(generator, encoder, discriminator, optimizer_G,
                    optimizer_D, code_size, validity_loss_f)

    mnist_subset = get_mnist_subset_data_loader(train=True, fraction=100)

    sampler = SampleImage(10, code_size)
    reconstruction = Reconstruction(mnist_subset)
    visualize_latent = VisualizeLatent(mnist_subset, method='pca')

    if train:

        if resume == 'model':
            gan_model.load_checkpoint(checkpoint_path, all=False)
        elif resume == 'checkpoint':
            gan_model.load_checkpoint(checkpoint_path, all=True)

        trainer = Trainer(trick_dict={
            'label_smooth': {
                'valid_range': 0.95,
                'fake_range': 0
            }
        })
        data_loader = get_mnist_data_loader(train=True, batch_size=128)

        trainer.train(num_epoch, data_loader, gan_model, checkpoint_path, 5,
                      [sampler, reconstruction, visualize_latent])

    else:
        # use notebook to perform downstream tasks and compare the result with acgan. But MNIST is trivial
        pass
Exemplo n.º 2
0
    sampler = SampleImage(10, 10)
    data_loader = get_mnist_subset_data_loader(train=True,
                                               transform=transform,
                                               fraction=100)
    reconstruct = Reconstruction(next(iter(data_loader))[0])

    summary_writer = SummaryWriter('runs/vae_mnist')

    if train:
        visualize_data_loader = get_mnist_subset_data_loader(
            train=True, transform=transform, fraction=1000)
        visualize_callback = VisualizeLatent(visualize_data_loader,
                                             method='pca')
        resume = args['resume']
        num_epoch = int(args['epoch'])

        if resume == 'model':
            model.load_checkpoint(checkpoint_path, all=False)
        elif resume == 'checkpoint':
            model.load_checkpoint(checkpoint_path, all=True)

        train_data_loader = get_mnist_data_loader(train=True,
                                                  transform=transform)

        model.train(num_epoch,
                    train_data_loader,
                    checkpoint_path,
                    epoch_per_save=10,
                    callbacks=[sampler, reconstruct, visualize_callback],
                    summary_writer=summary_writer)
Exemplo n.º 3
0
    if train:
        resume = args['resume']
        num_epoch = int(args['epoch'])
        sampler = SampleImage(10, 10, code_size)
        if resume == 'model':
            gan_model.load_checkpoint(checkpoint_path, all=False)
        elif resume == 'checkpoint':
            gan_model.load_checkpoint(checkpoint_path, all=True)

        trainer = Trainer(trick_dict={
            'label_smooth': {
                'valid_range': 0.95,
                'fake_range': 0
            }
        })
        data_loader = get_mnist_data_loader(train=True)

        trainer.train(num_epoch, data_loader, gan_model, 1, checkpoint_path, 5,
                      [sampler])

    else:
        # evaluate accuracy on test set
        gan_model.load_checkpoint(checkpoint_path, all=False)
        data_loader = get_mnist_data_loader(train=False)
        acc = compute_accuracy(data_loader, gan_model)
        print('The test accuracy of discriminator is {:.4f}'.format(acc[0]))
        # generate some samples
        import numpy as np
        from torch.utils.data import TensorDataset
        from torch.utils.data.dataloader import DataLoader
        labels = torch.from_numpy(np.random.randint(0, 10, (10000, )))
Exemplo n.º 4
0
    parser.add_argument('--train', action='store_true')
    parser.add_argument('--resume', choices=['model', 'checkpoint'])
    parser.add_argument('--epoch', required='--train' in sys.argv)
    args = vars(parser.parse_args())
    pprint.pprint(args)

    model = Discriminator(weight_init=weights_init_normal)
    class_loss_f = nn.CrossEntropyLoss()
    lr = 1e-5
    fraction = 100
    optimizer = torch.optim.Adam(model.parameters(), lr)

    classifier = Classifier(model, optimizer, class_loss_f)

    checkpoint_path = './checkpoint/sgan_mnist_compare.ckpt'
    test_loader = get_mnist_data_loader(train=False)

    if args['train']:
        epoch = int(args['epoch'])
        train_loader = get_mnist_subset_data_loader(train=True, fraction=fraction)
        if args['resume'] == 'model':
            classifier.load_checkpoint(checkpoint_path, all=False)
        elif args['resume'] == 'checkpoint':
            classifier.load_checkpoint(checkpoint_path, all=True)
        else:
            pass
        classifier.train(epoch=epoch, train_data_loader=train_loader, val_data_loader=test_loader,
                         checkpoint_path=checkpoint_path)
        classifier.save_checkpoint(checkpoint_path)
    else:
        classifier.load_checkpoint(checkpoint_path, all=False)