Example #1
0
    print('Min FID:', np.min(history['FID-c']))
    print('Epoch with min FID:', np.argmin(history['FID-c']))

    if not args.no_plots:
        plot_learningcurves(history, 'gen_loss')
        plot_learningcurves(history, 'disc_loss')
        plot_learningcurves(history, 'gen_loss_minibatch')
        plot_learningcurves(history, 'disc_loss_minibatch')
        plot_learningcurves(history, 'FID-c')

    test_model(model=model,
               n_tests=args.n_tests,
               cuda_mode=args.cuda,
               SNGAN=True)
    save_samples(prefix='CIFAR10_SNGAN',
                 generator=model,
                 cp_name=args.cp_path.split('/')[-1].split('.')[0],
                 cuda_mode=args.cuda,
                 im_size=32,
                 SNGAN=True)

    if args.inception:
        print(
            inception_score(model,
                            batch_size=4,
                            N=args.n_inception,
                            cuda=args.cuda,
                            resize=True,
                            splits=10,
                            SNGAN=True))
Example #2
0
    model = Generator(128, [1024, 512, 256, 128, 64, 32], 3)

    ckpt = torch.load(args.cp_path, map_location=lambda storage, loc: storage)
    model.load_state_dict(ckpt['model_state'])

    if args.cuda:
        model = model.cuda()

    print('Cuda Mode is: {}'.format(args.cuda))

    history = ckpt['history']

    if not args.no_plots:
        plot_learningcurves(history, 'gen_loss')
        plot_learningcurves(history, 'disc_loss')
        plot_learningcurves(history, 'gen_loss_minibatch')
        plot_learningcurves(history, 'disc_loss_minibatch')

    test_model(model=model,
               n_tests=args.n_tests,
               cuda_mode=args.cuda,
               SNGAN=True)
    save_samples(prefix='cats',
                 generator=model,
                 cp_name=args.cp_path.split('/')[-1].split('.')[0],
                 cuda_mode=args.cuda,
                 enhance=False,
                 im_size=256,
                 fig_size=(4, 8),
                 SNGAN=True)
Example #3
0
    if args.cuda:
        model = model.cuda()

    print('Cuda Mode is: {}'.format(args.cuda))

    history = ckpt['history']

    print('Min FID:', np.min(history['FID-c']))
    print('Epoch with min FID:', np.argmin(history['FID-c']))

    if not args.no_plots:
        plot_learningcurves(history, 'gen_loss')
        plot_learningcurves(history, 'disc_loss')
        plot_learningcurves(history, 'gen_loss_minibatch')
        plot_learningcurves(history, 'disc_loss_minibatch')
        plot_learningcurves(history, 'FID-c')

    test_model(model=model, n_tests=args.n_tests, cuda_mode=args.cuda)
    save_samples(prefix='CIFAR10_DCGAN',
                 generator=model,
                 cp_name=args.cp_path.split('/')[-1].split('.')[0],
                 cuda_mode=args.cuda)

    if args.inception:
        print(
            inception_score(model,
                            N=args.n_inception,
                            cuda=args.cuda,
                            resize=True,
                            splits=10))
Example #4
0
    if args.cp_path is None:
        raise ValueError(
            'There is no checkpoint/model path. Use arg --cp-path to indicate the path!'
        )

    model = Generator(100, [1024, 512, 256, 128], 3)

    ckpt = torch.load(args.cp_path, map_location=lambda storage, loc: storage)
    model.load_state_dict(ckpt['model_state'])

    if args.cuda:
        model = model.cuda()

    print('Cuda Mode is: {}'.format(args.cuda))

    history = ckpt['history']

    if not args.no_plots:
        plot_learningcurves(history, 'gen_loss')
        plot_learningcurves(history, 'disc_loss')
        plot_learningcurves(history, 'gen_loss_minibatch')
        plot_learningcurves(history, 'disc_loss_minibatch')

    test_model(model=model, n_tests=args.n_tests, cuda_mode=args.cuda)
    save_samples(prefix='CELEBA',
                 generator=model,
                 cp_name=args.cp_path.split('/')[-1].split('.')[0],
                 cuda_mode=args.cuda,
                 fig_size=(8, 8))
Example #5
0
        raise ValueError(
            'There is no checkpoint/model path. Use arg --cp-path to indicate the path!'
        )

    model = Generator(100, [2048, 1024, 512, 256, 128], 3)

    ckpt = torch.load(args.cp_path, map_location=lambda storage, loc: storage)
    model.load_state_dict(ckpt['model_state'])

    if args.cuda:
        model = model.cuda()

    print('Cuda Mode is: {}'.format(args.cuda))

    history = ckpt['history']

    if not args.no_plots:
        plot_learningcurves(history, 'gen_loss')
        plot_learningcurves(history, 'disc_loss')
        plot_learningcurves(history, 'gen_loss_minibatch')
        plot_learningcurves(history, 'disc_loss_minibatch')

    test_model(model=model, n_tests=args.n_tests, cuda_mode=args.cuda)
    save_samples(prefix='CELEBA',
                 generator=model,
                 cp_name=args.cp_path.split('/')[-1].split('.')[0],
                 cuda_mode=args.cuda,
                 enhance=False,
                 im_size=128,
                 fig_size=(4, 8))
Example #6
0
		raise ValueError('There is no checkpoint/model path. Use arg --cp-path to indicate the path!')

	model = Generator(100, [1024, 512, 256, 128], 3)

	ckpt = torch.load(args.cp_path, map_location=lambda storage, loc: storage)
	model.load_state_dict(ckpt['model_state'])

	if args.cuda:
		model = model.cuda()

	print('Cuda Mode is: {}'.format(args.cuda))

	history = ckpt['history']

	print('Min FID:', np.min(history['FID-c']))
	print('Epoch with min FID:', np.argmin(history['FID-c']))

	if not args.no_plots:
		plot_learningcurves(history, 'gen_loss')
		plot_learningcurves(history, 'disc_loss')
		plot_learningcurves(history, 'gen_loss_minibatch')
		plot_learningcurves(history, 'disc_loss_minibatch')
		plot_learningcurves(history, 'FID-c')
		#plot_learningcurves(history, 'steepest_dir_norm')

	test_model(model=model, n_tests=args.n_tests, cuda_mode=args.cuda)
	save_samples(prefix='CIFAR10_LSGAN_proj', generator=model, cp_name=args.cp_path.split('/')[-1].split('.')[0], cuda_mode=args.cuda, fig_size=(10, 10))

	if args.inception:
		print(inception_score(model, N=args.n_inception, cuda=args.cuda, resize=True, splits=10))
Example #7
0
    # Testing settings
    parser = argparse.ArgumentParser(
        description='Testing GANs under max hyper volume training')
    parser.add_argument('--models-path',
                        type=str,
                        default=None,
                        metavar='Path',
                        help='Checkpoint/model path')
    args = parser.parse_args()

    if args.models_path is None:
        raise ValueError(
            'There is no checkpoint/model path. Use arg --models-path to indicate the path!'
        )

    model = Generator(100, [1024, 512, 256, 128], 3)

    files_list = glob.glob(args.models_path + 'G_*.pt')
    files_list.sort()

    for file_ in files_list:

        ckpt = torch.load(file_, map_location=lambda storage, loc: storage)
        model.load_state_dict(ckpt['model_state'])

        save_samples(prefix='CELEBA_hGAN_VaryingD',
                     generator=model,
                     cp_name=file_.split('/')[-1].split('.')[0],
                     cuda_mode=False,
                     fig_size=(2, 14))