def build_gan(): noise_dim = 64 gen = dcgan_gen(noise_dim=noise_dim) disc = dcgan_disc() gan = GAN(gen, disc) batch_gen = MNISTBatchGenerator() noise_gen = NoiseGenerator([(noise_dim,)]) return (gan, batch_gen, noise_gen)
theta = torch.zeros(args.p).to(device) data, theta = generate_contaminated_data(args.eps, args.train_size, theta=theta, type_cont=args.contamination, coord_median_as_origin=False) data = data.to(device) theta = theta.to(device) data_loader = torch.utils.data.DataLoader(TensorDataset(data), batch_size=args.real_batch_size, shuffle=True, num_workers=0) noise_generator = NoiseGenerator().to(device) ''' We recommend not using coordinate-wise median as initialization. The global minimum of Wasserstein GAN has mean square error very close to the coordinate-wise median, thus we prefer the training starting from somewhere else in order to see the progress of training. ''' generator = Generator( p=args.p, initializer=1.3 * coord_median(data_loader.dataset.tensors[0]), # 0.5 * torch.ones(args.p), ).to(device) sinkhorn = SinkhornIteration(lam=args.lam, max_iter=args.sinkhorn_max_iter, device=device, const=args.const,