def ex_cgan(): datasets = ReWrite.load_data_in_seq(source_files) datasets = ReWrite.MyDataSet(datasets) data_loader = DataLoader( datasets, batch_size=opt.batch_size, shuffle=True, ) generator = G_D_Module.GeneratorCGAN(opt.latent_dim, opt.n_classes, img_shape) # latent_dim should be 200 discriminator = G_D_Module.DiscriminatorCGAN(opt.n_classes, img_shape) TrainFunction.train_cgan(generator, discriminator, data_loader, opt.n_epochs, opt.lr, opt.b1, opt.b2, opt.latent_dim, opt.n_classes, cuda, fist_train=False)
def show_cgan_data(): latent_dim = 20 data_list = os.listdir('coedatas') data = [] for path in data_list: data.append(data_read('coedatas/' + path)) FloatTensor = torch.FloatTensor LongTensor = torch.LongTensor generator = G_D_Module.GeneratorCGAN(latent_dim, 5, (1, 32, 32)) generator.load_state_dict(torch.load('GANParameters/CGAN/generator.pt')) noise = FloatTensor(np.random.normal(0, 1, (len(data)**2, latent_dim))) single_list = list(range(len(data))) label = LongTensor(single_list * len(data)) gen_imags = generator(noise, label) # real imgs = np.empty([len(data)**2, 1, 32, 32], dtype=float) for i in range(len(data)): for j in range(len(data)): index = random.randint(0, len(data[j]) - 1) imgs[i * len(data) + j][0] = data[j][index] for i in range(imgs.shape[0]): plt.subplot(len(data_list), len(data_list), i + 1) plt.axis('off') plt.contourf(imgs[i][0]) plt.savefig('caches/real.jpg', bbox_inches='tight') plt.close() for i in range(gen_imags.shape[0]): plt.subplot(len(data), len(data), i + 1) plt.axis('off') plt.contourf(gen_imags[i][0].detach().numpy()) plt.savefig('caches/gen.jpg', bbox_inches='tight') plt.close()