if cuda: generator.cuda() discriminator.cuda() criterion = nn.BCELoss() d_optimizer = args.optimizer(discriminator.parameters(), lr=args.d_lr) g_optimizer = args.optimizer(generator.parameters(), lr=args.d_lr) for train_iter in range(args.iterations): for d_index in range(args.d_steps): # 1. Train D on real+fake discriminator.zero_grad() # 1A: Train D on real real_samples = sample_2d(lut_2d, bs) d_real_data = Variable(torch.Tensor(real_samples)) if cuda: d_real_data = d_real_data.cuda() d_real_decision = discriminator(d_real_data) labels = Variable(torch.ones(bs)) if cuda: labels = labels.cuda() d_real_loss = criterion(d_real_decision, labels) # ones = true # 1B: Train D on fake latent_samples = torch.randn(bs, z_dim) d_gen_input = Variable(latent_samples) if cuda: d_gen_input = d_gen_input.cuda() d_fake_data = generator(d_gen_input).detach() # detach to avoid training G on these labels
y = numpy.zeros((bs, c_dim)) for i in range(c_dim): y[c_indices[i]:c_indices[i + 1], i] = 1 # conditional labels, one-hot encoding y = Variable(torch.Tensor(y)) if cuda: y = y.cuda() for train_iter in range(args.iterations): for d_index in range(args.d_steps): # 1. Train D on real+fake discriminator.zero_grad() # 1A: Train D on real samples with conditions real_samples = numpy.zeros((bs, DIMENSION)) for i in range(c_dim): real_samples[c_indices[i]:c_indices[i+1], :] = sample_2d(luts_2d[i], c_indices[i+1]-c_indices[i]) # first c dimensions is the condition inputs, the last 2 dimensions are samples real_samples = Variable(torch.Tensor(real_samples)) if cuda: real_samples = real_samples.cuda() d_real_data = torch.cat([y, real_samples], 1) if cuda: d_real_data = d_real_data.cuda() d_real_decision = discriminator(d_real_data) labels = Variable(torch.ones(bs)) if cuda: labels = labels.cuda() d_real_loss = criterion(d_real_decision, labels) # ones = true # 1B: Train D on fake
for i in range(c_dim): y[c_indices[i]:c_indices[i + 1], i] = 1 # conditional labels, one-hot encoding y = Variable(torch.Tensor(y)) if cuda: y = y.cuda() for train_iter in range(args.iterations): for d_index in range(args.d_steps): # 1. Train D on real+fake discriminator.zero_grad() # 1A: Train D on real samples with conditions real_samples = numpy.zeros((bs, DIMENSION)) for i in range(c_dim): real_samples[c_indices[i]:c_indices[i + 1], :] = sample_2d( luts_2d[i], c_indices[i + 1] - c_indices[i]) # first c dimensions is the condition inputs, the last 2 dimensions are samples real_samples = Variable(torch.Tensor(real_samples)) if cuda: real_samples = real_samples.cuda() d_real_data = torch.cat([y, real_samples], 1) if cuda: d_real_data = d_real_data.cuda() d_real_decision = discriminator(d_real_data) labels = Variable(torch.ones(bs)) if cuda: labels = labels.cuda() d_real_loss = criterion(d_real_decision, labels) # ones = true # 1B: Train D on fake
model = BEGAN(data, hidden_num=hidden_num, z_dim=z_dim) train_op = BaseSolver(model, init_learning_rate=init_learning_rate) d_fetches = [train_op.d_solver, model.balance, model.k_update] else: raise NotImplementedError('model_type is wrong.') config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session(config=config) saver = tf.train.Saver(max_to_keep=20) init = tf.global_variables_initializer() sess.run(init) #saver.restore(sess, "./trial/trial-100000") for iter in range(max_iter): for k in range(K): x_batch = sample_2d(lut_2d, N) - 0.5 sess.run(d_fetches, feed_dict={data:x_batch}) # x_batch, _ = train_data.next_batch(N) sess.run(train_op.g_solver, feed_dict={data:x_batch}) if iter % verbose_interval == 0: d_loss, g_loss, lr = sess.run([model.d_loss, model.g_loss, train_op.learning_rate], feed_dict={data:x_batch}) print('iter=%d, lr=%f, d_loss=%f, g_loss=%f') % (iter, lr, d_loss, g_loss) if model_type == 'BEGAN': messure, kt = sess.run([model.messure, model.kt], feed_dict={data:x_batch}) print('messure=%f, k=%f') % (messure, kt) if iter % show_interval == 0: real_samples = sample_2d(lut_2d, 2000) gen_samples = sess.run(model.g_data, feed_dict={data:real_samples}) + 0.5 visualizer.draw(real_samples, gen_samples)