load_path = None output_path = './result/auto_encoder' dataset = DBHelper('./downloads/bing2/preprocessed_256', 2) # if load_path is not None: # generator.load_weights('./%s/model_g' % load_path) # discriminator.load_weights('./%s/model_d' % load_path) for epoch in range(EPOCH_NUM): ae_loss = 0 iter_num = 0 for x_batch in dataset.train_ds: data_batch = dataset.get_data(x_batch) ae_loss += trainer.train(data_batch) iter_num += 1 break recon_data = trainer.test(data_batch) recon_data = (recon_data + 1.) / 2. shown_x_batch = (x_batch + 1.) / 2. print('epoch-%02d: auto_encoder loss=%.3f' % (epoch, ae_loss / iter_num)) fig, ax = plt.subplots(BATCH_SIZE, 2, figsize=(6, 6)) for i in range(BATCH_SIZE): ax[i, 0].imshow(shown_x_batch[i]) ax[i, 1].imshow(recon_data[i])
# if load_path is not None: # generator.load_weights('./%s/model_g' % load_path) # discriminator.load_weights('./%s/model_d' % load_path) for epoch in range(EPOCH_NUM): gen_f_loss = 0 gen_g_loss = 0 dis_f_loss = 0 dis_g_loss = 0 iter_num = 0 for general_batch in dataset_general.train_ds: celeb_batch = list(dataset.train_ds)[iter_num] general_data_batch = dataset_general.get_data(general_batch, augment=True) celeb_data_batch = dataset.get_data(celeb_batch, augment=True) cur_gen_f_loss, cur_gen_g_loss, cur_dis_f_loss, cur_dis_g_loss = trainer.train( general_data_batch, celeb_data_batch) gen_f_loss += cur_gen_f_loss gen_g_loss += cur_gen_g_loss dis_f_loss += cur_dis_f_loss dis_g_loss += cur_dis_g_loss iter_num += 1 break for test_batch in dataset_general.train_ds: test_data_batch = dataset.get_data(test_batch) recon_data = generator_general.test(test_data_batch) recon_data = recon_data.numpy() cycled_data = generator_celeb.test(recon_data)