示例#1
0
    def train(epoch):
        if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
            if epoch < 10: critic_rate, generator_rate = 1, 4
            else: critic_rate, generator_rate = 1, 4
        else:
            if epoch < 10: critic_rate, generator_rate = 2, 2
            else: critic_rate, generator_rate = 2, 2

        trans_steps, disc_steps, gen_steps = 0, 0, 0
        turn = 'gen'
        in_between_vis = 3
        report_count = 0
        data_loader.train()
        train_gen_loss_accum, train_dis_loss_accum, train_likelihood_accum, train_kl_accum, batch_size_accum = 0, 0, 0, 0, 0
        start = time.time();
        for batch_idx, curr_batch_size, batch in data_loader: 


            if turn == 'trans' or turn == 'disc' or turn == 'gen':
                # trans_train_step_np, generator_cost_np, discriminator_cost_np, transporter_cost_np, mean_transport_cost_np = \
                # sess.run([train_transport_step_tf, train_outs_dict['generator_cost'], train_outs_dict['discriminator_cost'], train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost']], 
                # feed_dict = input_dict_func(batch, np.asarray([epoch, float(epoch>2)])))

                trans_train_step_np, transporter_cost_np, mean_transport_cost_np = \
                sess.run([train_transport_step_tf, train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost']], 
                feed_dict = input_dict_func(batch, np.asarray([epoch, float(epoch>2)])))
                trans_steps = trans_steps+1
                
            if turn == 'disc' or turn == 'gen':
                disc_train_step_np, transporter_cost_np, mean_transport_cost_np = \
                sess.run([train_discriminator_step_tf, train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost']],
                feed_dict = input_dict_func(batch, np.asarray([epoch, float(epoch>2)])))
                disc_steps = disc_steps+1

            if turn == 'gen':
                gen_train_step_np, generator_cost_np, discriminator_cost_np, transporter_cost_np, mean_transport_cost_np, convex_mask_np = \
                sess.run([train_generator_step_tf, train_outs_dict['generator_cost'], train_outs_dict['discriminator_cost'], 
                train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost'], train_outs_dict['convex_mask']],
                feed_dict = input_dict_func(batch, np.asarray([epoch, float(epoch>2)])))
                gen_steps = gen_steps+1
           
            if turn == 'trans':
                # if discriminator_cost_np/np.abs(generator_cost_np) < 0.5: turn = 'disc'
                if trans_steps % (critic_rate*generator_rate) == 0: turn = 'disc'
            elif turn == 'disc':
                if disc_steps % generator_rate == 0: turn = 'gen'
            elif turn == 'gen':
                if gen_steps % 1 == 0: turn = 'trans'

            max_discriminator_weight = sess.run(max_abs_discriminator_vars)
            train_gen_loss_accum += curr_batch_size*generator_cost_np
            train_dis_loss_accum += curr_batch_size*discriminator_cost_np
            batch_size_accum += curr_batch_size

            if batch_idx % global_args.log_interval == 0:
                report_count = report_count+1

                end = time.time();
                print('Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.3f}\tDiscriminator Cost: {:.3f}\tTime: {:.3f}, variational cost {:.3f}, transport cost {:.3f}, t {:2d} d {:2d} g {:2d}, t/d {:.1f}, t/g {:.1f}, mask {:.3f}'.format(
                      epoch, batch_idx * curr_batch_size, generator_cost_np, discriminator_cost_np, (end - start), transporter_cost_np, mean_transport_cost_np, trans_steps, disc_steps, gen_steps, trans_steps/disc_steps, trans_steps/gen_steps, np.mean(convex_mask_np)))

                with open(global_args.exp_dir+"training_traces.txt", "a") as text_file:
                    text_file.write(str(generator_cost_np) + ', ' + str(discriminator_cost_np) + '\n')
                start = time.time()

                if in_between_vis>0 and report_count % in_between_vis: 
                    distributions.visualizeProductDistribution3(sess, input_dict_func(batch, np.asarray([epoch, float(epoch>2)])), batch, inference_obs_dist, transport_dist, rec_dist, generative_dict['obs_dist'], 
                    save_dir=global_args.exp_dir+'Visualization/Train/Random/', postfix='train_'+str(epoch))
                    batch['observed']['data']['image'] = fixed_batch_data
                    distributions.visualizeProductDistribution3(sess, input_dict_func(batch, np.asarray([epoch, float(epoch>2)])), batch, inference_obs_dist, transport_dist, rec_dist, generative_dict['obs_dist'], 
                    save_dir=global_args.exp_dir+'Visualization/Train/Fixed/', postfix='train_fixed_'+str(epoch))

            # prev_mean_transport_cost_np = mean_transport_cost_np
            # prev_transporter_cost_np = transporter_cost_np

        summary_str = sess.run(merged_summaries, feed_dict = input_dict_func(batch, np.asarray([epoch, float(epoch>2)])))
        summary_writer.add_summary(summary_str, (tf.train.global_step(sess, global_step)))
        
        checkpoint_time = 1
        if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
            checkpoint_time = 20

        if epoch % checkpoint_time == 0:
            print('====> Average Train: Epoch: {}\tGenerator Cost: {:.3f}\tDiscriminator Cost: {:.3f}'.format(
                  epoch, train_gen_loss_accum/batch_size_accum, train_dis_loss_accum/batch_size_accum))

            # helper.draw_bar_plot(effective_z_cost_np[:,0,0], thres = [np.mean(effective_z_cost_np), np.max(effective_z_cost_np)], save_dir=global_args.exp_dir+'Visualization/inversion_cost/', postfix='inversion_cost'+str(epoch))
            # helper.draw_bar_plot(disc_cost_gen_np[:,0,0], thres = [0, 0], save_dir=global_args.exp_dir+'Visualization/disc_cost/', postfix='disc_cost'+str(epoch))
            
            if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
                
                helper.visualize_datasets(sess, input_dict_func(batch), data_loader.dataset, generative_dict['obs_sample_out'],
                                          generative_dict['latent_sample_out'], train_outs_dict['transport_sample'], train_outs_dict['input_sample'],
                                          save_dir=global_args.exp_dir+'Visualization/', postfix=str(epoch)) 

                xmin, xmax, ymin, ymax, X_dense, Y_dense = -3.5, 3.5, -3.5, 3.5, 250, 250
                xlist = np.linspace(xmin, xmax, X_dense)
                ylist = np.linspace(ymin, ymax, Y_dense)
                X, Y = np.meshgrid(xlist, ylist)
                XY = np.concatenate([X.reshape(-1,1), Y.reshape(-1,1)], axis=1)

                batch['observed']['data']['flat'] = XY[:, np.newaxis, :]
                disc_cost_real_np = sess.run(train_outs_dict['critic_real'], feed_dict = input_dict_func(batch, np.asarray([epoch, float(epoch>2)])))

                f = np.reshape(disc_cost_real_np[:,0,0], [Y_dense, X_dense])
                helper.plot_ffs(X, Y, f, save_dir=global_args.exp_dir+'Visualization/discriminator_function/', postfix='discriminator_function'+str(epoch))
                
            else:
                # helper.draw_bar_plot(convex_mask_np, y_min_max = [0,1], save_dir=global_args.exp_dir+'Visualization/convex_mask/', postfix='convex_mask'+str(epoch))
                distributions.visualizeProductDistribution3(sess, input_dict_func(batch, np.asarray([epoch, float(epoch>2)])), batch, inference_obs_dist, transport_dist, rec_dist, generative_dict['obs_dist'], 
                save_dir=global_args.exp_dir+'Visualization/Train/Random/', postfix='train_'+str(epoch))
                batch['observed']['data']['image'] = fixed_batch_data
                distributions.visualizeProductDistribution3(sess, input_dict_func(batch, np.asarray([epoch, float(epoch>2)])), batch, inference_obs_dist, transport_dist, rec_dist, generative_dict['obs_dist'], 
                save_dir=global_args.exp_dir+'Visualization/Train/Fixed/', postfix='train_fixed_'+str(epoch))

            checkpoint_path1 = global_args.exp_dir+'checkpoint/'
            checkpoint_path2 = global_args.exp_dir+'checkpoint2/'
            print('====> Saving checkpoint. Epoch: ', epoch); start_tmp = time.time()
            # helper.save_checkpoint(saver, sess, global_step, checkpoint_path1) 
            end_tmp = time.time(); print('Checkpoint path: '+checkpoint_path1+'   ====> It took: ', end_tmp - start_tmp)
            if epoch % 60 == 0: 
                print('====> Saving checkpoint backup. Epoch: ', epoch); start_tmp = time.time()
                # helper.save_checkpoint(saver, sess, global_step, checkpoint_path2) 
                end_tmp = time.time(); print('Checkpoint path: '+checkpoint_path2+'   ====> It took: ', end_tmp - start_tmp)
示例#2
0
    def train(epoch):
        data_loader.train()
        train_gen_loss_accum, train_dis_loss_accum, train_likelihood_accum, train_kl_accum, batch_size_accum = 0, 0, 0, 0, 0
        start = time.time();
        for batch_idx, curr_batch_size, batch in data_loader: 

            disc_train_step_np = sess.run([train_discriminator_step_tf], feed_dict = input_dict_func(batch, np.asarray([0,])))
            if batch_idx % 5 !=0: continue
            gen_train_step_np, generator_cost_np, discriminator_cost_np = \
                sess.run([train_generator_step_tf, train_outs_dict['generator_cost'], train_outs_dict['discriminator_cost']],
                          feed_dict = input_dict_func(batch, np.asarray([0,])))

            max_discriminator_weight = sess.run(max_abs_discriminator_vars)
            train_gen_loss_accum += curr_batch_size*generator_cost_np
            train_dis_loss_accum += curr_batch_size*discriminator_cost_np
            batch_size_accum += curr_batch_size

            if batch_idx % global_args.log_interval == 0:
                end = time.time();
                print('Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}\tTime: {:.3f}, Max disc weight {:.6f}'.format(
                      epoch, batch_idx * curr_batch_size, generator_cost_np, discriminator_cost_np, (end - start), max_discriminator_weight))

                with open(global_args.exp_dir+"training_traces.txt", "a") as text_file:
                    text_file.write(str(generator_cost_np) + ', ' + str(discriminator_cost_np) + '\n')
                start = time.time()
    
        summary_str = sess.run(merged_summaries, feed_dict = input_dict_func(batch, np.asarray([0,])))
        summary_writer.add_summary(summary_str, (tf.train.global_step(sess, global_step)))
        
        checkpoint_time = 1
        if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
            checkpoint_time = 20

        if epoch % checkpoint_time == 0:
            print('====> Average Train: Epoch: {}\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}'.format(
                  epoch, train_gen_loss_accum/batch_size_accum, train_dis_loss_accum/batch_size_accum))

            # helper.draw_bar_plot(rate_similarity_gen_np[:,0,0], y_min_max = [0,1], save_dir=global_args.exp_dir+'Visualization/inversion_weight/', postfix='inversion_weight'+str(epoch))
            # helper.draw_bar_plot(effective_z_cost_np[:,0,0], thres = [np.mean(effective_z_cost_np), np.max(effective_z_cost_np)], save_dir=global_args.exp_dir+'Visualization/inversion_cost/', postfix='inversion_cost'+str(epoch))
            # helper.draw_bar_plot(disc_cost_gen_np[:,0,0], thres = [0, 0], save_dir=global_args.exp_dir+'Visualization/disc_cost/', postfix='disc_cost'+str(epoch))
            
            if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
                helper.visualize_datasets(sess, input_dict_func(batch), data_loader.dataset, generative_dict['obs_sample_out'], generative_dict['latent_sample_out'],
                    save_dir=global_args.exp_dir+'Visualization/', postfix=str(epoch)) 
                
                xmin, xmax, ymin, ymax, X_dense, Y_dense = -3.5, 3.5, -3.5, 3.5, 250, 250
                xlist = np.linspace(xmin, xmax, X_dense)
                ylist = np.linspace(ymin, ymax, Y_dense)
                X, Y = np.meshgrid(xlist, ylist)
                XY = np.concatenate([X.reshape(-1,1), Y.reshape(-1,1)], axis=1)

                batch['observed']['data']['flat'] = XY[:, np.newaxis, :]
                disc_cost_real_np = sess.run(train_outs_dict['critic_real'], feed_dict = input_dict_func(batch, np.asarray([0,])))

                f = np.reshape(disc_cost_real_np[:,0,0], [Y_dense, X_dense])
                helper.plot_ffs(X, Y, f, save_dir=global_args.exp_dir+'Visualization/discriminator_function/', postfix='discriminator_function'+str(epoch))
            else:
                distributions.visualizeProductDistribution(sess, input_dict_func(batch), batch, inference_obs_dist, generative_dict['obs_dist'], 
                save_dir=global_args.exp_dir+'Visualization/Train/', postfix='train_'+str(epoch))

            checkpoint_path1 = global_args.exp_dir+'checkpoint/'
            checkpoint_path2 = global_args.exp_dir+'checkpoint2/'
            print('====> Saving checkpoint. Epoch: ', epoch); start_tmp = time.time()
            helper.save_checkpoint(saver, sess, global_step, checkpoint_path1) 
            end_tmp = time.time(); print('Checkpoint path: '+checkpoint_path1+'   ====> It took: ', end_tmp - start_tmp)
            if epoch % 60 == 0: 
                print('====> Saving checkpoint backup. Epoch: ', epoch); start_tmp = time.time()
                helper.save_checkpoint(saver, sess, global_step, checkpoint_path2) 
                end_tmp = time.time(); print('Checkpoint path: '+checkpoint_path2+'   ====> It took: ', end_tmp - start_tmp)
示例#3
0
    def train(epoch):
        data_loader.train()
        train_loss, curr_batch_size_accum = 0, 0
        start = time.time()
        for batch_idx, curr_batch_size, batch in data_loader:
            train_step, batch_loss, train_elbo_per_sample, train_likelihood, train_kl, curr_temp =\
                sess.run([train_step_tf, *train_out_list], feed_dict = input_dict_func(batch))
            train_loss += curr_batch_size * batch_loss
            curr_batch_size_accum += curr_batch_size

            if batch_idx % global_args.log_interval == 0:
                end = time.time()
                print(
                    'Train Epoch: {} [{:7d} ()]\tLoss: {:.6f}\tLikelihood: {:.6f}\tKL: {:.6f}\tTime: {:.3f}, Temperature: {:.3f}'
                    .format(epoch, batch_idx * curr_batch_size, batch_loss,
                            train_likelihood, train_kl, (end - start),
                            curr_temp))
                with open(global_args.exp_dir + "training_traces.txt",
                          "a") as text_file:
                    trace_string = str(batch_loss) + ', ' + str(
                        train_likelihood) + ', ' + str(train_kl) + ', ' + str(
                            curr_temp) + '\n'
                    text_file.write(trace_string)
                start = time.time()

        summary_str = sess.run(merged_summaries,
                               feed_dict=input_dict_func(batch))
        summary_writer.add_summary(summary_str,
                                   (tf.train.global_step(sess, global_step)))

        if epoch % 10 == 0:
            print('====> Epoch: {} Average loss: {:.4f}'.format(
                epoch, train_loss / curr_batch_size_accum))

            distributions.visualizeProductDistribution(
                sess,
                input_dict_func(batch),
                batch,
                obs_dist,
                sample_obs_dist,
                save_dir=global_args.exp_dir + 'Visualization/',
                postfix='train')
            if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
                helper.visualize_datasets(sess,
                                          input_dict_func(batch),
                                          data_loader.dataset,
                                          obs_sample_out_tf,
                                          latent_sample_out_tf,
                                          save_dir=global_args.exp_dir +
                                          'Visualization/',
                                          postfix=str(epoch))

            checkpoint_path1 = global_args.exp_dir + 'checkpoint/'
            checkpoint_path2 = global_args.exp_dir + 'checkpoint2/'
            print('====> Saving checkpoint. Epoch: ', epoch)
            start_tmp = time.time()
            helper.save_checkpoint(saver, sess, global_step, checkpoint_path1)
            end_tmp = time.time()
            print(
                'Checkpoint path: ' + checkpoint_path1 + '   ====> It took: ',
                end_tmp - start_tmp)
            if epoch % 60 == 0:
                print('====> Saving checkpoint backup. Epoch: ', epoch)
                start_tmp = time.time()
                helper.save_checkpoint(saver, sess, global_step,
                                       checkpoint_path2)
                end_tmp = time.time()
                print(
                    'Checkpoint path: ' + checkpoint_path2 +
                    '   ====> It took: ', end_tmp - start_tmp)
示例#4
0
    def train(epoch):
        data_loader.train()
        train_gen_loss_accum, train_dis_loss_accum, train_likelihood_accum, train_kl_accum, batch_size_accum = 0, 0, 0, 0, 0
        start = time.time()
        for batch_idx, curr_batch_size, batch in data_loader:

            disc_train_step_np = sess.run([train_discriminator_step_tf],
                                          feed_dict=input_dict_func(
                                              batch, np.asarray([
                                                  0,
                                              ])))
            if batch_idx % 5 != 0: continue
            # gen_train_step_np, generator_cost_np, discriminator_cost_np, mean_geo_constraint_1_np, mean_geo_constraint_2_np, mean_geo_constraint_3_np = \
            #     sess.run([train_geo_generator_step_tf, train_outs_dict['geo_generator_cost'], train_outs_dict['discriminator_cost'],
            #               train_outs_dict['mean_geo_constraint_1'], train_outs_dict['mean_geo_constraint_2'], train_outs_dict['mean_geo_constraint_3']],
            #               feed_dict = input_dict_func(batch, np.asarray([0,])))

            # gen_train_step_np, generator_cost_np, discriminator_cost_np, mean_geo_constraint_1_np, mean_geo_constraint_2_np, jacobian_alpha_constraint_np = \
            #     sess.run([train_geo_generator_step_tf, train_outs_dict['geo_generator_cost'], train_outs_dict['discriminator_cost'],
            #               train_outs_dict['mean_geo_constraint_1'], train_outs_dict['mean_geo_constraint_2'], train_outs_dict['jacobian_alpha_constraint']],
            #               feed_dict = input_dict_func(batch, np.asarray([0,])))

            mean_geo_constraint_1_np, jacobian_alpha_constraint_np = 0, 0
            # mean_geo_constraint_1_np = 0
            gen_train_step_np, generator_cost_np, discriminator_cost_np, mean_geo_constraint_2_np = \
                sess.run([train_geo_generator_step_tf, train_outs_dict['geo_generator_cost'], train_outs_dict['discriminator_cost'],
                          train_outs_dict['mean_geo_constraint_2']], #, train_outs_dict['jacobian_alpha_constraint']
                          feed_dict = input_dict_func(batch, np.asarray([0,])))

            # gm_train_step_np = sess.run([train_generator_step_tf], feed_dict = input_dict_func(batch, np.asarray([0,])))

            # max_discriminator_weight = sess.run(max_abs_discriminator_vars)
            train_gen_loss_accum += curr_batch_size * generator_cost_np
            train_dis_loss_accum += curr_batch_size * discriminator_cost_np
            batch_size_accum += curr_batch_size

            if batch_idx % global_args.log_interval == 0:
                end = time.time()
                # print('Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}\tTime: {:.3f}, geo_const_1 {:.6f}, geo_const_2 {:.6f}, geo_const_3 {:.6f}'.format(
                #       epoch, batch_idx * curr_batch_size, generator_cost_np, discriminator_cost_np, (end - start), mean_geo_constraint_1_np, mean_geo_constraint_2_np, mean_geo_constraint_3_np))

                print(
                    'Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}\tTime: {:.3f}, geo_const_1 {:.6f}, geo_const_2 {:.6f}, alpha_const {:.6f}'
                    .format(epoch, batch_idx * curr_batch_size,
                            generator_cost_np, discriminator_cost_np,
                            (end - start), mean_geo_constraint_1_np,
                            mean_geo_constraint_2_np,
                            jacobian_alpha_constraint_np))

                with open(global_args.exp_dir + "training_traces.txt",
                          "a") as text_file:
                    text_file.write(
                        str(generator_cost_np) + ', ' +
                        str(discriminator_cost_np) + '\n')
                start = time.time()

        # summary_str = sess.run(merged_summaries, feed_dict = input_dict_func(batch, np.asarray([0,])))
        # summary_writer.add_summary(summary_str, (tf.train.global_step(sess, global_step)))

        checkpoint_time = 1
        if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
            checkpoint_time = 20

        if epoch % checkpoint_time == 0:
            print(
                '====> Average Train: Epoch: {}\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}'
                .format(epoch, train_gen_loss_accum / batch_size_accum,
                        train_dis_loss_accum / batch_size_accum))

            if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
                helper.visualize_datasets(sess,
                                          input_dict_func(batch),
                                          data_loader.dataset,
                                          generative_dict['obs_sample_out'],
                                          generative_dict['latent_sample_out'],
                                          save_dir=global_args.exp_dir +
                                          'Visualization/',
                                          postfix=str(epoch))

                helper.visualize_datasets2(
                    sess,
                    input_dict_func,
                    data_loader,
                    data_loader.dataset,
                    generative_dict['interpolated_sample'],
                    save_dir=global_args.exp_dir + 'Visualization/',
                    postfix=str(epoch))

                helper.visualize_datasets3(
                    sess,
                    input_dict_func,
                    data_loader,
                    data_loader.dataset,
                    generative_dict['doubly_interpolated_sample'],
                    save_dir=global_args.exp_dir + 'Visualization/',
                    postfix=str(epoch))

                helper.visualize_datasets4(
                    sess,
                    input_dict_func,
                    data_loader,
                    generative_dict['interpolated_sample_begin'],
                    generative_dict['interpolated_sample_end'],
                    generative_dict['interpolated_sample'],
                    save_dir=global_args.exp_dir + 'Visualization/',
                    postfix=str(epoch))

                xmin, xmax, ymin, ymax, X_dense, Y_dense = -3.5, 3.5, -3.5, 3.5, 250, 250
                xlist = np.linspace(xmin, xmax, X_dense)
                ylist = np.linspace(ymin, ymax, Y_dense)
                X, Y = np.meshgrid(xlist, ylist)
                XY = np.concatenate(
                    [X.reshape(-1, 1), Y.reshape(-1, 1)], axis=1)

                batch['observed']['data']['flat'] = np.concatenate([
                    XY[:int(XY.shape[0] / 2), np.newaxis, :], XY[:,
                                                                 np.newaxis, :]
                ],
                                                                   axis=0)
                # batch['observed']['data']['flat'] = XY[:, np.newaxis, :]
                disc_cost_real_np = sess.run(train_outs_dict['critic_real'],
                                             feed_dict=input_dict_func(
                                                 batch, np.asarray([
                                                     0,
                                                 ])))

                f = np.reshape(disc_cost_real_np[:, 0, 0], [Y_dense, X_dense])
                helper.plot_ffs(X,
                                Y,
                                f,
                                save_dir=global_args.exp_dir +
                                'Visualization/discriminator_function/',
                                postfix='discriminator_function' + str(epoch))
            else:
                # distributions.visualizeProductDistribution(sess, input_dict_func(batch), batch, model.obs_dist, generative_dict['obs_dist'],
                #                                            save_dir=global_args.exp_dir+'Visualization/Train/', postfix='train_'+str(epoch))
                helper.visualizeTransitions(sess,
                                            input_dict_func(batch),
                                            generative_dict,
                                            save_dir=global_args.exp_dir +
                                            'Visualization/Train/',
                                            postfix='train_' + str(epoch))

            checkpoint_path1 = global_args.exp_dir + 'checkpoint/'
            checkpoint_path2 = global_args.exp_dir + 'checkpoint2/'
            print('====> Saving checkpoint. Epoch: ', epoch)
            start_tmp = time.time()
            helper.save_checkpoint(saver, sess, global_step, checkpoint_path1)
            end_tmp = time.time()
            print(
                'Checkpoint path: ' + checkpoint_path1 + '   ====> It took: ',
                end_tmp - start_tmp)
            if epoch % 60 == 0:
                print('====> Saving checkpoint backup. Epoch: ', epoch)
                start_tmp = time.time()
                helper.save_checkpoint(saver, sess, global_step,
                                       checkpoint_path2)
                end_tmp = time.time()
                print(
                    'Checkpoint path: ' + checkpoint_path2 +
                    '   ====> It took: ', end_tmp - start_tmp)
示例#5
0
    def train(epoch):
        global curr_meanp, curr_stdp, p_real

        trans_steps, disc_steps, gen_steps = 0, 0, 0
        turn = 'gen'
        in_between_vis = 5
        report_count = 0
        data_loader.train()
        train_gen_loss_accum, train_dis_loss_accum, train_likelihood_accum, train_kl_accum, batch_size_accum = 0, 0, 0, 0, 0
        start = time.time()

        hyperparam_dict = {'b_identity': 0.}
        helper.update_dict_from_file(hyperparam_dict, './hyperparam_file.py')

        for batch_idx, curr_batch_size, batch in data_loader:
            gen_bool, disc_bool, trans_bool = scheduler(epoch, batch_idx)
            hyper_param = np.asarray(
                [epoch, hyperparam_dict['b_identity'], curr_meanp, p_real])
            curr_feed_dict = input_dict_func(batch, hyper_param)

            if trans_bool:
                trans_train_step_np, transporter_cost_np, mean_transport_cost_np = \
                sess.run([train_transport_step_tf, train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost']],
                feed_dict = curr_feed_dict)
                trans_steps = trans_steps + 1
                # p_real = np.exp(-mean_transport_cost_np)

            if disc_bool:
                disc_train_step_np, transporter_cost_np, mean_transport_cost_np = \
                sess.run([train_discriminator_step_tf, train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost']],
                feed_dict = curr_feed_dict)
                disc_steps = disc_steps + 1

            if gen_bool:
                gen_train_step_np, generator_cost_np, discriminator_cost_np, transporter_cost_np, mean_transport_cost_np, convex_mask_np, critic_real_np, \
                critic_gen_np, expected_log_pdf_prior_np, expected_log_pdf_agg_post_np, aaa = \
                sess.run([train_generator_step_tf, train_outs_dict['generator_cost'], train_outs_dict['discriminator_cost'],
                train_outs_dict['transporter_cost'], train_outs_dict['mean_transport_cost'], train_outs_dict['convex_mask'], train_outs_dict['critic_real'],
                train_outs_dict['critic_gen'], train_outs_dict['expected_log_pdf_prior'], train_outs_dict['expected_log_pdf_agg_post'], model.AAA],
                feed_dict = curr_feed_dict)
                gen_steps = gen_steps + 1

            max_discriminator_weight = sess.run(max_abs_discriminator_vars)
            train_gen_loss_accum += curr_batch_size * generator_cost_np
            train_dis_loss_accum += curr_batch_size * discriminator_cost_np
            batch_size_accum += curr_batch_size

            curr_meanp_delta, curr_stdp_delta = np.mean(critic_gen_np), np.std(
                critic_gen_np)

            curr_meanp = 0.9 * curr_meanp + 0.1 * curr_meanp_delta
            curr_stdp = 0.9 * curr_stdp + 0.1 * curr_stdp_delta

            if batch_idx % global_args.log_interval == 0:
                report_count = report_count + 1

                end = time.time()
                # print('Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.3f}\tDiscriminator Cost: {:.3f}\tTime: {:.3f}, variational cost {:.3f}, transport cost {:.3f}, t {:2d} d {:2d} g {:2d}, t/d {:.1f}, t/g {:.1f}, mask {:.3f}, m {:.2f}, std {:.2f}'.format(
                #       epoch, batch_idx * curr_batch_size, generator_cost_np, discriminator_cost_np, (end - start), transporter_cost_np, mean_transport_cost_np, trans_steps, disc_steps, gen_steps, trans_steps/disc_steps, trans_steps/gen_steps, np.mean(convex_mask_np[:,0]),  curr_meanp, curr_stdp))
                print(
                    'Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.3f}\tDiscriminator Cost: {:.3f}\tTime: {:.3f}, variational cost {:.3f}, transport cost {:.3f}, t {:2d} d {:2d} g {:2d}, t/d {:.1f}, t/g {:.1f}, pri {:.1f}, agg {:.1f}, mask {:.2f}, p_real {:.2f}'
                    .format(epoch, batch_idx * curr_batch_size,
                            generator_cost_np, discriminator_cost_np,
                            (end - start), transporter_cost_np,
                            mean_transport_cost_np, trans_steps, disc_steps,
                            gen_steps, trans_steps / disc_steps,
                            trans_steps / gen_steps, expected_log_pdf_prior_np,
                            expected_log_pdf_agg_post_np,
                            np.mean(convex_mask_np[:, 0]), p_real))

                with open(global_args.exp_dir + "training_traces.txt",
                          "a") as text_file:
                    text_file.write(
                        str(generator_cost_np) + ', ' +
                        str(discriminator_cost_np) + '\n')
                start = time.time()

                if data_loader.__module__ == 'datasetLoaders.CelebA1QueueLoader' and in_between_vis > 0 and report_count % in_between_vis:
                    distributions.visualizeProductDistribution3(
                        sess,
                        curr_feed_dict,
                        batch,
                        inference_obs_dist,
                        transport_dist,
                        rec_dist,
                        generative_dict['obs_dist'],
                        save_dir=global_args.exp_dir +
                        'Visualization/Train/Random/',
                        postfix='train_' + str(epoch))
                    batch['observed']['data']['image'] = fixed_batch_data
                    curr_feed_dict = input_dict_func(batch, hyper_param)
                    distributions.visualizeProductDistribution3(
                        sess,
                        curr_feed_dict,
                        batch,
                        inference_obs_dist,
                        transport_dist,
                        rec_dist,
                        generative_dict['obs_dist'],
                        save_dir=global_args.exp_dir +
                        'Visualization/Train/Fixed/',
                        postfix='train_fixed_' + str(epoch))

        summary_str = sess.run(merged_summaries, feed_dict=curr_feed_dict)
        summary_writer.add_summary(summary_str,
                                   (tf.train.global_step(sess, global_step)))

        checkpoint_time = 1
        if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
            checkpoint_time = 20

        if epoch % checkpoint_time == 0:
            print(
                '====> Average Train: Epoch: {}\tGenerator Cost: {:.3f}\tDiscriminator Cost: {:.3f}'
                .format(epoch, train_gen_loss_accum / batch_size_accum,
                        train_dis_loss_accum / batch_size_accum))

            # helper.draw_bar_plot(effective_z_cost_np[:,0,0], thres = [np.mean(effective_z_cost_np), np.max(effective_z_cost_np)], save_dir=global_args.exp_dir+'Visualization/inversion_cost/', postfix='inversion_cost'+str(epoch))
            # helper.draw_bar_plot(disc_cost_gen_np[:,0,0], thres = [0, 0], save_dir=global_args.exp_dir+'Visualization/disc_cost/', postfix='disc_cost'+str(epoch))

            if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':

                helper.visualize_datasets(sess,
                                          input_dict_func(batch),
                                          data_loader.dataset,
                                          generative_dict['obs_sample_out'],
                                          generative_dict['latent_sample_out'],
                                          train_outs_dict['transport_sample'],
                                          train_outs_dict['input_sample'],
                                          save_dir=global_args.exp_dir +
                                          'Visualization/',
                                          postfix=str(epoch))

                xmin, xmax, ymin, ymax, X_dense, Y_dense = -2.5, 2.5, -2.5, 2.5, 250, 250
                xlist = np.linspace(xmin, xmax, X_dense)
                ylist = np.linspace(ymin, ymax, Y_dense)
                X, Y = np.meshgrid(xlist, ylist)
                XY = np.concatenate(
                    [X.reshape(-1, 1), Y.reshape(-1, 1)], axis=1)

                batch['observed']['data']['flat'] = XY[:, np.newaxis, :]
                disc_cost_real_np = sess.run(train_outs_dict['critic_real'],
                                             feed_dict=input_dict_func(
                                                 batch, hyper_param))

                batch['observed']['data'][
                    'flat'] = data_loader.dataset[:, np.newaxis, :]
                disc_cost_real_real_np = sess.run(
                    train_outs_dict['critic_real'],
                    feed_dict=input_dict_func(batch, hyper_param))

                # disc_mean = disc_cost_real_real_np.max()
                # disc_max = disc_cost_real_real_np.max()
                # disc_min = disc_cost_real_real_np.min()

                disc_mean = disc_cost_real_real_np.mean()
                disc_std = disc_cost_real_real_np.std()
                disc_max = disc_mean + 2 * disc_std
                disc_min = disc_mean - 2 * disc_std

                np.clip(disc_cost_real_np,
                        disc_min,
                        disc_max,
                        out=disc_cost_real_np)
                f = np.reshape(disc_cost_real_np[:, 0, 0], [Y_dense, X_dense])
                helper.plot_ffs(X,
                                Y,
                                f,
                                save_dir=global_args.exp_dir +
                                'Visualization/discriminator_function/',
                                postfix='discriminator_function' + str(epoch))

            else:
                # helper.draw_bar_plot(convex_mask_np, y_min_max = [0,1], save_dir=global_args.exp_dir+'Visualization/convex_mask/', postfix='convex_mask'+str(epoch))
                distributions.visualizeProductDistribution3(
                    sess,
                    curr_feed_dict,
                    batch,
                    inference_obs_dist,
                    transport_dist,
                    rec_dist,
                    generative_dict['obs_dist'],
                    save_dir=global_args.exp_dir +
                    'Visualization/Train/Random/',
                    postfix='train_' + str(epoch))
                batch['observed']['data']['image'] = fixed_batch_data
                curr_feed_dict = input_dict_func(batch, hyper_param)
                distributions.visualizeProductDistribution3(
                    sess,
                    curr_feed_dict,
                    batch,
                    inference_obs_dist,
                    transport_dist,
                    rec_dist,
                    generative_dict['obs_dist'],
                    save_dir=global_args.exp_dir +
                    'Visualization/Train/Fixed/',
                    postfix='train_fixed_' + str(epoch))

            checkpoint_path1 = global_args.exp_dir + 'checkpoint/'
            checkpoint_path2 = global_args.exp_dir + 'checkpoint2/'
            print('====> Saving checkpoint. Epoch: ', epoch)
            start_tmp = time.time()
            # helper.save_checkpoint(saver, sess, global_step, checkpoint_path1)
            end_tmp = time.time()
            print(
                'Checkpoint path: ' + checkpoint_path1 + '   ====> It took: ',
                end_tmp - start_tmp)
            if epoch % 60 == 0:
                print('====> Saving checkpoint backup. Epoch: ', epoch)
                start_tmp = time.time()
                # helper.save_checkpoint(saver, sess, global_step, checkpoint_path2)
                end_tmp = time.time()
                print(
                    'Checkpoint path: ' + checkpoint_path2 +
                    '   ====> It took: ', end_tmp - start_tmp)
示例#6
0
    def train(epoch):
        global k_t_np, average_real, average_gen, average_set, constant
        data_loader.train()
        train_gen_loss_accum, train_dis_loss_accum, train_likelihood_accum, train_kl_accum, batch_size_accum = 0, 0, 0, 0, 0
        start = time.time()
        for batch_idx, curr_batch_size, batch in data_loader:

            # # # Single loop
            # disc_train_step, gen_train_step, batch_gen_loss, batch_dis_loss =\
            #     sess.run([train_discriminator_step_tf, train_generator_step_tf, *train_out_list[:2]], feed_dict = input_dict_func(batch))

            # disc_train_step = sess.run(train_discriminator_step_tf, feed_dict = input_dict_func(batch))
            # gen_train_step, batch_gen_loss, batch_dis_loss = sess.run([train_generator_step_tf, *train_out_list[:2]], feed_dict = input_dict_func(batch))

            disc_train_step, train_dis_gen_mean_np, train_dis_real_mean_np = \
                sess.run([train_discriminator_step_tf, train_dis_gen_mean, train_dis_real_mean], feed_dict = {additional_inputs_tf: np.asarray([k_t_np, average_real, average_gen, constant]), **input_dict_func(batch)})
            # inverter_train_step = sess.run(train_inverter_step_tf, feed_dict = {additional_inputs_tf: np.asarray([k_t_np, average_real, average_gen, constant]), **input_dict_func(batch)})

            if not average_set:
                average_real = train_dis_real_mean_np
                average_gen = train_dis_real_mean_np
                constant = 1 if average_real < average_gen else 0
                average_set = True

            gen_train_step, batch_gen_loss, batch_dis_loss, train_dis_gen_mean_np, train_dis_real_mean_np, z_cost_list_np, disc_cost_list_np, rate_similarity_gen_np = \
                sess.run([train_generator_step_tf, *train_out_list[:2], train_dis_gen_mean, train_dis_real_mean, z_cost_list, disc_cost_list, rate_similarity_gen],
                         feed_dict = {additional_inputs_tf: np.asarray([k_t_np, average_real, average_gen, constant]), **input_dict_func(batch)})

            average_real = (1 - average_lr) * average_real + average_lr * (
                train_dis_real_mean_np)
            average_gen = (1 - average_lr) * average_gen + average_lr * (
                train_dis_gen_mean_np)
            constant = 1 if (average_real - average_gen) < 0 else 0

            # k_t_np = k_t_np+lambd*(gamma*average_real-average_gen)
            k_t_np = k_t_np + lambd * (gamma * train_dis_real_mean_np -
                                       train_dis_gen_mean_np)
            k_t_np = max(min(1., k_t_np), 0.)

            max_discriminator_weight = sess.run(max_abs_discriminator_vars)
            train_gen_loss_accum += curr_batch_size * batch_gen_loss
            train_dis_loss_accum += curr_batch_size * batch_dis_loss
            batch_size_accum += curr_batch_size

            if batch_idx % global_args.log_interval == 0:
                end = time.time()
                print(
                    'Train: Epoch {} [{:7d} ()]\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}\tTime: {:.3f}, Max disc weight {:.6f}'
                    .format(epoch, batch_idx * curr_batch_size, batch_gen_loss,
                            batch_dis_loss, (end - start),
                            max_discriminator_weight))

                with open(global_args.exp_dir + "training_traces.txt",
                          "a") as text_file:
                    text_file.write(
                        str(batch_gen_loss) + ', ' + str(batch_dis_loss) +
                        '\n')
                start = time.time()

        # summary_str = sess.run(merged_summaries, feed_dict=input_dict_func(batch))
        summary_str = sess.run(
            merged_summaries,
            feed_dict={
                additional_inputs_tf:
                np.asarray([k_t_np, average_real, average_gen, constant]),
                **input_dict_func(batch)
            })
        summary_writer.add_summary(summary_str,
                                   (tf.train.global_step(sess, global_step)))

        if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
            checkpoint_time = 20
        else:
            checkpoint_time = 1

        if epoch % checkpoint_time == 0:
            print(
                '====> Average Train: Epoch: {}\tGenerator Cost: {:.6f}\tDiscriminator Cost: {:.6f}'
                .format(epoch, train_gen_loss_accum / batch_size_accum,
                        train_dis_loss_accum / batch_size_accum))

            helper.draw_bar_plot(rate_similarity_gen_np[:, 0, 0],
                                 y_min_max=[0, 1],
                                 save_dir=global_args.exp_dir +
                                 'Visualization/inversion_weight/',
                                 postfix='inversion_weight' + str(epoch))
            helper.draw_bar_plot(
                z_cost_list_np[:, 0, 0],
                thres=[np.mean(z_cost_list_np),
                       np.max(z_cost_list_np)],
                save_dir=global_args.exp_dir + 'Visualization/inversion_cost/',
                postfix='inversion_cost' + str(epoch))
            helper.draw_bar_plot(disc_cost_list_np[:, 0, 0],
                                 thres=[average_real, average_gen],
                                 save_dir=global_args.exp_dir +
                                 'Visualization/disc_cost/',
                                 postfix='disc_cost' + str(epoch))

            if data_loader.__module__ == 'datasetLoaders.RandomManifoldDataLoader' or data_loader.__module__ == 'datasetLoaders.ToyDataLoader':
                helper.visualize_datasets(sess,
                                          input_dict_func(batch),
                                          data_loader.dataset,
                                          obs_sample_out_tf,
                                          latent_sample_out_tf,
                                          save_dir=global_args.exp_dir +
                                          'Visualization/',
                                          postfix=str(epoch))

                xmin, xmax = -3.5, 3.5
                ymin, ymax = -3.5, 3.5
                X_dense = 250
                Y_dense = 250
                xlist = np.linspace(xmin, xmax, X_dense)
                ylist = np.linspace(ymin, ymax, Y_dense)
                X, Y = np.meshgrid(xlist, ylist)
                XY = np.concatenate(
                    [X.reshape(-1, 1), Y.reshape(-1, 1)], axis=1)

                batch['observed']['data']['flat'] = XY[:, np.newaxis, :]
                disc_cost_from_input_np = sess.run(disc_cost_from_input,
                                                   feed_dict={
                                                       additional_inputs_tf:
                                                       np.asarray([
                                                           k_t_np,
                                                           average_real,
                                                           average_gen,
                                                           constant
                                                       ]),
                                                       **input_dict_func(batch)
                                                   })
                disc_cost_from_input_np = disc_cost_from_input_np[:, 0, 0]

                f = np.reshape(disc_cost_from_input_np, [Y_dense, X_dense])
                helper.plot_ffs(X,
                                Y,
                                f,
                                save_dir=global_args.exp_dir +
                                'Visualization/discriminator_function/',
                                postfix='discriminator_function' + str(epoch))
            else:
                distributions.visualizeProductDistribution(
                    sess,
                    input_dict_func(batch),
                    batch,
                    obs_dist,
                    sample_obs_dist,
                    save_dir=global_args.exp_dir + 'Visualization/Train/',
                    postfix='train_' + str(epoch))

            checkpoint_path1 = global_args.exp_dir + 'checkpoint/'
            checkpoint_path2 = global_args.exp_dir + 'checkpoint2/'
            print('====> Saving checkpoint. Epoch: ', epoch)
            start_tmp = time.time()
            # helper.save_checkpoint(saver, sess, global_step, checkpoint_path1)
            end_tmp = time.time()
            print(
                'Checkpoint path: ' + checkpoint_path1 + '   ====> It took: ',
                end_tmp - start_tmp)