b_test_img_paths, batch_size, load_size=load_size, crop_size=crop_size) a2b_pool = utils.ItemPool() b2a_pool = utils.ItemPool() '''summary''' summary_writer = tf.summary.FileWriter('./summaries/' + dataset + '_spgan', sess.graph) '''saver''' ckpt_dir = './checkpoints/' + dataset + '_spgan' utils.mkdir(ckpt_dir + '/') saver = tf.train.Saver(max_to_keep=30) ckpt_path = utils.load_checkpoint(ckpt_dir, sess, saver) if ckpt_path is None: sess.run(tf.global_variables_initializer()) else: print('Copy variables from % s' % ckpt_path) '''train''' try: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) batch_epoch = min(len(a_data_pool), len(b_data_pool)) // batch_size max_it = epoch * batch_epoch now = time.strftime("%c") print('================ Begining Training time (%s) ================\n' % now) for it in range(sess.run(it_cnt), max_it):
b_test_img_paths = glob('./Datasets/' + dataset + '/bounding_box_train-Market/*.jpg') a_test_pool = data.ImageData(sess, a_test_img_paths, batch_size, load_size=load_size, crop_size=crop_size) b_test_pool = data.ImageData(sess, b_test_img_paths, batch_size, load_size=load_size, crop_size=crop_size) a2b_pool = utils.ItemPool() b2a_pool = utils.ItemPool() '''summary''' summary_writer = tf.summary.FileWriter('./summaries/' + dataset + '_spgan' , sess.graph) '''saver''' ckpt_dir = './checkpoints/' + dataset + '_spgan' utils.mkdir(ckpt_dir + '/') saver = tf.train.Saver(max_to_keep= 30) ckpt_path = utils.load_checkpoint(ckpt_dir, sess, saver) if ckpt_path is None: sess.run(tf.global_variables_initializer()) else: print('Copy variables from % s' % ckpt_path) '''train''' try: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) batch_epoch = min(len(a_data_pool), len(b_data_pool)) // batch_size max_it = epoch * batch_epoch now = time.strftime("%c") print('================ Begining Training time (%s) ================\n' % now) for it in range(sess.run(it_cnt), max_it):
dataset = args.dataset crop_size = args.crop_size """ run """ with tf.Session() as sess: a_real = tf.placeholder(tf.float32, shape=[None, crop_size, crop_size, 3]) b_real = tf.placeholder(tf.float32, shape=[None, crop_size, crop_size, 3]) a2b = models.generator(a_real, 'a2b') b2a = models.generator(b_real, 'b2a') b2a2b = models.generator(b2a, 'a2b', reuse=True) a2b2a = models.generator(a2b, 'b2a', reuse=True) #--retore--# saver = tf.train.Saver() ckpt_path = utils.load_checkpoint('./checkpoints/' + dataset + '_spgan', sess, saver) saver.restore(sess, ckpt_path) if ckpt_path is None: raise Exception('No checkpoint!') else: print('Copy variables from % s' % ckpt_path) #--test--# b_list = glob('./Datasets/' + dataset + '/bounding_box_train-Market/*.jpg') a_list = glob('./Datasets/' + dataset + '/bounding_box_train-Duke/*.jpg') b_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_market2duke/' a_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_duke2market/' utils.mkdir([a_save_dir, b_save_dir])
crop_size = args.crop_size """ run """ with tf.Session() as sess: a_real = tf.placeholder(tf.float32, shape=[None, crop_size, crop_size, 3]) b_real = tf.placeholder(tf.float32, shape=[None, crop_size, crop_size, 3]) a2b = models.generator(a_real, 'a2b') b2a = models.generator(b_real, 'b2a') b2a2b = models.generator(b2a, 'a2b', reuse=True) a2b2a = models.generator(a2b, 'b2a', reuse=True) #--retore--# saver = tf.train.Saver() ckpt_path = utils.load_checkpoint('./checkpoints/' + dataset + '_spgan', sess, saver) saver.restore(sess, ckpt_path) if ckpt_path is None: raise Exception('No checkpoint!') else: print('Copy variables from % s' % ckpt_path) #--test--# b_list = glob('./Datasets/' + dataset + '/bounding_box_train-Market/*.jpg') a_list = glob('./Datasets/' + dataset + '/bounding_box_train-Duke/*.jpg') b_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_market2duke/' a_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_duke2market/' utils.mkdir([a_save_dir, b_save_dir])