def load_data(args): img_ext = '/*.png' print("Using input directories:") if args.triplet: dir_train_phase1 = './datasets/' + args.dataset + '/train' + args.stage1 + img_ext dir_test_phase1 = './datasets/' + args.dataset + '/test' + args.stage1 + img_ext print(dir_train_phase1) print(dir_test_phase1) a_img_paths = glob(dir_train_phase1) a_data_pool = data.ImageData(sess, a_img_paths, args.batch_size, load_size=args.load_size, crop_size=args.crop_size) a_test_img_paths = glob(dir_test_phase1) a_test_pool = data.ImageData(sess, a_test_img_paths, args.batch_size, load_size=args.load_size, crop_size=args.crop_size) else: a_data_pool = a_test_pool = None dir_train_phase2 = './datasets/' + args.dataset + '/train' + args.stage2 + img_ext dir_train_phase3 = './datasets/' + args.dataset + '/train' + args.stage3 + img_ext dir_test_phase2 = './datasets/' + args.dataset + '/test' + args.stage2 + img_ext dir_test_phase3 = './datasets/' + args.dataset + '/test' + args.stage3 + img_ext print(dir_train_phase2) print(dir_train_phase3) print(dir_test_phase2) print(dir_test_phase3) b_img_paths = glob(dir_train_phase2) c_img_paths = glob(dir_train_phase3) b_data_pool = data.ImageData(sess, b_img_paths, args.batch_size, load_size=args.load_size, crop_size=args.crop_size) c_data_pool = data.ImageData(sess, c_img_paths, args.batch_size, load_size=args.load_size, crop_size=args.crop_size) b_test_img_paths = glob(dir_test_phase2) c_test_img_paths = glob(dir_test_phase3) b_test_pool = data.ImageData(sess, b_test_img_paths, args.batch_size, load_size=args.load_size, crop_size=args.crop_size) c_test_pool = data.ImageData(sess, c_test_img_paths, args.batch_size, load_size=args.load_size, crop_size=args.crop_size) return a_data_pool, b_data_pool, c_data_pool, a_test_pool, b_test_pool, c_test_pool
""" train """ ''' init ''' # session config = tf.ConfigProto(allow_soft_placement=True) config.gpu_options.allow_growth = True sess = tf.Session(config=config) # counter it_cnt, update_cnt = utils.counter() ''' data ''' a_img_paths = glob('./datasets/' + dataset + '/trainA/*.png') b_img_paths = glob('./datasets/' + dataset + '/trainB/*.png') a_data_pool = data.ImageData(sess, a_img_paths, batch_size, load_size=load_size, crop_size=crop_size, num_threads=num_threads, buffer_size=buffer_size) b_data_pool = data.ImageData(sess, b_img_paths, batch_size, load_size=load_size, crop_size=crop_size, num_threads=num_threads, buffer_size=buffer_size) a_test_img_paths = glob('./datasets/' + dataset + '/testA/*.png') b_test_img_paths = glob('./datasets/' + dataset + '/testB/*.png') a_test_pool = data.ImageData(sess, a_test_img_paths,
var_list=g_var) """ train """ ''' init ''' # session config = tf.ConfigProto(allow_soft_placement=True) config.gpu_options.allow_growth = True sess = tf.Session(config=config) # counter it_cnt, update_cnt = ops.counter() '''data''' a_img_paths = glob('./datasets/' + dataset + '/trainA/*.jpg') b_img_paths = glob('./datasets/' + dataset + '/trainB/*.jpg') a_data_pool = data.ImageData(sess, a_img_paths, batch_size, load_size=load_size, crop_size=crop_size) b_data_pool = data.ImageData(sess, b_img_paths, batch_size, load_size=load_size, crop_size=crop_size) a_test_img_paths = glob('./datasets/' + dataset + '/testA/*.jpg') b_test_img_paths = glob('./datasets/' + dataset + '/testB/*.jpg') a_test_pool = data.ImageData(sess, a_test_img_paths, batch_size, load_size=load_size, crop_size=crop_size)
# counter it_cnt, update_cnt = utils.counter() ''' data ''' a_img_paths = glob('./datasets/' + dataset + '/trainA/*') b_img_paths = glob('./datasets/' + dataset + '/trainB/*') ab_pair_data_pool = data.ImageDataPair(sess, a_img_paths, batch_size, load_size=load_size, crop_size=crop_size) a_data_pool = data.ImageData(sess, a_img_paths, batch_size, load_size=load_size, crop_size=crop_size, channels=3) b_data_pool = data.ImageData(sess, b_img_paths, batch_size, load_size=load_size, crop_size=crop_size, channels=1) a_test_img_paths = glob('./datasets/' + dataset + '/testA/*') ab_pair_test_pool = data.ImageDataPair(sess, a_test_img_paths, batch_size=len(a_test_img_paths), load_size=load_size,
# Session management config = tf.ConfigProto(allow_soft_placement=True) config.gpu_options.allow_growth = True sess = tf.Session(config=config) it_cnt, update_cnt = ops.counter() if do_train: summary_writer = tf.summary.FileWriter('./summaries/' + args.dataset+ '/train-'+training_run_id(), sess.graph) # Data loading if not singleTestOnly: a_data_pool, b_data_pool, c_data_pool, a_test_pool, b_test_pool, c_test_pool = load_data(args) else: single_test_input_pool = data.ImageData(sess, glob(args.singletestdir+'/*.png'), 1, load_size=args.load_size, crop_size=args.crop_size, shuffle = False, random_flip = True) #Fix the random flip problem, see data.py, then make the flip False. b2c_pool = utils.ItemPool() c2b_pool = utils.ItemPool() a2b_pool = utils.ItemPool() b2a_pool = utils.ItemPool() # Checkpoint management. saver = tf.train.Saver(max_to_keep=5) # If the triplet mode is enabled, we try to load the existing checkpoint for that first. # Otherwise, we try to load the regular checkpoint only. subnet_maybe = ('/'+args.subnet) if len(args.subnet) > 0 else '' subnet_ext_maybe = (subnet_maybe + ('-transitive2')) if args.transform_twice else subnet_maybe
d_a_train = tf.train.AdamOptimizer(lr, beta1=0.5).minimize(d_loss_a, var_list=d_a_var) d_b_train = tf.train.AdamOptimizer(lr, beta1=0.5).minimize(d_loss_b, var_list=d_b_var) g_train = tf.train.AdamOptimizer(lr, beta1=0.5).minimize(g_loss, var_list=g_var) ''' Train ''' sess = tf.Session() cnt, update_cnt = util.counter() trainA_path = glob('./datasets/' + dataset + '/trainA/*.jpg') trainB_path = glob('./datasets/' + dataset + '/trainB/*.jpg') trainA_pool = data.ImageData(sess, trainA_path, batch_size, load_size=load_size, crop_size=crop_size) trainB_pool = data.ImageData(sess, trainB_path, batch_size, load_size=load_size, crop_size=crop_size) testA_path = glob('./datasets/' + dataset + '/testA/*.jpg') testB_path = glob('./datasets/' + dataset + '/testB/*.jpg') testA_pool = data.ImageData(sess, testA_path, batch_size, load_size=load_size, crop_size=crop_size)