def main(_): save_model_path = os.path.join(FLAGS.output_dir, FLAGS.save_model_path) save_sample_path = os.path.join(FLAGS.output_dir, FLAGS.save_sample_path) save_log_path = os.path.join(FLAGS.output_dir, FLAGS.save_log_path) # create directories if not exist if not os.path.exists(save_model_path): os.mkdir(save_model_path) if not os.path.exists(save_log_path): os.mkdir(save_log_path) if not os.path.exists(save_sample_path) and not FLAGS.sample_to_log: os.mkdir(save_sample_path) model = DeepHeatmapsModel( mode='TRAIN', train_iter=FLAGS.train_iter, batch_size=FLAGS.batch_size, learning_rate=FLAGS.learning_rate, l_weight_primary=FLAGS.l_weight_primary, l_weight_fusion=FLAGS.l_weight_fusion, l_weight_upsample=FLAGS.l_weight_upsample, reg=FLAGS.reg, adam_optimizer=FLAGS.adam_optimizer, momentum=FLAGS.momentum, step=FLAGS.step, gamma=FLAGS.gamma, weight_initializer=FLAGS.weight_initializer, weight_initializer_std=FLAGS.weight_initializer_std, bias_initializer=FLAGS.bias_initializer, image_size=FLAGS.image_size, c_dim=FLAGS.c_dim, num_landmarks=FLAGS.num_landmarks, sigma=FLAGS.sigma, scale=FLAGS.scale, margin=FLAGS.margin, bb_type=FLAGS.bb_type, win_mult=FLAGS.win_mult, augment_basic=FLAGS.augment_basic, augment_texture=FLAGS.augment_texture, p_texture=FLAGS.p_texture, augment_geom=FLAGS.augment_geom, p_geom=FLAGS.p_geom, output_dir=FLAGS.output_dir, save_model_path=save_model_path, save_sample_path=save_sample_path, save_log_path=save_log_path, pre_train_path=FLAGS.pre_train_path, load_pretrain=FLAGS.load_pretrain, load_primary_only=FLAGS.load_primary_only, img_path=FLAGS.img_path, valid_data=FLAGS.valid_data, valid_size=FLAGS.valid_size, log_valid_every=FLAGS.log_valid_every, train_crop_dir=FLAGS.train_crop_dir, img_dir_ns=FLAGS.img_dir_ns, print_every=FLAGS.print_every, save_every=FLAGS.save_every, sample_every=FLAGS.sample_every, sample_grid=FLAGS.sample_grid, sample_to_log=FLAGS.sample_to_log, debug_data_size=FLAGS.debug_data_size, debug=FLAGS.debug, use_epoch_data=FLAGS.use_epoch_data, epoch_data_dir=FLAGS.epoch_data_dir) model.train()
bb_type = 'init' img_list = load_menpo_image_list( img_dir=data_dir, test_data=test_data, train_crop_dir=data_dir, img_dir_ns=data_dir, bb_type=bb_type, bb_dictionary=bb_dictionary, mode='TEST', return_transform=map_landmarks_to_original_image) # load model heatmap_model = DeepHeatmapsModel(mode='TEST', img_path=data_dir, test_model_path=model_path, test_data=test_data, menpo_verbose=False) # *************** predict landmarks *************** print("\npredicting landmarks for: " + os.path.join(data_dir, test_data)) print("\nsaving landmarks to: " + out_dir) for i, img in enumerate(img_list): if i == 0: reuse = None else: reuse = True preds = heatmap_model.get_landmark_predictions( img_list=[img], pdm_models_dir=pdm_path,
def evaluate_heatmap_fusion_network(model_path, img_path, test_data, batch_size=10, image_size=256, margin=0.25, bb_type='gt', c_dim=3, scale=1, num_landmarks=68, debug=False, debug_data_size=20): t = time() from deep_heatmaps_model_fusion_net import DeepHeatmapsModel import logging logging.getLogger('tensorflow').disabled = True # load test image menpo list test_menpo_img_list = load_menpo_test_list( img_path, test_data=test_data, image_size=image_size, margin=margin, bb_type=bb_type) if debug: test_menpo_img_list = test_menpo_img_list[:debug_data_size] print ('\n*** FUSION NETWORK: calculating normalized mean error on: ' + test_data + ' set (%d images - debug mode) ***' % debug_data_size) else: print ('\n*** FUSION NETWORK: calculating normalized mean error on: ' + test_data + ' set (%d images) ***' % (len(test_menpo_img_list))) # create heatmap model tf.reset_default_graph() model = DeepHeatmapsModel(mode='TEST', batch_size=batch_size, image_size=image_size, c_dim=c_dim, num_landmarks=num_landmarks, img_path=img_path, test_model_path=model_path, test_data=test_data, menpo_verbose=False) # add placeholders model.add_placeholders() # build model model.build_model() # create loss ops model.create_loss_ops() num_batches = int(1. * len(test_menpo_img_list) / batch_size) if num_batches == 0: batch_size = len(test_menpo_img_list) num_batches = 1 reminder = len(test_menpo_img_list) - num_batches * batch_size num_batches_reminder = num_batches + 1 * (reminder > 0) img_inds = np.arange(len(test_menpo_img_list)) with tf.Session() as session: # load trained parameters saver = tf.train.Saver() saver.restore(session, model_path) print ('\nnum batches: ' + str(num_batches_reminder)) err = [] for j in range(num_batches): print ('batch %d / %d ...' % (j + 1, num_batches_reminder)) batch_inds = img_inds[j * batch_size:(j + 1) * batch_size] batch_images, _, batch_landmarks_gt = load_images_landmarks( test_menpo_img_list, batch_inds=batch_inds, image_size=image_size, c_dim=c_dim, num_landmarks=num_landmarks, scale=scale) batch_maps_pred = session.run(model.pred_hm_f, {model.images: batch_images}) batch_pred_landmarks = batch_heat_maps_to_landmarks( batch_maps_pred, batch_size=batch_size, image_size=image_size, num_landmarks=num_landmarks) batch_err = session.run( model.nme_per_image, {model.lms: batch_landmarks_gt, model.pred_lms: batch_pred_landmarks}) err = np.hstack((err, batch_err)) if reminder > 0: print ('batch %d / %d ...' % (j + 2, num_batches_reminder)) reminder_inds = img_inds[-reminder:] batch_images, _, batch_landmarks_gt = load_images_landmarks( test_menpo_img_list, batch_inds=reminder_inds, image_size=image_size, c_dim=c_dim, num_landmarks=num_landmarks, scale=scale) batch_maps_pred = session.run(model.pred_hm_f, {model.images: batch_images}) batch_pred_landmarks = batch_heat_maps_to_landmarks( batch_maps_pred, batch_size=reminder, image_size=image_size, num_landmarks=num_landmarks) batch_err = session.run( model.nme_per_image, {model.lms: batch_landmarks_gt, model.pred_lms: batch_pred_landmarks}) err = np.hstack((err, batch_err)) print ('\ndone!') print ('run time: ' + str(time() - t)) return err
def main(_): for i, param in enumerate(params): test_dir = os.path.join(FLAGS.output_dir, str(param)) if not os.path.exists(test_dir): os.mkdir(test_dir) print('\n##### RUNNING TESTS FUSION (%d/%d) #####' % (i + 1, len(params))) print('##### current directory: ' + test_dir) save_model_path = os.path.join(test_dir, 'model') save_sample_path = os.path.join(test_dir, 'sample') save_log_path = os.path.join(test_dir, 'logs') # create directories if not exist if not os.path.exists(save_model_path): os.mkdir(save_model_path) if not os.path.exists(save_log_path): os.mkdir(save_log_path) if not os.path.exists(save_sample_path) and not FLAGS.sample_to_log: os.mkdir(save_sample_path) tf.reset_default_graph() # reset graph model = DeepHeatmapsModel( mode='TRAIN', train_iter=FLAGS.train_iter, batch_size=FLAGS.batch_size, learning_rate=param, l_weight_primary=FLAGS.l_weight_primary, l_weight_fusion=FLAGS.l_weight_fusion, l_weight_upsample=FLAGS.l_weight_upsample, reg=FLAGS.reg, adam_optimizer=FLAGS.adam_optimizer, momentum=FLAGS.momentum, step=FLAGS.step, gamma=FLAGS.gamma, weight_initializer=FLAGS.weight_initializer, weight_initializer_std=FLAGS.weight_initializer_std, bias_initializer=FLAGS.bias_initializer, image_size=FLAGS.image_size, c_dim=FLAGS.c_dim, num_landmarks=FLAGS.num_landmarks, sigma=FLAGS.sigma, scale=FLAGS.scale, margin=FLAGS.margin, bb_type=FLAGS.bb_type, approx_maps=FLAGS.approx_maps, win_mult=FLAGS.win_mult, augment_basic=FLAGS.augment_basic, basic_start=FLAGS.basic_start, augment_texture=FLAGS.augment_texture, p_texture=FLAGS.p_texture, augment_geom=FLAGS.augment_geom, p_geom=FLAGS.p_geom, artistic_step=FLAGS.artistic_step, artistic_start=FLAGS.artistic_start, output_dir=FLAGS.output_dir, save_model_path=save_model_path, save_sample_path=save_sample_path, save_log_path=save_log_path, test_model_path=FLAGS.test_model_path, pre_train_path=os.path.join(save_model_path, FLAGS.pre_train_model_name), load_pretrain=FLAGS.load_pretrain, load_primary_only=FLAGS.load_primary_only, img_path=FLAGS.img_path, test_data=FLAGS.test_data, valid_data=FLAGS.valid_data, valid_size=FLAGS.valid_size, log_valid_every=FLAGS.log_valid_every, train_crop_dir=FLAGS.train_crop_dir, img_dir_ns=FLAGS.img_dir_ns, print_every=FLAGS.print_every, save_every=FLAGS.save_every, sample_every=FLAGS.sample_every, sample_grid=FLAGS.sample_grid, sample_to_log=FLAGS.sample_to_log, debug_data_size=FLAGS.debug_data_size, debug=FLAGS.debug, use_epoch_data=FLAGS.use_epoch_data, epoch_data_dir=FLAGS.epoch_data_dir) model.train()
# directory containing test sets data_dir = '/Users/arik/Dropbox/a_mac_thesis/artistic_faces/artistic_face_dataset/' test_sets = ['all_AF'] # test sets to evaluate # data_dir = '/Users/arik/Desktop/Thesis_mac/semi_art_sets/semi_art_sets_wiki_train_2/' # test_sets = [ # 'challenging_set_aug_geom_texture', # 'common_set_aug_geom_texture', # 'test_set_aug_geom_texture', # 'full_set_aug_geom_texture' # ] # load heatmap model heatmap_model = DeepHeatmapsModel(mode='TEST', img_path=conv_dir, test_model_path=model_path, menpo_verbose=False, scale=1) bb_dir = os.path.join(conv_dir, 'Bounding_Boxes') # predict landmarks for input test sets for i, test_data in enumerate(test_sets): if i == 0: reuse = None else: reuse = True out_temp = os.path.join(out_dir, test_data) if not os.path.exists(out_temp):