def main(argv=None): # Directories for logs and checkpoints if FLAGS.dataset_name == 'MHAD': if FLAGS.random_split_mode: log_directory, ckpt_directory = config.get_directories_training( FLAGS.dataset_name) csv_filename = get_csv_filename(FLAGS.dataset_name) csv_dir = './csv' tfr_list, __, __ = csv_processor.get_random_percent_split( csv_filename, csv_dir, ratio=0.75) train_ckpt = '%smodel.ckpt' % ckpt_directory utils.restore_logs(log_directory) train(tfr_list, train_ckpt, log_directory, ckpt_directory) else: log_directory, ckpt_directory = config.get_directories_training( FLAGS.dataset_name) log_directory = os.path.join(log_directory, str(FLAGS.test_subject)) ckpt_directory = os.path.join(ckpt_directory, str(FLAGS.test_subject)) + '/' csv_filename = get_csv_filename(FLAGS.dataset_name) csv_dir = './csv' tfr_list, __, __, __ = csv_processor.get_subject_wise_split( csv_filename, csv_dir, FLAGS.test_subject) train_ckpt = '%smodel.ckpt' % ckpt_directory utils.restore_logs(log_directory) print 'Currently processing for subject number %d' % FLAGS.test_subject train(tfr_list, train_ckpt, log_directory, ckpt_directory) elif FLAGS.dataset_name == 'UBC_easy' or FLAGS.dataset_name == 'forced_UBC_easy' \ or FLAGS.dataset_name == 'UBC_MHAD' or FLAGS.dataset_name == 'UBC_interpolated' \ or FLAGS.dataset_name == 'UBC_medium' or FLAGS.dataset_name == 'UBC_hard': log_directory, ckpt_directory = config.get_directories_training( FLAGS.dataset_name) csv_filename = get_csv_filename(FLAGS.dataset_name) tfr_list = csv_processor.get_train_list(csv_filename) train_ckpt = '%smodel.ckpt' % ckpt_directory utils.restore_logs(log_directory) train(tfr_list, train_ckpt, log_directory, ckpt_directory)
def main(argv=None): utils.restore_logs(FLAGS.test_logs) test()
def main(argv=None): utils.restore_logs(FLAGS.train_logs) with tf.device('/gpu:0'): train()
def main(argv=None): utils.restore_logs(FLAGS.train_logs) train()