def main(data_dir, lib_dir, model_name, batch_size=10): ckpt_dir = os.path.join(os.path.dirname(__file__), 'models', model_name) clip, fc_filters, tconv_Fnums, tconv_dims, tconv_filters, n_filter, n_branch, \ reg_scale = network_helper.get_parameters(ckpt_dir) print('defining input data') features, pred_init_op = import_data(data_dir=data_dir, batch_size=batch_size) print('making network') # make network ntwk = network_maker.CnnNetwork(features, [], utils.my_model_fn_tens, batch_size, clip=clip, fc_filters=fc_filters, tconv_Fnums=tconv_Fnums, tconv_dims=tconv_dims, n_filter=n_filter, n_branch=n_branch, reg_scale=reg_scale, tconv_filters=tconv_filters, make_folder=False) print('defining save file') save_file = os.path.join('.', lib_dir) # evaluate the model for each geometry in the grid file print('executing the model ...') pred_file = ntwk.predictBin3(pred_init_op, ckpt_dir=ckpt_dir, model_name=model_name, save_file=save_file) return pred_file
def main(flags): # initialize data reader # optional for what type of layer the network ends with if len(flags.tconv_dims) == 0: output_size = flags.fc_filters[-1] else: output_size = flags.tconv_dims[-1] features, labels, train_init_op, valid_init_op = data_reader.read_data( input_size=flags.input_size, output_size=output_size - 2 * flags.clip, x_range=flags.x_range, y_range=flags.y_range, cross_val=flags.cross_val, val_fold=flags.val_fold, batch_size=flags.batch_size, shuffle_size=flags.shuffle_size) # make network ntwk = network_maker.CnnNetwork(features, labels, utils.my_model_fn_tens, flags.batch_size, clip=flags.clip, fc_filters=flags.fc_filters, tconv_Fnums=flags.tconv_Fnums, tconv_dims=flags.tconv_dims, tconv_filters=flags.tconv_filters, n_filter=flags.n_filter, n_branch=flags.n_branch, reg_scale=flags.reg_scale, learn_rate=flags.learn_rate, decay_step=flags.decay_step, decay_rate=flags.decay_rate) # define hooks for monitoring training train_hook = network_helper.TrainValueHook(flags.verb_step, ntwk.loss, ckpt_dir=ntwk.ckpt_dir, write_summary=True) lr_hook = network_helper.TrainValueHook(flags.verb_step, ntwk.learn_rate, ckpt_dir=ntwk.ckpt_dir, write_summary=True, value_name='learning_rate') valid_hook = network_helper.ValidationHook(flags.eval_step, valid_init_op, ntwk.labels, ntwk.logits, ntwk.loss, ntwk.preconv, ntwk.preTconv, ckpt_dir=ntwk.ckpt_dir, write_summary=True) # train the network ntwk.train(train_init_op, flags.train_step, [train_hook, valid_hook, lr_hook], write_summary=True)
def main(flags): # initialize data reader if len(flags.tconv_dims) == 0: output_size = flags.fc_filters[-1] else: output_size = flags.tconv_dims[-1] reader = data_reader.DataReader(input_size=flags.input_size, output_size=output_size, x_range=flags.x_range, y_range=flags.y_range, cross_val=flags.cross_val, val_fold=flags.val_fold, batch_size=flags.batch_size, shuffle_size=flags.shuffle_size) features, labels, train_init_op, valid_init_op = reader.get_data_holder_and_init_op( (flags.train_file, flags.valid_file)) # make network ntwk = network_maker.CnnNetwork(features, labels, utils.my_model_fn, flags.batch_size, fc_filters=flags.fc_filters, tconv_dims=flags.tconv_dims, tconv_filters=flags.tconv_filters, learn_rate=flags.learn_rate, decay_step=flags.decay_step, decay_rate=flags.decay_rate) # define hooks for monitoring training train_hook = network_helper.TrainValueHook(flags.verb_step, ntwk.loss, ckpt_dir=ntwk.ckpt_dir, write_summary=True) lr_hook = network_helper.TrainValueHook(flags.verb_step, ntwk.learn_rate, ckpt_dir=ntwk.ckpt_dir, write_summary=True, value_name='learning_rate') valid_hook = network_helper.ValidationHook(flags.eval_step, valid_init_op, ntwk.labels, ntwk.logits, ntwk.loss, ckpt_dir=ntwk.ckpt_dir, write_summary=True) # train the network ntwk.train(train_init_op, flags.train_step, [train_hook, valid_hook, lr_hook], write_summary=True)
def main(flags): ckpt_dir = os.path.join(os.path.dirname(__file__), 'models', flags.model_name) clip, fc_filters, tconv_Fnums, tconv_dims, tconv_filters, n_filter, n_branch, reg_scale = network_helper.get_parameters( ckpt_dir) print(ckpt_dir) # initialize data reader if len(tconv_dims) == 0: output_size = fc_filters[-1] else: output_size = tconv_dims[-1] features, labels, train_init_op, valid_init_op = data_reader.read_data( input_size=flags.input_size, output_size=output_size - 2 * clip, x_range=flags.x_range, y_range=flags.y_range, cross_val=flags.cross_val, val_fold=flags.val_fold, batch_size=flags.batch_size, shuffle_size=flags.shuffle_size) # make network ntwk = network_maker.CnnNetwork(features, labels, utils.my_model_fn_tens, flags.batch_size, clip, fc_filters=fc_filters, tconv_Fnums=tconv_Fnums, tconv_dims=tconv_dims, n_filter=n_filter, n_branch=n_branch, reg_scale=reg_scale, tconv_filters=tconv_filters, learn_rate=flags.learn_rate, decay_step=flags.decay_step, decay_rate=flags.decay_rate, make_folder=False) # evaluate the results if the results do not exist or user force to re-run evaluation save_file = os.path.join(os.path.dirname(__file__), 'data', 'test_pred_{}.csv'.format(flags.model_name)) if FORCE_RUN or (not os.path.exists(save_file)): print('Evaluating the model ...') pred_file, truth_file = ntwk.evaluate(valid_init_op, ckpt_dir=ckpt_dir, model_name=flags.model_name, write_summary=True) else: pred_file = save_file truth_file = os.path.join(os.path.dirname(__file__), 'data', 'test_truth.csv') mae, mse = compare_truth_pred(pred_file, truth_file) plt.figure(figsize=(12, 6)) plt.hist(mse, bins=100) plt.xlabel('Mean Squared Error') plt.ylabel('cnt') plt.suptitle('FC + TCONV (Avg MSE={:.4e})'.format(np.mean(mse))) plt.savefig( os.path.join( os.path.dirname(__file__), 'data', 'fc_tconv_single_channel_result_cmp_{}.png'.format( flags.model_name))) plt.show() print('FC + TCONV (Avg MSE={:.4e})'.format(np.mean(mse)))