def run_finetuned20(config): _set_config_files(config) model = None epochs_count = 0 while True: train_loss, model_config, model = train_graph.do_train(config, model) epochs_count +=1 eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config) pylog.info(epochs_count) show.show_result_detail(eval_loss, eval_accuracy, train_loss, config) if epochs_count > 20: break
def run_directly(config): _set_config_files(config) train_loss, model_config = train_graph.direct_save(config) eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config) show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)
def run_finetuned(config): _set_config_files(config) model = None train_loss, model_config, model = train_graph.do_train(config, model) eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config) show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)
def run_finetuned(config): train_loss, model_config = train_graph.do_train(config) eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config) show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)