Beispiel #1
0
def run_finetuned20(config):
    _set_config_files(config)
    model = None
    epochs_count = 0
    while True:
        train_loss, model_config, model = train_graph.do_train(config, model)
        epochs_count +=1
        eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config)
        pylog.info(epochs_count)
        show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)
        if epochs_count > 20:
            break
Beispiel #2
0
def run_directly(config):
    _set_config_files(config)

    train_loss, model_config = train_graph.direct_save(config)
    eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config)
    show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)
Beispiel #3
0
def run_finetuned(config):
    _set_config_files(config)
    model = None
    train_loss, model_config, model = train_graph.do_train(config, model)
    eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config)
    show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)
Beispiel #4
0
def run_finetuned(config):
    train_loss, model_config = train_graph.do_train(config)
    eval_loss, eval_accuracy = eval_graph.do_eval(model_config, config)
    show.show_result_detail(eval_loss, eval_accuracy, train_loss, config)