# ====== trainign ====== # print('Start training ...') task = training.MainLoop(batch_size=128, seed=1234, shuffle_level=2, allow_rollback=True) task.set_checkpoint(MODEL_PATH, model) task.set_callbacks([ training.NaNDetector(), training.EarlyStopGeneralizationLoss('valid', ce, threshold=5, patience=3) ]) task.set_train_task(func=f_train, data=(X_train, y_train), epoch=NB_EPOCH, name='train') task.set_valid_task(func=f_test, data=(X_valid, y_valid), freq=training.Timer(percentage=0.6), name='valid') task.set_eval_task(func=f_test, data=(X_test, y_test), name='eval') task.run() # =========================================================================== # Exsternal validation # Load model from file and perform validation # =========================================================================== script = r""" import os os.environ['ODIN'] = 'float32,gpu,seed=87654321' import pickle import numpy as np
task_name='train', signal=training.TaskSignal.EpochEnd), training.LambdaCallback(fn=lambda t: print(str(t)), task_name='valid', signal=training.TaskSignal.EpochEnd), training.EpochSummary(task_name=('train', 'valid'), output_name=loss, print_plot=True, save_path=os.path.join(model_save_path, 'summary.pdf')) ]) task.set_train_task(f_train, (ds['X_train'], ds['y_train']), epoch=N_EPOCH, name='train') task.set_valid_task(f_test, (ds['X_test'], ds['y_test']), freq=training.Timer(percentage=1.), name='valid') task.set_eval_task(f_test, (ds['X_test'], ds['y_test']), name='test') task.run() # =========================================================================== # Evaluate # =========================================================================== ACC = [] CM = [] for X, y in zip( F.as_data(ds['X_test']).set_batch(128, seed=None), F.as_data(ds['y_test']).set_batch(128, seed=None)): ce, acc, cm = f_test(X, y) ACC.append(acc) CM.append(cm) print("Accuracy:", np.mean(ACC))