'batch_size': BATCH_SIZE, 'epochs': EPOCHS, 'verbose': 1, 'callbacks': [es], 'use_multiprocessing': True } # ================================================================================== # Training and prediction training2(img_list, pctls, model_func, feat_list_new, uncertainty, data_path, batch, DROPOUT_RATE, HOLDOUT, **model_params) prediction(img_list, pctls, feat_list_new, data_path, batch, remove_perm=True) viz_params = { 'img_list': img_list, 'pctls': pctls, 'data_path': data_path, 'uncertainty': uncertainty, 'batch': batch, 'feat_list_new': feat_list_new } viz = VizFuncs(viz_params) viz.metric_plots() viz.time_plot() # viz.metric_plots_multi() viz.false_map() # viz.time_size()
accuracy.append(accuracy_score(y_test, preds)) precision.append(precision_score(y_test, preds)) recall.append(recall_score(y_test, preds)) f1.append(f1_score(y_test, preds)) del preds, p_hat, aleatoric, epistemic, X_test, y_test, model, data_test, data_vector_test, data_ind_test metrics = pd.DataFrame( np.column_stack([pctls, accuracy, precision, recall, f1]), columns=['cloud_cover', 'accuracy', 'precision', 'recall', 'f1']) metrics.to_csv(metrics_path / 'metrics.csv', index=False) times = [float(i) for i in times] times_df = pd.DataFrame(np.column_stack([pctls, times]), columns=['cloud_cover', 'testing_time']) times_df.to_csv(metrics_path / 'testing_times.csv', index=False) # ====================================================================================================================== training_BNN_gen_model(img_list_train, feat_list_new, model_func, data_path, batch, dropout_rate, **model_params) prediction_BNN_gen_model(img_list_test, pctls, feat_list_new, data_path, batch, MC_passes, **model_params) viz = VizFuncs(viz_params) viz.metric_plots() viz.metric_plots_multi() viz.time_plot() viz.false_map(probs=False, save=False) viz.false_map_borders() viz.fpfn_map() viz.uncertainty_map_NN()