with torch.no_grad(): model.eval() y_hat = model.predict(x_test) y_hat = np.argmax(y_hat, axis=1) # auc = sklearn.metrics.roc_auc_score(y_test, np.asarray(y_hat).flatten(), multi_class='ovo') acc = sklearn.metrics.accuracy_score(y_test, np.asarray(y_hat).flatten()) f1 = sklearn.metrics.f1_score(y_test, np.asarray(y_hat).flatten(), average='macro') experiment["model"] = model.name experiment["auc"] = 0 experiment["acc"] = acc experiment["f1"] = f1 experiment["num_genes"] = len(x_train.columns) experiment["time_elapsed"] = str(time.time() - start_time) results = record_result(results, experiment, filename) print(experiment) except Exception as e: print("------------------------------------") print("Exception, x shape: ", x_train.shape) # print(str( gene in list(neighbors.nodes))) print(x_train.shape, y_train.shape, adj.shape) print(e) logging.error(logging.traceback.format_exc()) print("------------------------------------") # cleanup model.best_model = None del model torch.cuda.empty_cache()
with warnings.catch_warnings(): warnings.simplefilter("ignore") model.fit(X_train, y_train, adj) model.eval() with torch.no_grad(): y_hat = model.predict(X_test) auc = sklearn.metrics.roc_auc_score(y_test, np.argmax(y_hat, axis=1)) acc = sklearn.metrics.accuracy_score(y_test, np.argmax(y_hat, axis=1)) print("auc:", auc, " acc: ", acc) experiment["auc"] = auc experiment["acc"] = acc results.append(experiment) if auc > best_auc: best_auc = copy.deepcopy(auc) best_auc_model = copy.deepcopy(model) if acc > best_acc: best_acc = copy.deepcopy(acc) best_acc_model = copy.deepcopy(model) model.best_model = None # cleanup del model torch.cuda.empty_cache() except Exception: tb = traceback.format_exc() experiment['error'] = tb print(tb) print(fixed_params) print(*results, sep="\n") print("best AUC: ", best_auc, " best ACC: ", best_acc)