def CrossValidate_LR(X, y, config, output_layer_size): kf = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) kf.get_n_splits(X, y) config["loss"] = "ce" config["seed"] = 1234 config.update({ "input_layer_size": X.shape[1], "output_layer_size": output_layer_size }) lr = [1, 0.5, 0.1, 0.05, 0.01, 0.005, 0.001] opt = ["adam", "nag", "momentum", "gd"] # lr = [0.1] # opt = ["nag"] f = open('LR_ModelSelection.txt', 'w') f.write('lr\tOptimizer\tCV\tAccuracy\tPrecision\tRecall\tF1\n') for l in lr: for o in opt: config["lr"] = l config["opt"] = o i = 0 for train_index, val_index in kf.split(X, y): print('lr {}, CV={}...............\n'.format(l, i)) X_train, X_val = X[train_index], X[val_index] y_train, y_val = y[train_index], y[val_index] # Configuring the neural network with the hyperparameters for Logistic Regression LR = LogisticRegression(config) i += 1 # Train, validate and test Accuracy, Precision, Recall, F1 = LR.Train_LR( X_train, y_train, X_val, y_val) f.write('{}\t{}\t{}\t{}\t{}\t{}\t{}\n'.format( l, o, i, Accuracy, Precision, Recall, F1)) f.close()
def Train_LR(X, y, config, output_layer_size): # Setting config config["loss"] = "ce" config["seed"] = 1234 config["lr"] = 0.5 config["opt"] = "adam" config.update({ "input_layer_size": X.shape[1], "output_layer_size": output_layer_size }) np.random.seed(1234) # Splitting dta into train and val X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.2, stratify=y, random_state=42) # Configuring the neural network with the hyperparameters for Logistic Regression LR = LogisticRegression(config) # Train, validate and test Accuracy, Precision, Recall, F1 = LR.Train_LR(X_train, y_train, X_val, y_val) print("\n\n\n\n", "=" * 60, sep="") print("\n\t\tPerformance on Validation data\n") print("=" * 60) print( '\n\t\tAccuracy={}\n\n\t\tPrecision={}\n\n\t\tRecall={}\n\n\t\tF1={}\n\n' .format(Accuracy, Precision, Recall, F1)) print("-" * 60) print("\n\n\n")