def benchmark(X, z, parameters, NeuralNetwork=None, mode=None, randomSearch=None, writingPermissions=None, N=None): """Benchmarks Neural Networks using all permutations of given parameters and finds the optimal values. Functions for either classification or regression, will write all iterations to file if given writing permissions. Args: X (numpy.ndarray): Input values z (numpy.ndarray): True values for inputs parameters (dict): dictionary of all parameters NeuralNetwork (NN, optional): Neural Network class to be benchmarked, Defaults to neural network in main script named NN. mode (string, optional): Determines whether network is benchmarked for "classification" or "regression". Defaults to "classification". writingPermissions (bool, optional): Determines whether script has permission to write to file. Defaults to False. Returns: (float, list, string): A tuple of best score, a list of parameters used, and a string containing those parameters that can be printed """ writingPermissions = defaultingFunc(None, writingPermissions, False) mode = defaultingFunc(None, mode, "classification") NeuralNetwork = defaultingFunc(None, NeuralNetwork, NN) randomSearch = defaultingFunc(None, randomSearch, False) N = defaultingFunc(None, N, 10000) dataSelection = np.random.choice(range(X.shape[0]), int(parameters["datafraction"] * X.shape[0]), replace=False) X = X[dataSelection] z = z[dataSelection] tasksToDo = 1 tasksDone = 0 minimum = -np.inf params = "n/a" if writingPermissions: outfile = open( "parameterValues/NeuralNetworkParameters_" + mode + ".txt", "w") print("Starting benchmarking with parameters: ") for parameter in parameters: try: tasksToDo *= len(parameters[parameter]) except: pass print(parameter, ":", parameters[parameter]) if writingPermissions: outfile.write( str(parameter) + " : " + str(parameters[parameter]) + "\n") if randomSearch: for i in range(N): score = 0 hiddenLN = np.random.choice(parameters["hiddenLN"]) hiddenNN = np.random.choice(parameters["hiddenNN"]) epochN = np.random.choice(parameters["epochN"]) minibatchSize = np.random.choice(parameters["minibatchSize"]) # print((parameters["eta"])) eta = parameters["eta"][np.random.choice( range(len(parameters["eta"])))] # exit() lmbd = np.random.choice(parameters["lmbd"]) alpha = np.random.choice(parameters["alpha"]) activationFunction = np.random.choice( parameters["activationFunction"]) outputFunction = np.random.choice(parameters["outputFunction"]) for _ in range(parameters["#repetitions"]): network = NeuralNetwork(hiddenNN=32, hiddenLN=hiddenLN) network.giveInput(X, z) network.giveParameters(epochN=epochN, minibatchSize=minibatchSize, eta=etaDefinerDefiner(eta[0], eta[1]), lmbd=lmbd, alpha=alpha, activationFunction=activationFunction, outputFunction=outputFunction) network.getBiasesWeights() network.train() if mode.lower() == "classification": score += network.score / parameters["#repetitions"] elif mode.lower() == "regression": score += network.R2 / parameters["#repetitions"] paramSTR = "epochN:{}, minibatchSize:{}, eta:{}, lmbd:{}, alpha:{}, activFunct:{}, outFunc:{}, layers:{}, nodes:{}"\ .format(epochN, minibatchSize, eta, lmbd, alpha, activationFunction.__name__, outputFunction.__name__, hiddenLN, hiddenNN) if score > minimum: minimum = score paramSTROUT = paramSTR params = [ hiddenLN, hiddenNN, epochN, minibatchSize, eta, lmbd, alpha, activationFunction, outputFunction ] print(i, "/", N, "| score: {:.3f}".format(score), "| params:", paramSTR) return minimum, params, paramSTROUT for hiddenLN in parameters["hiddenLN"]: for hiddenNN in parameters["hiddenNN"]: for epochN in parameters["epochN"]: for minibatchSize in parameters["minibatchSize"]: for eta in parameters["eta"]: for lmbd in parameters["lmbd"]: for alpha in parameters["alpha"]: for activationFunction in parameters[ "activationFunction"]: for outputFunction in parameters[ "outputFunction"]: score = 0 tasksDone += 1 for _ in range( parameters["#repetitions"]): network = NeuralNetwork( hiddenNN=32, hiddenLN=hiddenLN) network.giveInput(X, z) network.giveParameters( epochN=epochN, minibatchSize=minibatchSize, eta=etaDefinerDefiner( eta[0], eta[1]), lmbd=lmbd, alpha=alpha, activationFunction= activationFunction, outputFunction=outputFunction) network.getBiasesWeights() network.train() if mode.lower( ) == "classification": score += network.score / parameters[ "#repetitions"] elif mode.lower() == "regression": score += network.R2 / parameters[ "#repetitions"] paramSTR = "epochN:{}, minibatchSize:{}, eta:{}, lmbd:{}, alpha:{}, activFunct:{}, outFunc:{}, layers:{}, nodes:{}"\ .format(epochN, minibatchSize, eta, lmbd, alpha, activationFunction.__name__, outputFunction.__name__, hiddenLN, hiddenNN) if score > minimum: minimum = score paramSTROUT = paramSTR params = [ hiddenLN, hiddenNN, epochN, minibatchSize, eta, lmbd, alpha, activationFunction, outputFunction ] if writingPermissions: outfile.write( str(score) + " | " + paramSTR + "\n") print("Task done:", tasksDone, "/", tasksToDo, "| score: {:.3f}".format(score), "| params:", paramSTR) if writingPermissions: outfile.write("Optimal: " + str(minimum) + " | " + paramSTROUT) outfile.close() return minimum, params, paramSTROUT
def NeuralNetwork(X, z, test=False): """Wrapper for a neural network. Trains a neural network using X and z. Args: X (np.ndarray): Input data the network is to be trained on. z (np.ndarray): Response data the network is to be trained against. test (bool, optional): If true, will search a hard-coded parameter- space for optimal parameters instead of training a network. Defaults to False. Returns: (float, list): (score reached, [testing set prediction, testing set]) """ if not test: hiddenLayers = 2 hiddenNeurons = 64 epochN = 500 minibatchSize = 32 eta = (None, 1e-03) lmbd = 1e-06 alpha = 1e-00 activationFunction = sigmoid outputFunction = softMax Xtr, Xte, ztr, zte = train_test_split(X, z) network = NN(hiddenNN=hiddenNeurons, hiddenLN=hiddenLayers) network.giveInput(Xtr, ztr) network.giveParameters(epochN=epochN, minibatchSize=minibatchSize, eta=etaDefinerDefiner(eta[0], eta[1]), lmbd=lmbd, alpha=alpha, activationFunction=activationFunction, outputFunction=outputFunction) network.train(splitData=False) network.predict(Xte, zte) return network.score, [network.predictedLabel, zte] else: # Benchmarking parameters; random search parameters = { "hiddenLN": [0, 1, 2, 4], "hiddenNN": [16, 32, 64, 128, 256], "epochN": [500], "minibatchSize": [32, 64], "eta": [[j, i**k] for i in np.logspace(0, 6, 7) for j, k in [(1, 1), (None, -1)]], "lmbd": np.logspace(-1, -6, 3), "alpha": np.logspace(-0, -1, 1), "activationFunction": [sigmoid, ReLU_leaky, ReLU], "outputFunction": [softMax], "#repetitions": 5, "datafraction": 1 } optimalScore, optimalParams, optimalParamSTR = benchmarkNN( X, z, parameters, NN, mode="classification", randomSearch=False, writingPermissions=False, N=int(1e3)) print("Optimal Neural Network parameters:", optimalScore, optimalParamSTR, sep="\n", end="\n\n")
minibatchSize = 32 eta = (1, 1000000) lmbd = 1e-01 alpha = 1e-00 activationFunction = ReLU_leaky outputFunction = softMax handler = dataHandler() handler.makeImageLabel() X, z = handler.X, handler.z network = NN(hiddenNN=hiddenNeurons, hiddenLN=hiddenLayers) network.giveInput(X, z) network.giveParameters(epochN=epochN, minibatchSize=minibatchSize, eta=etaDefinerDefiner(eta[0], eta[1]), lmbd=lmbd, alpha=alpha, activationFunction=activationFunction, outputFunction=outputFunction) network.train() paramSTR = "epochN:{}, minibatchSize:{}, eta:{}, lmbd:{}, alpha:{}, activFunct:{}, outFunc:{}"\ .format(epochN, minibatchSize, eta, lmbd, alpha, activationFunction.__name__, outputFunction.__name__) print("Manually chosen parameters; classification:\n Layers: {}, Neurons: {} | score (1 is good): {} | params: {}\n"\ .format(hiddenLayers, hiddenNeurons, network.score, paramSTR)) handler.printPredictions(network) # REGRESSION