Пример #1
0
def NeuralNetwork(X, z, test=False):
    """Wrapper for a neural network. Trains a neural network using X and z.

    Args:
        X (np.ndarray): Input data the network is to be trained on.
        z (np.ndarray): Response data the network is to be trained against.
        test (bool, optional): If true, will search a hard-coded parameter-
                               space for optimal parameters instead of 
                               training a network. Defaults to False.

    Returns:
        (float, list): (score reached, [testing set prediction, testing set])
    """
    if not test:
        hiddenLayers = 2
        hiddenNeurons = 64
        epochN = 500
        minibatchSize = 32
        eta = (None, 1e-03)
        lmbd = 1e-06
        alpha = 1e-00
        activationFunction = sigmoid
        outputFunction = softMax

        Xtr, Xte, ztr, zte = train_test_split(X, z)

        network = NN(hiddenNN=hiddenNeurons, hiddenLN=hiddenLayers)
        network.giveInput(Xtr, ztr)
        network.giveParameters(epochN=epochN,
                               minibatchSize=minibatchSize,
                               eta=etaDefinerDefiner(eta[0], eta[1]),
                               lmbd=lmbd,
                               alpha=alpha,
                               activationFunction=activationFunction,
                               outputFunction=outputFunction)
        network.train(splitData=False)

        network.predict(Xte, zte)

        return network.score, [network.predictedLabel, zte]

    else:
        # Benchmarking parameters; random search
        parameters = {
            "hiddenLN": [0, 1, 2, 4],
            "hiddenNN": [16, 32, 64, 128, 256],
            "epochN": [500],
            "minibatchSize": [32, 64],
            "eta": [[j, i**k] for i in np.logspace(0, 6, 7)
                    for j, k in [(1, 1), (None, -1)]],
            "lmbd":
            np.logspace(-1, -6, 3),
            "alpha":
            np.logspace(-0, -1, 1),
            "activationFunction": [sigmoid, ReLU_leaky, ReLU],
            "outputFunction": [softMax],
            "#repetitions":
            5,
            "datafraction":
            1
        }

        optimalScore, optimalParams, optimalParamSTR = benchmarkNN(
            X,
            z,
            parameters,
            NN,
            mode="classification",
            randomSearch=False,
            writingPermissions=False,
            N=int(1e3))
        print("Optimal Neural Network parameters:",
              optimalScore,
              optimalParamSTR,
              sep="\n",
              end="\n\n")
Пример #2
0
import numpy as np
from NeuralNetwork import NN

network = NN(3, 3, 3, 100000, .00001, .0001, 0, 0)

X = np.matrix('1,2,3;4,3,6;1,2,5;0,1,2;1,4,5;1,0,1')
y = np.matrix('0;2;1;1;1;2')
X_cv = np.matrix('1,0,0;1,2,4;10,7,4')
y_cv = np.matrix('0;1;1')

print(network.fit(X, y))
print(network.predict(X_cv))
network.cross_validate(X_cv, y_cv)