Ejemplo n.º 1
0
def test_neuralNetwork_adam():
    from sklearn.neural_network._stochastic_optimizers import AdamOptimizer

    np.random.seed(2019)
    X = np.random.normal(size=(1, 500))
    target = 3.9285985 * X

    nn = NeuralNetwork(inputs=1,
                       neurons=3,
                       outputs=1,
                       activations='sigmoid',
                       silent=True)
    nn.addLayer()
    nn.addLayer()
    nn.addOutputLayer(activations='identity')
    learning_rate = 0.001

    yhat = nn.forward_pass(X)
    nn.backpropagation(yhat.T, target.T)
    nn.learning_rate = learning_rate
    nn.initializeAdam()
    nn.adam()

    skl_adam = AdamOptimizer(params=nn.param, learning_rate_init=learning_rate)
    upd = skl_adam._get_updates(nn.grad)

    for update_nn, update_skl in zip(nn.change, upd):
        assert update_nn == pytest.approx(update_skl)
Ejemplo n.º 2
0
def main():

    visualiseTrainingExamples()

    nn = NeuralNetwork(config.numLayers, config.numClasses,
                       config.weightInitialisation, config.activationFn,
                       config.weightDecay)
    nn.initialiseParams(len(x_train[0]) * len(x_train[0]), config.numNeurons)

    sample = np.random.randint(3 * len(x_train) / 4)
    nn.forwardPropagate(x_train[sample])
    if config.optimizer == "sgd":
        nn.stochasticGradDesc(x_train, y_train, config.maxIterations,
                              config.learningRate, config.batchSize)
    elif config.optimizer == "momentum":
        nn.momentumGradDesc(x_train, y_train, config.maxIterations,
                            config.learningRate, config.batchSize,
                            config.gamma)
    elif config.optimizer == "nesterov":
        nn.nesterovAcceleratedGradDesc(x_train, y_train, config.maxIterations,
                                       config.learningRate, config.batchSize,
                                       config.gamma)
    elif config.optimizer == "rmsprop":
        nn.rmsprop(x_train, y_train, config.maxIterations, config.learningRate,
                   config.batchSize, config.eps, config.beta1)
    elif config.optimizer == "adam":
        nn.adam(x_train, y_train, config.maxIterations, config.learningRate,
                config.batchSize, config.eps, config.beta1, config.beta2)
    elif config.optimizer == "nadam":
        nn.nadam(x_train, y_train, config.maxIterations, config.learningRate,
                 config.batchSize, config.eps, config.beta1, config.beta2)
    else:
        print("No such optimizer available.")

    predictions = []
    predProbs = []
    test_acc = 0
    test_entropy = 0
    test_mse = 0
    for i in range(len(x_test)):
        nn.forwardPropagate(x_test[i])
        predictions.append(nn.predictedClass)
        predProbs.append(nn.output[nn.predictedClass])

    test_acc = accuracy(y_test, predictions)
    test_entropy = crossEntropyLoss(y_test, predProbs)
    test_mse = MSEloss(y_test, predictions)

    predictions = []
    predProbs = []
    valid_acc = 0
    valid_entropy = 0
    valid_mse = 0
    for i in range(len(x_valid)):
        nn.forwardPropagate(x_valid[i])
        predictions.append(nn.predictedClass)
        predProbs.append(nn.output[nn.predictedClass])

    valid_acc = accuracy(y_valid, predictions)
    valid_entropy = crossEntropyLoss(y_valid, predProbs)
    valid_mse = MSEloss(y_valid, predictions)

    print(
        f"Test Set:\nAccuracy = {test_acc}\nLoss = {test_entropy}\nMSE = {test_mse}"
    )
    print(
        f"Validation Set:\nAccuracy = {valid_acc}\nLoss = {valid_entropy}\nMSE = {valid_mse}"
    )

    # #Log in wandb
    metrics = {
        'test_acc': test_acc,
        # 'test_entropy': test_entropy,
        "test_mse": test_mse,
        'valid_acc': valid_acc,
        # 'valid_entropy': valid_entropy,
        "valid_mse": valid_mse
    }
    wandb.log(metrics)
    run.finish()