Esempio n. 1
0
def test_neuralNetwork_adam():
    from sklearn.neural_network._stochastic_optimizers import AdamOptimizer

    np.random.seed(2019)
    X = np.random.normal(size=(1, 500))
    target = 3.9285985 * X

    nn = NeuralNetwork(inputs=1,
                       neurons=3,
                       outputs=1,
                       activations='sigmoid',
                       silent=True)
    nn.addLayer()
    nn.addLayer()
    nn.addOutputLayer(activations='identity')
    learning_rate = 0.001

    yhat = nn.forward_pass(X)
    nn.backpropagation(yhat.T, target.T)
    nn.learning_rate = learning_rate
    nn.initializeAdam()
    nn.adam()

    skl_adam = AdamOptimizer(params=nn.param, learning_rate_init=learning_rate)
    upd = skl_adam._get_updates(nn.grad)

    for update_nn, update_skl in zip(nn.change, upd):
        assert update_nn == pytest.approx(update_skl)