Exemplo n.º 1
0
 def test_check_with_numerical_gradient(self):
     f = lambda x: x**2
     x = np.array([1.3, 1.4])
     grad = np.array(2. * x)
     numgrad = numerical_gradient.calc(f, x)
     numgrad = np.diagonal(numgrad)
     numerical_gradient.assert_are_similar(grad, numgrad)
Exemplo n.º 2
0
 def test_OneNeuronGradient(self):
     layer = Linear(2, 1)
     x = np.random.rand(2)
     y = layer.forward(x)
     deriv_grad = layer.backward(np.ones(1))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numerical_gradient.assert_are_similar(deriv_grad, numgrad[0])
Exemplo n.º 3
0
 def test_TwoNeuronsGradient(self):
     layer = Linear(3, 2)
     x = np.random.rand(3)
     y = layer.forward(x)
     deriv_grad = layer.backward(np.ones(2))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numgrad = np.sum(numgrad, axis=0)
     numerical_gradient.assert_are_similar(deriv_grad, numgrad)
Exemplo n.º 4
0
 def test_SoftmaxLayerGradientCheck(self):
     x = np.random.rand(3)
     layer = Softmax()
     layer.forward(x)
     grad = layer.backward(np.array([1.]))
     numgrad = numerical_gradient.calc(layer.forward, x)
     numgrad = np.sum(numgrad, axis=1)
     numerical_gradient.assert_are_similar(grad, numgrad)
Exemplo n.º 5
0
 def test_numerical_grad(self):
     layer = Relu()
     x = np.random.rand(5)
     layer.forward(x)
     grad = layer.backward(np.array([1.]))
     num_grad = numerical_gradient.calc(layer.forward, x)
     num_grad = num_grad.diagonal()
     numerical_gradient.assert_are_similar(grad, num_grad)
Exemplo n.º 6
0
    def test_LinearLayerNumericalGradientCheck(self):
        x = np.random.rand(3)

        model = Seq()
        model.add(Linear(3, 2, initialize='ones'))

        num_grad = numerical_gradient.calc(model.forward, x)
        deriv_grad = model.backward(np.array([1, 1]))
        num_grad = np.sum(num_grad, axis=0)

        numerical_gradient.assert_are_similar(deriv_grad, num_grad)
Exemplo n.º 7
0
    def test_ClaudioMaxNLLNumericalGradient(self):
        nll = ClaudioMaxNLL()
        y = np.random.rand(5)
        t = int(1)
        nll.calc_loss(y, t)
        grad = nll.calc_gradient(y, t)

        def loss_with_target(x):
            return nll.calc_loss(x, t)

        num_grad = numerical_gradient.calc(loss_with_target, y)
        num_grad = np.sum(num_grad, axis=0)
        numerical_gradient.assert_are_similar(grad, num_grad)
Exemplo n.º 8
0
    def test_backward(self):
        layer = Sigmoid()
        x = np.random.rand(2)
        y = layer.forward(x)
        deriv_grad = layer.backward(np.ones(1))

        numerical_grad_matrix = numerical_gradient.calc(layer.forward, x)

        # the numerical grad in this case is a matrix made of zeros with
        # dJ/dx_i only in the diagonal
        num_grad = np.diagonal(numerical_grad_matrix)

        numerical_gradient.assert_are_similar(deriv_grad, num_grad)
Exemplo n.º 9
0
    def test_numerical_gradient(self):
        x = np.random.rand(5)
        target_class = make_one_hot_target(classes_n=5, target_class=1)

        loss = CrossEntropyLoss()
        y = loss.calc_loss(x, target_class)
        grad = loss.calc_gradient(y, target_class)

        def forward(i):
            return loss.calc_loss(i, target_class)

        num_grad = numerical_gradient.calc(forward, x)

        num_grad = np.sum(num_grad, axis=0)
        print num_grad
        numerical_gradient.assert_are_similar(grad, num_grad)
Exemplo n.º 10
0
    def test_TwoLinearSigmoidLayers(self):
        x = np.random.rand(5)

        real_model = Seq([
            Linear(5, 3, initialize='ones'),
            Sigmoid(),
            Linear(3, 5, initialize='ones'),
            Sigmoid()
        ])
        y = real_model.forward(x)
        real_grad = real_model.backward(np.ones(5))

        num_model = Seq([
            Linear(5, 3, initialize='ones'),
            Sigmoid(),
            Linear(3, 5, initialize='ones'),
            Sigmoid()
        ])
        num_grad = numerical_gradient.calc(num_model.forward, x)

        num_grad = np.sum(num_grad, axis=1)
        numerical_gradient.assert_are_similar(real_grad, num_grad)