예제 #1
0
    def test_backward(self):
        x = Variable(np.array(2.0))
        a = square(x)
        y = add(square(a), square(a))
        y.backward()

        self.assertEqual(y.data, 32.0)
        self.assertEqual(x.grad, 64.0)
예제 #2
0
    def test_config(self):
        # 学習時
        self.assertTrue(Config.enable_backprop)
        x = Variable(np.ones((100, 100, 100)))
        y = square(square(square(x)))
        y.backward()
        with no_grad():
            x = Variable(np.array(2.0))
            y = square(x)
            self.assertFalse(Config.enable_backprop)

        self.assertTrue(Config.enable_backprop)
예제 #3
0
 def test_gradient_check(self):
     x = Variable(np.random.rand(1))
     y = square(x)
     y.backward()
     num_grad = numerical_diff(square, x)
     flag = np.allclose(x.grad, num_grad)
     self.assertTrue(flag)
예제 #4
0
 def test_forward(self):
     x = Variable(np.array(2.0))
     y = square(x)
     expected = np.array(4.0)
     self.assertEqual(y.data, expected)
예제 #5
0
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = square(x)
     y.backward()
     expected = np.array(6.0)
     self.assertEqual(x.grad, expected)
예제 #6
0
def test_wrapper():
    x = Variable(np.array([1, 2]))
    y = exp(square(x))
    y.backward()
    return x.grad