Esempio n. 1
0
def test_complex_graph_backward():
    x = Variable(np.array(2.0))
    a = square(x)
    y = add(square(a), square(a))
    y.backward()

    assert y.data == 32.0
    assert x.grad == 64.0
Esempio n. 2
0
def test_square_backward():
    x = Variable(np.array(2.0))
    y = Variable(np.array(3.0))

    z = add(square(x), square(y))
    z.backward()

    print(z.data)
    print(x.grad)
    print(y.grad)
 def test_gradient_check(self):
     x = Variable(np.random.rand(1))
     y = square(x)
     y.backward()
     num_grad = numerical_diff(square, x)
     flg = np.allclose(x.grad, num_grad)
     self.assertTrue(flg)
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = square(x)
     y.backward()
     expected = np.array(6.0)
     self.assertEqual(x.grad, expected)
 def test_forward(self):
     x = Variable(np.array(2.0))
     y = square(x)
     expected = np.array(4.0)
     self.assertEqual(y.data, expected)
Esempio n. 6
0
def test_step_18():
    with using_config('enable_backprop', False):
        x = Variable(np.array(2.0))
        y = square(x)

    assert y.grad is None