Exemple #1
0
 def test_generation(self):
     x = Variable(np.array(2.0))
     a = F.square(x)
     y = add(F.square(a), F.square(a))
     y.backward()
     self.assertEqual(y.data, np.array(32.0))
     self.assertEqual(x.grad.data, np.array(64.0))
Exemple #2
0
 def test_backprop_with_numercal_diff(self):
     x = Variable(np.random.rand(1))
     y = square(x)
     y.backward()
     num_grad = numercal_diff(square, x)
     flg = np.allclose(x.grad.data, num_grad.data)
     self.assertTrue(flg)
Exemple #3
0
    def test_forward(self):
        x = Variable(np.array(2.0))
        y = Variable(np.array(3.0))

        z = add(F.square(x), F.square(y))
        z.backward()
        self.assertEqual(z.data, np.array(13.0))
        self.assertTrue(isinstance(x.grad, Variable))

        a = Variable(np.array(2.0))
        b = add(a, a)
        b.backward()

        a.cleargrad()
        b = add(add(a, a), a)
        b.backward()
    def test__dot_var(self):
        a = Variable(np.array(1.0))
        b = Variable(np.array(1.0))
        c = a + b
        d = square(c)

        actual = get_dot_graph(d)
        self.assertIn(f"{id(d.creator)} -> {id(d)}", actual)
        self.assertIn(f"{id(c)} -> {id(d.creator)}", actual)
Exemple #5
0
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = square(x)
     y.backward()
     expected = np.array(6.0)
     if isinstance(x.grad, Variable):
         self.assertEqual(x.grad.data, expected)
     else:
         self.assertEqual(x.grad, expected)
Exemple #6
0
 def test_gradient_check(self):
     x = Variable(np.random.rand(1))
     y = square(x)
     y.backward()
     num_grad = numerical_diff(square, x)
     if isinstance(x.grad, Variable):
         flg = np.allclose(x.grad.data, num_grad)
     else:
         flg = np.allclose(x.grad, num_grad)
     self.assertTrue(flg)
Exemple #7
0
if '__file__' in globals():
    import os, sys

    sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

import numpy as np

from dezero import Variable
from dezero.functions import square, exp

x = Variable(np.array(0.5))
y = square(exp(square(x)))
y.backward()
print(x.grad)
Exemple #8
0
 def test_two_branch_diff(self):
     x = Variable(np.array(2.0))
     a = square(x)
     y = add(square(a), square(a))
     y.backward()
     self.assertEqual(x.grad.data, np.array(64.0))
Exemple #9
0
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = square(x)
     y.backward()
     expected = np.array(6.0)
     self.assertEqual(x.grad.data, expected)
Exemple #10
0
 def test_foreard(self):
     x = Variable(np.array(2.0))
     y = square(x)
     expected = np.array(4.0)
     self.assertEqual(y.data, expected)
Exemple #11
0
 def test_using_config(self):
     with no_grad():
         x = Variable(np.array(2.0))
         y = F.square(x)
         self.assertEqual(x.grad, None)
Exemple #12
0
 def func(x):
     return square(square(x)) + square(square(x))