Exemple #1
0
def test_sub():
    x = Variable(np.array(2.0))
    y1: Variable = 2.0 - x
    y2: Variable = x - 1.0

    assert y1.data == 0
    assert y2.data == 1
Exemple #2
0
    def test_matyas(self):
        def matyas(x, y):
            z = 0.26 * (x**2 + y**2) - 0.48 * x * y
            return z

        x = Variable(np.array(1.0))
        y = Variable(np.array(1.0))
        z = matyas(x, y)
        z.backward()
        expected = np.array(0.040000000000000036)
        if isinstance(x.grad, Variable):
            self.assertEqual(x.grad.data, expected)
            self.assertEqual(y.grad.data, expected)
        else:
            self.assertEqual(x.grad, expected)
            self.assertEqual(y.grad, expected)
Exemple #3
0
    def test_sphere(self):
        def sphere(x, y):
            z = x**2 + y**2
            return z

        x = Variable(np.array(1.0))
        y = Variable(np.array(1.0))
        z = sphere(x, y)
        z.backward()
        expected = np.array(2.0)
        if isinstance(x.grad, Variable):
            self.assertEqual(x.grad.data, expected)
            self.assertEqual(y.grad.data, expected)
        else:
            self.assertEqual(x.grad, expected)
            self.assertEqual(y.grad, expected)
Exemple #4
0
def check_backward(func, x_data, y_grad=None, eps=0.001,
                   atol=1e-5, rtol=1e-4, verbose=True):
    x_data = _as_tuple(x_data)
    x_data = tuple([x.astype(np.float64) for x in x_data])
    if y_grad is not None:
        y_grad = y_grad.astype(np.float64)

    def f(inputs):
        inputs = _as_tuple(inputs)
        inputs = [as_variable(x) for x in inputs]
        y = func(*inputs)
        return y.data

    num_grads = numerical_grad(f, x_data, y_grad, eps)
    inputs = [as_variable(x) for x in x_data]
    y = func(*inputs)
    if y_grad is not None:
        y.grad = Variable(y_grad)
    y.backward()
    bp_grads = [x.grad.data for x in inputs]

    results = []
    for num_grad, bp_grad in zip(num_grads, bp_grads):
        assert bp_grad.shape == num_grad.shape
        res = np.allclose(num_grad, bp_grad, atol=atol, rtol=rtol)
        results.append(res)
        if not res and verbose:
            diff = abs(num_grad - bp_grad)
            print('-------------------------')
            print('diff', diff)
            print('diff mean', np.array(diff).mean())
            # print('num_grad:', num_grad.shape, num_grad)
            # print('bp_grad:', bp_grad.shape, bp_grad)

    return all(results)
Exemple #5
0
 def test_forward4(self):
     shape = (10, 20, 30)
     axis = (0, 1)
     x = Variable(np.random.rand(*shape))
     y = F.max(x, axis=axis, keepdims=True)
     expected = np.max(x.data, axis=axis, keepdims=True)
     self.assertTrue(array_allclose(y.data, expected))
Exemple #6
0
def testSquare():
    data = np.array([1, 2])
    x = Variable(data)
    f = Square()
    y = f(x)
    print(type(y))
    print(y.data)
Exemple #7
0
def accuracy(y, t):
    y, t = as_variable(y), as_variable(t)

    pred = y.data.argmax(axis=1).reshape(t.shape)
    result = (pred == t.data)
    acc = result.mean()
    return Variable(as_array(acc))
Exemple #8
0
def testExp():
    data = np.array([1, 2])
    x = Variable(data)
    f = Exp()
    y = f(x)
    print(type(y))
    print(y.data)
 def test_forward3(self):
     np.random.seed(0)
     x = np.random.rand(10, 10, 10).astype('f')
     y2 = CF.softmax(x, axis=1)
     y = F.softmax(Variable(x))
     res = np.allclose(y.data, y2.data)
     self.assertTrue(res)
def test_taylor_sin():
    x = Variable(np.array(np.pi / 4))
    y = taylor_sin(x)
    y.backward()

    assert y.data == pytest.approx(0.707106)
    assert x.grad.data == pytest.approx(0.707103)
Exemple #11
0
 def test_forward2(self):
     shape = (10, 20, 30)
     axis = 1
     x = Variable(np.random.rand(*shape))
     y = F.max(x, axis=axis)
     expected = np.max(x.data, axis=axis)
     self.assertTrue(array_allclose(y.data, expected))
Exemple #12
0
 def test_forward1(self):
     x0 = np.array([1, 2, 3])
     x1 = Variable(np.array([1, 2, 3]))
     y = x0 + x1
     res = y.data
     expected = np.array([2, 4, 6])
     self.assertTrue(array_equal(res, expected))
    def test_double_backprop(self):
        def y_(x):
            y = x**2
            return y

        def z_(gx, y):
            z = gx**3 + y
            return z

        x = Variable(np.array(2.0))
        y = y_(x)
        y.backward(create_graph=True)

        gx = x.grad
        x.cleargrad()
        z = z_(gx, y)
        z.backward()
Exemple #14
0
    def test_backward(self):
        x = Variable(np.array(2.0))
        a = square(x)
        y = add(square(a), square(a))
        y.backward()

        self.assertEqual(y.data, 32.0)
        self.assertEqual(x.grad, 64.0)
Exemple #15
0
def testconcat():
    data = np.array([1, 2])
    x = Variable(data)
    a = Square()
    b = Exp()
    y = a(b(x))
    print(type(y))
    print(y.data)
    def test_sin(self):
        x = Variable(np.linspace(-7, 7, 200))
        y = F.sin(x)
        y.backward(create_graph=True)

        logs = [y.data]

        for i in range(3):
            logs.append(x.grad.data)
            gx = x.grad
            x.cleargrad()
            gx.backward(create_graph=True)

        labels = ["y=sin(x)", "y'", "y''", "y'''"]
        for i, v in enumerate(logs):
            plt.plot(x.data, logs[i], label=labels[i])
        plt.legend(loc='lower right')
        plt.savefig('test.png')
Exemple #17
0
def testautobackward():
    data = np.array([1, 2])
    x = Variable(data)
    A = Square()
    B = Exp()
    a = A(x)
    y = B(a)
    y.backward()
    return x.grad
Exemple #18
0
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = square(x)
     y.backward()
     expected = np.array(6.0)
     if isinstance(x.grad, Variable):
         self.assertEqual(x.grad.data, expected)
     else:
         self.assertEqual(x.grad, expected)
    def test_reshape(self):
        x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
        y = F.reshape(x, (6, ))
        y.backward(retain_grad=True)

        self.assertEqual(y.shape, (6, ))
        self.assertTrue(np.allclose(y.data, np.array([1, 2, 3, 4, 5, 6])))
        self.assertTrue(
            np.allclose(x.grad.data, np.array([[1, 1, 1], [1, 1, 1]])))
    def test_tanh(self):
        x = Variable(np.array(1.0))
        y = F.tanh(x)
        x.name = 'x'
        y.name = 'y'
        y.backward(create_graph=True)

        iters = 0

        for i in range(iters):
            gx = x.grad
            x.cleargrad()
            gx.backward(create_graph=True)

        gx = x.grad
        gx.name = 'gx' + str(iters + 1)
        txt = get_dot_graph(gx)
        with open('test.dot', 'w') as f:
            f.write(txt)
Exemple #21
0
 def test_gradient_check(self):
     x = Variable(np.random.rand(1))
     y = square(x)
     y.backward()
     num_grad = numerical_diff(square, x)
     if isinstance(x.grad, Variable):
         flg = np.allclose(x.grad.data, num_grad)
     else:
         flg = np.allclose(x.grad, num_grad)
     self.assertTrue(flg)
Exemple #22
0
    def test_goldstein(self):
        def goldstein(x, y):
            z = (1 + (x + y + 1) ** 2 * (19 - 14*x + 3*x**2 - 14*y + 6*x*y + 3*y**2)) * \
                (30 + (2*x - 3*y)**2 * (18 - 32*x + 12*x**2 + 48*y - 36*x*y + 27*y**2))
            return z

        x = Variable(np.array(1.0))
        y = Variable(np.array(1.0))
        z = goldstein(x, y)
        z.backward()

        expected0 = np.array(-5376.0)
        expected1 = np.array(8064.0)
        if isinstance(x.grad, Variable):
            self.assertEqual(x.grad.data, expected0)
            self.assertEqual(y.grad.data, expected1)
        else:
            self.assertEqual(x.grad, expected0)
            self.assertEqual(y.grad, expected1)
Exemple #23
0
def testconttection():
    data = np.array([1, 2])
    x = Variable(data)
    A = Square()
    B = Exp()
    a = A(x)
    y = B(a)
    assert y.creator == B
    assert y.creator.input == a
    assert y.creator.input.creator == A
    assert y.creator.input.creator.input == x
Exemple #24
0
    def test_newton_gd(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        def gx2(x):
            return 12 * x**2 - 4

        x = Variable(np.array(2.0))
        iters = 10

        for i in range(iters):
            y = f(x)
            x.cleargrad()
            y.backward()

            if isinstance(x.grad, Variable):
                x.data -= x.grad.data / gx2(x.data)
            else:
                x.data -= x.grad / gx2(x.data)
Exemple #25
0
def test_2nd_floor_derivative():
    f = lambda t: t**4 - 2 * t**2

    x = Variable(np.array(2))
    y = f(x)
    y.backward(create_graph=True)
    assert x.grad.data == 24

    gx = x.grad
    gx.backward()
    assert x.grad.data == 68
Exemple #26
0
    def test_taylor(self):
        def my_sin(x, threshold=0.00001):
            y = 0
            for i in range(100000):
                c = (-1)**i / math.factorial(2 * i + 1)
                t = c * x**(2 * i + 1)
                y = y + t
                if abs(t.data) < threshold:
                    break
            return y

        x = Variable(np.array(np.pi / 4))
        y = my_sin(x)
        y.backward()
        if isinstance(x.grad, Variable):
            flg = np.allclose(y.data, x.grad.data)
        else:
            flg = np.allclose(y.data, x.grad)
        self.assertTrue(flg)
    def test_sum(self):
        x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
        y = x.sum()
        y.backward()

        self.assertEqual(y.data, 21)
        self.assertTrue(
            np.allclose(x.grad.data, np.array([[1, 1, 1], [1, 1, 1]])))
        self.assertTrue(np.allclose(x.sum(axis=0).data, np.array([5, 7, 9])))
        self.assertTrue(np.allclose(x.sum(axis=1).data, np.array([6, 15])))
        self.assertTrue(
            np.allclose(
                x.sum(axis=0, keepdims=True).data, np.array([[5, 7, 9]])))
        self.assertTrue(
            np.allclose(
                x.sum(axis=1, keepdims=True).data, np.array([[6], [15]])))
    def test_newton(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        x = Variable(np.array(2.0))
        iters = 10

        for i in range(iters):
            y = f(x)
            x.cleargrad()
            y.backward(create_graph=True)

            gx = x.grad
            x.cleargrad()
            gx.backward()
            gx2 = x.grad

            x.data -= gx.data / gx2.data
Exemple #29
0
def test_newtons_method():
    f = lambda t: t**4 - 2 * t**2

    x = Variable(np.array(2.0))
    iters = 10

    for i in range(iters):
        LOGGER.debug('{} {}'.format(i, x))

        y = f(x)
        x.clear_grad()
        y.backward(create_graph=True)

        gx = x.grad
        x.clear_grad()
        gx.backward()
        gx2 = x.grad

        x.data -= gx.data / gx2.data

    assert x.data == 1
Exemple #30
0
# coding: utf-8
if '__file__' in globals():
  import os, sys
  sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

import numpy as np
from dezero import Variable

def f(x):
  y = x ** 4  - 2 * x ** 2
  return y

x = Variable(np.array(2.0))
iters=10
for i in range(iters):
  print(i, x)

  y = f(x)
  x.cleargrad()
  y.backward(create_graph=True)
  gx = x.grad
  x.cleargrad()
  gx.backward()
  gx2 = x.grad
  x.data -= gx.data/gx2.data