Esempio n. 1
0
 def test_backward_with_broadcast(self):
     x0 = Variable(np.array([10, 11, 12]))
     x1 = Variable(np.array([5]))
     y = sub(x0, x1)
     y.backward()
     assert_equal(x0.grad.data, np.array([1, 1, 1]))
     assert_equal(x1.grad.data, np.array([-3]))
Esempio n. 2
0
 def test_backward_sets_grad_on_inputs(self):
     x0 = Variable(np.array(2.0))
     x1 = Variable(np.array(3.0))
     y = add(x0, x1)
     y.backward()
     self.assertEqual(x0.grad.data, np.array(1.0))
     self.assertEqual(x1.grad.data, np.array(1.0))
Esempio n. 3
0
    def test_basic_operation(self):

        #Variable and Variable
        a = Variable(np.array(3.0))
        b = Variable(np.array(2.0))
        self.assertEqual((a * b).data, np.array(6.0))
        self.assertEqual((a + b).data, np.array(5.0))
        self.assertEqual((a - b).data, np.array(1.0))
        self.assertEqual((a / b).data, np.array(3.0 / 2.0))

        #Variable and Scalar
        b = 2.0
        self.assertEqual((a * b).data, np.array(6.0))
        self.assertEqual((a + b).data, np.array(5.0))
        self.assertEqual((a - b).data, np.array(1.0))
        self.assertEqual((a / b).data, np.array(3.0 / 2.0))

        #Scalar and Variable
        self.assertEqual((b * a).data, np.array(6.0))
        self.assertEqual((b + a).data, np.array(5.0))
        self.assertEqual((b - a).data, np.array(-1.0))
        self.assertEqual((b / a).data, np.array(2.0 / 3.0))

        #negative
        self.assertEqual((-a).data, np.array(-3.0))

        #pow
        self.assertEqual((a**2).data, np.array(9.0))
Esempio n. 4
0
 def test_matyas(self):
     x = Variable(np.array(1.0))
     y = Variable(np.array(1.0))
     z = 0.26*(x**2+y**2)-0.48*x*y
     z.backward()
     self.assertAlmostEqual(x.grad.data, 0.04, delta=1e-6)
     self.assertAlmostEqual(y.grad.data, 0.04, delta=1e-6)
Esempio n. 5
0
 def test_backward(self):
     a = Variable(np.array(3.0))
     b = Variable(np.array(2.0))
     y = sub(a, b)
     y.backward()
     self.assertEqual(a.grad.data, np.array(1.0))
     self.assertEqual(b.grad.data, np.array(-1.0))
Esempio n. 6
0
 def test_backward_when_broadcasted(self):
     x0 = Variable(np.array([10, 11, 12]))
     x1 = Variable(np.array([5]))
     y = x0 + x1
     y.backward()
     assert_equal(x0.grad.data, np.ones((3,)))
     assert_equal(x1.grad.data, np.array([3]))
Esempio n. 7
0
 def test_backward_with_broadcast(self):
     x0 = Variable(np.array([10, 11, 12]))
     x1 = Variable(np.array([2]))
     y = div(x0, x1)
     y.backward()
     assert_equal(x0.grad.data, np.array([0.5, 0.5, 0.5]))
     assert_equal(x1.grad.data, np.array([-10/4-11/4-12/4]))
Esempio n. 8
0
 def test_clear_grad(self):
     x = Variable(np.array(2.0))
     y = 2 * x
     y.backward()
     x.clear_grad()
     y = 3 * x
     y.backward()
     self.assertEqual(x.grad.data, np.array(3.0))
Esempio n. 9
0
 def test_backward_WO_bias(self):
     x = Variable(np.array([[1, 2, 3], [6, 7, 8]]))
     W = Variable(np.arange(1, 7).reshape(3, 2))
     y = linear(x, W)
     y.backward()
     assert_equal(x.grad.data, np.ones((2, 2)) @ W.data.T)
     assert_equal(W.grad.data, x.data.T @ np.ones((2, 2)))
     self.assertIsNone(y.creator.inputs[2].grad)
Esempio n. 10
0
 def test_backward(self):
     x0 = Variable(np.array(1.0))
     x1 = Variable(np.array(1.0))
     t = add(x0, x1)
     y = add(x0, t)
     y.backward()
     self.assertEqual(x0.grad.data, np.array(2.0))
     self.assertEqual(x1.grad.data, np.array(1.0))
Esempio n. 11
0
 def test_backward(self):
     x = Variable(np.arange(2 * 3 * 3).reshape(1, 2, 3, 3))
     kernel = Variable(np.array([[[[1, 2], [2, 1]], [[0, 1], [0, 1]]]]))
     y = C.conv2d_simple(x, kernel, pad=1)
     y.backward()
     assert_equal(x.grad.shape, x.shape)
     assert_equal(x.grad.data, [[[[6, 6, 6], [6, 6, 6], [6, 6, 6]],
                                 [[2, 2, 2], [2, 2, 2], [2, 2, 2]]]])
Esempio n. 12
0
 def test_goldstein(self):
     x = Variable(np.array(1.0))
     y = Variable(np.array(1.0))
     z = (1+(x+y+1)**2*(19-14*x+3*x**2-14*y+6*x*y+3*y**2)) * \
         (30+(2*x-3*y)**2*(18-32*x+12*x**2+48*y-36*x*y+27*y**2))
     z.backward()
     self.assertEqual(x.grad.data, -5376)
     self.assertEqual(y.grad.data, 8064)
Esempio n. 13
0
 def test_backward_W_bias(self):
     x = Variable(np.array([[1, 2, 3], [6, 7, 8]]))
     W = Variable(np.arange(1, 7).reshape(3, 2))
     b = Variable(np.array(5))
     y = linear(x, W, b)
     y.backward()
     assert_equal(x.grad.data, np.ones((2, 2)) @ W.data.T)
     assert_equal(W.grad.data, x.data.T @ np.ones((2, 2)))
     assert_equal(b.grad.data, 4)
Esempio n. 14
0
    def test_div(self):
        a = Variable(np.array(4.0))
        b = Variable(np.array(2.0))
        c = a / b
        self.assertEqual(c.data, np.array(2.0))

        c.backward()
        self.assertEqual(a.grad.data, 1 / 2.0)
        self.assertEqual(b.grad.data, -4.0 / 2.0**2)
Esempio n. 15
0
 def test_two_order_diff(self):
     x = Variable(np.array(2.0))
     y = x**4 - 2*x**2
     y.backward(create_graph=True)
     gx = x.grad
     x.clear_grad()
     self.assertEqual(gx.data, np.array(24.0))
     gx.backward()
     self.assertEqual(x.grad.data, np.array(44.0))
Esempio n. 16
0
    def test_softmax_cross_entropy(self):
        x = Variable(np.array([[1.0, 1.0, 1.0], [3.0, 2.0, 1.0]]))
        t = Variable(np.array([1, 0]))

        y = softmax_cross_entropy(x, t)
        p_1 = np.log(np.exp(1.0) / (3 * np.exp(1.0)))
        p_2 = np.log(np.exp(3.0) / (np.exp(3.0) + np.exp(2.0) + np.exp(1.0)))
        expected_ans = -(p_1 + p_2) / 2
        assert_array_equal(y.data, np.array(expected_ans))
Esempio n. 17
0
    def test_mul(self):
        a = Variable(np.array(3.0))
        b = Variable(np.array(2.0))
        c = Variable(np.array(1.0))

        y = 2.0 * a
        z = a * 2.0
        self.assertEqual(y.data, np.array(6.0))
        self.assertEqual(z.data, np.array(6.0))
Esempio n. 18
0
    def test__dot_var(self):
        a = Variable(np.array(1.0))
        b = Variable(np.array(1.0))
        c = a + b
        d = square(c)

        actual = get_dot_graph(d)
        self.assertIn(f"{id(d.creator)} -> {id(d)}", actual)
        self.assertIn(f"{id(c)} -> {id(d.creator)}", actual)
Esempio n. 19
0
    def test_gradient_add_with_differentsize_array(self):

        x = Variable(np.array([1.0, 2.0, 3.0]))
        y = Variable(np.array([2.0]))
        z = x + y
        z.backward()

        assert_array_equal(x.grad.data, np.array([1, 1, 1]))
        assert_array_equal(y.grad.data, np.array([3.0]))
Esempio n. 20
0
 def test_retain_grad_only_the_firts_inputs(self):
     x0 = Variable(np.array(2.0))
     x1 = Variable(np.array(3.0))
     t = add(x0, x1)
     y = add(x0, t)
     y.backward()
     self.assertIsNone(y.grad)
     self.assertIsNone(t.grad)
     self.assertEqual(x0.grad.data, 2.0)
     self.assertEqual(x1.grad.data, 1.0)
Esempio n. 21
0
    def test_matmul(self):

        x = Variable(np.array([[1, 1], [1, 1]]))
        W = Variable(np.array([[2], [2]]))
        y = matmul(x, W)
        y.backward()

        assert_array_equal(y.data, np.array([[4], [4]]))
        assert_array_equal(x.grad.data, np.array([[2, 2], [2, 2]]))
        assert_array_equal(W.grad.data, np.array([[2], [2]]))
Esempio n. 22
0
    def test_rosen(self):
        def rosenbrock(x0, x1):
            y = 100 * (x1 - x0**2)**2 + (x0 - 1)**2
            return y

        x0 = Variable(np.array(0.0))
        x1 = Variable(np.array(2.0))

        y = rosenbrock(x0, x1)
        y.backward()
Esempio n. 23
0
    def test_forward(self):
        x = Variable(np.arange(2 * 3 * 3).reshape(1, 2, 3, 3))
        kernel = Variable(np.array([[[[1, 2], [2, 1]], [[0, 1], [0, 1]]]]))
        y = C.conv2d_simple(x, kernel, pad=1)
        a = np.array([[[[0, 1, 4, 4], [3, 12, 18, 12], [12, 30, 36, 21],
                        [12, 20, 23, 8]]]])
        b = np.array([[[[9, 10, 11, 0], [21, 23, 25, 0], [27, 29, 31, 0],
                        [15, 16, 17, 0]]]])

        assert_equal(y.shape, (1, 1, 4, 4))
        assert_equal(y.data, a + b)
Esempio n. 24
0
    def test_gradient_matyas(self):
        def matyas(x, y):
            z = 0.26 * (x**2 + y**2) - 0.48 * x * y
            return z

        x = Variable(np.array(1.0))
        y = Variable(np.array(1.0))
        z = matyas(x, y)
        z.backward()

        self.assertAlmostEqual(x.grad.data, 0.040)
        self.assertAlmostEqual(y.grad.data, 0.040)
Esempio n. 25
0
    def test_higher_derivative(self):

        x = Variable(np.array(2.0))
        y = x**4 - 2 * x**2
        y.backward()
        self.assertEqual(x.grad.data, 24.0)

        gx = x.grad
        x.clear_grad()
        gx.backward()
        print(gx)
        self.assertEqual(x.grad.data, 44.0)
Esempio n. 26
0
    def test_shape(self):
        x = Variable(np.random.randn(1, 2, 3))
        y = x.reshape((6, ))
        y.backward(retain_grad=True)
        self.assertEqual(x.shape, x.grad.shape)
        self.assertEqual(y.shape, (6, ))

        a = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
        b = F.transpose(a)
        c = a.T
        b.backward()
        self.assertEqual(a.grad.shape, a.shape)
        self.assertEqual(b.shape, c.shape)
Esempio n. 27
0
    def test_get_item(self):
        x = Variable(np.array([[1, 1, 1], [2, 2, 2]]))
        y = x[1]
        assert_array_equal(y.data, np.array([2, 2, 2]))
        y.backward()
        expected_grad = np.array([[0, 0, 0], [1, 1, 1]])
        assert_array_equal(x.grad.data, expected_grad)

        x = Variable(np.array([[1, 1, 1], [2, 2, 2]]))
        y = x[np.arange(2), [0, 1]]
        assert_array_equal(y.data, np.array([1, 2]))
        y.backward()
        expected_grad = np.array([[1, 0, 0], [0, 1, 0]])
        assert_array_equal(x.grad.data, expected_grad)
Esempio n. 28
0
    def test_gradient_with_constant(self):
        x = Variable(np.array(4.0))
        y = 2.0 * x
        y.backward()
        self.assertEqual(x.grad.data, 2.0)

        x = Variable(np.array(4.0))
        y = x / 2.0
        y.backward()
        self.assertEqual(x.grad.data, 0.5)

        x = Variable(np.array(2.0))
        y = 2.0 / x
        y.backward()
        self.assertEqual(x.grad.data, -0.5)
Esempio n. 29
0
def accuracy(y, t):
    y, t = as_variable(y), as_variable(t)

    pred = y.data.argmax(axis=1).reshape(t.shape)
    result = (pred == t.data)
    acc = result.mean()
    return Variable(as_array(acc))
Esempio n. 30
0
 def test_backward(self):
     x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
     y = reshape(x, (6, ))
     y.backward(retain_grad=True)
     self.assertEqual(y.grad.shape, (6, ))
     self.assertEqual(x.grad.shape, x.shape)
     self.assertTrue((x.grad.data == np.ones((2, 3))).all())