예제 #1
0
 def test_backward3(self):
     n, c, h, w = 1, 5, 20, 15
     o, k, s, p = 3, (5, 3), 1, 3
     x = np.random.randn(n, c, h, w)
     W = np.random.randn(o, c, k[0], k[1])
     b = np.random.randn(o)
     f = lambda W: F.conv2d_simple(x, W, b, s, p)
     self.assertTrue(gradient_check(f, W))
예제 #2
0
    def test_backward3(self):
        x_data = np.random.randn(10, 10)

        def f(x):
            np.random.seed(0)
            return F.dropout(x, 0.0)

        self.assertTrue(gradient_check(f, x_data))
예제 #3
0
 def test_backward3(self):
     n, c_i, c_o = 10, 1, 3
     h_i, w_i = 5, 10
     h_k, w_k = 10, 10
     h_p, w_p = 5, 5
     s_y, s_x = 5, 5
     x = np.random.uniform(0, 1, (n, c_i, h_i, w_i))
     W = np.random.uniform(0, 1, (c_i, c_o, h_k, w_k))
     b = np.random.uniform(0, 1, c_o)
     f = lambda b: F.deconv2d(x, W, b, stride=(s_y, s_x), pad=(h_p, w_p))
     self.assertTrue(gradient_check(f, b))
예제 #4
0
 def test_backward2(self):
     n, c, h, w = 1, 1, 3, 3
     x = np.random.rand(1, 1, 3, 3, 1, 1)
     f = lambda x: F.col2im(x, (n, c, h, w), 3, 3, 0, to_matrix=False)
     self.assertTrue(gradient_check(f, x))
예제 #5
0
 def test_backward1(self):
     x = np.array([[1, 2, 3], [4, 5, 6]])
     self.assertTrue(gradient_check(F.transpose, x))
 def test_backward3(self):
     np.random.seed(0)
     x_data = np.random.rand(10, 10, 10)
     self.assertTrue(gradient_check(F.sigmoid, x_data))
예제 #7
0
 def test_backward4(self):
     x_data = np.random.rand(10, 20, 20)
     f = lambda x: F.sum(x, axis=None)
     self.assertTrue(gradient_check(f, x_data))
 def test_backward3(self):
     N, C = 8, 3
     x, gamma, beta, mean, var = get_params(N, C, dtype=np.float64)
     f = lambda beta: F.batch_nrom(x, gamma, beta, mean, var)
     self.assertTrue(gradient_check(f, beta))
예제 #9
0
 def test_backward3(self):
     x_data = np.random.rand(10, 20, 20) * 100
     f = lambda x: F.sum_to(x, (10, ))
     self.assertTrue(gradient_check(f, x_data))
 def test_backward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]])
     f = lambda x: F.log_softmax(x)
     self.assertTrue(gradient_check(f, x))
 def test_backward1(self):
     x_data = np.array([[0, 1, 2], [0, 2, 4]])
     f = lambda x: F.softmax(x, axis=1)
     self.assertTrue(gradient_check(f, x_data))
예제 #12
0
 def test_backward3(self):
     x = np.random.randn(3, 3)
     y = np.random.randn(3, 1)
     f = lambda x: x / y
     self.assertTrue(gradient_check(f, x))
 def test_backward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]], np.float32)
     t = np.array([3, 0]).astype(np.int32)
     f = lambda x: F.softmax_cross_entropy_simple(x, Variable(t))
     self.assertTrue(gradient_check(f, x))
예제 #14
0
 def test_backward3(self):
     x = np.random.randn(3, 3)
     y = np.random.randn(3, 1)
     self.assertTrue(gradient_check(F.add, x, y))
예제 #15
0
 def test_backward2(self):
     x = Variable(np.random.randn(3, 3))
     y = np.random.randn(3, 1)
     f = lambda x: x + y
     self.assertTrue(gradient_check(f, x))
예제 #16
0
 def test_backward2(self):
     x = np.random.randn(100, 200)
     W = np.random.randn(200, 300)
     b = None
     f = lambda x: F.linear(x, W, b)
     self.assertTrue(gradient_check(f, x))
예제 #17
0
 def test_backward1(self):
     x = np.random.randn(3, 2)
     W = np.random.randn(2, 3)
     b = np.random.randn(3)
     f = lambda x: F.linear(x, W, b)
     self.assertTrue(gradient_check(f, x))
예제 #18
0
 def test_backward4(self):
     x_data = np.random.rand(10)
     f = lambda x: F.sum_to(x, (10, )) + 1
     self.assertTrue(gradient_check(f, x_data))
 def test_backward3(self):
     N, CLS_NUM = 100, 10
     x = np.random.randn(N, CLS_NUM)
     t = np.random.randint(0, CLS_NUM, (N, ))
     f = lambda x: F.softmax_cross_entropy_simple(x, t)
     self.assertTrue(gradient_check(f, x))
예제 #20
0
 def test_backward3(self):
     x = np.random.randn(10, 5)
     self.assertTrue(gradient_check(F.transpose, x))
 def test_backward2(self):
     x = np.random.randn(10, 10)
     f = lambda x: F.log_softmax(x)
     self.assertTrue(gradient_check(f, x))
예제 #22
0
 def test_backward4(self):
     x = np.array([1, 2])
     self.assertTrue(gradient_check(F.transpose, x))
 def test_backward3(self):
     np.random.seed(0)
     x_data = np.random.rand(10, 10, 10)
     f = lambda x: F.softmax(x, axis=1)
     self.assertTrue(gradient_check(f, x_data))
예제 #24
0
 def test_backward1(self):
     x_data = np.array([[1, 2, 3], [4, 5, 6]])
     slices = 1
     f = lambda x: F.get_item(x, slices)
     gradient_check(f, x_data)
 def test_backward6(self):
     params = 10, 20, 5, 5
     x, gamma, beta, mean, var = get_params(*params, dtype=np.float64)
     f = lambda beta: F.batch_nrom(x, gamma, beta, mean, var)
     self.assertTrue(gradient_check(f, beta))
예제 #26
0
 def test_backward2(self):
     x_data = np.arange(12).reshape(4, 3)
     slices = slice(1, 3)
     f = lambda x: F.get_item(x, slices)
     gradient_check(f, x_data)
 def test_backward1(self):
     x_data = np.array([[0, 1, 2], [0, 2, 4]])
     self.assertTrue(gradient_check(F.sigmoid, x_data))
 def test_backward1(self):
     n, c, h, w = 1, 5, 16, 16
     ksize, stride, pad = 2, 2, 0
     x = np.random.randn(n, c, h, w).astype('f') * 1000
     f = lambda x: F.pooling(x, ksize, stride, pad)
     self.assertTrue(gradient_check(f, x))
예제 #29
0
 def test_backward2(self):
     x0 = np.random.rand(100)
     x1 = np.random.rand(100)
     f = lambda x0: F.mean_squared_error(x0, x1)
     self.assertTrue(gradient_check(f, x0))
예제 #30
0
 def test_backward2(self):
     n, c, h, w = 1, 1, 3, 3
     x = np.arange(n * c * h * w).reshape((n, c, h, w))
     f = lambda x: F.im2col(x, 3, 3, 0, to_matrix=False)
     self.assertTrue(gradient_check(f, x))