Пример #1
0
    def test_forward1(self):
        layer = ConvLayer(1, 1, 3)

        x = fake_data((1, 1, 3, 3))
        layer.W = fake_data((1, 1, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)

        should_be = np.array([[[[58., 100., 70.], [132., 204., 132.],
                                [70., 100., 58.]]]])

        self.assertTrue(np.allclose(y, should_be))
Пример #2
0
    def test_forward4(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)
        #print "b = ", layer.b

        y = layer.forward(x)

        from test4_result import t4_should_be
        #print "y = ", y
        #print "should_be = ", t4_should_be
        self.assertTrue(np.allclose(y, t4_should_be))
Пример #3
0
    def test_forward2(self):
        layer = ConvLayer(2, 1, 3)

        x = fake_data((1, 2, 4, 4))
        layer.W = fake_data((1, 2, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)

        should_be = np.array([[[[1196., 1796., 1916., 1264.],
                                [1881., 2793., 2946., 1923.],
                                [2313., 3405., 3558., 2307.],
                                [1424., 2072., 2156., 1380.]]]])

        self.assertTrue(np.allclose(y, should_be))
Пример #4
0
    def test_forward1(self):
        layer = FlattenLayer()

        x = fake_data((1, 2, 3, 3))
        y = layer.forward(x)

        self.assertTrue(y.shape == (1, 18))
Пример #5
0
    def test_backward3(self):
        self.x = fake_data((2, 3, 5, 5))
        nm_x_grad = numerical_gradient(self.layer, self.x, self.x)

        self.layer.forward(self.x)
        y = np.ones((2, 3, 2, 2))
        x_grad = self.layer.backward(y)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
Пример #6
0
    def test_backward4(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Пример #7
0
    def test_backward3_5(self):
        layer = ConvLayer(5, 3, 3)

        x = fake_data((2, 5, 3, 3))
        layer.W = fake_data(layer.W.shape)
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Пример #8
0
    def test_forward3(self):
        layer = ConvLayer(2, 2, 3)

        x = fake_data((1, 2, 4, 4))
        layer.W = fake_data((2, 2, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)

        should_be = np.array([[[[1196., 1796., 1916., 1264.],
                                [1881., 2793., 2946., 1923.],
                                [2313., 3405., 3558., 2307.],
                                [1424., 2072., 2156., 1380.]],
                               [[2709., 4173., 4509., 3065.],
                                [4582., 7006., 7483., 5056.],
                                [5878., 8914., 9391., 6304.],
                                [4089., 6177., 6477., 4333.]]]])

        self.assertTrue(np.allclose(y, should_be))
Пример #9
0
    def test_backward5(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        y_grad = fake_data(y.shape)
        x_grad = layer.backward(y_grad)

        nm_x_grad = numerical_gradient(layer, x, x, y_grad)
        nm_w_grad = numerical_gradient(layer, x, layer.W, y_grad)
        nm_b_grad = numerical_gradient(layer, x, layer.b, y_grad)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Пример #10
0
    def test_backward1(self):
        layer = FlattenLayer()

        x = fake_data((1, 2, 3, 3))
        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
Пример #11
0
    def test_backward2(self):
        layer = ConvLayer(2, 1, 3)

        x = fake_data((1, 2, 4, 4))
        layer.W = fake_data((1, 2, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        print "Pass x_grad"
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        print "Pass w_grad"
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
        print "Pass b_grad"
Пример #12
0
    def test_backward(self):
        out = self.layer.forward(self.x)
        y = fake_data((1, 3, 2, 2))
        x_grad = self.layer.backward(y)

        should_be = np.array([[[[0., 0., 0., 0.], [0., 0., 0., 1.],
                                [0., 0., 0., 0.], [0., 2., 0., 3.]],
                               [[0., 0., 0., 0.], [0., 4., 0., 5.],
                                [0., 0., 0., 0.], [0., 6., 0., 7.]],
                               [[0., 0., 0., 0.], [0., 8., 0., 9.],
                                [0., 0., 0., 0.], [0., 10., 0., 11.]]]])

        self.assertTrue(np.allclose(should_be, x_grad))
Пример #13
0
    def test_backward1(self):
        layer = ConvLayer(1, 1, 3)

        x = fake_data((1, 1, 8, 8))
        layer.W = fake_data((1, 1, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        # note that this does not check the gradients of the padded elements

        self.assertTrue(np.allclose(nm_x_grad, x_grad))

        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))

        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Пример #14
0
 def setUp(self):
     self.layer = MaxPoolLayer()
     self.x = fake_data((1, 3, 4, 4))
Пример #15
0
    def test_bigger(self):
        self.x = fake_data((2, 4, 8, 8))
        out = self.layer.forward(self.x)

        from max_pool_big import mp_result
        self.assertTrue(np.allclose(mp_result, out))