Beispiel #1
0
    def test_backward3_5(self):
        layer = ConvLayer(5, 3, 3)

        x = fake_data((2, 5, 3, 3))
        layer.W = fake_data(layer.W.shape)
        layer.b = fake_data(layer.b.shape)
        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Beispiel #2
0
    def test_backward4(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Beispiel #3
0
    def test_backward1(self):
        layer = ConvLayer(1, 1, 3)

        x = fake_data((1, 1, 8, 8))
        layer.W = fake_data((1, 1, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        # note that this does not check the gradients of the padded elements

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Beispiel #4
0
    def test_backward5(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)
        y = layer.forward(x)
        y_grad = fake_data(y.shape)
        x_grad = layer.backward(y_grad)

        nm_x_grad = numerical_gradient(layer, x, x, y_grad)
        nm_w_grad = numerical_gradient(layer, x, layer.W, y_grad)
        nm_b_grad = numerical_gradient(layer, x, layer.b, y_grad)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        #print("expected",nm_x_grad)
        #print(x_grad)
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        #print("expected2",nm_w_grad)
        #print(layer.W_grad)
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Beispiel #5
0
    def test_backward2(self):
        layer = ConvLayer(2, 1, 3)

        x = fake_data((1, 2, 4, 4))
        layer.W = fake_data((1, 2, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        #print("expected", nm_x_grad)
        #print(x_grad)
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        #print("expected2", nm_w_grad)
        #print(layer.W_grad)
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))