Esempio n. 1
0
 def numerical_gradient(self, x, t):
     loss_W = lambda W: self.loss(x, t)
     grads = {}
     grads['W1'] = numerical_gradient(loss_W, self.params['W1'])
     grads['b1'] = numerical_gradient(loss_W, self.params['b1'])
     grads['W2'] = numerical_gradient(loss_W, self.params['W2'])
     grads['b2'] = numerical_gradient(loss_W, self.params['b2'])
     return grads
Esempio n. 2
0
 def numerical_gradient(self, x, t):
     loss_W = lambda W: self.loss(x, t)
     grads = {}
     grads['W1'] = numerical_gradient(loss_W, self.params['W1'])
     grads['b1'] = numerical_gradient(loss_W, self.params['b1'])
     grads['W2'] = numerical_gradient(loss_W, self.params['W2'])
     grads['b2'] = numerical_gradient(loss_W, self.params['b2'])
     return grads
Esempio n. 3
0
    def test_backward2(self):
        nm_x_grad = numerical_gradient(self.layer, self.x, self.x)

        self.layer.forward(self.x)
        y = np.ones((1, 3, 2, 2))
        x_grad = self.layer.backward(y)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
Esempio n. 4
0
    def test_backward3(self):
        self.x = fake_data((2, 3, 5, 5))
        nm_x_grad = numerical_gradient(self.layer, self.x, self.x)

        self.layer.forward(self.x)
        y = np.ones((2, 3, 2, 2))
        x_grad = self.layer.backward(y)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
Esempio n. 5
0
    def test_backward4(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Esempio n. 6
0
    def test_backward3_5(self):
        layer = ConvLayer(5, 3, 3)

        x = fake_data((2, 5, 3, 3))
        layer.W = fake_data(layer.W.shape)
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Esempio n. 7
0
    def test_backward5(self):
        h = 5
        layer = ConvLayer(2, 5, h)

        x = fake_data((2, 2, 8, 8))
        layer.W = fake_data((5, 2, h, h))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        y_grad = fake_data(y.shape)
        x_grad = layer.backward(y_grad)

        nm_x_grad = numerical_gradient(layer, x, x, y_grad)
        nm_w_grad = numerical_gradient(layer, x, layer.W, y_grad)
        nm_b_grad = numerical_gradient(layer, x, layer.b, y_grad)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Esempio n. 8
0
    def test_backward4(self):
        x = np.array([[[[0, 0, 0, 0], [0, 0, 1, 1], [2, 0, 0, 3],
                        [2, 0, 3, 0]]]]).astype('float64')

        nm_x_grad = numerical_gradient(self.layer, x, x)

        out = self.layer.forward(x)
        y = np.ones((1, 1, 2, 2))
        x_grad = self.layer.backward(y)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
Esempio n. 9
0
    def test_backward1(self):
        layer = FlattenLayer()

        x = fake_data((1, 2, 3, 3))
        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
Esempio n. 10
0
    def test_backward2(self):
        layer = ConvLayer(2, 1, 3)

        x = fake_data((1, 2, 4, 4))
        layer.W = fake_data((1, 2, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        self.assertTrue(np.allclose(nm_x_grad, x_grad))
        print "Pass x_grad"
        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))
        print "Pass w_grad"
        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
        print "Pass b_grad"
Esempio n. 11
0
    def test_backward1(self):
        layer = ConvLayer(1, 1, 3)

        x = fake_data((1, 1, 8, 8))
        layer.W = fake_data((1, 1, 3, 3))
        layer.b = fake_data(layer.b.shape)

        y = layer.forward(x)
        x_grad = layer.backward(np.ones_like(y))

        # do numerical gradients
        nm_x_grad = numerical_gradient(layer, x, x)
        nm_w_grad = numerical_gradient(layer, x, layer.W)
        nm_b_grad = numerical_gradient(layer, x, layer.b)

        # note that this does not check the gradients of the padded elements

        self.assertTrue(np.allclose(nm_x_grad, x_grad))

        self.assertTrue(np.allclose(nm_w_grad, layer.W_grad))

        self.assertTrue(np.allclose(nm_b_grad, layer.b_grad))
Esempio n. 12
0
b = np.zeros((1, 1, 1, 1))
params = {'stride': 2, 'pad': 2}
res = conv_forward(im, filter, b, params)
res = res.reshape(res.shape[0], res.shape[1])
plt.subplot(1, 2, 2)
plt.imshow(res.astype('uint8'), cmap=cm.Greys_r)
plt.show()

# test conv_backward
x = np.random.randn(5, 5, 3, 4)
w = np.random.randn(3, 3, 3, 2)
b = np.random.randn(1, 1, 1, 2)
dout = np.random.randn(5, 5, 2, 4)
conv_param = {'stride': 1, 'pad': 1}

dx_num = numerical_gradient(lambda x: conv_forward(x, w, b, conv_param), x, dout)
dw_num = numerical_gradient(lambda w: conv_forward(x, w, b, conv_param), w, dout)
db_num = numerical_gradient(lambda b: conv_forward(x, w, b, conv_param), b, dout)

out = conv_forward(x, w, b, conv_param)
dx, dw, db = conv_backward(x, w, b, conv_param, dout)

# Your errors should be around 1e-9'
print 'Testing conv_backward function'
print 'dx error: ', rel_error(dx, dx_num)
print 'dw error: ', rel_error(dw, dw_num)
print 'db error: ', rel_error(db, db_num)

x_shape = (2, 3, 4, 4)
x = np.linspace(-0.3, 0.4, num=np.prod(x_shape)).reshape(x_shape)
pool_param = {'HF': 2, 'WF': 2, 'stride': 2}