Пример #1
0
class TestRelu(unittest.TestCase):
    def setUp(self):
        self.relu = Relu()

    def test_forward(self):
        x = np.array([[1.0, -0.5], [-2.0, 3.0]])
        assert_array_equal(np.array([[1., 0.], [0., 3.]]),
                           self.relu.forward(x))

    def test_backward(self):
        x = np.array([[1.0, -0.5], [-2.0, 3.0]])
        assert_array_equal(np.array([[1., 0.], [0., 3.]]),
                           self.relu.backward(self.relu.forward(x)))
Пример #2
0
    # training
    for i in range(int(images.shape[0] / batch_size)):
        # forward
        img = images[i * batch_size:(i + 1) * batch_size].reshape(
            [batch_size, 28, 28, 1])
        label = labels[i * batch_size:(i + 1) * batch_size]
        conv1_out = conv1.forward(img)
        relu1_out = relu1.forward(conv1_out)
        pool1_out = pool1.forward(relu1_out)
        conv2_out = conv2.forward(pool1_out)
        relu2_out = relu2.forward(conv2_out)
        pool2_out = pool2.forward(relu2_out)
        fc_out = fc.forward(pool2_out)

        print("loss: %10.3f" % sf.cal_loss(fc_out, np.array(label)))

        # backward
        sf.backward()
        fc_back = fc.backward(sf.eta)
        pool2_back = pool2.backward(fc_back)
        relu2_back = relu2.backward(pool2_back)
        conv2_back = conv2.backward(relu2_back)
        pool1_back = pool1.backward(conv2_back)
        relu1_back = relu1.backward(pool1_back)
        conv1_back = conv1.backward(relu1_back)

        if i % 1 == 0:
            fc.gradient(alpha=learning_rate, weight_decay=0.0004)
            conv2.gradient(alpha=learning_rate, weight_decay=0.0004)
            conv1.gradient(alpha=learning_rate, weight_decay=0.0004)