Esempio n. 1
0
 def test_one_layer_softmax_relu(self):
     model = SoftmaxRegression()
     expected_loss = 2.3029
     expected_grad = np.load(
         'tests/softmax_grad_check/softmax_relu_grad.npy')
     loss, _ = model.forward(self.test_batch, self.test_label, mode='train')
     w_grad = model.gradients['W1']
     self.assertAlmostEqual(expected_loss, loss, places=5)
     diff = np.sum(np.abs(expected_grad - w_grad))
     self.assertAlmostEqual(diff, 0)
Esempio n. 2
0
    def test_sgd(self):
        optimizer = SGD(learning_rate=1e-3, reg=1e-3)
        model = SoftmaxRegression()
        np.random.seed(256)
        fake_gradients = np.random.randn(784, 10)
        model.gradients['W1'] = fake_gradients
        optimizer.update(model)
        expected_weights = np.load('tests/sgd/sgd_updated_weights.npy')
        diff = np.abs(expected_weights - model.weights['W1'])
        diff = np.sum(diff)

        self.assertAlmostEqual(diff, 0)
Esempio n. 3
0
class TestActivation(unittest.TestCase):
    """ The class containing all test cases for this assignment"""
    def setUp(self):
        """Define the functions to be tested here."""
        self.model = SoftmaxRegression()

    def test_sigmoid(self):
        x = np.array([[-1.48839468, -0.31530738], [-0.28271176, -1.00780433],
                      [0.66435418, 1.2537461], [-1.64829182, 0.90223236]])
        y = np.array([[0.1841628, 0.4218198], [0.42978908, 0.26740977],
                      [0.66023782, 0.77794766], [0.16133995, 0.71140804]])
        outs = self.model.sigmoid(x)
        diff = np.sum(np.abs((outs - y)))
        self.assertAlmostEqual(diff, 0, places=7)

    def test_sigmoid_dev(self):
        x = np.array([[-1.48839468, -0.31530738], [-0.28271176, -1.00780433],
                      [0.66435418, 1.2537461], [-1.64829182, 0.90223236]])
        y = np.array([[0.15024686, 0.24388786], [0.24507043, 0.19590178],
                      [0.22432384, 0.1727451], [0.13530937, 0.20530664]])

        outs = self.model.sigmoid_dev(x)
        diff = np.sum(np.abs((outs - y)))
        self.assertAlmostEqual(diff, 0, places=7)

    def test_relu(self):
        x = np.array([[-1.48839468, -0.31530738], [-0.28271176, -1.00780433],
                      [0.66435418, 1.2537461], [-1.64829182, 0.90223236]])
        y = np.array([[0.0, 0.0], [0.0, 0.0], [0.66435418, 1.2537461],
                      [0.0, 0.90223236]])
        out = self.model.ReLU(x)
        diff = np.sum(np.abs((y - out)))
        self.assertAlmostEqual(diff, 0, places=7)

    def test_relu_dev(self):
        x = np.array([[-1.48839468, -0.31530738], [-0.28271176, -1.00780433],
                      [0.66435418, 1.2537461], [-1.64829182, 0.90223236]])
        y = np.array([[0.0, 0.0], [0.0, 0.0], [1., 1.], [0.0, 1.]])
        out = self.model.ReLU_dev(x)
        diff = np.sum(np.abs((y - out)))
        self.assertAlmostEqual(diff, 0, places=7)

    def test_softmax(self):
        x = np.array([[-1.48839468, -0.31530738], [-0.28271176, -1.00780433],
                      [0.66435418, 1.2537461], [-1.64829182, 0.90223236]])
        y = np.array([[0.23629739, 0.76370261], [0.67372745, 0.32627255],
                      [0.35677439, 0.64322561], [0.07239128, 0.92760872]])

        out = self.model.softmax(x)

        diff = np.sum(np.abs((y - out)))
        self.assertAlmostEqual(diff, 0, places=7)
Esempio n. 4
0
class TestActivation(unittest.TestCase):
    """ The class containing all test cases for this assignment"""
    def setUp(self):
        """Define the functions to be tested here."""
        self.model = SoftmaxRegression()

    def test_ce_loss(self):
        x = np.array([[0.2, 0.5, 0.3], [0.5, 0.1, 0.4], [0.3, 0.3, 0.4]])
        y = np.array([1, 2, 0])
        expected_loss = 0.937803
        loss = self.model.cross_entropy_loss(x, y)
        self.assertAlmostEqual(loss, expected_loss, places=5)

    def test_accuracy(self):
        x = np.array([[0.2, 0.5, 0.3], [0.5, 0.1, 0.4], [0.3, 0.3, 0.4]])
        y = np.array([1, 2, 0])
        expected_acc = 0.3333
        acc = self.model.compute_accuracy(x, y)
        self.assertAlmostEqual(acc, expected_acc, places=4)
Esempio n. 5
0
    def test_regularization(self):
        optimizer = SGD(learning_rate=1e-4, reg=100)
        model = SoftmaxRegression()
        w_grad = model.gradients['W1'].copy()
        optimizer.apply_regularization(model)
        w_grad_reg = model.gradients['W1']
        reg_diff = w_grad_reg - w_grad
        expected_diff = model.weights['W1'] * optimizer.reg

        diff = np.mean(np.abs(reg_diff - expected_diff))
        self.assertAlmostEqual(diff, 0, places=7)
Esempio n. 6
0
    def test_one_layer_train(self):
        model = SoftmaxRegression()
        optimizer = SGD(learning_rate=0.1, reg=1e-3)
        train_data, train_label, _, _ = load_mnist_trainval()
        test_data, test_label = load_mnist_test()

        batched_train_data, batched_train_label = generate_batched_data(
            train_data, train_label, batch_size=128, shuffle=True)
        _, train_acc = train(1,
                             batched_train_data,
                             batched_train_label,
                             model,
                             optimizer,
                             debug=False)

        batched_test_data, batched_test_label = generate_batched_data(
            test_data, test_label, batch_size=128)
        _, test_acc = evaluate(batched_test_data,
                               batched_test_label,
                               model,
                               debug=False)

        self.assertGreater(train_acc, 0.3)
        self.assertGreater(test_acc, 0.3)
Esempio n. 7
0
 def setUp(self):
     """Define the functions to be tested here."""
     self.model = SoftmaxRegression()