Esempio n. 1
0
 def test_neural_tree_network_append(self):
     net = NeuralTreeNet(3, empty=False)
     self.assertRaise(lambda: net.append(
         NeuralTreeNode(2, activation='identity'), inputs=[3]))
     net.append(NeuralTreeNode(1, activation='identity'), inputs=[3])
     self.assertEqual(net.size_, 5)
     last_node = net.nodes[-1]
     X = numpy.random.randn(2, 3)
     got = net.predict(X)
     exp = X.sum(axis=1) * last_node.input_weights[0] + last_node.bias
     self.assertEqual(exp.reshape((-1, 1)), got[:, -1:])
     rep = repr(net)
     self.assertEqual(rep, 'NeuralTreeNet(3)')
Esempio n. 2
0
 def test_neural_tree_node(self):
     self.assertRaise(lambda: NeuralTreeNode([0, 1], 0.5, 'identity2'))
     neu = NeuralTreeNode([0, 1], 0.5, 'identity')
     res = neu.predict(numpy.array([4, 5]))
     self.assertEqual(res, 5.5)
     st = repr(neu)
     self.assertEqual("NeuralTreeNode(weights=array([0., 1.]), "
                      "bias=0.5, activation='identity')", st)
     st = io.BytesIO()
     pickle.dump(neu, st)
     st = io.BytesIO(st.getvalue())
     neu2 = pickle.load(st)
     self.assertTrue(neu == neu2)
Esempio n. 3
0
 def test_neural_tree_network_copy(self):
     net = NeuralTreeNet(3, empty=False)
     net.append(NeuralTreeNode(1, activation='identity'),
                inputs=[3])
     net2 = net.copy()
     X = numpy.random.randn(2, 3)
     self.assertEqualArray(net.predict(X), net2.predict(X))
Esempio n. 4
0
 def test_shape_dim2(self):
     X = numpy.random.randn(10, 3)
     w = numpy.array([[10, 20, 3], [-10, -20, 0.5]])
     for act in ['sigmoid', 'sigmoid4', 'expit', 'identity',
                 'relu', 'leakyrelu']:
         with self.subTest(act=act):
             neu = NeuralTreeNode(w, bias=[-4, 0.5], activation=act)
             pred = neu.predict(X)
             self.assertEqual(pred.shape, (X.shape[0], 2))
Esempio n. 5
0
 def test_neural_tree_network_training_weights(self):
     net = NeuralTreeNet(3, empty=False)
     net.append(NeuralTreeNode(1, activation='identity'), inputs=[3])
     w = net.training_weights
     self.assertEqual(w.shape, (6, ))
     self.assertEqual(w[0], 0)
     self.assertEqualArray(w[1:4], [1, 1, 1])
     delta = numpy.arange(6) - 0.5
     net.update_training_weights(delta)
     w2 = net.training_weights
     self.assertEqualArray(w2, w + delta)
Esempio n. 6
0
    def test_neural_net_gradient_regression_2(self):
        X = numpy.abs(numpy.random.randn(10, 2))
        w1 = numpy.array([-0.5, 0.8, -0.6])
        noise = numpy.random.randn(X.shape[0]) / 10
        noise[0] = 0
        noise[1] = 0.07
        X[1, 0] = 0.7
        X[1, 1] = -0.5
        y = w1[0] + X[:, 0] * w1[1] + X[:, 1] * w1[2] + noise

        for act in [
                'relu', 'sigmoid', 'identity', 'leakyrelu', 'sigmoid4', 'expit'
        ]:

            with self.subTest(act=act):
                neu = NeuralTreeNode(w1[1:], bias=w1[0], activation=act)
                loss1 = neu.loss(X, y)
                pred1 = neu.predict(X)
                if act == 'relu':
                    self.assertEqualArray(pred1[1:2], numpy.array([0.36]))
                    pred11 = neu.predict(X)
                    self.assertEqualArray(pred11[1:2], numpy.array([0.36]))

                net = NeuralTreeNet(X.shape[1], empty=True)
                net.append(neu, numpy.arange(0, 2))
                ide = NeuralTreeNode(numpy.array([1], dtype=X.dtype),
                                     bias=numpy.array([0], dtype=X.dtype),
                                     activation='identity')
                net.append(ide, numpy.arange(2, 3))
                pred2 = net.predict(X)
                loss2 = net.loss(X, y)

                self.assertEqualArray(pred1, pred2[:, -1])
                self.assertEqualArray(pred2[:, -2], pred2[:, -1])
                self.assertEqualArray(pred2[:, 2], pred2[:, 3])
                self.assertEqualArray(loss1, loss2)

                for p in range(0, 5):
                    grad1 = neu.gradient(X[p], y[p])
                    grad2 = net.gradient(X[p], y[p])
                    self.assertEqualArray(grad1, grad2[:3])
Esempio n. 7
0
    def test_optim_regression(self):
        state = numpy.random.RandomState(seed=0)  # pylint: disable=E1101
        X = numpy.abs(state.randn(10, 2))
        w0 = state.randn(3)
        w1 = numpy.array([-0.5, 0.8, -0.6])
        noise = state.randn(X.shape[0]) / 10
        noise[0] = 0
        noise[1] = 0.07
        X[1, 0] = 0.7
        X[1, 1] = -0.5
        y = w1[0] + X[:, 0] * w1[1] + X[:, 1] * w1[2] + noise

        for act in [
                'identity', 'relu', 'leakyrelu', 'sigmoid', 'sigmoid4', 'expit'
        ]:
            with self.subTest(act=act):
                neu = NeuralTreeNode(w1[1:], bias=w1[0], activation=act)
                loss = neu.loss(X, y).sum() / X.shape[0]
                if act == 'identity':
                    self.assertGreater(loss, 0)
                    self.assertLess(loss, 0.1)
                grad = neu.gradient(X[0], y[0])
                if act == 'identity':
                    self.assertEqualArray(grad, numpy.zeros(grad.shape))
                grad = neu.gradient(X[1], y[1])
                ming, maxg = grad[:2].min(), grad[:2].max()
                if ming == maxg:
                    raise AssertionError(
                        "Gradient is wrong\nloss={}\ngrad={}".format(
                            loss, grad))
                self.assertEqual(grad.shape, w0.shape)

                neu.fit(X, y, verbose=False)
                c1 = neu.training_weights
                neu = NeuralTreeNode(w0[1:], bias=w0[0], activation=act)
                neu.fit(X, y, verbose=False, lr_schedule='constant')
                c2 = neu.training_weights
                diff = numpy.abs(c2 - c1)
                if act == 'identity':
                    self.assertLess(diff.max(), 0.16)
Esempio n. 8
0
    def test_gradients(self):
        X = numpy.array([0.1, 0.2, -0.3])
        w = numpy.array([10, 20, 3])
        g = numpy.array([-0.7], dtype=numpy.float64)
        for act in [
                'sigmoid', 'sigmoid4', 'expit', 'identity', 'relu', 'leakyrelu'
        ]:
            with self.subTest(act=act):
                neu = NeuralTreeNode(w, bias=-4, activation=act)
                pred = neu.predict(X)
                self.assertEqual(pred.shape, tuple())
                grad = neu.gradient_backward(g, X)
                self.assertEqual(grad.shape, (4, ))
                grad = neu.gradient_backward(g, X, inputs=True)
                self.assertEqual(grad.shape, (3, ))
                ww = neu.training_weights
                neu.update_training_weights(-ww)
                w0 = neu.training_weights
                self.assertEqualArray(w0, numpy.zeros(w0.shape))

        X = numpy.array([0.1, 0.2, -0.3])
        w = numpy.array([[10, 20, 3], [-10, -20, 3]])
        b = numpy.array([-3, 4], dtype=numpy.float64)
        g = numpy.array([-0.7, 0.2], dtype=numpy.float64)
        for act in ['softmax', 'softmax4']:
            with self.subTest(act=act):
                neu = NeuralTreeNode(w, bias=b, activation=act)
                pred = neu.predict(X)
                self.assertAlmostEqual(numpy.sum(pred), 1.)
                self.assertEqual(pred.shape, (2, ))
                grad = neu.gradient_backward(g, X)
                self.assertEqual(grad.shape, (2, 4))
                grad = neu.gradient_backward(g, X, inputs=True)
                self.assertEqual(grad.shape, (3, ))
                ww = neu.training_weights
                neu.update_training_weights(-ww)
                w0 = neu.training_weights
                self.assertEqualArray(w0, numpy.zeros(w0.shape))
Esempio n. 9
0
    def test_optim_clas(self):
        X = numpy.abs(numpy.random.randn(10, 2))
        w1 = numpy.array([[0.1, 0.8, -0.6], [-0.1, 0.4, -0.3]])
        w0 = numpy.random.randn(*w1.shape)
        noise = numpy.random.randn(*X.shape) / 10
        noise[0] = 0
        noise[1] = 0.07
        y0 = (X[:, :1] @ w1[:, 1:2].T +
              X[:, 1:] @ w1[:, 2:3].T + w1[:, 0].T + noise)
        y = numpy.exp(y0)
        y /= numpy.sum(y, axis=1, keepdims=1)
        y[:-1, 0] = (y[:-1, 0] >= 0.5).astype(numpy.float64)
        y[:-1, 1] = (y[:-1, 1] >= 0.5).astype(numpy.float64)
        y /= numpy.sum(y, axis=1, keepdims=1)

        for act in ['softmax', 'softmax4']:
            with self.subTest(act=act):
                neu2 = NeuralTreeNode(2, activation=act)
                neu = NeuralTreeNode(w1[:, 1:], bias=w1[:, 0], activation=act)
                self.assertEqual(neu2.training_weights.shape,
                                 neu.training_weights.shape)
                self.assertEqual(neu2.input_weights.shape,
                                 neu.input_weights.shape)
                loss = neu.loss(X, y).sum() / X.shape[0]
                self.assertNotEmpty(loss)
                self.assertFalse(numpy.isinf(loss))
                self.assertFalse(numpy.isnan(loss))
                grad = neu.gradient(X[0], y[0])
                self.assertEqual(grad.ravel().shape, w1.ravel().shape)

                neu.fit(X, y, verbose=False)
                c1 = neu.training_weights
                neu = NeuralTreeNode(w0[:, 1:], bias=w0[:, 0], activation=act)
                neu.fit(X, y, verbose=False, lr_schedule='constant')
                c2 = neu.training_weights
                self.assertEqual(c1.shape, c2.shape)
Esempio n. 10
0
    def test_neural_net_gradient_regression(self):
        X = numpy.abs(numpy.random.randn(10, 2))
        w1 = numpy.array([-0.5, 0.8, -0.6])
        noise = numpy.random.randn(X.shape[0]) / 10
        noise[0] = 0
        noise[1] = 0.07
        X[1, 0] = 0.7
        X[1, 1] = -0.5
        y = w1[0] + X[:, 0] * w1[1] + X[:, 1] * w1[2] + noise

        for act in ['identity', 'relu', 'leakyrelu',
                    'sigmoid', 'sigmoid4', 'expit']:
            with self.subTest(act=act):
                neu = NeuralTreeNode(w1[1:], bias=w1[0], activation=act)
                loss1 = neu.loss(X, y)
                grad1 = neu.gradient(X[0], y[0])

                net = NeuralTreeNet(X.shape[1], empty=True)
                net.append(neu, numpy.arange(0, 2))
                loss2 = net.loss(X, y)
                grad2 = net.gradient(X[0], y[0])
                self.assertEqualArray(loss1, loss2)
                self.assertEqualArray(grad1, grad2)
Esempio n. 11
0
 def test_shape_dim2(self):
     X = numpy.random.randn(10, 3)
     w = numpy.array([[10, 20, 3], [-10, -20, 0.5]])
     first = None
     for act in [
             'sigmoid', 'sigmoid4', 'expit', 'identity', 'relu', 'leakyrelu'
     ]:
         with self.subTest(act=act):
             neu = NeuralTreeNode(w, bias=[-4, 0.5], activation=act)
             pred = neu.predict(X)
             self.assertEqual(pred.shape, (X.shape[0], 2))
             text = str(neu)
             self.assertIn('NeuralTreeNode(', text)
             if first is None:
                 first = neu
             else:
                 self.assertFalse(neu == first)
             self.assertEqual(neu.ndim, 3)
             loss = neu.loss(X[0], 0.)
             self.assertEqual(loss.shape, (2, ))
             loss = neu.loss(
                 X, numpy.zeros((X.shape[0], 1), dtype=numpy.float64))
             self.assertEqual(loss.shape, (10, 2))