Esempio n. 1
0
 def test_neural_tree_network(self):
     net = NeuralTreeNet(3, empty=False)
     X = numpy.random.randn(2, 3)
     got = net.predict(X)
     exp = X.sum(axis=1)
     self.assertEqual(exp.reshape((-1, 1)), got[:, -1:])
     rep = repr(net)
     self.assertEqual(rep, 'NeuralTreeNet(3)')
     net.clear()
     self.assertEqual(len(net), 0)
Esempio n. 2
0
    def test_convert(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        y2 = y.copy()
        y2[0] = 2

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y2)
        self.assertRaise(
            lambda: NeuralTreeNet.create_from_tree(tree), RuntimeError)

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10)
        self.assertNotEmpty(root)
        exp = tree.predict_proba(X)
        got = root.predict(X)
        self.assertEqual(exp.shape[0], got.shape[0])
        self.assertEqualArray(exp, got[:, -2:])
Esempio n. 3
0
    def test_neural_net_gradient(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        ny = label_class_to_softmax_output(y)

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10)
        _, out, err = self.capture(lambda: root.fit(X, ny, verbose=True))
        self.assertIn("loss:", out)
        self.assertEmpty(err)
Esempio n. 4
0
 def test_neural_tree_network_copy(self):
     net = NeuralTreeNet(3, empty=False)
     net.append(NeuralTreeNode(1, activation='identity'),
                inputs=[3])
     net2 = net.copy()
     X = numpy.random.randn(2, 3)
     self.assertEqualArray(net.predict(X), net2.predict(X))
Esempio n. 5
0
    def test_convert_compact(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        y2 = y.copy()
        y2[0] = 2

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y2)
        self.assertRaise(
            lambda: NeuralTreeNet.create_from_tree(tree, arch="k"), ValueError)
        self.assertRaise(
            lambda: NeuralTreeNet.create_from_tree(tree, arch="compact"),
            RuntimeError)

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10, arch='compact')
        self.assertNotEmpty(root)
        exp = tree.predict_proba(X)
        got = root.predict(X)
        self.assertEqual(exp.shape[0], got.shape[0])
        self.assertEqualArray(exp + 1e-8, got[:, -2:] + 1e-8)
        dot = root.to_dot()
        self.assertIn("s3a4:f4 -> s5a6:f6", dot)
Esempio n. 6
0
    def test_neural_net_gradient_regression_2(self):
        X = numpy.abs(numpy.random.randn(10, 2))
        w1 = numpy.array([-0.5, 0.8, -0.6])
        noise = numpy.random.randn(X.shape[0]) / 10
        noise[0] = 0
        noise[1] = 0.07
        X[1, 0] = 0.7
        X[1, 1] = -0.5
        y = w1[0] + X[:, 0] * w1[1] + X[:, 1] * w1[2] + noise

        for act in [
                'relu', 'sigmoid', 'identity', 'leakyrelu', 'sigmoid4', 'expit'
        ]:

            with self.subTest(act=act):
                neu = NeuralTreeNode(w1[1:], bias=w1[0], activation=act)
                loss1 = neu.loss(X, y)
                pred1 = neu.predict(X)
                if act == 'relu':
                    self.assertEqualArray(pred1[1:2], numpy.array([0.36]))
                    pred11 = neu.predict(X)
                    self.assertEqualArray(pred11[1:2], numpy.array([0.36]))

                net = NeuralTreeNet(X.shape[1], empty=True)
                net.append(neu, numpy.arange(0, 2))
                ide = NeuralTreeNode(numpy.array([1], dtype=X.dtype),
                                     bias=numpy.array([0], dtype=X.dtype),
                                     activation='identity')
                net.append(ide, numpy.arange(2, 3))
                pred2 = net.predict(X)
                loss2 = net.loss(X, y)

                self.assertEqualArray(pred1, pred2[:, -1])
                self.assertEqualArray(pred2[:, -2], pred2[:, -1])
                self.assertEqualArray(pred2[:, 2], pred2[:, 3])
                self.assertEqualArray(loss1, loss2)

                for p in range(0, 5):
                    grad1 = neu.gradient(X[p], y[p])
                    grad2 = net.gradient(X[p], y[p])
                    self.assertEqualArray(grad1, grad2[:3])
Esempio n. 7
0
    def test_training_weights(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        y2 = y.copy()
        y2[0] = 2

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10)
        v1 = root.predict(X[:1])
        w = root.training_weights
        self.assertEqual(w.shape, (11, ))
        delta = numpy.arange(11) + 0.5
        root.update_training_weights(delta)
        v2 = root.predict(X[:1])
        self.assertNotEqualArray(v1, v2)
Esempio n. 8
0
 def test_neural_tree_network_training_weights(self):
     net = NeuralTreeNet(3, empty=False)
     net.append(NeuralTreeNode(1, activation='identity'), inputs=[3])
     w = net.training_weights
     self.assertEqual(w.shape, (6, ))
     self.assertEqual(w[0], 0)
     self.assertEqualArray(w[1:4], [1, 1, 1])
     delta = numpy.arange(6) - 0.5
     net.update_training_weights(delta)
     w2 = net.training_weights
     self.assertEqualArray(w2, w + delta)
Esempio n. 9
0
    def test_neural_net_gradient_fit(self):
        X = numpy.arange(16).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 15).astype(numpy.int64)
        ny = label_class_to_softmax_output(y)

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10)
        loss1 = root.loss(X, ny).sum()
        self.assertGreater(loss1, -1e-5)
        self.assertLess(loss1, 1.)
        _, out, err = self.capture(
            lambda: root.fit(X, ny, verbose=True, max_iter=20))
        self.assertEmpty(err)
        self.assertNotEmpty(out)
        loss2 = root.loss(X, ny).sum()
        self.assertLess(loss2, loss1 + 1)
Esempio n. 10
0
 def test_neural_tree_network_append(self):
     net = NeuralTreeNet(3, empty=False)
     self.assertRaise(lambda: net.append(
         NeuralTreeNode(2, activation='identity'), inputs=[3]))
     net.append(NeuralTreeNode(1, activation='identity'), inputs=[3])
     self.assertEqual(net.size_, 5)
     last_node = net.nodes[-1]
     X = numpy.random.randn(2, 3)
     got = net.predict(X)
     exp = X.sum(axis=1) * last_node.input_weights[0] + last_node.bias
     self.assertEqual(exp.reshape((-1, 1)), got[:, -1:])
     rep = repr(net)
     self.assertEqual(rep, 'NeuralTreeNet(3)')
Esempio n. 11
0
    def test_convert_compact_fit(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        y2 = y.copy()
        y2[0] = 2

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10, arch='compact')
        self.assertNotEmpty(root)
        exp = tree.predict_proba(X)
        got = root.predict(X)
        self.assertEqual(exp.shape[0], got.shape[0])
        self.assertEqualArray(exp + 1e-8, got[:, -2:] + 1e-8)
        ny = label_class_to_softmax_output(y)
        loss1 = root.loss(X, ny).sum()
        _, out, err = self.capture(
            lambda: root.fit(X, ny, verbose=True, max_iter=20))
        self.assertEmpty(err)
        self.assertNotEmpty(out)
        loss2 = root.loss(X, ny).sum()
        self.assertLess(loss2, loss1 + 1)
Esempio n. 12
0
    def test_dot(self):
        data = load_iris()
        X, y = data.data, data.target
        y = y % 2

        tree = DecisionTreeClassifier(max_depth=3, random_state=11)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree)
        dot = export_graphviz(tree)
        self.assertIn("digraph", dot)

        dot2 = root.to_dot()
        self.assertIn("digraph", dot2)
        x = X[:1].copy()
        x[0, 3] = 1.
        dot2 = root.to_dot(X=x.ravel())
        self.assertIn("digraph", dot2)
        exp = tree.predict_proba(X)[:, -1]
        got = root.predict(X)[:, -1]
        mat = numpy.empty((exp.shape[0], 2), dtype=exp.dtype)
        mat[:, 0] = exp
        mat[:, 1] = got
        c = numpy.corrcoef(mat.T)
        self.assertGreater(c[0, 1], 0.5)
Esempio n. 13
0
    def test_neural_net_gradient_regression(self):
        X = numpy.abs(numpy.random.randn(10, 2))
        w1 = numpy.array([-0.5, 0.8, -0.6])
        noise = numpy.random.randn(X.shape[0]) / 10
        noise[0] = 0
        noise[1] = 0.07
        X[1, 0] = 0.7
        X[1, 1] = -0.5
        y = w1[0] + X[:, 0] * w1[1] + X[:, 1] * w1[2] + noise

        for act in ['identity', 'relu', 'leakyrelu',
                    'sigmoid', 'sigmoid4', 'expit']:
            with self.subTest(act=act):
                neu = NeuralTreeNode(w1[1:], bias=w1[0], activation=act)
                loss1 = neu.loss(X, y)
                grad1 = neu.gradient(X[0], y[0])

                net = NeuralTreeNet(X.shape[1], empty=True)
                net.append(neu, numpy.arange(0, 2))
                loss2 = net.loss(X, y)
                grad2 = net.gradient(X[0], y[0])
                self.assertEqualArray(loss1, loss2)
                self.assertEqualArray(grad1, grad2)