Exemplo n.º 1
0
 def test_label_class_to_softmax_output(self):
     y_label = numpy.array([0, 1, 0, 0])
     self.assertRaise(lambda: label_class_to_softmax_output(y_label.reshape((-1, 1))),
                      ValueError)
     soft_y = label_class_to_softmax_output(y_label)
     self.assertEqual(soft_y.shape, (4, 2))
     self.assertEqual(soft_y[:, 1], y_label)
     self.assertEqual(soft_y[:, 0], 1 - y_label)
Exemplo n.º 2
0
    def test_neural_net_gradient(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        ny = label_class_to_softmax_output(y)

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10)
        _, out, err = self.capture(lambda: root.fit(X, ny, verbose=True))
        self.assertIn("loss:", out)
        self.assertEmpty(err)
Exemplo n.º 3
0
    def test_neural_net_gradient_fit(self):
        X = numpy.arange(16).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 15).astype(numpy.int64)
        ny = label_class_to_softmax_output(y)

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10)
        loss1 = root.loss(X, ny).sum()
        self.assertGreater(loss1, -1e-5)
        self.assertLess(loss1, 1.)
        _, out, err = self.capture(
            lambda: root.fit(X, ny, verbose=True, max_iter=20))
        self.assertEmpty(err)
        self.assertNotEmpty(out)
        loss2 = root.loss(X, ny).sum()
        self.assertLess(loss2, loss1 + 1)
Exemplo n.º 4
0
    def test_convert_compact_fit(self):
        X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
        y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
        y2 = y.copy()
        y2[0] = 2

        tree = DecisionTreeClassifier(max_depth=2)
        tree.fit(X, y)
        root = NeuralTreeNet.create_from_tree(tree, 10, arch='compact')
        self.assertNotEmpty(root)
        exp = tree.predict_proba(X)
        got = root.predict(X)
        self.assertEqual(exp.shape[0], got.shape[0])
        self.assertEqualArray(exp + 1e-8, got[:, -2:] + 1e-8)
        ny = label_class_to_softmax_output(y)
        loss1 = root.loss(X, ny).sum()
        _, out, err = self.capture(
            lambda: root.fit(X, ny, verbose=True, max_iter=20))
        self.assertEmpty(err)
        self.assertNotEmpty(out)
        loss2 = root.loss(X, ny).sum()
        self.assertLess(loss2, loss1 + 1)