def test_bayes(self): x = np.random.random_integers(1, 3, (100, 5)) col = np.random.randint(5) y = x[:, col].copy().reshape(100, 1) t = Table(x, y) t = discretization.DiscretizeTable(t, method=EqualWidth(n=3)) res = testing.TestOnTrainingData(t, [naive_bayes.NaiveBayesLearner()]) np.testing.assert_almost_equal(scoring.CA(res), [1]) t.Y[-20:] = 4 - t.Y[-20:] res = testing.TestOnTrainingData(t, [naive_bayes.NaiveBayesLearner()]) self.assertGreaterEqual(scoring.CA(res)[0], 0.75) self.assertLess(scoring.CA(res)[0], 1)
def test_NaiveBayes(self): table = Orange.data.Table('titanic') bayes = nb.NaiveBayesLearner() results = testing.CrossValidation(table[::20], [bayes], k=10) ca = scoring.CA(results) self.assertGreater(ca, 0.7) self.assertLess(ca, 0.9)
def test_predict_numpy(self): table = Orange.data.Table('titanic') bayes = nb.NaiveBayesLearner() c = bayes(table) X = table.X[::20] c(X) vals, probs = c(X, c.ValueProbs)
def test_predict_table(self): table = Orange.data.Table('titanic') bayes = nb.NaiveBayesLearner() c = bayes(table) table = table[::20] c(table) vals, probs = c(table, c.ValueProbs)
def test_predict_single_instance(self): table = Orange.data.Table('titanic') bayes = nb.NaiveBayesLearner() c = bayes(table) for ins in table[::20]: c(ins) val, prob = c(ins, c.ValueProbs)
def test_predict_single_instance(self): table = data.Table("titanic") learn = nb.NaiveBayesLearner() clf = learn(table) pred = [] for row in table: pred.append(clf(row))
def test_NaiveBayes(self): table = SqlTable( connection_params(), "iris", type_hints=Domain( [], DiscreteVariable("iris", values=[ "Iris-setosa", "Iris-virginica", "Iris-versicolor" ]), ), ) table = preprocess.Discretize(table) bayes = nb.NaiveBayesLearner() clf = bayes(table) # Single instance prediction self.assertEqual(clf(table[0]), table[0].get_class()) # Table prediction pred = clf(table) actual = array([ins.get_class() for ins in table]) ca = pred == actual ca = ca.sum() / len(ca) self.assertGreater(ca, 0.95) self.assertLess(ca, 1.0)
def main(): from Orange.classification import \ logistic_regression as lr, naive_bayes as nb app = QtGui.QApplication([]) data = Orange.data.Table("iris") w = OWTestLearners() w.show() w.set_train_data(data) w.set_test_data(data) w.set_learner(lr.LogisticRegressionLearner(), 1) w.set_learner(nb.NaiveBayesLearner(), 2) w.handleNewSignals() return app.exec_()
def test_NaiveBayes(self): table = SqlTable(dict(host='localhost', database='test'), 'iris', type_hints=Domain([], DiscreteVariable("iris", values=['Iris-setosa', 'Iris-virginica', 'Iris-versicolor']))) table = preprocess.Discretize(table) bayes = nb.NaiveBayesLearner() clf = bayes(table) # Single instance prediction self.assertEqual(clf(table[0]), table[0].get_class()) # Table prediction pred = clf(table) actual = array([ins.get_class() for ins in table]) ca = pred == actual ca = ca.sum() / len(ca) self.assertGreater(ca, 0.95) self.assertLess(ca, 1.)
def test_NaiveBayes(self): iris_v = ['Iris-setosa', 'Iris-virginica', 'Iris-versicolor'] table = SqlTable(self.conn, self.iris, type_hints=Domain([], DiscreteVariable("iris", values=iris_v))) disc = preprocess.Discretize() table = disc(table) bayes = nb.NaiveBayesLearner() clf = bayes(table) # Single instance prediction self.assertEqual(clf(table[0]), table[0].get_class()) # Table prediction pred = clf(table) actual = array([ins.get_class() for ins in table]) ca = pred == actual ca = ca.sum() / len(ca) self.assertGreater(ca, 0.95) self.assertLess(ca, 1.)