def test_SoftmaxRegressionPreprocessors(self): table = Table('iris') table.X[:,2] = table.X[:,2] * 0.001 table.X[:,3] = table.X[:,3] * 0.001 learners = [SoftmaxRegressionLearner(preprocessors=[]), SoftmaxRegressionLearner()] results = CrossValidation(table, learners, k=10) ca = CA(results) self.assertTrue(ca[0] < ca[1])
def test_SoftmaxRegressionPreprocessors(self): table = self.iris.copy() table.X[:, 2] = table.X[:, 2] * 0.001 table.X[:, 3] = table.X[:, 3] * 0.001 learners = [ SoftmaxRegressionLearner(preprocessors=[]), SoftmaxRegressionLearner(), ] results = CrossValidation(table, learners, k=10) ca = CA(results) self.assertLess(ca[0], ca[1])
def test_SoftmaxRegressionPreprocessors(self): table = self.iris.copy() with table.unlocked(): table.X[:, 2] = table.X[:, 2] * 0.001 table.X[:, 3] = table.X[:, 3] * 0.001 learners = [ SoftmaxRegressionLearner(preprocessors=[]), SoftmaxRegressionLearner() ] cv = CrossValidation(k=10) results = cv(table, learners) ca = CA(results) self.assertLess(ca[0], ca[1])
def test_SoftmaxRegressionPreprocessors(self): np.random.seed(42) table = Table('iris') new_attrs = (ContinuousVariable('c0'), ) + table.domain.attributes new_domain = Domain(new_attrs, table.domain.class_vars, table.domain.metas) new_table = np.hstack((1000000 * np.random.random( (table.X.shape[0], 1)), table)) table = table.from_numpy(new_domain, new_table) learners = [ SoftmaxRegressionLearner(preprocessors=[]), SoftmaxRegressionLearner() ] results = CrossValidation(table, learners, k=3) ca = CA(results) self.assertTrue(ca[0] < ca[1])
def test_SoftmaxRegression(self): learner = SoftmaxRegressionLearner() cv = CrossValidation(k=3) results = cv(self.iris, [learner]) ca = CA(results) self.assertGreater(ca, 0.9) self.assertLess(ca, 1.0)
def test_reprs(self): lr = LogisticRegressionLearner(tol=0.0002) m = MajorityLearner() nb = NaiveBayesLearner() rf = RandomForestLearner(bootstrap=False, n_jobs=3) st = SimpleTreeLearner(seed=1, bootstrap=True) sm = SoftmaxRegressionLearner() svm = SVMLearner(shrinking=False) lsvm = LinearSVMLearner(tol=0.022, dual=False) nsvm = NuSVMLearner(tol=0.003, cache_size=190) osvm = OneClassSVMLearner(degree=2) tl = TreeLearner(max_depth=3, min_samples_split=1) knn = KNNLearner(n_neighbors=4) el = EllipticEnvelopeLearner(store_precision=False) srf = SimpleRandomForestLearner(n_estimators=20) learners = [lr, m, nb, rf, st, sm, svm, lsvm, nsvm, osvm, tl, knn, el, srf] for l in learners: repr_str = repr(l) new_l = eval(repr_str) self.assertEqual(repr(new_l), repr_str)
def test_SoftmaxRegression(self): table = Table('iris') learner = SoftmaxRegressionLearner() results = CrossValidation(table, [learner], k=3) ca = CA(results) self.assertTrue(0.9 < ca < 1.0)
def test_predict_numpy(self): table = Table('iris') learner = SoftmaxRegressionLearner() c = learner(table) c(table.X) vals, probs = c(table.X, c.ValueProbs)
def test_probability(self): table = Table('iris') learn = SoftmaxRegressionLearner() clf = learn(table) p = clf(table, ret=Model.Probs) self.assertTrue(all(abs(p.sum(axis=1) - 1) < 1e-6))
def test_predict_numpy(self): learner = SoftmaxRegressionLearner() c = learner(self.iris) c(self.iris.X) vals, probs = c(self.iris.X, c.ValueProbs)
def test_predict_table(self): learner = SoftmaxRegressionLearner() c = learner(self.iris) c(self.iris) vals, probs = c(self.iris, c.ValueProbs)
def test_probability(self): learn = SoftmaxRegressionLearner() clf = learn(self.iris) p = clf(self.iris, ret=Model.Probs) self.assertLess(abs(p.sum(axis=1) - 1).all(), 1e-6)