Exemple #1
0
    def apply(self):
        learner = KNNLearner(n_neighbors=self.n_neighbors,
                             metric=self.metrics[self.metric_index],
                             preprocessors=self.preprocessors)
        learner.name = self.learner_name
        classifier = None
        if self.data is not None:
            classifier = learner(self.data)
            classifier.name = self.learner_name

        self.send("Learner", learner)
        self.send("Classifier", classifier)
Exemple #2
0
    def apply(self):
        learner = KNNLearner(
            n_neighbors=self.n_neighbors, metric=self.metrics[self.metric_index], preprocessors=self.preprocessors
        )
        learner.name = self.learner_name
        classifier = None
        if self.data is not None:
            classifier = learner(self.data)
            classifier.name = self.learner_name

        self.send("Learner", learner)
        self.send("Classifier", classifier)
Exemple #3
0
 def test_error_message_cleared_when_valid_learner_on_input(self):
     # Disconnecting an invalid learner should use the default one and hide
     # the error
     self.send_signal("Learner", KNNLearner())
     self.send_signal('Learner', None)
     self.assertFalse(self.widget.Error.no_weight_support.is_shown(),
                      'Error message was not hidden on input disconnect')
     # Connecting a valid learner should also reset the error message
     self.send_signal("Learner", KNNLearner())
     self.send_signal('Learner', RandomForestLearner())
     self.assertFalse(
         self.widget.Error.no_weight_support.is_shown(),
         'Error message was not hidden when a valid learner appeared on '
         'input')
Exemple #4
0
 def test_predict_single_instance(self):
     data = Table('iris')
     learn = KNNLearner()
     clf = learn(data)
     for ins in data[::20]:
         clf(ins)
         val, prob = clf(ins, clf.ValueProbs)
Exemple #5
0
 def test_KNN(self):
     table = Table('iris')
     learn = KNNLearner()
     results = CrossValidation(table, [learn], k=10)
     ca = CA(results)
     self.assertGreater(ca, 0.8)
     self.assertLess(ca, 0.99)
Exemple #6
0
 def test_input_learner_disconnect(self):
     """Check base learner after disconnecting learner on the input"""
     self.send_signal("Learner", KNNLearner())
     self.assertIsInstance(self.widget.base_estimator, KNNLearner)
     self.send_signal("Learner", None)
     self.assertEqual(self.widget.base_estimator,
                      self.widget.DEFAULT_BASE_ESTIMATOR)
Exemple #7
0
    def apply(self):
        learner = KNNLearner(
            n_neighbors=self.n_neighbors,
            metric=self.metrics[self.metric_index],
            preprocessors=self.preprocessors
        )
        learner.name = self.learner_name
        classifier = None

        if self.data is not None:
            self.error(0)
            if not learner.check_learner_adequacy(self.data.domain):
                self.error(0, learner.learner_adequacy_err_msg)
            else:
                classifier = learner(self.data)
                classifier.name = self.learner_name

        self.send("Learner", learner)
        self.send("Classifier", classifier)
Exemple #8
0
 def test_random(self):
     nrows, ncols = 1000, 5
     x = np.random.random_integers(-20, 50, (nrows, ncols))
     y = np.random.random_integers(-2, 2, (nrows, 1))
     x1, x2 = np.split(x, 2)
     y1, y2 = np.split(y, 2)
     attr = (ContinuousVariable('Feature 1'),
             ContinuousVariable('Feature 2'),
             ContinuousVariable('Feature 3'),
             ContinuousVariable('Feature 4'),
             ContinuousVariable('Feature 5'))
     class_vars = (DiscreteVariable('Target 1'),)
     domain = Domain(attr, class_vars)
     t = Table(domain, x1, y1)
     learn = KNNLearner()
     clf = learn(t)
     z = clf(x2)
     correct = (z == y2.flatten())
     ca = sum(correct)/len(correct)
     self.assertTrue(0.1 < ca < 0.3)
Exemple #9
0
 def test_random(self):
     nrows, ncols = 1000, 5
     x = np.random.randint(-20, 51, (nrows, ncols))
     y = np.random.randint(0, 9, (nrows, 1))
     x1, x2 = np.split(x, 2)
     y1, y2 = np.split(y, 2)
     attr = (ContinuousVariable('Feature 1'),
             ContinuousVariable('Feature 2'),
             ContinuousVariable('Feature 3'),
             ContinuousVariable('Feature 4'),
             ContinuousVariable('Feature 5'))
     class_vars = (DiscreteVariable('Target 1', values=list("abcdefghij")),)
     domain = Domain(attr, class_vars)
     t = Table(domain, x1, y1)
     lrn = KNNLearner()
     clf = lrn(t)
     z = clf(x2)
     correct = (z == y2.flatten())
     ca = sum(correct) / len(correct)
     self.assertGreater(ca, 0.1)
     self.assertLess(ca, 0.3)
Exemple #10
0
    def test_reprs(self):
        lr = LogisticRegressionLearner(tol=0.0002)
        m = MajorityLearner()
        nb = NaiveBayesLearner()
        rf = RandomForestLearner(bootstrap=False, n_jobs=3)
        st = SimpleTreeLearner(seed=1, bootstrap=True)
        sm = SoftmaxRegressionLearner()
        svm = SVMLearner(shrinking=False)
        lsvm = LinearSVMLearner(tol=0.022, dual=False)
        nsvm = NuSVMLearner(tol=0.003, cache_size=190)
        osvm = OneClassSVMLearner(degree=2)
        tl = TreeLearner(max_depth=3, min_samples_split=1)
        knn = KNNLearner(n_neighbors=4)
        el = EllipticEnvelopeLearner(store_precision=False)
        srf = SimpleRandomForestLearner(n_estimators=20)

        learners = [lr, m, nb, rf, st, sm, svm,
                    lsvm, nsvm, osvm, tl, knn, el, srf]

        for l in learners:
            repr_str = repr(l)
            new_l = eval(repr_str)
            self.assertEqual(repr(new_l), repr_str)
 def test_random(self):
     nrows, ncols = 1000, 5
     x = np.random.randint(-20, 51, (nrows, ncols))
     y = np.random.randint(-2, 3, (nrows, 1))
     x1, x2 = np.split(x, 2)
     y1, y2 = np.split(y, 2)
     attr = (
         ContinuousVariable("Feature 1"),
         ContinuousVariable("Feature 2"),
         ContinuousVariable("Feature 3"),
         ContinuousVariable("Feature 4"),
         ContinuousVariable("Feature 5"),
     )
     class_vars = (DiscreteVariable("Target 1"),)
     domain = Domain(attr, class_vars)
     t = Table(domain, x1, y1)
     lrn = KNNLearner()
     clf = lrn(t)
     z = clf(x2)
     correct = z == y2.flatten()
     ca = sum(correct) / len(correct)
     self.assertGreater(ca, 0.1)
     self.assertLess(ca, 0.3)
 def test_KNN(self):
     results = CrossValidation(self.iris, [KNNLearner()], k=3)
     ca = CA(results)
     self.assertGreater(ca, 0.8)
     self.assertLess(ca, 0.99)
Exemple #13
0
from Orange.data import Domain, Table
from Orange.classification import LogisticRegressionLearner
from Orange.classification import NaiveBayesLearner
from Orange.classification import TreeLearner
from Orange.classification import RandomForestLearner
from Orange.classification import KNNLearner
from Orange.classification import SVMLearner

### create models ###

models = [
    LogisticRegressionLearner(),
    NaiveBayesLearner(),
    TreeLearner(),
    RandomForestLearner(),
    KNNLearner(),
    SVMLearner(),
]

### read train data ###

train = Table.from_file('train.csv')
# move `sex` from X to Y (from attributes/features to class_var/target)
domain = Domain(train.domain.attributes[1:], train.domain.attributes[0])
train = train.transform(domain)

print('\n=== train.X ===')
print(train.X)
print('\n=== train.Y ===')
print(train.Y)
Exemple #14
0
 def test_input_learner_that_does_not_support_sample_weights(self):
     self.send_signal("Learner", KNNLearner())
     self.assertNotIsInstance(self.widget.base_estimator, KNNLearner)
     self.assertIsNone(self.widget.base_estimator)
     self.assertTrue(self.widget.Error.no_weight_support.is_shown())
 def test_predict_single_instance(self):
     lrn = KNNLearner()
     clf = lrn(self.iris)
     for ins in self.iris[::20]:
         clf(ins)
         val, prob = clf(ins, clf.ValueProbs)
 def test_KNN_mahalanobis(self):
     learners = [KNNLearner(metric="mahalanobis")]
     results = CrossValidation(self.iris, learners, k=3)
     ca = CA(results)
     self.assertGreater(ca, 0.8)
Exemple #17
0
 def setUpClass(cls):
     cls.iris = Table('iris')
     cls.learn = KNNLearner()