コード例 #1
0
 def test_KNN_regression(self):
     learners = [KNNRegressionLearner(),
                 KNNRegressionLearner(metric="mahalanobis")]
     cv = CrossValidation(k=3)
     results = cv(self.housing, learners)
     mse = MSE(results)
     self.assertLess(mse[1], mse[0])
コード例 #2
0
    def test_abs_error_normalized(self):
        tab = Table('housing')
        normalizer = Normalize(zero_based=True,
                               norm_type=Normalize.NormalizeBySpan)
        tab = normalizer(tab)

        icr = InductiveRegressor(AbsError(LinearRegressionLearner()))
        icr_knn = InductiveRegressor(AbsError(KNNRegressionLearner(4)))
        icr_norm = InductiveRegressor(
            AbsErrorNormalized(KNNRegressionLearner(4),
                               Euclidean,
                               4,
                               exp=False))
        icr_norm_exp = InductiveRegressor(
            AbsErrorNormalized(KNNRegressionLearner(4), Euclidean, 4,
                               exp=True))
        icr_norm_rf = InductiveRegressor(
            AbsErrorNormalized(KNNRegressionLearner(4),
                               Euclidean,
                               4,
                               rf=RandomForestRegressor()))

        r, r_knn, r_norm, r_norm_exp, r_norm_rf = ResultsRegr(), ResultsRegr(
        ), ResultsRegr(), ResultsRegr(), ResultsRegr()
        eps = 0.05
        for rep in range(10):
            for train, test in CrossSampler(tab, 10):
                train, calibrate = next(
                    RandomSampler(train,
                                  len(train) - 100, 100))
                r.concatenate(run_train_test(icr, eps, train, test, calibrate))
                r_knn.concatenate(
                    run_train_test(icr_knn, eps, train, test, calibrate))
                r_norm.concatenate(
                    run_train_test(icr_norm, eps, train, test, calibrate))
                r_norm_exp.concatenate(
                    run_train_test(icr_norm_exp, eps, train, test, calibrate))
                r_norm_rf.concatenate(
                    run_train_test(icr_norm_rf, eps, train, test, calibrate))

        print(r.median_range(), r.interdecile_mean(), 1 - r.accuracy())
        print(r_knn.median_range(), r_knn.interdecile_mean(),
              1 - r_knn.accuracy())
        print(r_norm.median_range(), r_norm.interdecile_mean(),
              1 - r_norm.accuracy())
        print(r_norm_exp.median_range(), r_norm_exp.interdecile_mean(),
              1 - r_norm_exp.accuracy())
        print(r_norm_rf.median_range(), r_norm_rf.interdecile_mean(),
              1 - r_norm_rf.accuracy())
        self.assertGreater(r.accuracy(), 1 - eps - 0.03)
        self.assertGreater(r_knn.accuracy(), 1 - eps - 0.03)
        self.assertGreater(r_norm.accuracy(), 1 - eps - 0.03)
        self.assertGreater(r_norm_exp.accuracy(), 1 - eps - 0.03)
        self.assertGreater(r_norm_rf.accuracy(), 1 - eps - 0.03)
        """
コード例 #3
0
 def test_error_message_cleared_when_valid_learner_on_input(self):
     # Disconnecting an invalid learner should use the default one and hide
     # the error
     self.send_signal("Learner", KNNRegressionLearner())
     self.send_signal('Learner', None)
     self.assertFalse(self.widget.Error.no_weight_support.is_shown(),
                      'Error message was not hidden on input disconnect')
     # Connecting a valid learner should also reset the error message
     self.send_signal("Learner", KNNRegressionLearner())
     self.send_signal('Learner', RandomForestRegressionLearner())
     self.assertFalse(
         self.widget.Error.no_weight_support.is_shown(),
         'Error message was not hidden when a valid learner appeared on '
         'input')
コード例 #4
0
 def test_nan(self):
     lrn1 = KNNRegressionLearner(n_neighbors=1)
     lrn3 = KNNRegressionLearner(n_neighbors=3)
     X = np.arange(1, 7)[:, None]
     Y = np.array([np.nan, np.nan, np.nan, 1, 1, 1])
     attr = (ContinuousVariable("Feat 1"), )
     class_var = (ContinuousVariable("Class"), )
     domain = Domain(attr, class_var)
     data = Table(domain, X, Y)
     clf = lrn1(data)
     predictions = clf(data)
     self.assertEqual(predictions[0], 1.0)
     clf = lrn3(data)
     predictions = clf(data)
     self.assertEqual(predictions[3], 1.0)
コード例 #5
0
 def test_input_learner_disconnect(self):
     """Check base learner after disconnecting learner on the input"""
     self.send_signal("Learner", KNNRegressionLearner())
     self.assertIsInstance(self.widget.base_estimator, KNNRegressionLearner)
     self.send_signal("Learner", None)
     self.assertEqual(self.widget.base_estimator,
                      self.widget.DEFAULT_BASE_ESTIMATOR)
コード例 #6
0
 def test_input_learner_that_does_not_support_sample_weights(self):
     self.send_signal("Learner", KNNRegressionLearner())
     self.assertNotIsInstance(self.widget.base_estimator,
                              KNNRegressionLearner)
     self.assertIsNone(self.widget.base_estimator)
     self.assertTrue(self.widget.Error.no_weight_support.is_shown())