Exemple #1
0
 def test_default_configuration_iris_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(AdaboostClassifier)
         self.assertAlmostEqual(
             0.22452300738472031,
             sklearn.metrics.log_loss(targets, predictions))
Exemple #2
0
 def test_default_configuration_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(ExtraTreesClassifier)
         self.assertAlmostEqual(
             0.1086791056721286,
             sklearn.metrics.log_loss(targets, predictions))
 def test_default_configuration_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(ExtraTreesClassifier)
         self.assertAlmostEqual(0.1086791056721286,
                                sklearn.metrics.log_loss(
                                    targets, predictions))
Exemple #4
0
 def test_default_configuration_predict_proba(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(DeepNetIterative)
         self.assertAlmostEqual(
             0.76018262995220975,
             sklearn.metrics.log_loss(targets, predictions))
 def test_default_configuration_predict_proba(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(KNearestNeighborsClassifier)
         self.assertAlmostEqual(
             1.381551055796429,
             sklearn.metrics.log_loss(targets, predictions))
Exemple #6
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = _test_classifier_predict_proba(
             DecisionTree, make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.83333333333333337,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #7
0
    def test_default_configuration_predict_proba_individual(self):
        # Leave this additional test here
        for i in range(2):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC,
                sparse=True,
                dataset='digits',
                train_size_maximum=500)
            self.assertAlmostEqual(
                5.4706296711768925,
                sklearn.metrics.log_loss(targets, predictions))

        for i in range(2):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC, sparse=True, dataset='iris')
            self.assertAlmostEqual(
                0.84336416900751887,
                sklearn.metrics.log_loss(targets, predictions))

        # 2 class
        for i in range(2):
            X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris')
            remove_training_data = Y_train == 2
            remove_test_data = Y_test == 2
            X_train = X_train[~remove_training_data]
            Y_train = Y_train[~remove_training_data]
            X_test = X_test[~remove_test_data]
            Y_test = Y_test[~remove_test_data]
            ss = sklearn.preprocessing.StandardScaler()
            X_train = ss.fit_transform(X_train)
            configuration_space = LibSVM_SVC.get_hyperparameter_search_space()
            default = configuration_space.get_default_configuration()

            cls = LibSVM_SVC(random_state=1,
                             **{
                                 hp_name: default[hp_name]
                                 for hp_name in default
                                 if default[hp_name] is not None
                             })

            cls = cls.fit(X_train, Y_train)
            prediction = cls.predict_proba(X_test)
            self.assertAlmostEqual(sklearn.metrics.log_loss(
                Y_test, prediction),
                                   0.6932,
                                   places=4)
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = _test_classifier_predict_proba(
             DecisionTree, make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.83333333333333337,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
Exemple #9
0
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(LDA,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.96639166748245653,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=GaussianNB,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.98533237262174234,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(KNearestNeighborsClassifier,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.97060428849902536,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=BernoulliNB,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.66666666666666663,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #13
0
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(DeepNetIterative,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.76835649552496521,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #14
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=MultinomialNB,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.76548981051208942,
             sklearn.metrics.average_precision_score(targets, predictions))
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(ExtraTreesClassifier,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.99401797442008899,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(RandomForest,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.99252721833266977,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=MultinomialNB,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.76548981051208942,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
Exemple #18
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=AdaboostClassifier,
                              make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.9722131915406923,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
Exemple #19
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=PassiveAggressive,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.99703892466326138,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
Exemple #20
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=GaussianNB,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.98533237262174234,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #21
0
 def test_default_iris_predict_proba(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(dataset="iris",
                                            classifier=self.module)
         self.assertAlmostEqual(self.res["default_iris_proba"],
                                sklearn.metrics.log_loss(targets, predictions),
                                places=self.res.get(
                                        "default_iris_proba_places", 7))
Exemple #22
0
 def test_default_iris_predict_proba(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(dataset="iris",
                                            classifier=self.module)
         self.assertAlmostEqual(
             self.res["default_iris_proba"],
             sklearn.metrics.log_loss(targets, predictions),
             places=self.res.get("default_iris_proba_places", 7))
Exemple #23
0
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(ExtraTreesClassifier,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.99401797442008899,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #24
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=GradientBoostingClassifier,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(0.92926139448174994,
                                sklearn.metrics.average_precision_score(
                                    targets, predictions))
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(2):
         predictions, targets = \
             _test_classifier_predict_proba(KNearestNeighborsClassifier,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.97060428849902536,
             sklearn.metrics.average_precision_score(targets, predictions))
 def test_default_configuration_predict_proba_multilabel(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(RandomForest,
                                            make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.9943139211500065,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #27
0
 def test_default_configuration_multilabel_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(classifier=AdaboostClassifier,
                              make_multilabel=True)
         self.assertEqual(predictions.shape, ((50, 3)))
         self.assertAlmostEqual(
             0.9722131915406923,
             sklearn.metrics.average_precision_score(targets, predictions))
Exemple #28
0
    def test_default_configuration_predict_proba(self):
        for i in range(10):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC,
                sparse=True,
                dataset='digits',
                train_size_maximum=500)
            self.assertAlmostEqual(
                4.6680593525563063,
                sklearn.metrics.log_loss(targets, predictions))

        for i in range(10):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC, sparse=True, dataset='iris')
            self.assertAlmostEqual(
                0.8649665185853217,
                sklearn.metrics.log_loss(targets, predictions))

        # 2 class
        for i in range(10):
            X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris')
            remove_training_data = Y_train == 2
            remove_test_data = Y_test == 2
            X_train = X_train[~remove_training_data]
            Y_train = Y_train[~remove_training_data]
            X_test = X_test[~remove_test_data]
            Y_test = Y_test[~remove_test_data]
            ss = sklearn.preprocessing.StandardScaler()
            X_train = ss.fit_transform(X_train)
            configuration_space = LibSVM_SVC.get_hyperparameter_search_space()
            default = configuration_space.get_default_configuration()

            cls = LibSVM_SVC(random_state=1,
                             **{
                                 hp_name: default[hp_name]
                                 for hp_name in default
                                 if default[hp_name] is not None
                             })

            cls = cls.fit(X_train, Y_train)
            prediction = cls.predict_proba(X_test)
            self.assertAlmostEqual(
                sklearn.metrics.log_loss(Y_test, prediction),
                0.69323680119641773)
    def test_default_configuration_predict_proba_individual(self):
        # Leave this additional test here
        for i in range(2):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC, sparse=True, dataset='digits',
                train_size_maximum=500)
            self.assertAlmostEqual(5.4706296711768925,
                                   sklearn.metrics.log_loss(targets,
                                                            predictions))

        for i in range(2):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC, sparse=True, dataset='iris')
            self.assertAlmostEqual(0.84336416900751887,
                                   sklearn.metrics.log_loss(targets,
                                                            predictions))

        # 2 class
        for i in range(2):
            X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris')
            remove_training_data = Y_train == 2
            remove_test_data = Y_test == 2
            X_train = X_train[~remove_training_data]
            Y_train = Y_train[~remove_training_data]
            X_test = X_test[~remove_test_data]
            Y_test = Y_test[~remove_test_data]
            ss = sklearn.preprocessing.StandardScaler()
            X_train = ss.fit_transform(X_train)
            configuration_space = LibSVM_SVC.get_hyperparameter_search_space()
            default = configuration_space.get_default_configuration()

            cls = LibSVM_SVC(random_state=1, **{hp_name: default[hp_name]
                                                for hp_name in default
                                                if default[hp_name] is not None})

            cls = cls.fit(X_train, Y_train)
            prediction = cls.predict_proba(X_test)
            self.assertAlmostEqual(sklearn.metrics.log_loss(Y_test, prediction),
                                   0.6932, places=4)
    def test_default_configuration_predict_proba(self):
        for i in range(10):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC, sparse=True, dataset='digits',
                train_size_maximum=500)
            self.assertAlmostEqual(4.6680593525563063,
                                   sklearn.metrics.log_loss(targets,
                                                            predictions))

        for i in range(10):
            predictions, targets = _test_classifier_predict_proba(
                LibSVM_SVC, sparse=True, dataset='iris')
            self.assertAlmostEqual(0.8649665185853217,
                               sklearn.metrics.log_loss(targets,
                                                        predictions))

        # 2 class
        for i in range(10):
            X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris')
            remove_training_data = Y_train == 2
            remove_test_data = Y_test == 2
            X_train = X_train[~remove_training_data]
            Y_train = Y_train[~remove_training_data]
            X_test = X_test[~remove_test_data]
            Y_test = Y_test[~remove_test_data]
            ss = sklearn.preprocessing.StandardScaler()
            X_train = ss.fit_transform(X_train)
            configuration_space = LibSVM_SVC.get_hyperparameter_search_space()
            default = configuration_space.get_default_configuration()

            cls = LibSVM_SVC(random_state=1, **{hp_name: default[hp_name]
                                                for hp_name in default
                                                if default[hp_name] is not None})

            cls = cls.fit(X_train, Y_train)
            prediction = cls.predict_proba(X_test)
            self.assertAlmostEqual(sklearn.metrics.log_loss(Y_test, prediction),
                                   0.69323680119641773)
Exemple #31
0
    def test_default_digits_multilabel_predict_proba(self):
        if not self.module.get_properties()["handles_multilabel"]:
            return

        for i in range(2):
            predictions, targets = \
                _test_classifier_predict_proba(classifier=self.module,
                                               make_multilabel=True)
            self.assertEqual(predictions.shape, ((50, 3)))
            self.assertAlmostEqual(self.res["default_digits_multilabel_proba"],
                                   sklearn.metrics.roc_auc_score(
                                       targets, predictions, average='macro'),
                                   places=self.res.get(
                                           "default_digits_multilabel_proba_places", 7))
    def test_default_iris_predict_proba(self):

        if self.__class__ == BaseClassificationComponentTest:
            return

        for _ in range(2):
            predictions, targets = _test_classifier_predict_proba(
                dataset="iris", classifier=self.module
            )
            self.assertAlmostEqual(
                self.res["default_iris_proba"],
                sklearn.metrics.log_loss(targets, predictions),
                places=self.res.get("default_iris_proba_places", 7)
            )
Exemple #33
0
    def test_default_digits_multilabel_predict_proba(self):
        if not self.module.get_properties()["handles_multilabel"]:
            return

        for i in range(2):
            predictions, targets = \
                _test_classifier_predict_proba(classifier=self.module,
                                               make_multilabel=True)
            self.assertEqual(predictions.shape, ((50, 3)))
            self.assertAlmostEqual(self.res["default_digits_multilabel_proba"],
                                   sklearn.metrics.roc_auc_score(
                                       targets, predictions, average='macro'),
                                   places=self.res.get(
                                           "default_digits_multilabel_proba_places", 7))
Exemple #34
0
 def test_default_configuration_predict_proba(self):
     for i in range(10):
         predictions, targets = _test_classifier_predict_proba(DecisionTree)
         self.assertAlmostEqual(
             0.51333963481747835,
             sklearn.metrics.log_loss(targets, predictions))
 def test_default_configuration_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(KNearestNeighborsClassifier)
         self.assertAlmostEqual(1.381551055796429,
             sklearn.metrics.log_loss(targets, predictions))
Exemple #36
0
 def test_default_configuration_predict_proba(self):
     for i in range(10):
         predictions, targets = _test_classifier_predict_proba(
             DecisionTree, dataset='iris')
         self.assertAlmostEqual(0.28069887755912964,
             sklearn.metrics.log_loss(targets, predictions))
Exemple #37
0
 def test_default_configuration_iris_predict_proba(self):
     for i in range(10):
         predictions, targets = \
             _test_classifier_predict_proba(AdaboostClassifier)
         self.assertAlmostEqual(0.22452300738472031,
                                sklearn.metrics.log_loss(targets, predictions))
 def test_default_configuration_predict_proba(self):
     for i in range(10):
         predictions, targets = _test_classifier_predict_proba(DecisionTree)
         self.assertAlmostEqual(0.28069887755912964, sklearn.metrics.log_loss(targets, predictions))