コード例 #1
0
 def test_best_config_metric(self):
     my_pipe_optimizer = Hyperpipe.Optimization('grid_search', {}, [],
                                                'balanced_accuracy', None)
     self.assertTrue(my_pipe_optimizer.maximize_metric)
     my_pipe_optimizer = Hyperpipe.Optimization('grid_search', {}, [],
                                                'mean_squared_error', None)
     self.assertFalse(my_pipe_optimizer.maximize_metric)
コード例 #2
0
ファイル: inner_fold_tests.py プロジェクト: nkourkou/photon
 def setUp(self):
     super(InnerFoldTests, self).setUp()
     self.pipe = PhotonPipeline([
         ("StandardScaler", PipelineElement("StandardScaler")),
         ("PCA", PipelineElement("PCA")),
         ("RidgeClassifier", PipelineElement("RidgeClassifier")),
     ])
     self.config = {
         "PCA__n_components": 5,
         "RidgeClassifier__solver": "svd",
         "RidgeClassifier__random_state": 42,
     }
     self.outer_fold_id = "TestID"
     self.inner_cv = KFold(n_splits=4)
     self.X, self.y = load_breast_cancer(True)
     self.cross_validation = Hyperpipe.CrossValidation(
         self.inner_cv, None, True, 0.2, True, False)
     self.cross_validation.inner_folds = {
         self.outer_fold_id: {
             i: FoldInfo(i, i + 1, train, test)
             for i, (train,
                     test) in enumerate(self.inner_cv.split(self.X, self.y))
         }
     }
     self.optimization = Hyperpipe.Optimization(
         "grid_search", {}, ["accuracy", "recall", "specificity"],
         "accuracy", None)
コード例 #3
0
 def setUp(self):
     super(InnerFoldTests, self).setUp()
     self.pipe = PhotonPipeline([
         ('StandardScaler', PipelineElement('StandardScaler')),
         ('PCA', PipelineElement('PCA')),
         ('RidgeClassifier', PipelineElement('RidgeClassifier'))
     ])
     self.config = {
         'PCA__n_components': 5,
         'RidgeClassifier__solver': 'svd',
         'RidgeClassifier__random_state': 42
     }
     self.outer_fold_id = 'TestID'
     self.inner_cv = KFold(n_splits=4)
     self.X, self.y = load_breast_cancer(return_X_y=True)
     self.cross_validation = Hyperpipe.CrossValidation(
         self.inner_cv, None, True, 0.2, True, False, False, None)
     self.cross_validation.inner_folds = {
         self.outer_fold_id: {
             i: FoldInfo(i, i + 1, train, test)
             for i, (train,
                     test) in enumerate(self.inner_cv.split(self.X, self.y))
         }
     }
     self.optimization = Hyperpipe.Optimization(
         'grid_search', {}, ['accuracy', 'recall', 'specificity'],
         'accuracy', None)
コード例 #4
0
ファイル: hyperpipe_tests.py プロジェクト: nkourkou/photon
    def test_get_optimum_config_outer_folds(self):
        my_pipe_optimizer = Hyperpipe.Optimization(
            "grid_search", {}, [], "balanced_accuracy", None
        )

        outer_fold_list = list()
        for i in range(10):
            outer_fold = MDBOuterFold()
            outer_fold.best_config = MDBConfig()
            outer_fold.best_config.best_config_score = MDBInnerFold()
            outer_fold.best_config.best_config_score.validation = MDBScoreInformation()
            # again fold 5 wins
            if i == 5:
                outer_fold.best_config.best_config_score.validation.metrics = {
                    "balanced_accuracy": 0.99
                }
            else:
                outer_fold.best_config.best_config_score.validation.metrics = {
                    "balanced_accuracy": 0.5
                }
            outer_fold_list.append(outer_fold)

        best_config_outer_folds = my_pipe_optimizer.get_optimum_config_outer_folds(
            outer_fold_list
        )
        self.assertEqual(
            best_config_outer_folds.best_config_score.validation.metrics[
                "balanced_accuracy"
            ],
            0.99,
        )
        self.assertIs(best_config_outer_folds, outer_fold_list[5].best_config)
コード例 #5
0
ファイル: hyperpipe_tests.py プロジェクト: nkourkou/photon
    def test_get_optimum_config(self):
        my_pipe_optimizer = Hyperpipe.Optimization(
            "grid_search", {}, [], "balanced_accuracy", None
        )
        list_of_tested_configs = list()
        metric_default = MDBFoldMetric(
            metric_name="balanced_accuracy", operation=FoldOperations.MEAN, value=0.5
        )
        metric_best = MDBFoldMetric(
            metric_name="balanced_accuracy", operation=FoldOperations.MEAN, value=0.99
        )
        # we add looser configs, one good config, and one good config that failed
        # and check if the good non-failing config is chosen
        for i in range(10):
            config = MDBConfig()
            # number 5 is the winner
            if i == 5 or i == 8:
                config.metrics_test = [metric_best]
            else:
                config.metrics_test = [metric_default]
            if i == 8:
                config.config_failed = True
            list_of_tested_configs.append(config)

        winner_config = my_pipe_optimizer.get_optimum_config(list_of_tested_configs)
        self.assertIs(winner_config, list_of_tested_configs[5])
        self.assertEqual(winner_config.metrics_test[0].value, 0.99)
コード例 #6
0
    def test_optmizer_input_str(self):
        with self.assertRaises(ValueError):
            my_pipe_optimizer = Hyperpipe.Optimization('unknown_optimizer', {},
                                                       [], 'accuracy', None)

        for name, opt_class in Hyperpipe.Optimization.OPTIMIZER_DICTIONARY.items(
        ):

            def get_optimizer(name):
                my_pipe_optimizer = Hyperpipe.Optimization(
                    name, {}, [], 'accuracy', None)
                return my_pipe_optimizer.get_optimizer()

            if name == 'smac':
                with self.assertRaises(ModuleNotFoundError):
                    get_optimizer(name)
            else:
                self.assertIsInstance(get_optimizer(name), opt_class)
コード例 #7
0
ファイル: test_outer_folds.py プロジェクト: mkueh/photonai
    def setUp(self):

        super(OuterFoldTests, self).setUp()
        self.fold_nr_inner_cv = 5
        self.inner_cv = ShuffleSplit(n_splits=self.fold_nr_inner_cv,
                                     random_state=42)
        self.outer_cv = ShuffleSplit(n_splits=1,
                                     test_size=0.2,
                                     random_state=42)
        self.cv_info = Hyperpipe.CrossValidation(
            inner_cv=self.inner_cv,
            outer_cv=self.outer_cv,
            eval_final_performance=True,
            test_size=0.2,
            calculate_metrics_per_fold=True,
            calculate_metrics_across_folds=False,
            learning_curves=False,
            learning_curves_cut=None)

        self.X, self.y = load_boston(return_X_y=True)
        self.outer_fold_id = "TestFoldOuter1"
        self.cv_info.outer_folds = {
            self.outer_fold_id: FoldInfo(0, 1, train, test)
            for train, test in self.outer_cv.split(self.X, self.y)
        }

        self.config_num = 2
        self.optimization_info = Hyperpipe.Optimization(
            metrics=['mean_absolute_error', 'mean_squared_error'],
            best_config_metric='mean_absolute_error',
            optimizer_input='grid_search',
            optimizer_params={},
            performance_constraints=None)
        self.elements = [
            PipelineElement('StandardScaler'),
            PipelineElement('PCA', {'n_components': [4, 7]}),
            PipelineElement('DecisionTreeRegressor', random_state=42)
        ]
        self.pipe = PhotonPipeline([(p.name, p) for p in self.elements])
コード例 #8
0
ファイル: hyperpipe_tests.py プロジェクト: nkourkou/photon
 def get_optimizer(name):
     my_pipe_optimizer = Hyperpipe.Optimization(
         name, {}, [], "accuracy", None
     )
     return my_pipe_optimizer.get_optimizer()