def get_hyperparameter_search_space(cls, dataset_properties,
                                        default=None,
                                        include=None,
                                        exclude=None):
        cs = ConfigurationSpace()

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = cls.get_available_components(
            data_prop=dataset_properties,
            include=include, exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No preprocessors found, please add no_preprocessing")

        if default is None:
            defaults = ['no_preprocessing', 'select_percentile', 'pca',
                        'truncatedSVD']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter('__choice__',
                                                 list(
                                                     available_preprocessors.keys()),
                                                 default=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            cs = add_component_deepcopy(cs, name,
                                        preprocessor_configuration_space)

        return cs
예제 #2
0
 def add_params(cs: ConfigurationSpace):
     '''
         adds parameters to ConfigurationSpace 
     '''
     switch = CategoricalHyperparameter(
         "StandardScaler", choices=[True, False], default=True)
     cs.add_hyperparameter(switch)
예제 #3
0
    def test_build_new_GreaterThanIntCondition(self):
        expected = "a real [0.0, 1.0] [0.5]\n" \
                   "b integer [0, 10] [5]\n\n" \
                   "b | a > 0.5"
        cs = ConfigurationSpace()
        a = UniformFloatHyperparameter("a", 0, 1, 0.5)
        b = UniformIntegerHyperparameter("b", 0, 10, 5)
        cs.add_hyperparameter(a)
        cs.add_hyperparameter(b)
        cond = GreaterThanCondition(b, a, 0.5)
        cs.add_condition(cond)

        value = pcs_new.write(cs)
        self.assertEqual(expected, value)

        expected = "a integer [0, 10] [5]\n" \
                   "b integer [0, 10] [5]\n\n" \
                   "b | a > 5"
        cs = ConfigurationSpace()
        a = UniformIntegerHyperparameter("a", 0, 10, 5)
        b = UniformIntegerHyperparameter("b", 0, 10, 5)
        cs.add_hyperparameter(a)
        cs.add_hyperparameter(b)
        cond = GreaterThanCondition(b, a, 5)
        cs.add_condition(cond)

        value = pcs_new.write(cs)
        self.assertEqual(expected, value)
예제 #4
0
 def test_write_float(self):
     expected = "float_a '--float_a ' r (16.000000, 1024.000000)\n"
     cs = ConfigurationSpace()
     cs.add_hyperparameter(
         UniformFloatHyperparameter("float_a", 16, 1024))
     value = irace.write(cs)
     self.assertEqual(expected, value)
예제 #5
0
 def test_write_new_q_int(self):
     expected = "Q16_int_a integer [16, 1024] [520]"
     cs = ConfigurationSpace()
     cs.add_hyperparameter(
         UniformIntegerHyperparameter("int_a", 16, 1024, q=16))
     value = pcs_new.write(cs)
     self.assertEqual(expected, value)
예제 #6
0
 def get_hyperparameter_search_space(dataset_properties=None):
     # TODO add replace by zero!
     strategy = CategoricalHyperparameter(
         "strategy", ["none", "weighting"], default_value="none")
     cs = ConfigurationSpace()
     cs.add_hyperparameter(strategy)
     return cs
예제 #7
0
 def get_hyperparameter_search_space(dataset_properties=None):
     N = UniformIntegerHyperparameter("N", 5, 20, default=10)
     precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(N)
     cs.add_hyperparameter(precond)
     return cs
예제 #8
0
 def test_write_new_log10(self):
     expected = "a real [10.0, 1000.0] [100.0]log"
     cs = ConfigurationSpace()
     cs.add_hyperparameter(
         UniformFloatHyperparameter("a", 10, 1000, log=True))
     value = pcs_new.write(cs)
     self.assertEqual(expected, value)
    def test_add_forbidden(self):
        m = numpy.ones([2, 3])
        preprocessors_list = ['pa', 'pb']
        classifier_list = ['ca', 'cb', 'cc']
        cs = ConfigurationSpace()
        preprocessor = CategoricalHyperparameter(name='preprocessor',
                                                 choices=preprocessors_list)
        classifier = CategoricalHyperparameter(name='classifier',
                                               choices=classifier_list)
        cs.add_hyperparameter(preprocessor)
        cs.add_hyperparameter(classifier)
        new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden(
            conf_space=cs, node_0_list=preprocessors_list,
            node_1_list=classifier_list, matches=m,
            node_0_name='preprocessor', node_1_name="classifier")
        self.assertEqual(len(new_cs.forbidden_clauses), 0)
        self.assertIsInstance(new_cs, ConfigurationSpace)

        m[1, 1] = 0
        new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden(
            conf_space=cs, node_0_list=preprocessors_list,
            node_1_list=classifier_list, matches=m,
            node_0_name='preprocessor', node_1_name="classifier")
        self.assertEqual(len(new_cs.forbidden_clauses), 1)
        self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'cb')
        self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pb')
        self.assertIsInstance(new_cs, ConfigurationSpace)
    def get_hyperparameter_search_space(cls, dataset_properties=None,
                                        default=None,
                                        include=None,
                                        exclude=None):
        cs = ConfigurationSpace()

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = cls.get_available_components(
            data_prop=dataset_properties,
            include=include, exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No rescaling algorithm found.")

        if default is None:
            defaults = ['min/max', 'standardize', 'none', 'normalize']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter('__choice__',
                                                 list(
                                                     available_preprocessors.keys()),
                                                 default=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            cs = add_component_deepcopy(cs, name,
                                        preprocessor_configuration_space)

        return cs
예제 #11
0
 def get_hyperparameter_search_space(dataset_properties=None):
     # TODO add replace by zero!
     strategy = CategoricalHyperparameter(
         "strategy", ["mean", "median", "most_frequent"], default_value="mean")
     cs = ConfigurationSpace()
     cs.add_hyperparameter(strategy)
     return cs
예제 #12
0
 def test_write_new_q_float(self):
     expected = "Q16_float_a real [16.0, 1024.0] [520.0]"
     cs = ConfigurationSpace()
     cs.add_hyperparameter(
         UniformFloatHyperparameter("float_a", 16, 1024, q=16))
     value = pcs_new.write(cs)
     self.assertEqual(expected, value)
예제 #13
0
 def test_write_categorical(self):
     expected = "cat_a '--cat_a ' c {a,b,c}\n"
     cs = ConfigurationSpace()
     cs.add_hyperparameter(
         CategoricalHyperparameter("cat_a", ["a", "b", "c"]))
     value = irace.write(cs)
     self.assertEqual(expected, value)
예제 #14
0
 def test_write_ordinal(self):
     expected = "ord_a '--ord_a ' o {a,b,3}\n"
     cs = ConfigurationSpace()
     cs.add_hyperparameter(
         OrdinalHyperparameter("ord_a", ["a", "b", 3]))
     value = irace.write(cs)
     self.assertEqual(expected, value)
예제 #15
0
 def test_write_log_int(self):
     expected = "int_log '--int_log ' i (2, 4)\n"
     int_log = UniformIntegerHyperparameter("int_log", 10, 100, log=True)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(int_log)
     value = irace.write(cs)
     self.assertEqual(expected, value)
예제 #16
0
 def test_write_log_float(self):
     import numpy as np
     expected = "float_log '--float_log ' r (2.000000, 5.000000)\n"
     float_log = UniformFloatHyperparameter("float_log", np.exp(2), np.exp(5), log=True)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(float_log)
     value = irace.write(cs)
     self.assertEqual(expected, value)
예제 #17
0
파일: pca.py 프로젝트: Ayaro/auto-sklearn
 def get_hyperparameter_search_space(dataset_properties=None):
     keep_variance = UniformFloatHyperparameter(
         "keep_variance", 0.5, 0.9999, default=0.9999)
     whiten = CategoricalHyperparameter(
         "whiten", ["False", "True"], default="False")
     cs = ConfigurationSpace()
     cs.add_hyperparameter(keep_variance)
     cs.add_hyperparameter(whiten)
     return cs
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     alpha = cs.add_hyperparameter(UniformFloatHyperparameter(
         "alpha", 10 ** -5, 10., log=True, default=1.))
     fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter(
         "fit_intercept", "True"))
     tol = cs.add_hyperparameter(UniformFloatHyperparameter(
         "tol", 1e-5, 1e-1, default=1e-4, log=True))
     return cs
예제 #19
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     use_minimum_fraction = cs.add_hyperparameter(CategoricalHyperparameter(
         "use_minimum_fraction", ["True", "False"], default="True"))
     minimum_fraction = cs.add_hyperparameter(UniformFloatHyperparameter(
         "minimum_fraction", lower=.0001, upper=0.5, default=0.01, log=True))
     cs.add_condition(EqualsCondition(minimum_fraction,
                                      use_minimum_fraction, 'True'))
     return cs
예제 #20
0
 def get_hyperparameter_search_space(dataset_properties=None):
     gamma = UniformFloatHyperparameter(
         "gamma", 0.3, 2., default=1.0)
     n_components = UniformIntegerHyperparameter(
         "n_components", 50, 10000, default=100, log=True)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(gamma)
     cs.add_hyperparameter(n_components)
     return cs
예제 #21
0
    def get_hyperparameter_search_space(dataset_properties=None):
        nugget = UniformFloatHyperparameter(name="nugget", lower=0.0001, upper=10, default=0.1, log=True)
        thetaL = UniformFloatHyperparameter(name="thetaL", lower=1e-6, upper=1e-3, default=1e-4, log=True)
        thetaU = UniformFloatHyperparameter(name="thetaU", lower=0.2, upper=10, default=1.0, log=True)

        cs = ConfigurationSpace()
        cs.add_hyperparameter(nugget)
        cs.add_hyperparameter(thetaL)
        cs.add_hyperparameter(thetaU)
        return cs
예제 #22
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter("n_components", 10, 2000, default=100))
        algorithm = cs.add_hyperparameter(CategoricalHyperparameter("algorithm", ["parallel", "deflation"], "parallel"))
        whiten = cs.add_hyperparameter(CategoricalHyperparameter("whiten", ["False", "True"], "False"))
        fun = cs.add_hyperparameter(CategoricalHyperparameter("fun", ["logcosh", "exp", "cube"], "logcosh"))

        cs.add_condition(EqualsCondition(n_components, whiten, "True"))

        return cs
예제 #23
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="n_neighbors", lower=1, upper=100, log=True, default=1))
        weights = cs.add_hyperparameter(CategoricalHyperparameter(
            name="weights", choices=["uniform", "distance"], default="uniform"))
        p = cs.add_hyperparameter(CategoricalHyperparameter(
            name="p", choices=[1, 2], default=2))

        return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        percentile = UniformFloatHyperparameter(
            "percentile", lower=1, upper=99, default=50)

        score_func = UnParametrizedHyperparameter(
            name="score_func", value="f_regression")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(percentile)
        cs.add_hyperparameter(score_func)
        return cs
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        try:
            selector = cs.get_hyperparameter("selector")
            selector.choices.append("PairwiseClassifier")
        except KeyError:
            selector = CategoricalHyperparameter(
                "selector", choices=["PairwiseClassifier"], default="PairwiseClassifier")
            cs.add_hyperparameter(selector)
예제 #26
0
파일: lda.py 프로젝트: Ayaro/auto-sklearn
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        shrinkage = cs.add_hyperparameter(CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default="None"))
        shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5))
        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter(
            'n_components', 1, 250, default=10))
        tol = cs.add_hyperparameter(UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default=1e-4, log=True))

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))
        return cs
예제 #27
0
    def test_write_in_condition(self):
        expected = "ls '--ls ' c {sa,ca,ny}\ntemp '--temp ' r (0.500000, 1.000000)|  ls  %in%  c(sa,ca)\n"

        temp = UniformFloatHyperparameter("temp", 0.5, 1)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(ls)
        c1 = InCondition(temp, ls, ['sa','ca'])
        cs.add_condition(c1)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #28
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        # base_estimator = Constant(name="base_estimator", value="None")
        n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="n_estimators", lower=50, upper=500, default=50, log=False))
        learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter(
            name="learning_rate", lower=0.01, upper=2, default=0.1, log=True))
        algorithm = cs.add_hyperparameter(CategoricalHyperparameter(
            name="algorithm", choices=["SAMME.R", "SAMME"], default="SAMME.R"))
        max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="max_depth", lower=1, upper=10, default=1, log=False))
        return cs
예제 #29
0
 def test_build_new_forbidden(self):
     expected = "a categorical {a, b, c} [a]\nb categorical {a, b, c} [c]\n\n" \
                "{a=a, b=a}\n{a=a, b=b}\n{a=b, b=a}\n{a=b, b=b}"
     cs = ConfigurationSpace()
     a = CategoricalHyperparameter("a", ["a", "b", "c"], "a")
     b = CategoricalHyperparameter("b", ["a", "b", "c"], "c")
     cs.add_hyperparameter(a)
     cs.add_hyperparameter(b)
     fb = ForbiddenAndConjunction(ForbiddenInClause(a, ["a", "b"]),
                                  ForbiddenInClause(b, ["a", "b"]))
     cs.add_forbidden_clause(fb)
     value = pcs_new.write(cs)
     self.assertIn(expected, value)
예제 #30
0
    def test_write_equals_condition_numerical(self):
        expected = "temp '--temp ' i (1, 2)\nls '--ls ' c {sa,ca,ny}|  temp==2\n"

        temp = UniformIntegerHyperparameter("temp", 1, 2)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(ls)
        c1 = EqualsCondition(ls, temp, 2)
        cs.add_condition(c1)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #31
0
    def get_hyperparameter_search_space(
        self,
        dataset_properties: Optional[DATASET_PROPERTIES_TYPE] = None,
        default: Optional[str] = None,
        include: Optional[Dict[str, str]] = None,
        exclude: Optional[Dict[str, str]] = None,
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = {}

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = self.get_available_components(
            dataset_properties=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No minority coalescers found, please add any one minority coalescer"
                "component.")

        if default is None:
            defaults = ['minority_coalescer', 'no_coalescense']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter(
            '__choice__',
            list(available_preprocessors.keys()),
            default_value=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': preprocessor, 'value': name}
            cs.add_configuration_space(
                name,
                preprocessor_configuration_space,
                parent_hyperparameter=parent_hyperparameter)

        self.configuration_space = cs
        self.dataset_properties = dataset_properties
        return cs
예제 #32
0
def read(jason_string):
    """
    Creates a configuration space definition from a json string.

    Example
    -------

    >>> from ConfigSpace.read_and_write import json
    >>> with open('configspace.json', 'r') as f:
    >>>     jason_string = f.read()
    >>>     config = json.read(jason_string)

    Parameters
    ----------
    jason_string : str
        A json string representing a configuration space definition

    Returns
    -------
    :class:`~ConfigSpace.configuration_space.ConfigurationSpace`
        The restored ConfigurationSpace object
    """
    jason = json.loads(jason_string)
    if 'name' in jason:
        configuration_space = ConfigurationSpace(name=jason['name'])
    else:
        configuration_space = ConfigurationSpace()

    for hyperparameter in jason['hyperparameters']:
        configuration_space.add_hyperparameter(
            _construct_hyperparameter(hyperparameter, ))

    for condition in jason['conditions']:
        configuration_space.add_condition(
            _construct_condition(
                condition,
                configuration_space,
            ))

    for forbidden in jason['forbiddens']:
        configuration_space.add_forbidden_clause(
            _construct_forbidden(
                forbidden,
                configuration_space,
            ))

    return configuration_space
예제 #33
0
    def get_hyperparameter_search_space(dataset_properties=None):
        percentile = UniformFloatHyperparameter(
            name="percentile", lower=1, upper=99, default=50)

        score_func = CategoricalHyperparameter(
            name="score_func", choices=["chi2", "f_classif"], default="chi2")
        if dataset_properties is not None:
            # Chi2 can handle sparse data, so we respect this
            if 'is_sparse' in dataset_properties and dataset_properties['is_sparse']:
                score_func = Constant(
                    name="score_func", value="chi2")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(percentile)
        cs.add_hyperparameter(score_func)

        return cs
예제 #34
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_neighbors = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="n_neighbors",
                                         lower=1,
                                         upper=100,
                                         log=True,
                                         default=1))
        weights = cs.add_hyperparameter(
            CategoricalHyperparameter(name="weights",
                                      choices=["uniform", "distance"],
                                      default="uniform"))
        p = cs.add_hyperparameter(
            CategoricalHyperparameter(name="p", choices=[1, 2], default=2))

        return cs
예제 #35
0
파일: scaler.py 프로젝트: Dee-Why/hp-tuner
 def get_hyperparameter_search_space(dataset_properties=None,
                                     optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         scaler = CategoricalHyperparameter(
             'scaler', ['min_max', 'max_abs', 'standard', 'robust'],
             default_value='min_max')
         cs.add_hyperparameter(scaler)
         return cs
     elif optimizer == 'tpe':
         from hyperopt import hp
         space = {
             'scaler':
             hp.choice('scaler_scaler',
                       ['min_max', 'max_abs', 'standard', 'robust'])
         }
         return space
예제 #36
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     alpha = cs.add_hyperparameter(
         UniformFloatHyperparameter("alpha",
                                    10**-5,
                                    10.,
                                    log=True,
                                    default=1.))
     fit_intercept = cs.add_hyperparameter(
         UnParametrizedHyperparameter("fit_intercept", "True"))
     tol = cs.add_hyperparameter(
         UniformFloatHyperparameter("tol",
                                    1e-5,
                                    1e-1,
                                    default=1e-4,
                                    log=True))
     return cs
예제 #37
0
    def get_hyperparameter_search_space(self,
                                        dataset_properties=None,
                                        default=None,
                                        include=None,
                                        exclude=None):
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = {}

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = self.get_available_components(
            dataset_properties=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No preprocessors found, please add NoPreprocessing")

        if default is None:
            defaults = [
                'no_preprocessing', 'select_percentile', 'pca', 'truncatedSVD'
            ]
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter(
            '__choice__',
            list(available_preprocessors.keys()),
            default_value=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': preprocessor, 'value': name}
            cs.add_configuration_space(
                name,
                preprocessor_configuration_space,
                parent_hyperparameter=parent_hyperparameter)

        self.configuration_space_ = cs
        self.dataset_properties_ = dataset_properties
        return cs
예제 #38
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        C = cs.add_hyperparameter(UniformFloatHyperparameter(
            "C", 0.03125, 32768, log=True, default=1.0))
        loss = cs.add_hyperparameter(CategoricalHyperparameter(
            "loss", ["epsilon_insensitive", "squared_epsilon_insensitive"],
            default="squared_epsilon_insensitive"))
        # Random Guess
        epsilon = cs.add_hyperparameter(UniformFloatHyperparameter(
            name="epsilon", lower=0.001, upper=1, default=0.1, log=True))
        dual = cs.add_hyperparameter(Constant("dual", "False"))
        # These are set ad-hoc
        tol = cs.add_hyperparameter(UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default=1e-4, log=True))
        fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True"))
        intercept_scaling = cs.add_hyperparameter(Constant(
            "intercept_scaling", 1))

        dual_and_loss = ForbiddenAndConjunction(
            ForbiddenEqualsClause(dual, "False"),
            ForbiddenEqualsClause(loss, "epsilon_insensitive")
        )
        cs.add_forbidden_clause(dual_and_loss)

        return cs
예제 #39
0
    def get_hyperparameter_search_space(dataset_properties=None):
        C = UniformFloatHyperparameter("C",
                                       0.03125,
                                       32768,
                                       log=True,
                                       default=1.0)
        # No linear kernel here, because we have liblinear
        kernel = CategoricalHyperparameter(name="kernel",
                                           choices=["rbf", "poly", "sigmoid"],
                                           default="rbf")
        degree = UniformIntegerHyperparameter("degree", 1, 5, default=3)
        gamma = UniformFloatHyperparameter("gamma",
                                           3.0517578125e-05,
                                           8,
                                           log=True,
                                           default=0.1)
        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                              default="True")
        tol = UniformFloatHyperparameter("tol",
                                         1e-5,
                                         1e-1,
                                         default=1e-4,
                                         log=True)
        # cache size is not a hyperparameter, but an argument to the program!
        #max_iter = UnParametrizedHyperparameter("max_iter", -1)

        cs = ConfigurationSpace()
        cs.add_hyperparameter(C)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)
        cs.add_hyperparameter(shrinking)
        cs.add_hyperparameter(tol)
        #cs.add_hyperparameter(max_iter)

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)

        return cs
예제 #40
0
def add_hyperparameter(cs: ConfigurationSpace,
                       hyperparameter: HyperparameterSearchSpace,
                       hyperparameter_type: Type[Hyperparameter]) -> None:
    """
    Adds the given hyperparameter to the given configuration space

    Args:
        cs (ConfigurationSpace):
            Configuration space where the hyperparameter must be added
        hyperparameter (HyperparameterSearchSpace):
            search space of the hyperparameter
        hyperparameter_type (Hyperparameter):
            type of the hyperparameter

    Returns:
        None
    """
    cs.add_hyperparameter(get_hyperparameter(hyperparameter, hyperparameter_type))
예제 #41
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_components = cs.add_hyperparameter(
            UniformIntegerHyperparameter("n_components", 10, 2000,
                                         default=100))
        algorithm = cs.add_hyperparameter(
            CategoricalHyperparameter('algorithm', ['parallel', 'deflation'],
                                      'parallel'))
        whiten = cs.add_hyperparameter(
            CategoricalHyperparameter('whiten', ['False', 'True'], 'False'))
        fun = cs.add_hyperparameter(
            CategoricalHyperparameter('fun', ['logcosh', 'exp', 'cube'],
                                      'logcosh'))

        cs.add_condition(EqualsCondition(n_components, whiten, "True"))

        return cs
    def test_write_AndConjunction_condition(self):
        expected = "lp '--lp ' c {mi,bo}\nls '--ls ' c {sa,ca,ny}\ntemp '--temp ' r (0.500000, 1.000000)|  ls  %in%  c(sa,ca)  &&  lp  %in%  c(bo)\n"

        temp = UniformFloatHyperparameter("temp", 0.5, 1)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")
        lp = CategoricalHyperparameter("lp", ["mi", "bo"], "bo")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(lp)
        cs.add_hyperparameter(ls)

        c1 = InCondition(temp, ls, ['sa','ca'])
        c2 = InCondition(temp, lp, ['bo'])
        c3 = AndConjunction(c1, c2)
        cs.add_condition(c3)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #43
0
 def add_params(cs: ConfigurationSpace):
     '''
         adds parameters to ConfigurationSpace 
     '''
     pca_switch = CategoricalHyperparameter("pca",
                                            choices=[True, False],
                                            default_value=False)
     n_components = UniformIntegerHyperparameter("pca_n_components",
                                                 lower=1,
                                                 upper=20,
                                                 default_value=7,
                                                 log=True)
     cs.add_hyperparameter(pca_switch)
     cs.add_hyperparameter(n_components)
     cond = InCondition(child=n_components,
                        parent=pca_switch,
                        values=[True])
     cs.add_condition(cond)
예제 #44
0
    def get_hyperparameter_search_space(self,
                                        dataset_properties=None,
                                        default=None,
                                        include=None,
                                        exclude=None):
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = {}

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = self.get_available_components(
            dataset_properties=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No rescalers found, please add any rescaling component.")

        if default is None:
            defaults = ['standardize', 'none', 'maxabs', 'minmax', 'normalize']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter(
            '__choice__',
            list(available_preprocessors.keys()),
            default_value=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': preprocessor, 'value': name}
            cs.add_configuration_space(
                name,
                preprocessor_configuration_space,
                parent_hyperparameter=parent_hyperparameter)

        self.configuration_space = cs
        self.dataset_properties = dataset_properties
        return cs
예제 #45
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_layers: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_layers", value_range=(1, 4), default_value=2),
        units_layer: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="units_layer",
            value_range=(64, 512),
            default_value=128),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0]),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_layers: int = num_layers.value_range[0]  # type: ignore
        max_num_layers: int = num_layers.value_range[-1]  # type: ignore
        num_layers_is_constant = (min_num_layers == max_num_layers)

        num_layers_hp = get_hyperparameter(num_layers,
                                           UniformIntegerHyperparameter)
        activation_hp = get_hyperparameter(activation,
                                           CategoricalHyperparameter)
        cs.add_hyperparameter(num_layers_hp)

        if not num_layers_is_constant:
            cs.add_hyperparameter(activation_hp)
            cs.add_condition(
                CS.GreaterThanCondition(activation_hp, num_layers_hp, 1))
        elif max_num_layers > 1:
            # only add activation if we have more than 1 layer
            cs.add_hyperparameter(activation_hp)

        for i in range(1, max_num_layers + 1):
            num_units_search_space = HyperparameterSearchSpace(
                hyperparameter=f"units_layer_{i}",
                value_range=units_layer.value_range,
                default_value=units_layer.default_value,
                log=units_layer.log,
            )
            num_units_hp = get_hyperparameter(num_units_search_space,
                                              UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_units_hp)

            if i >= min_num_layers and not num_layers_is_constant:
                # In the case of a constant, the max and min number of layers are the same.
                # So no condition is needed. If it is not a constant but a hyperparameter,
                # then a condition has to be made so that it accounts for the value of the
                # hyperparameter.
                cs.add_condition(
                    CS.GreaterThanCondition(num_units_hp, num_layers_hp, i))

        return cs
예제 #46
0
    def load_configspace(self, folder):
        """Will try to load the configspace. If it's a pcs-file, backup_cs will be a list containing all possible
        combinations of interpretation for Categoricals. If this issue will be fixed, we can drop this procedure."""
        cs_fn_json = os.path.join(folder, 'configspace.json')
        cs_fn_pcs = os.path.join(folder, 'configspace.pcs')
        if os.path.exists(cs_fn_json):
            with open(cs_fn_json, 'r') as fh:
                cs = pcs_json.read(fh.read())
                backup_cs = []
            self.logger.debug("Detected and loaded \"%s\". No backup-cs necessary", cs_fn_json)
        elif os.path.exists(cs_fn_pcs):
            with open(cs_fn_pcs, 'r') as fh:
                cs = pcs_new.read(fh.readlines())
            # Create alternative interpretations
            categoricals = [hp for hp in cs.get_hyperparameters() if isinstance(hp, CategoricalHyperparameter)]
            non_categoricals = [hp for hp in cs.get_hyperparameters() if not isinstance(hp, CategoricalHyperparameter)]

            def _get_interpretations(choices):
                result = []
                if set(choices) == {"True", "False"}:
                    result.append([True, False])
                if all([c.isdigit() for c in choices]):
                    result.append([int(c) for c in choices])
                result.append(choices)
                return result

            choices_per_cat = [_get_interpretations(hp.choices) for hp in categoricals]
            combinations = itertools.product(*choices_per_cat)
            self.logger.debug(combinations)
            backup_cs = []
            for combi in combinations:
                bcs = ConfigurationSpace()
                for hp in non_categoricals:
                    bcs.add_hyperparameter(hp)
                for name, choices in zip([hp.name for hp in categoricals], combi):
                    bcs.add_hyperparameter(CategoricalHyperparameter(name, choices))
                bcs.add_conditions(cs.get_conditions())
                backup_cs.append(bcs)

            self.logger.debug("Sampled %d interpretations of \"%s\"", len(backup_cs), cs_fn_pcs)
            self.logger.debug(choices_per_cat)
        else:
            raise ValueError("Missing pcs-file at '%s.[pcs|json]'!" % os.path.join(folder, 'configspace'))
        return cs, backup_cs
예제 #47
0
    def get_hyperparameter_search_space(
            self,
            dataset_properties: Optional[Dict] = None,
            default: str = None,
            include: Optional[Dict] = None,
            exclude: Optional[Dict] = None) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = {}

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = self.get_available_components(
            dataset_properties=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No preprocessors found, please add NoPreprocessing")

        if default is None:
            defaults = ["feature_type"]
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter(
            '__choice__',
            list(available_preprocessors.keys()),
            default_value=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name](
                dataset_properties=dataset_properties). \
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': preprocessor, 'value': name}
            cs.add_configuration_space(
                name,
                preprocessor_configuration_space,
                parent_hyperparameter=parent_hyperparameter)
        return cs
예제 #48
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        shrinkage = cs.add_hyperparameter(
            CategoricalHyperparameter("shrinkage", ["None", "auto", "manual"],
                                      default="None"))
        shrinkage_factor = cs.add_hyperparameter(
            UniformFloatHyperparameter("shrinkage_factor", 0., 1., 0.5))
        n_components = cs.add_hyperparameter(
            UniformIntegerHyperparameter('n_components', 1, 250, default=10))
        tol = cs.add_hyperparameter(
            UniformFloatHyperparameter("tol",
                                       1e-5,
                                       1e-1,
                                       default=1e-4,
                                       log=True))

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage,
                                         "manual"))
        return cs
    def test_write_OrConjunction_condition(self):
        import numpy as np
        expected = "lp '--lp ' c {mi,bo}\ntemp '--temp ' r (2.000000, 5.000000)\nls '--ls ' c {sa,ca,ny}|  temp==3.0  ||  lp  %in%  c(bo)\n"

        temp = UniformFloatHyperparameter("temp", np.exp(2), np.exp(5), log=True)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")
        lp = CategoricalHyperparameter("lp", ["mi", "bo"], "bo")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(lp)
        cs.add_hyperparameter(ls)

        c1 = EqualsCondition(ls, temp, np.exp(3))
        c2 = InCondition(ls, lp, ['bo'])
        c3 = OrConjunction(c1, c2)
        cs.add_condition(c3)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #50
0
    def get_hyperparameter_search_space(
            dataset_properties: Optional[Dict[str, str]] = None,
            min_num_blocks: int = 1,
            max_num_blocks: int = 10,
            min_num_filters: int = 4,
            max_num_filters: int = 64,
            min_kernel_size: int = 4,
            max_kernel_size: int = 64,
            min_dropout: float = 0.0,
            max_dropout: float = 0.5) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        num_blocks_hp = UniformIntegerHyperparameter("num_blocks",
                                                     lower=min_num_blocks,
                                                     upper=max_num_blocks)
        cs.add_hyperparameter(num_blocks_hp)

        kernel_size_hp = UniformIntegerHyperparameter("kernel_size",
                                                      lower=min_kernel_size,
                                                      upper=max_kernel_size)
        cs.add_hyperparameter(kernel_size_hp)

        use_dropout_hp = CategoricalHyperparameter("use_dropout",
                                                   choices=[True, False])
        cs.add_hyperparameter(use_dropout_hp)

        dropout_hp = UniformFloatHyperparameter("dropout",
                                                lower=min_dropout,
                                                upper=max_dropout)
        cs.add_hyperparameter(dropout_hp)
        cs.add_condition(CS.EqualsCondition(dropout_hp, use_dropout_hp, True))

        for i in range(0, max_num_blocks):
            num_filters_hp = UniformIntegerHyperparameter(
                f"num_filters_{i}",
                lower=min_num_filters,
                upper=max_num_filters)
            cs.add_hyperparameter(num_filters_hp)
            if i >= min_num_blocks:
                cs.add_condition(
                    CS.GreaterThanCondition(num_filters_hp, num_blocks_hp, i))

        return cs
예제 #51
0
    def get_hyperparameter_search_space(cls,
                                        dataset_properties,
                                        default=None,
                                        include=None,
                                        exclude=None):
        if include is not None and exclude is not None:
            raise ValueError(
                "The argument include and exclude cannot be used together.")

        cs = ConfigurationSpace()

        # Compile a list of all estimator objects for this problem
        available_estimators = cls.get_available_components(
            data_prop=dataset_properties, include=include, exclude=exclude)

        if len(available_estimators) == 0:
            raise ValueError("No regressors found")

        if default is None:
            defaults = ['random_forest', 'support_vector_regression'] + \
                list(available_estimators.keys())
            for default_ in defaults:
                if default_ in available_estimators:
                    if include is not None and default_ not in include:
                        continue
                    if exclude is not None and default_ in exclude:
                        continue
                    default = default_
                    break

        estimator = CategoricalHyperparameter('__choice__',
                                              list(
                                                  available_estimators.keys()),
                                              default=default)
        cs.add_hyperparameter(estimator)
        for estimator_name in available_estimators.keys():
            estimator_configuration_space = available_estimators[
                estimator_name]. \
                get_hyperparameter_search_space(dataset_properties)
            cs = add_component_deepcopy(cs, estimator_name,
                                        estimator_configuration_space)

        return cs
예제 #52
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        # the smoothing parameter is a non-negative float
        # I will limit it to 1000 and put it on a logarithmic scale. (SF)
        # Please adjust that, if you know a proper range, this is just a guess.
        alpha = UniformFloatHyperparameter(name="alpha",
                                           lower=1e-2,
                                           upper=100,
                                           default=1,
                                           log=True)

        fit_prior = CategoricalHyperparameter(name="fit_prior",
                                              choices=["True", "False"],
                                              default="True")

        cs.add_hyperparameter(alpha)
        cs.add_hyperparameter(fit_prior)

        return cs
예제 #53
0
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        try:
            selector = cs.get_hyperparameter("selector")
            selector.choices.append("PairwiseClassifier")
        except KeyError:
            selector = CategoricalHyperparameter(
                "selector",
                choices=["PairwiseClassifier"],
                default="PairwiseClassifier")
            cs.add_hyperparameter(selector)

        classifier = cs.get_hyperparameter("classifier")
        cond = InCondition(child=classifier,
                           parent=selector,
                           values=["PairwiseClassifier"])
        cs.add_condition(cond)
예제 #54
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        n_clusters = cs.add_hyperparameter(
            UniformIntegerHyperparameter("n_clusters", 2, 400, 25))
        affinity = cs.add_hyperparameter(
            CategoricalHyperparameter("affinity",
                                      ["euclidean", "manhattan", "cosine"],
                                      "euclidean"))
        linkage = cs.add_hyperparameter(
            CategoricalHyperparameter("linkage",
                                      ["ward", "complete", "average"], "ward"))
        pooling_func = cs.add_hyperparameter(
            CategoricalHyperparameter("pooling_func",
                                      ["mean", "median", "max"]))

        affinity_and_linkage = ForbiddenAndConjunction(
            ForbiddenInClause(affinity, ["manhattan", "cosine"]),
            ForbiddenEqualsClause(linkage, "ward"))
        cs.add_forbidden_clause(affinity_and_linkage)
        return cs
예제 #55
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_blocks: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_blocks", value_range=(1, 10), default_value=5),
        num_filters: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_filters",
            value_range=(4, 64),
            default_value=32),
        kernel_size: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="kernel_size",
            value_range=(4, 64),
            default_value=32),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False),
        dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="dropout", value_range=(0, 0.5), default_value=0.1),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_blocks, max_num_blocks = num_blocks.value_range
        num_blocks_hp = get_hyperparameter(num_blocks,
                                           UniformIntegerHyperparameter)
        cs.add_hyperparameter(num_blocks_hp)

        add_hyperparameter(cs, kernel_size, UniformIntegerHyperparameter)

        use_dropout_hp = get_hyperparameter(use_dropout,
                                            CategoricalHyperparameter)
        cs.add_hyperparameter(use_dropout_hp)

        dropout_hp = get_hyperparameter(dropout, UniformFloatHyperparameter)
        cs.add_hyperparameter(dropout_hp)
        cs.add_condition(CS.EqualsCondition(dropout_hp, use_dropout_hp, True))

        for i in range(0, int(max_num_blocks)):
            num_filter_search_space = HyperparameterSearchSpace(
                f"num_filters_{i}",
                value_range=num_filters.value_range,
                default_value=num_filters.default_value,
                log=num_filters.log)
            num_filters_hp = get_hyperparameter(num_filter_search_space,
                                                UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_filters_hp)
            if i >= int(min_num_blocks):
                cs.add_condition(
                    CS.GreaterThanCondition(num_filters_hp, num_blocks_hp, i))

        return cs
예제 #56
0
def make_config_space(dataset_shape: tuple) -> ConfigurationSpace:
    r"""Build a ConfigurationSpace object encompassing all different autoencoder types and parameters.

    The resulting ConfigurationSpace object depends on the given dataset shape.
    Adapting it to different datasets requires manual adjustments (see :func:`build_autoencoder_from_existing`).

    :param dataset_shape: Shape of the dataset this ConfigurationSpace should be valid for.
    :return: A ConfigurationSpace object tied to the given dataset size.

    .. note:

       Changes to this function invalidate previous optimization results.
    """
    input_dim = dataset_shape[-1]

    # Build Configuration Space which defines all parameters and their ranges
    cs = ConfigurationSpace()

    ae_type = CategoricalHyperparameter('ae_type', ['deep', 'deep_ksparse'],
                                        default_value='deep_ksparse')
    act_type = CategoricalHyperparameter('act_type',
                                         ['relu', 'sigmoid', 'tanh'],
                                         default_value='relu')
    epochs = UniformIntegerHyperparameter('epochs', 1, 50, default_value=10)
    cs.add_hyperparameters([ae_type, act_type, epochs])

    num_layers = UniformIntegerHyperparameter('num_hidden_layers',
                                              1,
                                              5,
                                              default_value=3)
    latent_dim = UniformIntegerHyperparameter('latent_dim',
                                              2,
                                              input_dim // 2,
                                              default_value=2)
    cs.add_hyperparameters([num_layers, latent_dim])

    ksparse_k = UniformIntegerHyperparameter('k', 50, 200, default_value=200)
    cs.add_hyperparameter(ksparse_k)
    cs.add_condition(
        InCondition(child=ksparse_k, parent=ae_type, values=['deep_ksparse']))
    return cs
예제 #57
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        criterion = cs.add_hyperparameter(
            CategoricalHyperparameter("criterion", ["gini", "entropy"],
                                      default="gini"))
        #splitter = cs.add_hyperparameter(Constant("splitter", "best"))
        #max_features = cs.add_hyperparameter(Constant('max_features', 1.0))
        max_depth = cs.add_hyperparameter(
            UniformFloatHyperparameter('max_depth', 0., 2., default=0.5))
        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter("min_samples_split", 2, 20,
                                         default=2))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default=1))
        #min_weight_fraction_leaf = cs.add_hyperparameter(
        #    Constant("min_weight_fraction_leaf", 0.0))
        #max_leaf_nodes = cs.add_hyperparameter(
        #    UnParametrizedHyperparameter("max_leaf_nodes", "None"))

        return cs
예제 #58
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        penalty = cs.add_hyperparameter(Constant("penalty", "l1"))
        loss = cs.add_hyperparameter(
            CategoricalHyperparameter("loss", ["hinge", "squared_hinge"],
                                      default="squared_hinge"))
        #dual = cs.add_hyperparameter(Constant("dual", "False"))
        # This is set ad-hoc
        tol = cs.add_hyperparameter(
            UniformFloatHyperparameter("tol",
                                       1e-5,
                                       1e-1,
                                       default=1e-4,
                                       log=True))
        C = cs.add_hyperparameter(
            UniformFloatHyperparameter("C",
                                       0.03125,
                                       32768,
                                       log=True,
                                       default=1.0))
        multi_class = cs.add_hyperparameter(Constant("multi_class", "ovr"))
        # These are set ad-hoc
        fit_intercept = cs.add_hyperparameter(Constant("fit_intercept",
                                                       "True"))
        #intercept_scaling = cs.add_hyperparameter(Constant(
        #    "intercept_scaling", 1))

        penalty_and_loss = ForbiddenAndConjunction(
            ForbiddenEqualsClause(penalty, "l1"),
            ForbiddenEqualsClause(loss, "hinge"))
        cs.add_forbidden_clause(penalty_and_loss)
        return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        alpha = UniformFloatHyperparameter(name="alpha",
                                           lower=0.01,
                                           upper=0.5,
                                           default_value=0.1)

        if dataset_properties is not None and dataset_properties.get('sparse'):
            choices = ['mutual_info_regression', 'f_regression']
        else:
            choices = ['f_regression']

        score_func = CategoricalHyperparameter(name="score_func",
                                               choices=choices,
                                               default_value="f_regression")

        mode = CategoricalHyperparameter('mode', ['fpr', 'fdr', 'fwe'], 'fpr')

        cs = ConfigurationSpace()
        cs.add_hyperparameter(alpha)
        cs.add_hyperparameter(score_func)
        cs.add_hyperparameter(mode)

        # Mutual info consistently crashes if percentile is not the mode
        if 'mutual_info_regression' in choices:
            cond = NotEqualsCondition(mode, score_func,
                                      'mutual_info_regression')
            cs.add_condition(cond)

        return cs
예제 #60
0
    def get_hyperparameter_search_space(dataset_properties=None):
        alpha = UniformFloatHyperparameter(name="alpha",
                                           lower=0.01,
                                           upper=0.5,
                                           default_value=0.1)

        if dataset_properties is not None and dataset_properties.get('sparse'):
            choices = ['chi2', 'mutual_info_classif']
        else:
            choices = ['chi2', 'f_classif', 'mutual_info_classif']

        score_func = CategoricalHyperparameter(name="score_func",
                                               choices=choices,
                                               default_value="chi2")

        mode = CategoricalHyperparameter('mode', ['fpr', 'fdr', 'fwe'], 'fpr')

        cs = ConfigurationSpace()
        cs.add_hyperparameter(alpha)
        cs.add_hyperparameter(score_func)
        cs.add_hyperparameter(mode)

        # mutual_info_classif constantly crashes if mode is not percentile
        # as a WA, fix the mode for this score
        cond = NotEqualsCondition(mode, score_func, 'mutual_info_classif')
        cs.add_condition(cond)

        return cs