def get_hyperparameter_search_space(dataset_properties=None):
        alpha = UniformFloatHyperparameter(name="alpha",
                                           lower=0.01,
                                           upper=0.5,
                                           default_value=0.1)

        if dataset_properties is not None and dataset_properties.get('sparse'):
            choices = ['mutual_info_regression', 'f_regression']
        else:
            choices = ['f_regression']

        score_func = CategoricalHyperparameter(name="score_func",
                                               choices=choices,
                                               default_value="f_regression")

        mode = CategoricalHyperparameter('mode', ['fpr', 'fdr', 'fwe'], 'fpr')

        cs = ConfigurationSpace()
        cs.add_hyperparameter(alpha)
        cs.add_hyperparameter(score_func)
        cs.add_hyperparameter(mode)

        # Mutual info consistently crashes if percentile is not the mode
        if 'mutual_info_regression' in choices:
            cond = NotEqualsCondition(mode, score_func,
                                      'mutual_info_regression')
            cs.add_condition(cond)

        return cs
예제 #2
0
    def test_build_new_GreaterThanIntCondition(self):
        expected = "a real [0.0, 1.0] [0.5]\n" \
                   "b integer [0, 10] [5]\n\n" \
                   "b | a > 0.5"
        cs = ConfigurationSpace()
        a = UniformFloatHyperparameter("a", 0, 1, 0.5)
        b = UniformIntegerHyperparameter("b", 0, 10, 5)
        cs.add_hyperparameter(a)
        cs.add_hyperparameter(b)
        cond = GreaterThanCondition(b, a, 0.5)
        cs.add_condition(cond)

        value = pcs_new.write(cs)
        self.assertEqual(expected, value)

        expected = "a integer [0, 10] [5]\n" \
                   "b integer [0, 10] [5]\n\n" \
                   "b | a > 5"
        cs = ConfigurationSpace()
        a = UniformIntegerHyperparameter("a", 0, 10, 5)
        b = UniformIntegerHyperparameter("b", 0, 10, 5)
        cs.add_hyperparameter(a)
        cs.add_hyperparameter(b)
        cond = GreaterThanCondition(b, a, 5)
        cs.add_condition(cond)

        value = pcs_new.write(cs)
        self.assertEqual(expected, value)
예제 #3
0
    def get_hyperparameter_search_space(dataset_properties=None):
        C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True,
                                       default_value=1.0)
        # No linear kernel here, because we have liblinear
        kernel = CategoricalHyperparameter(name="kernel",
                                           choices=["rbf", "poly", "sigmoid"],
                                           default_value="rbf")
        degree = UniformIntegerHyperparameter("degree", 2, 5, default_value=3)
        gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                           log=True, default_value=0.1)
        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                              default_value="True")
        tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-3,
                                         log=True)
        # cache size is not a hyperparameter, but an argument to the program!
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        cs = ConfigurationSpace()
        cs.add_hyperparameters([C, kernel, degree, gamma, coef0, shrinking,
                                tol, max_iter])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)

        return cs
예제 #4
0
    def get_hyperparameter_search_space(dataset_properties=None):
        n_components = UniformIntegerHyperparameter("n_components",
                                                    10,
                                                    2000,
                                                    default=100)
        kernel = CategoricalHyperparameter(
            'kernel', ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf')
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        gamma = UniformFloatHyperparameter("gamma",
                                           3.0517578125e-05,
                                           8,
                                           log=True,
                                           default=1.0)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0)
        cs = ConfigurationSpace()
        cs.add_hyperparameter(n_components)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)
        cs.add_condition(gamma_condition)
        return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = cs.add_hyperparameter(CategoricalHyperparameter(
            "loss", ["ls", "lad", "huber", "quantile"], default="ls"))
        learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter(
            name="learning_rate", lower=0.01, upper=1, default=0.1, log=True))
        n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="n_estimators", lower=50, upper=500, default=100))
        max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="max_depth", lower=1, upper=10, default=3))
        min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="min_samples_split", lower=2, upper=20, default=2, log=False))
        min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="min_samples_leaf", lower=1, upper=20, default=1, log=False))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.))
        subsample = cs.add_hyperparameter(UniformFloatHyperparameter(
            name="subsample", lower=0.01, upper=1.0, default=1.0, log=False))
        max_features = cs.add_hyperparameter(UniformFloatHyperparameter(
            "max_features", 0.5, 5, default=1))
        max_leaf_nodes = cs.add_hyperparameter(UnParametrizedHyperparameter(
            name="max_leaf_nodes", value="None"))
        alpha = cs.add_hyperparameter(UniformFloatHyperparameter(
            "alpha", lower=0.75, upper=0.99, default=0.9))

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
예제 #6
0
    def get_hyperparameter_search_space(dataset_properties=None):
        C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True,
                                       default_value=1.0)
        # No linear kernel here, because we have liblinear
        kernel = CategoricalHyperparameter(name="kernel",
                                           choices=["rbf", "poly", "sigmoid"],
                                           default_value="rbf")
        degree = UniformIntegerHyperparameter("degree", 2, 5, default_value=3)
        gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                           log=True, default_value=0.1)
        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                              default_value="True")
        tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-3,
                                         log=True)
        # cache size is not a hyperparameter, but an argument to the program!
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        cs = ConfigurationSpace()
        cs.add_hyperparameters([C, kernel, degree, gamma, coef0, shrinking,
                                tol, max_iter])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)

        return cs
예제 #7
0
    def get_hyperparameter_search_space(dataset_properties=None):
        alpha = UniformFloatHyperparameter(name="alpha",
                                           lower=0.01,
                                           upper=0.5,
                                           default_value=0.1)

        if dataset_properties is not None and dataset_properties.get('sparse'):
            choices = ['chi2', 'mutual_info_classif']
        else:
            choices = ['chi2', 'f_classif', 'mutual_info_classif']

        score_func = CategoricalHyperparameter(name="score_func",
                                               choices=choices,
                                               default_value="chi2")

        mode = CategoricalHyperparameter('mode', ['fpr', 'fdr', 'fwe'], 'fpr')

        cs = ConfigurationSpace()
        cs.add_hyperparameter(alpha)
        cs.add_hyperparameter(score_func)
        cs.add_hyperparameter(mode)

        # mutual_info_classif constantly crashes if mode is not percentile
        # as a WA, fix the mode for this score
        cond = NotEqualsCondition(mode, score_func, 'mutual_info_classif')
        cs.add_condition(cond)

        return cs
예제 #8
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_augmenter",
            value_range=(True, False),
            default_value=True,
        ),
        sigma_min: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="sigma_min",
            value_range=(0, 3),
            default_value=0,
        ),
        sigma_offset: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="sigma_offset",
            value_range=(0.0, 3.0),
            default_value=0.5,
        ),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()
        use_augmenter = get_hyperparameter(use_augmenter,
                                           CategoricalHyperparameter)
        sigma_min = get_hyperparameter(sigma_min, UniformFloatHyperparameter)
        sigma_offset = get_hyperparameter(sigma_offset,
                                          UniformFloatHyperparameter)
        cs.add_hyperparameters([use_augmenter, sigma_min, sigma_offset])
        # only add hyperparameters to configuration space if we are using the augmenter
        cs.add_condition(CS.EqualsCondition(sigma_min, use_augmenter, True))
        cs.add_condition(CS.EqualsCondition(sigma_offset, use_augmenter, True))

        return cs
예제 #9
0
    def test_build_new_GreaterThanIntCondition(self):
        expected = "a real [0.0, 1.0] [0.5]\n" \
                   "b integer [0, 10] [5]\n\n" \
                   "b | a > 0.5"
        cs = ConfigurationSpace()
        a = UniformFloatHyperparameter("a", 0, 1, 0.5)
        b = UniformIntegerHyperparameter("b", 0, 10, 5)
        cs.add_hyperparameter(a)
        cs.add_hyperparameter(b)
        cond = GreaterThanCondition(b, a, 0.5)
        cs.add_condition(cond)

        value = pcs_new.write(cs)
        self.assertEqual(expected, value)

        expected = "a integer [0, 10] [5]\n" \
                   "b integer [0, 10] [5]\n\n" \
                   "b | a > 5"
        cs = ConfigurationSpace()
        a = UniformIntegerHyperparameter("a", 0, 10, 5)
        b = UniformIntegerHyperparameter("b", 0, 10, 5)
        cs.add_hyperparameter(a)
        cs.add_hyperparameter(b)
        cond = GreaterThanCondition(b, a, 5)
        cs.add_condition(cond)

        value = pcs_new.write(cs)
        self.assertEqual(expected, value)
예제 #10
0
    def get_hyperparameter_search_space(dataset_properties: Optional[Dict[str, str]] = None,
                                        min_num_layers: int = 1,
                                        max_num_layers: int = 4,
                                        min_num_filters: int = 16,
                                        max_num_filters: int = 256) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        num_layers_hp = UniformIntegerHyperparameter("num_layers",
                                                     lower=min_num_layers,
                                                     upper=max_num_layers)

        pooling_method_hp = CategoricalHyperparameter("pooling_method",
                                                      choices=["average", "max"])

        activation_hp = CategoricalHyperparameter('activation',
                                                  choices=list(_activations.keys()))

        cs.add_hyperparameters([num_layers_hp, pooling_method_hp, activation_hp])
        cs.add_condition(CS.GreaterThanCondition(activation_hp, num_layers_hp, 1))

        for i in range(1, max_num_layers):
            num_filters_hp = UniformIntegerHyperparameter(f"layer_{i}_filters",
                                                          lower=min_num_filters,
                                                          upper=max_num_filters)
            cs.add_hyperparameter(num_filters_hp)
            if i >= min_num_layers:
                cs.add_condition(CS.GreaterThanCondition(num_filters_hp, num_layers_hp, i))

        return cs
    def get_hyperparameter_search_space(**kwargs):
        cs = ConfigurationSpace()
        n_components = UniformIntegerHyperparameter('n_components_factor',
                                                    1,
                                                    250,
                                                    default_value=10)
        max_iter = UniformIntegerHyperparameter("max_iter",
                                                10,
                                                2000,
                                                default_value=1000)
        tol = UniformFloatHyperparameter("tol",
                                         1e-5,
                                         1e-1,
                                         default_value=1e-2,
                                         log=True)
        svd_method = CategoricalHyperparameter("svd_method",
                                               ["lapack", "randomized"],
                                               default_value="randomized")
        iterated_power = UniformIntegerHyperparameter("iterated_power",
                                                      1,
                                                      10,
                                                      default_value=3)
        cs.add_hyperparameters(
            [n_components, max_iter, tol, svd_method, iterated_power])

        iterated_power_condition = InCondition(iterated_power, svd_method,
                                               ["randomized"])
        cs.add_condition(iterated_power_condition)

        return cs
예제 #12
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = CategoricalHyperparameter(
            "loss", ["ls", "lad", "huber", "quantile"], default_value="ls")
        learning_rate = UniformFloatHyperparameter(
            name="learning_rate", lower=0.01, upper=1, default_value=0.1, log=True)
        n_estimators = UniformIntegerHyperparameter(
            "n_estimators", 50, 500, default_value=100)
        max_depth = UniformIntegerHyperparameter(
            name="max_depth", lower=1, upper=10, default_value=3)
        min_samples_split = UniformIntegerHyperparameter(
            name="min_samples_split", lower=2, upper=20, default_value=2, log=False)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf", lower=1, upper=20, default_value=1, log=False)
        min_weight_fraction_leaf = UnParametrizedHyperparameter(
            "min_weight_fraction_leaf", 0.)
        subsample = UniformFloatHyperparameter(
            name="subsample", lower=0.01, upper=1.0, default_value=1.0, log=False)
        max_features = UniformFloatHyperparameter(
            "max_features", 0.1, 1.0, default_value=1)
        max_leaf_nodes = UnParametrizedHyperparameter(
            name="max_leaf_nodes", value="None")
        min_impurity_decrease = UnParametrizedHyperparameter(
            name='min_impurity_decrease', value=0.0)
        alpha = UniformFloatHyperparameter(
            "alpha", lower=0.75, upper=0.99, default_value=0.9)

        cs.add_hyperparameters([loss, learning_rate, n_estimators, max_depth,
                                min_samples_split, min_samples_leaf,
                                min_weight_fraction_leaf, subsample, max_features,
                                max_leaf_nodes, min_impurity_decrease, alpha])

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
예제 #13
0
    def add_params(cs: ConfigurationSpace, cutoff: int):
        '''
            adds parameters to ConfigurationSpace

            Arguments
            ---------
            cs: ConfigurationSpace
                configuration space to add new parameters and conditions
            cutoff: int
                maximal possible time for aspeed
        '''

        pre_solving = CategoricalHyperparameter("presolving",
                                                choices=[True, False],
                                                default_value=False)
        cs.add_hyperparameter(pre_solving)
        pre_cutoff = UniformIntegerHyperparameter("pre:cutoff",
                                                  lower=1,
                                                  upper=cutoff,
                                                  default_value=math.ceil(
                                                      cutoff * 0.1),
                                                  log=True)
        cs.add_hyperparameter(pre_cutoff)
        cond = InCondition(child=pre_cutoff, parent=pre_solving, values=[True])
        cs.add_condition(cond)
예제 #14
0
def get_hyperparameter_search_space(seed):

    imputation = CategoricalHyperparameter('imputation__strategy', ['mean', 'median', 'most_frequent'])

    C = UniformFloatHyperparameter("classifier__C", 0.03125, 32768, log=True, default_value=1.0)
    # No linear kernel here, because we have liblinear
    kernel = CategoricalHyperparameter(name="classifier__kernel", choices=["rbf", "poly", "sigmoid"], default_value="rbf")
    degree = UniformIntegerHyperparameter("classifier__degree", 2, 5, default_value=3)
    gamma = UniformFloatHyperparameter("classifier__gamma", 3.0517578125e-05, 8, log=True, default_value=0.1)
    # TODO this is totally ad-hoc
    coef0 = UniformFloatHyperparameter("classifier__coef0", -1, 1, default_value=0)
    # probability is no hyperparameter, but an argument to the SVM algo
    shrinking = CategoricalHyperparameter("classifier__shrinking", [True, False], default_value=True)
    tol = UniformFloatHyperparameter("classifier__tol", 1e-5, 1e-1, default_value=1e-3, log=True)
    # cache size is not a hyperparameter, but an argument to the program!
    max_iter = UnParametrizedHyperparameter("classifier__max_iter", -1)

    cs = ConfigurationSpace('sklearn.svm.SVC', seed)
    cs.add_hyperparameters([imputation, C, kernel, degree, gamma, coef0, shrinking, tol, max_iter])

    degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
    coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
    cs.add_condition(degree_depends_on_poly)
    cs.add_condition(coef0_condition)

    return cs
예제 #15
0
def read(jason_string):
    jason = json.loads(jason_string)
    if 'name' in jason:
        configuration_space = ConfigurationSpace(name=jason['name'])
    else:
        configuration_space = ConfigurationSpace()

    for hyperparameter in jason['hyperparameters']:
        configuration_space.add_hyperparameter(
            _construct_hyperparameter(hyperparameter, ))

    for condition in jason['conditions']:
        configuration_space.add_condition(
            _construct_condition(
                condition,
                configuration_space,
            ))

    for forbidden in jason['forbiddens']:
        configuration_space.add_forbidden_clause(
            _construct_forbidden(
                forbidden,
                configuration_space,
            ))

    return configuration_space
예제 #16
0
 def get_hyperparameter_search_space(dataset_properties=None,
                                     optimizer='samc'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         n_components = UniformIntegerHyperparameter("n_components",
                                                     10,
                                                     2000,
                                                     default_value=100)
         algorithm = CategoricalHyperparameter('algorithm',
                                               ['parallel', 'deflation'],
                                               'parallel')
         whiten = CategoricalHyperparameter('whiten', ['False', 'True'],
                                            'False')
         fun = CategoricalHyperparameter('fun', ['logcosh', 'exp', 'cube'],
                                         'logcosh')
         cs.add_hyperparameters([n_components, algorithm, whiten, fun])
         cs.add_condition(EqualsCondition(n_components, whiten, "True"))
         return cs
     elif optimizer == 'tpe':
         from hyperopt import hp
         space = {
             'n_components': hp.randint('ica_n_components', 1990) + 10,
             'algorithm': hp.choice('ica_algorithm',
                                    ['parallel', 'deflation']),
             'whiten': 'False',
             'fun': hp.choice('ica_fun', ['logcosh', 'exo', 'cube'])
         }
         return space
예제 #17
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = CategoricalHyperparameter(
            "loss", ["ls", "lad", "huber", "quantile"], default_value="ls")
        learning_rate = UniformFloatHyperparameter(
            name="learning_rate", lower=0.01, upper=1, default_value=0.1, log=True)
        n_estimators = UniformIntegerHyperparameter(
            "n_estimators", 50, 500, default_value=100)
        max_depth = UniformIntegerHyperparameter(
            name="max_depth", lower=1, upper=10, default_value=3)
        min_samples_split = UniformIntegerHyperparameter(
            name="min_samples_split", lower=2, upper=20, default_value=2, log=False)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf", lower=1, upper=20, default_value=1, log=False)
        min_weight_fraction_leaf = UnParametrizedHyperparameter(
            "min_weight_fraction_leaf", 0.)
        subsample = UniformFloatHyperparameter(
            name="subsample", lower=0.01, upper=1.0, default_value=1.0, log=False)
        max_features = UniformFloatHyperparameter(
            "max_features", 0.1, 1.0, default_value=1)
        max_leaf_nodes = UnParametrizedHyperparameter(
            name="max_leaf_nodes", value="None")
        min_impurity_decrease = UnParametrizedHyperparameter(
            name='min_impurity_decrease', value=0.0)
        alpha = UniformFloatHyperparameter(
            "alpha", lower=0.75, upper=0.99, default_value=0.9)

        cs.add_hyperparameters([loss, learning_rate, n_estimators, max_depth,
                                min_samples_split, min_samples_leaf,
                                min_weight_fraction_leaf, subsample, max_features,
                                max_leaf_nodes, min_impurity_decrease, alpha])

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
예제 #18
0
파일: json.py 프로젝트: LvdKnaap/BinPacking
def read(jason_string):
    """
    Create a configuration space definition from a json string.

    Example
    -------

    .. testsetup:: json_test

        from ConfigSpace import ConfigurationSpace
        import ConfigSpace.hyperparameters as CSH
        from ConfigSpace.read_and_write import json
        cs = ConfigurationSpace()
        cs.add_hyperparameter(CSH.CategoricalHyperparameter('a', choices=[1, 2, 3]))
        with open('configspace.json', 'w') as f:
             f.write(json.write(cs))

    .. doctest:: json_test

        >>> from ConfigSpace.read_and_write import json
        >>> with open('configspace.json', 'r') as f:
        ...     jason_string = f.read()
        ...     config = json.read(jason_string)

    Parameters
    ----------
    jason_string : str
        A json string representing a configuration space definition

    Returns
    -------
    :class:`~ConfigSpace.configuration_space.ConfigurationSpace`
        The deserialized ConfigurationSpace object
    """
    jason = json.loads(jason_string)
    if 'name' in jason:
        configuration_space = ConfigurationSpace(name=jason['name'])
    else:
        configuration_space = ConfigurationSpace()

    for hyperparameter in jason['hyperparameters']:
        configuration_space.add_hyperparameter(
            _construct_hyperparameter(hyperparameter, ))

    for condition in jason['conditions']:
        configuration_space.add_condition(
            _construct_condition(
                condition,
                configuration_space,
            ))

    for forbidden in jason['forbiddens']:
        configuration_space.add_forbidden_clause(
            _construct_forbidden(
                forbidden,
                configuration_space,
            ))

    return configuration_space
예제 #19
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     use_minimum_fraction = cs.add_hyperparameter(CategoricalHyperparameter(
         "use_minimum_fraction", ["True", "False"], default="True"))
     minimum_fraction = cs.add_hyperparameter(UniformFloatHyperparameter(
         "minimum_fraction", lower=.0001, upper=0.5, default=0.01, log=True))
     cs.add_condition(EqualsCondition(minimum_fraction,
                                      use_minimum_fraction, 'True'))
     return cs
예제 #20
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     use_minimum_fraction = CategoricalHyperparameter(
         "use_minimum_fraction", ["True", "False"], default_value="True")
     minimum_fraction = UniformFloatHyperparameter(
         "minimum_fraction", lower=.0001, upper=0.5, default_value=0.01, log=True)
     cs.add_hyperparameters([use_minimum_fraction, minimum_fraction])
     cs.add_condition(EqualsCondition(minimum_fraction,
                                      use_minimum_fraction, 'True'))
     return cs
예제 #21
0
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        selector = cs.get_hyperparameter("selector")
        regressor = cs.get_hyperparameter("regressor")
        if "PairwiseRegressor" in selector.choices:
            cond = InCondition(child=regressor, parent=selector, values=["PairwiseRegressor"])
            cs.add_condition(cond)
예제 #22
0
def get_libsvm_svc_default_search_space():
    classif_prefix = "classifier:libsvm_svc:"

    model_type = CategoricalHyperparameter('classifier:__choice__',
                                           ['libsvm_svc'])
    imputation = CategoricalHyperparameter('imputation:strategy',
                                           ['mean', 'median', 'most_frequent'])

    C = UniformFloatHyperparameter(classif_prefix + "C",
                                   0.03125,
                                   32768,
                                   log=True,
                                   default_value=1.0)
    # No linear kernel here, because we have liblinear
    kernel = CategoricalHyperparameter(name=classif_prefix + "kernel",
                                       choices=["rbf", "poly", "sigmoid"],
                                       default_value="rbf")
    degree = UniformIntegerHyperparameter(classif_prefix + "degree",
                                          2,
                                          5,
                                          default_value=3)
    gamma = UniformFloatHyperparameter(classif_prefix + "gamma",
                                       3.0517578125e-05,
                                       8,
                                       log=True,
                                       default_value=0.1)
    # TODO this is totally ad-hoc
    coef0 = UniformFloatHyperparameter(classif_prefix + "coef0",
                                       -1,
                                       1,
                                       default_value=0)
    # probability is no hyperparameter, but an argument to the SVM algo
    shrinking = CategoricalHyperparameter(classif_prefix + "shrinking",
                                          ["True", "False"],
                                          default_value="True")
    tol = UniformFloatHyperparameter(classif_prefix + "tol",
                                     1e-5,
                                     1e-1,
                                     default_value=1e-3,
                                     log=True)
    # cache size is not a hyperparameter, but an argument to the program!
    max_iter = UnParametrizedHyperparameter(classif_prefix + "max_iter", -1)

    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        model_type, imputation, C, kernel, degree, gamma, coef0, shrinking,
        tol, max_iter
    ])

    degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
    coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
    cs.add_condition(degree_depends_on_poly)
    cs.add_condition(coef0_condition)

    return cs
예제 #23
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter("n_components", 10, 2000, default=100))
        algorithm = cs.add_hyperparameter(CategoricalHyperparameter("algorithm", ["parallel", "deflation"], "parallel"))
        whiten = cs.add_hyperparameter(CategoricalHyperparameter("whiten", ["False", "True"], "False"))
        fun = cs.add_hyperparameter(CategoricalHyperparameter("fun", ["logcosh", "exp", "cube"], "logcosh"))

        cs.add_condition(EqualsCondition(n_components, whiten, "True"))

        return cs
예제 #24
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        shrinkage = CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default_value="None")
        shrinkage_factor = UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5)
        tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-4, log=True)
        cs.add_hyperparameters([shrinkage, shrinkage_factor, tol])

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))
        return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = cs.add_hyperparameter(
            CategoricalHyperparameter("loss",
                                      ["ls", "lad", "huber", "quantile"],
                                      default="ls"))
        learning_rate = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="learning_rate",
                                       lower=0.01,
                                       upper=1,
                                       default=0.1,
                                       log=True))
        n_estimators = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="n_estimators",
                                         lower=50,
                                         upper=500,
                                         default=100))
        max_depth = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="max_depth",
                                         lower=1,
                                         upper=10,
                                         default=3))
        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_split",
                                         lower=2,
                                         upper=20,
                                         default=2,
                                         log=False))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_leaf",
                                         lower=1,
                                         upper=20,
                                         default=1,
                                         log=False))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.))
        subsample = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="subsample",
                                       lower=0.01,
                                       upper=1.0,
                                       default=1.0,
                                       log=False))
        max_features = cs.add_hyperparameter(
            UniformFloatHyperparameter("max_features", 0.5, 5, default=1))
        max_leaf_nodes = cs.add_hyperparameter(
            UnParametrizedHyperparameter(name="max_leaf_nodes", value="None"))
        alpha = cs.add_hyperparameter(
            UniformFloatHyperparameter("alpha",
                                       lower=0.75,
                                       upper=0.99,
                                       default=0.9))

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
예제 #26
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_layers: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_layers", value_range=(1, 4), default_value=2),
        units_layer: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="units_layer",
            value_range=(64, 512),
            default_value=128),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0]),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_layers: int = num_layers.value_range[0]  # type: ignore
        max_num_layers: int = num_layers.value_range[-1]  # type: ignore
        num_layers_is_constant = (min_num_layers == max_num_layers)

        num_layers_hp = get_hyperparameter(num_layers,
                                           UniformIntegerHyperparameter)
        activation_hp = get_hyperparameter(activation,
                                           CategoricalHyperparameter)
        cs.add_hyperparameter(num_layers_hp)

        if not num_layers_is_constant:
            cs.add_hyperparameter(activation_hp)
            cs.add_condition(
                CS.GreaterThanCondition(activation_hp, num_layers_hp, 1))
        elif max_num_layers > 1:
            # only add activation if we have more than 1 layer
            cs.add_hyperparameter(activation_hp)

        for i in range(1, max_num_layers + 1):
            num_units_search_space = HyperparameterSearchSpace(
                hyperparameter=f"units_layer_{i}",
                value_range=units_layer.value_range,
                default_value=units_layer.default_value,
                log=units_layer.log,
            )
            num_units_hp = get_hyperparameter(num_units_search_space,
                                              UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_units_hp)

            if i >= min_num_layers and not num_layers_is_constant:
                # In the case of a constant, the max and min number of layers are the same.
                # So no condition is needed. If it is not a constant but a hyperparameter,
                # then a condition has to be made so that it accounts for the value of the
                # hyperparameter.
                cs.add_condition(
                    CS.GreaterThanCondition(num_units_hp, num_layers_hp, i))

        return cs
    def get_hyperparameter_search_space(**kwargs):
        cs = ConfigurationSpace()
        penalty = CategoricalHyperparameter("penalty", ["l1", "l2", "elasticnet", "none"], default_value='l2')
        solver = CategoricalHyperparameter("solver", ["newton-cg", "lbfgs", "liblinear", "sag", "saga"],
                                           default_value="lbfgs")
        dual = CategoricalHyperparameter("dual", choices=[True, False], default_value=False)
        tol = UniformFloatHyperparameter("tol", lower=1e-7, upper=100., default_value=1.0e-4, log=True)
        C = UniformFloatHyperparameter("C", lower=1e-7, upper=100., default_value=1.0, log=True)
        fit_intercept = CategoricalHyperparameter("fit_intercept", choices=[True, False], default_value=True)
        intercept_scaling = UniformFloatHyperparameter("intercept_scaling", lower=0.0001, upper=2.0, default_value=1.0,
                                                       log=True)
        max_iter = UniformIntegerHyperparameter("max_iter", lower=50, upper=10000, default_value=100)
        multi_class = CategoricalHyperparameter("multi_class", ["ovr", "multinomial", "auto"], default_value="auto")
        l1_ratio = UniformFloatHyperparameter("l1_ratio", lower=0., upper=1., default_value=0.1)

        l1_ratio_condition = InCondition(l1_ratio, penalty, ["elasticnet"])
        dual_condition = AndConjunction(InCondition(dual, penalty, ["l2"]), InCondition(dual, solver, ["liblinear"]))
        cs.add_hyperparameters([penalty, solver, dual, tol, C, fit_intercept, intercept_scaling, max_iter, multi_class,
                                l1_ratio])

        penaltyAndLbfgs = ForbiddenAndConjunction(
            ForbiddenEqualsClause(solver, "lbfgs"),
            ForbiddenInClause(penalty, ["l1", "elasticnet"])
        )
        penaltyAndNewton = ForbiddenAndConjunction(
            ForbiddenEqualsClause(solver, "newton-cg"),
            ForbiddenInClause(penalty, ["l1", "elasticnet"])
        )
        penaltyAndSag = ForbiddenAndConjunction(
            ForbiddenEqualsClause(solver, "sag"),
            ForbiddenInClause(penalty, ["l1", "elasticnet"])
        )
        penaltyAndSaga = ForbiddenAndConjunction(
            ForbiddenInClause(penalty, ["elasticnet"]),
            ForbiddenInClause(solver, ["newton-cg", "lbfgs", "sag"])
        )
        penaltyAndSagaa = ForbiddenAndConjunction(
            ForbiddenInClause(penalty, ["elasticnet", "none"]),
            ForbiddenInClause(solver, ["liblinear"])
        )
        penaltyAndSagaaa = ForbiddenAndConjunction(
            ForbiddenInClause(multi_class, ["multinomial"]),
            ForbiddenInClause(solver, ["liblinear"])
        )

        cs.add_forbidden_clause(penaltyAndLbfgs)
        cs.add_forbidden_clause(penaltyAndNewton)
        cs.add_forbidden_clause(penaltyAndSag)
        cs.add_forbidden_clause(penaltyAndSagaa)
        cs.add_forbidden_clause(penaltyAndSaga)
        cs.add_forbidden_clause(penaltyAndSagaaa)
        cs.add_condition(l1_ratio_condition)
        cs.add_condition(dual_condition)
        return cs
예제 #28
0
    def get_cs():
        cs = ConfigurationSpace()
        shrinkage = CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default_value="None")
        shrinkage_factor = UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5)
        n_components = UniformIntegerHyperparameter('n_components', 1, 250, default_value=10)
        tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-4, log=True)
        cs.add_hyperparameters([shrinkage, shrinkage_factor, n_components, tol])

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))
        return cs
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        selector = cs.get_hyperparameter("selector")
        classifier = cs.get_hyperparameter("classifier")
        if "MultiClassifier" in selector.choices:
            cond = InCondition(child=classifier,
                               parent=selector,
                               values=["MultiClassifier"])
            cs.add_condition(cond)
예제 #30
0
파일: lda.py 프로젝트: mfeurer/pc_smac
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        shrinkage = cs.add_hyperparameter(CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default="None"))
        shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5))
        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter(
            'n_components', 1, 250, default=10))
        tol = cs.add_hyperparameter(UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default=1e-4, log=True))

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))
        return cs
예제 #31
0
    def test_write_equals_condition_categorical(self):
        expected = "ls '--ls ' c {sa,ca,ny}\ntemp '--temp ' r (0.500000, 1.000000)|  ls==sa\n"

        temp = UniformFloatHyperparameter("temp", 0.5, 1)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(ls)
        c1 = EqualsCondition(temp, ls, 'sa')
        cs.add_condition(c1)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #32
0
    def test_write_equals_condition_numerical(self):
        expected = "temp '--temp ' i (1, 2)\nls '--ls ' c {sa,ca,ny}|  temp==2\n"

        temp = UniformIntegerHyperparameter("temp", 1, 2)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(ls)
        c1 = EqualsCondition(ls, temp, 2)
        cs.add_condition(c1)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #33
0
    def test_write_in_condition(self):
        expected = "ls '--ls ' c {sa,ca,ny}\ntemp '--temp ' r (0.500000, 1.000000)|  ls  %in%  c(sa,ca)\n"

        temp = UniformFloatHyperparameter("temp", 0.5, 1)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(ls)
        c1 = InCondition(temp, ls, ['sa','ca'])
        cs.add_condition(c1)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #34
0
    def test_write_equals_condition_numerical(self):
        expected = "temp '--temp ' i (1, 2)\nls '--ls ' c {sa,ca,ny}|  temp==2\n"

        temp = UniformIntegerHyperparameter("temp", 1, 2)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(ls)
        c1 = EqualsCondition(ls, temp, 2)
        cs.add_condition(c1)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #35
0
파일: lda.py 프로젝트: Ayaro/auto-sklearn
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        shrinkage = cs.add_hyperparameter(CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default="None"))
        shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5))
        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter(
            'n_components', 1, 250, default=10))
        tol = cs.add_hyperparameter(UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default=1e-4, log=True))

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))
        return cs
예제 #36
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_blocks: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_blocks", value_range=(1, 10), default_value=5),
        num_filters: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_filters",
            value_range=(4, 64),
            default_value=32),
        kernel_size: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="kernel_size",
            value_range=(4, 64),
            default_value=32),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False),
        dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="dropout", value_range=(0, 0.5), default_value=0.1),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_blocks, max_num_blocks = num_blocks.value_range
        num_blocks_hp = get_hyperparameter(num_blocks,
                                           UniformIntegerHyperparameter)
        cs.add_hyperparameter(num_blocks_hp)

        add_hyperparameter(cs, kernel_size, UniformIntegerHyperparameter)

        use_dropout_hp = get_hyperparameter(use_dropout,
                                            CategoricalHyperparameter)
        cs.add_hyperparameter(use_dropout_hp)

        dropout_hp = get_hyperparameter(dropout, UniformFloatHyperparameter)
        cs.add_hyperparameter(dropout_hp)
        cs.add_condition(CS.EqualsCondition(dropout_hp, use_dropout_hp, True))

        for i in range(0, int(max_num_blocks)):
            num_filter_search_space = HyperparameterSearchSpace(
                f"num_filters_{i}",
                value_range=num_filters.value_range,
                default_value=num_filters.default_value,
                log=num_filters.log)
            num_filters_hp = get_hyperparameter(num_filter_search_space,
                                                UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_filters_hp)
            if i >= int(min_num_blocks):
                cs.add_condition(
                    CS.GreaterThanCondition(num_filters_hp, num_blocks_hp, i))

        return cs
예제 #37
0
 def __activate(self, value: Dict, store: Dict, cs: ConfigurationSpace):
     assert isinstance(value, dict)
     for k, v in value.items():
         assert isinstance(v, dict)
         reversed_dict = self.reverse_dict(v)
         reversed_dict = self.pop_covered_item(reversed_dict, len(v))
         for sk, sv in reversed_dict.items():
             cond = self.__condition(
                 {
                     "_child": sk,
                     "_values": sv,
                     "_parent": k
                 }, store)
             cs.add_condition(cond)
예제 #38
0
    def test_read_new_configuration_space_complex_conditionals(self):
        classi = OrdinalHyperparameter("classi", ["random_forest", "extra_trees", "k_nearest_neighbors", "something"])
        knn_weights = CategoricalHyperparameter("knn_weights", ["uniform", "distance"])
        weather = OrdinalHyperparameter("weather", ["sunny", "rainy", "cloudy", "snowing"])
        temperature = CategoricalHyperparameter("temperature", ["high", "low"])
        rain = CategoricalHyperparameter("rain", ["yes", "no"])
        gloves = OrdinalHyperparameter("gloves", ["none", "yarn", "leather", "gortex"])
        heur1 = CategoricalHyperparameter("heur1", ["off", "on"])
        heur2 = CategoricalHyperparameter("heur2", ["off", "on"])
        heur_order = CategoricalHyperparameter("heur_order", ["heur1then2", "heur2then1"])
        gloves_condition = OrConjunction(EqualsCondition(gloves, rain, "yes"),
                                         EqualsCondition(gloves, temperature, "low"))
        heur_condition = AndConjunction(EqualsCondition(heur_order, heur1, "on"),
                                        EqualsCondition(heur_order, heur2, "on"))
        and_conjunction = AndConjunction(NotEqualsCondition(knn_weights, classi, "extra_trees"),
                                         EqualsCondition(knn_weights, classi, "random_forest"))
        Cl_condition = OrConjunction(EqualsCondition(knn_weights, classi, "k_nearest_neighbors"),
                                     and_conjunction,
                                     EqualsCondition(knn_weights, classi, "something"))

        and1 = AndConjunction(EqualsCondition(temperature, weather, "rainy"),
                              EqualsCondition(temperature, weather, "cloudy"))
        and2 = AndConjunction(EqualsCondition(temperature, weather, "sunny"),
                              NotEqualsCondition(temperature, weather, "snowing"))
        another_condition = OrConjunction(and1, and2)

        complex_conditional_space = ConfigurationSpace()
        complex_conditional_space.add_hyperparameter(classi)
        complex_conditional_space.add_hyperparameter(knn_weights)
        complex_conditional_space.add_hyperparameter(weather)
        complex_conditional_space.add_hyperparameter(temperature)
        complex_conditional_space.add_hyperparameter(rain)
        complex_conditional_space.add_hyperparameter(gloves)
        complex_conditional_space.add_hyperparameter(heur1)
        complex_conditional_space.add_hyperparameter(heur2)
        complex_conditional_space.add_hyperparameter(heur_order)

        complex_conditional_space.add_condition(gloves_condition)
        complex_conditional_space.add_condition(heur_condition)
        complex_conditional_space.add_condition(Cl_condition)
        complex_conditional_space.add_condition(another_condition)

        complex_cs = list()
        complex_cs.append("classi ordinal {random_forest,extra_trees,k_nearest_neighbors, something} [random_forest]")
        complex_cs.append("knn_weights categorical {uniform, distance} [uniform]")
        complex_cs.append("weather ordinal {sunny, rainy, cloudy, snowing} [sunny]")
        complex_cs.append("temperature categorical {high, low} [high]")
        complex_cs.append("rain categorical { yes, no } [yes]")
        complex_cs.append("gloves ordinal { none, yarn, leather, gortex } [none]")
        complex_cs.append("heur1 categorical { off, on } [off]")
        complex_cs.append("heur2 categorical { off, on } [off]")
        complex_cs.append("heur_order categorical { heur1then2, heur2then1 } [heur1then2]")
        complex_cs.append("gloves | rain == yes || temperature == low")
        complex_cs.append("heur_order | heur1 == on && heur2 == on")
        complex_cs.append("knn_weights | classi == k_nearest_neighbors || "
                          "classi != extra_trees && classi == random_forest || classi == something")
        complex_cs.append("temperature | weather == rainy && weather == cloudy || "
                          "weather == sunny && weather != snowing")
        cs_new = pcs_new.read(complex_cs)
        self.assertEqual(cs_new, complex_conditional_space)
    def get_hyperparameter_search_space(dataset_properties=None):
        # Copied from libsvm_c
        C = UniformFloatHyperparameter(
            name="C", lower=0.03125, upper=32768, log=True, default=1.0)

        kernel = CategoricalHyperparameter(
            name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'],
            default="rbf")
        degree = UniformIntegerHyperparameter(
            name="degree", lower=1, upper=5, default=3)

        # Changed the gamma value to 0.0 (is 0.1 for classification)
        gamma = UniformFloatHyperparameter(
            name="gamma", lower=3.0517578125e-05, upper=8, log=True, default=0.1)

        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter(
            name="coef0", lower=-1, upper=1, default=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter(
            name="shrinking", choices=["True", "False"], default="True")
        tol = UniformFloatHyperparameter(
            name="tol", lower=1e-5, upper=1e-1, default=1e-3, log=True)
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        # Random Guess
        epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001,
                                             upper=1, default=0.1, log=True)
        cs = ConfigurationSpace()
        cs.add_hyperparameter(C)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)
        cs.add_hyperparameter(shrinking)
        cs.add_hyperparameter(tol)
        cs.add_hyperparameter(max_iter)
        cs.add_hyperparameter(epsilon)

        degree_depends_on_kernel = InCondition(child=degree, parent=kernel,
                                               values=('poly', 'rbf', 'sigmoid'))
        gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel,
                                              values=('poly', 'rbf'))
        coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel,
                                              values=('poly', 'sigmoid'))
        cs.add_condition(degree_depends_on_kernel)
        cs.add_condition(gamma_depends_on_kernel)
        cs.add_condition(coef0_depends_on_kernel)
        return cs
예제 #40
0
    def get_hyperparameter_search_space(dataset_properties=None):
        # Copied from libsvm_c
        C = UniformFloatHyperparameter(
            name="C", lower=0.03125, upper=32768, log=True, default=1.0)

        kernel = CategoricalHyperparameter(
            name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'],
            default="rbf")
        degree = UniformIntegerHyperparameter(
            name="degree", lower=1, upper=5, default=3)

        # Changed the gamma value to 0.0 (is 0.1 for classification)
        gamma = UniformFloatHyperparameter(
            name="gamma", lower=3.0517578125e-05, upper=8, log=True, default=0.1)

        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter(
            name="coef0", lower=-1, upper=1, default=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter(
            name="shrinking", choices=["True", "False"], default="True")
        tol = UniformFloatHyperparameter(
            name="tol", lower=1e-5, upper=1e-1, default=1e-3, log=True)
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        # Random Guess
        epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001,
                                             upper=1, default=0.1, log=True)
        cs = ConfigurationSpace()
        cs.add_hyperparameter(C)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)
        cs.add_hyperparameter(shrinking)
        cs.add_hyperparameter(tol)
        cs.add_hyperparameter(max_iter)
        cs.add_hyperparameter(epsilon)

        degree_depends_on_kernel = InCondition(child=degree, parent=kernel,
                                               values=('poly', 'rbf', 'sigmoid'))
        gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel,
                                              values=('poly', 'rbf'))
        coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel,
                                              values=('poly', 'sigmoid'))
        cs.add_condition(degree_depends_on_kernel)
        cs.add_condition(gamma_depends_on_kernel)
        cs.add_condition(coef0_depends_on_kernel)
        return cs
예제 #41
0
    def get_hyperparameter_search_space(dataset_properties=None, optimizer='smac'):
        if optimizer == 'smac':
            C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True,
                                           default_value=1.0)
            # No linear kernel here, because we have liblinear
            kernel = CategoricalHyperparameter(name="kernel",
                                               choices=["rbf", "poly", "sigmoid"],
                                               default_value="rbf")
            degree = UniformIntegerHyperparameter("degree", 2, 5, default_value=3)
            gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                               log=True, default_value=0.1)
            coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
            shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                                  default_value="True")
            tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-3,
                                             log=True)
            # cache size is not a hyperparameter, but an argument to the program!
            max_iter = UnParametrizedHyperparameter("max_iter", 2000)

            cs = ConfigurationSpace()
            cs.add_hyperparameters([C, kernel, degree, gamma, coef0, shrinking,
                                    tol, max_iter])

            degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
            coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
            cs.add_condition(degree_depends_on_poly)
            cs.add_condition(coef0_condition)

            return cs
        elif optimizer == 'tpe':
            coef0 = hp.uniform("libsvm_coef0", -1, 1)
            space = {'C': hp.loguniform('libsvm_C', np.log(0.03125), np.log(32768)),
                     'gamma': hp.loguniform('libsvm_gamma', np.log(3.0517578125e-5), np.log(8)),
                     'shrinking': hp.choice('libsvm_shrinking', ["True", "False"]),
                     'tol': hp.loguniform('libsvm_tol', np.log(1e-5), np.log(1e-1)),
                     'max_iter': hp.choice('libsvm_max_iter', [2000]),
                     'kernel': hp.choice('libsvm_kernel',
                                         [("poly", {'degree': hp.randint('libsvm_degree', 4) + 2, 'coef0': coef0}),
                                          ("rbf", {}),
                                          ("sigmoid", {'coef0': coef0})])}

            init_trial = {'C': 1,
                          'gamma': 0.1,
                          'shrinking': "True",
                          'tol': 1e-3,
                          'max_iter': 2000,
                          'kernel': ("rbf", {})}

            return space
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter(
            "n_components", 10, 2000, default=100))
        algorithm = cs.add_hyperparameter(CategoricalHyperparameter('algorithm',
            ['parallel', 'deflation'], 'parallel'))
        whiten = cs.add_hyperparameter(CategoricalHyperparameter('whiten',
            ['False', 'True'], 'False'))
        fun = cs.add_hyperparameter(CategoricalHyperparameter(
            'fun', ['logcosh', 'exp', 'cube'], 'logcosh'))

        cs.add_condition(EqualsCondition(n_components, whiten, "True"))

        return cs
예제 #43
0
파일: sgd.py 프로젝트: Ayaro/auto-sklearn
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        loss = cs.add_hyperparameter(CategoricalHyperparameter("loss",
            ["squared_loss", "huber", "epsilon_insensitive", "squared_epsilon_insensitive"],
            default="squared_loss"))
        penalty = cs.add_hyperparameter(CategoricalHyperparameter(
            "penalty", ["l1", "l2", "elasticnet"], default="l2"))
        alpha = cs.add_hyperparameter(UniformFloatHyperparameter(
            "alpha", 10e-7, 1e-1, log=True, default=0.01))
        l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter(
            "l1_ratio", 1e-9, 1., log=True, default=0.15))
        fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter(
            "fit_intercept", "True"))
        n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter(
            "n_iter", 5, 1000, log=True, default=20))
        epsilon = cs.add_hyperparameter(UniformFloatHyperparameter(
            "epsilon", 1e-5, 1e-1, default=1e-4, log=True))
        learning_rate = cs.add_hyperparameter(CategoricalHyperparameter(
            "learning_rate", ["optimal", "invscaling", "constant"],
            default="optimal"))
        eta0 = cs.add_hyperparameter(UniformFloatHyperparameter(
            "eta0", 10 ** -7, 0.1, default=0.01))
        power_t = cs.add_hyperparameter(UniformFloatHyperparameter(
            "power_t", 1e-5, 1, default=0.5))
        average = cs.add_hyperparameter(CategoricalHyperparameter(
            "average", ["False", "True"], default="False"))

        # TODO add passive/aggressive here, although not properly documented?
        elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet")
        epsilon_condition = InCondition(epsilon, loss,
            ["huber", "epsilon_insensitive", "squared_epsilon_insensitive"])
        # eta0 seems to be always active according to the source code; when
        # learning_rate is set to optimial, eta0 is the starting value:
        # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx
        # eta0_and_inv = EqualsCondition(eta0, learning_rate, "invscaling")
        #eta0_and_constant = EqualsCondition(eta0, learning_rate, "constant")
        #eta0_condition = OrConjunction(eta0_and_inv, eta0_and_constant)
        power_t_condition = EqualsCondition(power_t, learning_rate,
                                            "invscaling")

        cs.add_condition(elasticnet)
        cs.add_condition(epsilon_condition)
        cs.add_condition(power_t_condition)

        return cs
예제 #44
0
    def test_write_AndConjunction_condition(self):
        expected = "lp '--lp ' c {mi,bo}\nls '--ls ' c {sa,ca,ny}\ntemp '--temp ' r (0.500000, 1.000000)|  ls  %in%  c(sa,ca)  &&  lp  %in%  c(bo)\n"

        temp = UniformFloatHyperparameter("temp", 0.5, 1)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")
        lp = CategoricalHyperparameter("lp", ["mi", "bo"], "bo")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(lp)
        cs.add_hyperparameter(ls)

        c1 = InCondition(temp, ls, ['sa','ca'])
        c2 = InCondition(temp, lp, ['bo'])
        c3 = AndConjunction(c1, c2)
        cs.add_condition(c3)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #45
0
파일: json.py 프로젝트: automl/ConfigSpace
def read(jason_string):
    """
    Creates a configuration space definition from a json string.

    Example
    -------

    >>> from ConfigSpace.read_and_write import json
    >>> with open('configspace.json', 'r') as f:
    >>>     jason_string = f.read()
    >>>     config = json.read(jason_string)

    Parameters
    ----------
    jason_string : str
        A json string representing a configuration space definition

    Returns
    -------
    :class:`~ConfigSpace.configuration_space.ConfigurationSpace`
        The restored ConfigurationSpace object
    """
    jason = json.loads(jason_string)
    if 'name' in jason:
        configuration_space = ConfigurationSpace(name=jason['name'])
    else:
        configuration_space = ConfigurationSpace()

    for hyperparameter in jason['hyperparameters']:
        configuration_space.add_hyperparameter(_construct_hyperparameter(
            hyperparameter,
        ))

    for condition in jason['conditions']:
        configuration_space.add_condition(_construct_condition(
            condition, configuration_space,
        ))

    for forbidden in jason['forbiddens']:
        configuration_space.add_forbidden_clause(_construct_forbidden(
            forbidden, configuration_space,
        ))

    return configuration_space
예제 #46
0
    def test_write_OrConjunction_condition(self):
        import numpy as np
        expected = "lp '--lp ' c {mi,bo}\ntemp '--temp ' r (2.000000, 5.000000)\nls '--ls ' c {sa,ca,ny}|  temp==3.0  ||  lp  %in%  c(bo)\n"

        temp = UniformFloatHyperparameter("temp", np.exp(2), np.exp(5), log=True)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")
        lp = CategoricalHyperparameter("lp", ["mi", "bo"], "bo")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(lp)
        cs.add_hyperparameter(ls)

        c1 = EqualsCondition(ls, temp, np.exp(3))
        c2 = InCondition(ls, lp, ['bo'])
        c3 = OrConjunction(c1, c2)
        cs.add_condition(c3)
        value = irace.write(cs)
        self.assertEqual(expected, value)
예제 #47
0
    def add_params(cs: ConfigurationSpace, cutoff: int):
        '''
            adds parameters to ConfigurationSpace

            Arguments
            ---------
            cs: ConfigurationSpace
                configuration space to add new parameters and conditions
            cutoff: int
                maximal possible time for aspeed
        '''

        pre_solving = CategoricalHyperparameter(
            "presolving", choices=[True, False], default=True)
        cs.add_hyperparameter(pre_solving)
        pre_cutoff = UniformIntegerHyperparameter(
            "pre:cutoff", lower=0, upper=cutoff, default=math.ceil(cutoff * 0.1))
        cs.add_hyperparameter(pre_cutoff)
        cond = InCondition(child=pre_cutoff, parent=pre_solving, values=[True])
        cs.add_condition(cond)
예제 #48
0
    def get_hyperparameter_search_space(dataset_properties=None):
        n_components = UniformIntegerHyperparameter(
            "n_components", 10, 2000, default=100)
        kernel = CategoricalHyperparameter('kernel',
            ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf')
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                           log=True, default=1.0)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0)
        cs = ConfigurationSpace()
        cs.add_hyperparameter(n_components)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)
        cs.add_condition(gamma_condition)
        return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        if dataset_properties is not None and \
                (dataset_properties.get("is_sparse") is True or
                 dataset_properties.get("signed") is False):
            allow_chi2 = False
        else:
            allow_chi2 = True

        possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine']
        if allow_chi2:
            possible_kernels.append("chi2")
        kernel = CategoricalHyperparameter('kernel', possible_kernels, 'rbf')
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                           log=True, default=0.1)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0)
        n_components = UniformIntegerHyperparameter(
            "n_components", 50, 10000, default=100, log=True)

        cs = ConfigurationSpace()
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)
        cs.add_hyperparameter(n_components)

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])

        gamma_kernels = ["poly", "rbf", "sigmoid"]
        if allow_chi2:
            gamma_kernels.append("chi2")
        gamma_condition = InCondition(gamma, kernel, gamma_kernels)
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)
        cs.add_condition(gamma_condition)
        return cs
예제 #50
0
    def get_hyperparameter_search_space(cls, dataset_properties,
                                        default=None,
                                        include=None,
                                        exclude=None):
        if include is not None and exclude is not None:
            raise ValueError("The argument include and exclude cannot be used together.")

        cs = ConfigurationSpace()

        # Compile a list of all estimator objects for this problem
        available_estimators = cls.get_available_components(
            data_prop=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_estimators) == 0:
            raise ValueError("No regressors found")

        if default is None:
            defaults = ['random_forest', 'support_vector_regression'] + \
                list(available_estimators.keys())
            for default_ in defaults:
                if default_ in available_estimators:
                    if include is not None and default_ not in include:
                        continue
                    if exclude is not None and default_ in exclude:
                        continue
                    default = default_
                    break

        estimator = CategoricalHyperparameter('__choice__',
                                              list(available_estimators.keys()),
                                              default=default)
        cs.add_hyperparameter(estimator)
        for estimator_name in available_estimators.keys():

            # We have to retrieve the configuration space every time because
            # we change the objects it returns. If we reused it, we could not
            # retrieve the conditions further down
            # TODO implement copy for hyperparameters and forbidden and
            # conditions!

            estimator_configuration_space = available_estimators[
                estimator_name]. \
                get_hyperparameter_search_space(dataset_properties)
            for parameter in estimator_configuration_space.get_hyperparameters():
                new_parameter = copy.deepcopy(parameter)
                new_parameter.name = "%s:%s" % (
                    estimator_name, new_parameter.name)
                cs.add_hyperparameter(new_parameter)
                # We must only add a condition if the hyperparameter is not
                # conditional on something else
                if len(estimator_configuration_space.
                        get_parents_of(parameter)) == 0:
                    condition = EqualsCondition(new_parameter, estimator,
                                                estimator_name)
                    cs.add_condition(condition)

            for condition in available_estimators[estimator_name]. \
                    get_hyperparameter_search_space(
                    dataset_properties).get_conditions():
                dlcs = condition.get_descendant_literal_conditions()
                for dlc in dlcs:
                    if not dlc.child.name.startswith(estimator_name):
                        dlc.child.name = "%s:%s" % (
                            estimator_name, dlc.child.name)
                    if not dlc.parent.name.startswith(estimator_name):
                        dlc.parent.name = "%s:%s" % (
                            estimator_name, dlc.parent.name)
                cs.add_condition(condition)

            for forbidden_clause in available_estimators[estimator_name]. \
                    get_hyperparameter_search_space(
                    dataset_properties).forbidden_clauses:
                dlcs = forbidden_clause.get_descendant_literal_clauses()
                for dlc in dlcs:
                    if not dlc.hyperparameter.name.startswith(estimator_name):
                        dlc.hyperparameter.name = "%s:%s" % (estimator_name,
                                                             dlc.hyperparameter.name)
                cs.add_forbidden_clause(forbidden_clause)

        return cs
예제 #51
0
neurons = UniformIntegerHyperparameter("neurons", 16, 1024)
neurons_condition = EqualsCondition(neurons, classifier, "nn")
lr = UniformFloatHyperparameter("lr", 0.0001, 1.0)
lr_condition = EqualsCondition(lr, classifier, "nn")
preprocessing = CategoricalHyperparameter("preprocessing", ["None", "pca"])
conditional_space = ConfigurationSpace()
conditional_space.add_hyperparameter(preprocessing)
conditional_space.add_hyperparameter(classifier)
conditional_space.add_hyperparameter(kernel)
conditional_space.add_hyperparameter(C)
conditional_space.add_hyperparameter(neurons)
conditional_space.add_hyperparameter(lr)
conditional_space.add_hyperparameter(degree)
conditional_space.add_hyperparameter(gamma)

conditional_space.add_condition(C_condition)
conditional_space.add_condition(kernel_condition)
conditional_space.add_condition(lr_condition)
conditional_space.add_condition(neurons_condition)
conditional_space.add_condition(degree_condition)
conditional_space.add_condition(gamma_condition)

float_a = UniformFloatHyperparameter("float_a", -1.23, 6.45)
e_float_a = UniformFloatHyperparameter("e_float_a", .5E-2, 4.5e+06)
int_a = UniformIntegerHyperparameter("int_a", -1, 6)
log_a = UniformFloatHyperparameter("log_a", 4e-1, 6.45, log=True)
int_log_a = UniformIntegerHyperparameter("int_log_a", 1, 6, log=True)
cat_a = CategoricalHyperparameter("cat_a", ["a", "b", "c", "d"])
crazy = CategoricalHyperparameter("@.:;/\?!$%&_-<>*+1234567890", ["const"])
easy_space = ConfigurationSpace()
easy_space.add_hyperparameter(float_a)
예제 #52
0
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''
        try:
            classifier = cs.get_hyperparameter("classifier")
            classifier.choices.append("RandomForest")
        except KeyError:
            classifier = CategoricalHyperparameter(
                "classifier", choices=["RandomForest"], default="RandomForest")
            cs.add_hyperparameter(classifier)

        n_estimators = UniformIntegerHyperparameter(
            name="rf:n_estimators", lower=10, upper=100, default=10, log=True)
        cs.add_hyperparameter(n_estimators)
        criterion = CategoricalHyperparameter(
            name="rf:criterion", choices=["gini", "entropy"], default="gini")
        cs.add_hyperparameter(criterion)
        max_features = CategoricalHyperparameter(
            name="rf:max_features", choices=["sqrt", "log2", None], default="sqrt")
        cs.add_hyperparameter(max_features)
        max_depth = UniformIntegerHyperparameter(
            name="rf:max_depth", lower=10, upper=2**31, default=2**31, log=True)
        cs.add_hyperparameter(max_depth)
        min_samples_split = UniformIntegerHyperparameter(
            name="rf:min_samples_split", lower=2, upper=100, default=2, log=True)
        cs.add_hyperparameter(min_samples_split)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="rf:min_samples_leaf", lower=2, upper=100, default=10, log=True)
        cs.add_hyperparameter(min_samples_leaf)
        bootstrap = CategoricalHyperparameter(
            name="rf:bootstrap", choices=[True, False], default=True)
        cs.add_hyperparameter(bootstrap)

        cond = InCondition(
            child=n_estimators, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
        cond = InCondition(
            child=criterion, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
        cond = InCondition(
            child=max_features, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
        cond = InCondition(
            child=max_depth, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
        cond = InCondition(
            child=min_samples_split, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
        cond = InCondition(
            child=min_samples_leaf, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
        cond = InCondition(
            child=bootstrap, parent=classifier, values=["RandomForest"])
        cs.add_condition(cond)
예제 #53
0
    def get_hyperparameter_search_space(cls, dataset_properties=None,
                                        default=None,
                                        include=None,
                                        exclude=None):
        cs = ConfigurationSpace()

        # Compile a list of legal preprocessors for this problem
        available_preprocessors = cls.get_available_components(
            data_prop=dataset_properties,
            include=include, exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError(
                "No rescaling algorithm found.")

        if default is None:
            defaults = ['min/max', 'standardize', 'none', 'normalize']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    default = default_
                    break

        preprocessor = CategoricalHyperparameter('__choice__',
                                                 list(
                                                     available_preprocessors.keys()),
                                                 default=default)
        cs.add_hyperparameter(preprocessor)
        for name in available_preprocessors:
            preprocessor_configuration_space = available_preprocessors[name]. \
                get_hyperparameter_search_space(dataset_properties)
            for parameter in preprocessor_configuration_space.get_hyperparameters():
                new_parameter = copy.deepcopy(parameter)
                new_parameter.name = "%s:%s" % (name, new_parameter.name)
                cs.add_hyperparameter(new_parameter)
                # We must only add a condition if the hyperparameter is not
                # conditional on something else
                if len(preprocessor_configuration_space.
                        get_parents_of(parameter)) == 0:
                    condition = EqualsCondition(new_parameter, preprocessor,
                                                name)
                    cs.add_condition(condition)

            for condition in available_preprocessors[name]. \
                    get_hyperparameter_search_space(
                    dataset_properties).get_conditions():
                if not isinstance(condition, AbstractConjunction):
                    dlcs = [condition]
                else:
                    dlcs = condition.get_descendent_literal_conditions()
                for dlc in dlcs:
                    if not dlc.child.name.startswith(name):
                        dlc.child.name = "%s:%s" % (name, dlc.child.name)
                    if not dlc.parent.name.startswith(name):
                        dlc.parent.name = "%s:%s" % (name, dlc.parent.name)
                cs.add_condition(condition)

            for forbidden_clause in available_preprocessors[name]. \
                    get_hyperparameter_search_space(
                    dataset_properties).forbidden_clauses:
                dlcs = forbidden_clause.get_descendant_literal_clauses()
                for dlc in dlcs:
                    if not dlc.hyperparameter.name.startswith(name):
                        dlc.hyperparameter.name = "%s:%s" % (name,
                                                             dlc.hyperparameter.name)
                cs.add_forbidden_clause(forbidden_clause)

        return cs
예제 #54
0
파일: pcs.py 프로젝트: automl/ConfigSpace
def read(pcs_string, debug=False):
    """
    Reads in a :py:class:`~ConfigSpace.configuration_space.ConfigurationSpace`
     definition from a pcs file.

    Example
    -------

    >>> from ConfigSpace.read_and_write import pcs
    >>> with open('configspace.pcs', 'r') as fh:
    >>>     restored_conf = pcs_new.read(fh)

    Parameters
    ----------
    pcs_string : str
        ConfigSpace definition in pcs format
    debug : bool
        Provides debug information. Defaults to False.

    Returns
    -------
    :py:class:`~ConfigSpace.configuration_space.ConfigurationSpace`
        The restored ConfigurationSpace object

    """
    configuration_space = ConfigurationSpace()
    conditions = []
    forbidden = []

    # some statistics
    ct = 0
    cont_ct = 0
    cat_ct = 0
    line_ct = 0

    for line in pcs_string:
        line_ct += 1

        if "#" in line:
            # It contains a comment
            pos = line.find("#")
            line = line[:pos]

        # Remove quotes and whitespaces at beginning and end
        line = line.replace('"', "").replace("'", "")
        line = line.strip()

        if "|" in line:
            # It's a condition
            try:
                c = pp_condition.parseString(line)
                conditions.append(c)
            except pyparsing.ParseException:
                raise NotImplementedError("Could not parse condition: %s" % line)
            continue
        if "}" not in line and "]" not in line:
            continue
        if line.startswith("{") and line.endswith("}"):
            forbidden.append(line)
            continue
        if len(line.strip()) == 0:
            continue

        ct += 1
        param = None

        create = {"int": UniformIntegerHyperparameter,
                  "float": UniformFloatHyperparameter,
                  "categorical": CategoricalHyperparameter}

        try:
            param_list = pp_cont_param.parseString(line)
            il = param_list[9:]
            if len(il) > 0:
                il = il[0]
            param_list = param_list[:9]
            name = param_list[0]
            lower = float(param_list[2])
            upper = float(param_list[4])
            paramtype = "int" if "i" in il else "float"
            log = True if "l" in il else False
            default_value = float(param_list[7])
            param = create[paramtype](name=name, lower=lower, upper=upper,
                                      q=None, log=log, default_value=default_value)
            cont_ct += 1
        except pyparsing.ParseException:
            pass

        try:
            param_list = pp_cat_param.parseString(line)
            name = param_list[0]
            choices = [c for c in param_list[2:-4:2]]
            default_value = param_list[-2]
            param = create["categorical"](name=name, choices=choices,
                                          default_value=default_value)
            cat_ct += 1
        except pyparsing.ParseException:
            pass

        if param is None:
            raise NotImplementedError("Could not parse: %s" % line)

        configuration_space.add_hyperparameter(param)

    for clause in forbidden:
        # TODO test this properly!
        # TODO Add a try/catch here!
        # noinspection PyUnusedLocal
        param_list = pp_forbidden_clause.parseString(clause)
        tmp_list = []
        clause_list = []
        for value in param_list[1:]:
            if len(tmp_list) < 3:
                tmp_list.append(value)
            else:
                # So far, only equals is supported by SMAC
                if tmp_list[1] == '=':
                    # TODO maybe add a check if the hyperparameter is
                    # actually in the configuration space
                    clause_list.append(ForbiddenEqualsClause(
                        configuration_space.get_hyperparameter(tmp_list[0]),
                        tmp_list[2]))
                else:
                    raise NotImplementedError()
                tmp_list = []
        configuration_space.add_forbidden_clause(ForbiddenAndConjunction(
            *clause_list))

    # Now handle conditions
    # If there are two conditions for one child, these two conditions are an
    # AND-conjunction of conditions, thus we have to connect them
    conditions_per_child = OrderedDict()
    for condition in conditions:
        child_name = condition[0]
        if child_name not in conditions_per_child:
            conditions_per_child[child_name] = list()
        conditions_per_child[child_name].append(condition)

    for child_name in conditions_per_child:
        condition_objects = []
        for condition in conditions_per_child[child_name]:
            child = configuration_space.get_hyperparameter(child_name)
            parent_name = condition[2]
            parent = configuration_space.get_hyperparameter(parent_name)
            restrictions = condition[5:-1:2]

            # TODO: cast the type of the restriction!
            if len(restrictions) == 1:
                condition = EqualsCondition(child, parent, restrictions[0])
            else:
                condition = InCondition(child, parent, values=restrictions)
            condition_objects.append(condition)

        # Now we have all condition objects for this child, so we can build a
        #  giant AND-conjunction of them (if number of conditions >= 2)!

        if len(condition_objects) > 1:
            and_conjunction = AndConjunction(*condition_objects)
            configuration_space.add_condition(and_conjunction)
        else:
            configuration_space.add_condition(condition_objects[0])

    return configuration_space
    def get_hyperparameter_search_space(dataset_properties=None):
        # GPUTRACK: Based on http://svail.github.io/rnn_perf/
        # We make batch size and number of units multiples of 64

        # Hacky way to condition layers params based on the number of layers
        # GPUTRACK: Reduced number of layers
        # 'c'=1, 'd'=2, 'e'=3 ,'f'=4 + output_layer
        # layer_choices = [chr(i) for i in xrange(ord('c'), ord('e'))]

        layer_choices = ['c', 'd', 'e']

        batch_size = UniformIntegerHyperparameter("batch_size",
                                                  32, 4096,
                                                  log=True,
                                                  default=32)

        number_updates = UniformIntegerHyperparameter("number_updates",
                                                      200, 3500,
                                                      log=True,
                                                      default=200)

        num_layers = CategoricalHyperparameter("num_layers",
                                               choices=layer_choices,
                                               default='c')

        num_units_layer_1 = UniformIntegerHyperparameter("num_units_layer_1",
                                                         64, 4096,
                                                         log=True,
                                                         default=256)

        num_units_layer_2 = UniformIntegerHyperparameter("num_units_layer_2",
                                                         64, 4096,
                                                         log=True,
                                                         default=128)

        num_units_layer_3 = UniformIntegerHyperparameter("num_units_layer_3",
                                                         64, 4096,
                                                         log=True,
                                                         default=128)

        dropout_layer_1 = UniformFloatHyperparameter("dropout_layer_1",
                                                     0.0, 0.99,
                                                     default=0.5)

        dropout_layer_2 = UniformFloatHyperparameter("dropout_layer_2",
                                                     0.0, 0.99,
                                                     default=0.5)

        dropout_layer_3 = UniformFloatHyperparameter("dropout_layer_3",
                                                     0.0, 0.99,
                                                     default=0.5)

        dropout_output = UniformFloatHyperparameter("dropout_output",
                                                    0.0, 0.99,
                                                    default=0.5)

        lr = CategoricalHyperparameter("learning_rate",
                                       choices=[1e-1, 1e-2, 1e-3, 1e-4],
                                       default=1e-2)

        l2 = UniformFloatHyperparameter("lambda2", 1e-6, 1e-2, log=True,
                                        default=1e-3)

        std_layer_1 = UniformFloatHyperparameter("std_layer_1", 0.001, 0.1,
                                                 log=True,
                                                 default=0.005)

        std_layer_2 = UniformFloatHyperparameter("std_layer_2", 0.001, 0.1,
                                                 log=True,
                                                 default=0.005)

        std_layer_3 = UniformFloatHyperparameter("std_layer_3", 0.001, 0.1,
                                                 log=True,
                                                 default=0.005)

        # Using Tobias' adam
        solver = Constant(name="solver", value="smorm3s")
        non_linearities = CategoricalHyperparameter(name='activation',
                                                    choices=['relu', 'tanh'],
                                                    default='relu')

        cs = ConfigurationSpace()
        # cs.add_hyperparameter(number_epochs)
        cs.add_hyperparameter(number_updates)
        cs.add_hyperparameter(batch_size)
        cs.add_hyperparameter(num_layers)
        cs.add_hyperparameter(num_units_layer_1)
        cs.add_hyperparameter(num_units_layer_2)
        cs.add_hyperparameter(num_units_layer_3)
        cs.add_hyperparameter(dropout_layer_1)
        cs.add_hyperparameter(dropout_layer_2)
        cs.add_hyperparameter(dropout_layer_3)
        cs.add_hyperparameter(dropout_output)
        cs.add_hyperparameter(std_layer_1)
        cs.add_hyperparameter(std_layer_2)
        cs.add_hyperparameter(std_layer_3)
        cs.add_hyperparameter(lr)
        cs.add_hyperparameter(l2)
        cs.add_hyperparameter(solver)
        cs.add_hyperparameter(non_linearities)

        layer_2_condition = InCondition(num_units_layer_2, num_layers,
                                        ['d', 'e'])
        layer_3_condition = InCondition(num_units_layer_3, num_layers,
                                        ['e'])
        cs.add_condition(layer_2_condition)
        cs.add_condition(layer_3_condition)

        # Condition dropout parameter on layer choice
        dropout_2_condition = InCondition(dropout_layer_2, num_layers,
                                          ['d', 'e'])
        dropout_3_condition = InCondition(dropout_layer_3, num_layers,
                                          ['e'])
        cs.add_condition(dropout_2_condition)
        cs.add_condition(dropout_3_condition)

        # Condition std parameter on layer choice
        std_2_condition = InCondition(std_layer_2, num_layers, ['d', 'e'])
        std_3_condition = InCondition(std_layer_3, num_layers, ['e'])
        cs.add_condition(std_2_condition)
        cs.add_condition(std_3_condition)

        return cs
예제 #56
0
def read(pcs_string, debug=False):
    """
    Reads in a :py:class:`~ConfigSpace.configuration_space.ConfigurationSpace`
    definition from a pcs file.

    Example
    -------

    >>> from ConfigSpace.read_and_write import pcs_new
    >>> with open('configspace.pcs', 'r') as fh:
    >>>     restored_conf = pcs_new.read(fh)

    Parameters
    ----------
    pcs_string : str
        ConfigSpace definition in pcs format
    debug : bool
        Provides debug information. Defaults to False.

    Returns
    -------
    :py:class:`~ConfigSpace.configuration_space.ConfigurationSpace`
        The restored ConfigurationSpace object

    """
    configuration_space = ConfigurationSpace()
    conditions = []
    forbidden = []

    # some statistics
    ct = 0
    cont_ct = 0
    cat_ct = 0
    ord_ct = 0
    line_ct = 0

    for line in pcs_string:
        line_ct += 1

        if "#" in line:
            # It contains a comment
            pos = line.find("#")
            line = line[:pos]

        # Remove quotes and whitespaces at beginning and end
        line = line.replace('"', "").replace("'", "")
        line = line.strip()
        if "|" in line:
            # It's a condition
            try:
                c = pp_condition.parseString(line)
                conditions.append(c)
            except pyparsing.ParseException:
                raise NotImplementedError("Could not parse condition: %s" % line)
            continue
        if "}" not in line and "]" not in line:
            continue
        if line.startswith("{") and line.endswith("}"):
            forbidden.append(line)
            continue
        if len(line.strip()) == 0:
            continue

        ct += 1
        param = None

        create = {"int": UniformIntegerHyperparameter,
                  "float": UniformFloatHyperparameter,
                  "categorical": CategoricalHyperparameter,
                  "ordinal": OrdinalHyperparameter
                  }

        try:
            param_list = pp_cont_param.parseString(line)
            name = param_list[0]
            if param_list[1] == 'integer':
                paramtype = 'int'
            elif param_list[1] == 'real':
                paramtype = 'float'
            else:
                paramtype = None

            if paramtype in ['int', 'float']:
                log = param_list[10:]
                param_list = param_list[:10]
                if len(log) > 0:
                    log = log[0]
                lower = float(param_list[3])
                upper = float(param_list[5])
                log_on = True if "log" in log else False
                default_value = float(param_list[8])
                param = create[paramtype](name=name, lower=lower, upper=upper,
                                          q=None, log=log_on, default_value=default_value)
                cont_ct += 1

        except pyparsing.ParseException:
            pass

        try:
            if "categorical" in line:
                param_list = pp_cat_param.parseString(line)
                name = param_list[0]
                choices = [choice for choice in param_list[3:-4:2]]
                default_value = param_list[-2]
                param = create["categorical"](name=name, choices=choices, default_value=default_value)
                cat_ct += 1

            elif "ordinal" in line:
                param_list = pp_ord_param.parseString(line)
                name = param_list[0]
                sequence = [seq for seq in param_list[3:-4:2]]
                default_value = param_list[-2]
                param = create["ordinal"](name=name, sequence=sequence, default_value=default_value)
                ord_ct += 1

        except pyparsing.ParseException:
            pass

        if param is None:
            raise NotImplementedError("Could not parse: %s" % line)

        configuration_space.add_hyperparameter(param)

    for clause in forbidden:
        param_list = pp_forbidden_clause.parseString(clause)
        tmp_list = []
        clause_list = []
        for value in param_list[1:]:
            if len(tmp_list) < 3:
                tmp_list.append(value)
            else:
                # So far, only equals is supported by SMAC
                if tmp_list[1] == '=':
                    # TODO maybe add a check if the hyperparameter is
                    # actually in the configuration space
                    clause_list.append(ForbiddenEqualsClause(
                        configuration_space.get_hyperparameter(tmp_list[0]),
                        tmp_list[2]))
                else:
                    raise NotImplementedError()
                tmp_list = []
        configuration_space.add_forbidden_clause(ForbiddenAndConjunction(
            *clause_list))

    conditions_per_child = OrderedDict()
    for condition in conditions:
        child_name = condition[0]
        if child_name not in conditions_per_child:
            conditions_per_child[child_name] = list()
        conditions_per_child[child_name].append(condition)

    for child_name in conditions_per_child:
        for condition in conditions_per_child[child_name]:
            condition = condition[2:]
            condition = ' '.join(condition)
            if '||' in str(condition):
                ors = []
                # 1st case we have a mixture of || and &&
                if '&&' in str(condition):
                    ors_combis = []
                    for cond_parts in str(condition).split('||'):
                        condition = str(cond_parts).split('&&')
                        # if length is 1 it must be or
                        if len(condition) == 1:
                            element_list = condition[0].split()
                            ors_combis.append(condition_specification(child_name, element_list, configuration_space))
                        else:
                            # now taking care of ands
                            ands = []
                            for and_part in condition:
                                element_list = [element for part in condition for element in and_part.split()]
                                ands.append(condition_specification(child_name, element_list, configuration_space))
                            ors_combis.append(AndConjunction(*ands))
                    mixed_conjunction = OrConjunction(*ors_combis)
                    configuration_space.add_condition(mixed_conjunction)
                else:
                    # 2nd case: we only have ors
                    for cond_parts in str(condition).split('||'):
                        element_list = [element for element in cond_parts.split()]
                        ors.append(condition_specification(child_name, element_list, configuration_space))
                    or_conjunction = OrConjunction(*ors)
                    configuration_space.add_condition(or_conjunction)
            else:
                # 3rd case: we only have ands
                if '&&' in str(condition):
                    ands = []
                    for cond_parts in str(condition).split('&&'):
                        element_list = [element for element in cond_parts.split()]
                        ands.append(condition_specification(child_name, element_list, configuration_space))
                    and_conjunction = AndConjunction(*ands)
                    configuration_space.add_condition(and_conjunction)
                else:
                    # 4th case: we have a normal condition
                    element_list = [element for element in condition.split()]
                    normal_condition = condition_specification(child_name, element_list, configuration_space)
                    configuration_space.add_condition(normal_condition)

    return configuration_space