def test_categorical_is_legal(self):
        f1 = CategoricalHyperparameter("param", ["a", "b"])
        self.assertTrue(f1.is_legal("a"))
        self.assertTrue(f1.is_legal(u"a"))
        self.assertFalse(f1.is_legal("c"))
        self.assertFalse(f1.is_legal(3))

        # Test is legal vector
        self.assertTrue(f1.is_legal_vector(1.0))
        self.assertTrue(f1.is_legal_vector(0.0))
        self.assertTrue(f1.is_legal_vector(0))
        self.assertFalse(f1.is_legal_vector(0.3))
        self.assertFalse(f1.is_legal_vector(-0.1))
        self.assertFalse(f1.is_legal_vector("Hahaha"))
Example #2
0
    def test_not_equals_condition(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = NotEqualsCondition(hp2, hp1, 0)
        cond_ = NotEqualsCondition(hp2, hp1, 0)
        self.assertEqual(cond, cond_)

        # Test vector value:
        self.assertEqual(cond.vector_value, hp1._inverse_transform(0))
        self.assertEqual(cond.vector_value, cond_.vector_value)

        cond_reverse = NotEqualsCondition(hp1, hp2, 0)
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent != 0", str(cond))
Example #3
0
    def test_in_condition(self):
        hp1 = CategoricalHyperparameter("parent", list(range(0, 11)))
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        cond_ = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        self.assertEqual(cond, cond_)

        # Test vector value:
        self.assertEqual(cond.vector_values, [hp1._inverse_transform(i) for i in [0, 1, 2, 3, 4, 5]])
        self.assertEqual(cond.vector_values, cond_.vector_values)

        cond_reverse = InCondition(hp1, hp2, [0, 1, 2, 3, 4, 5])
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent in {0, 1, 2, 3, 4, 5}", str(cond))
Example #4
0
    def test_equals_condition(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = EqualsCondition(hp2, hp1, 0)
        cond_ = EqualsCondition(hp2, hp1, 0)

        # Test vector value:
        self.assertEqual(cond.vector_value, hp1._inverse_transform(0))
        self.assertEqual(cond.vector_value, cond_.vector_value)

        # Test invalid conditions:
        self.assertRaises(TypeError, EqualsCondition, hp2, "parent", 0)
        self.assertRaises(TypeError, EqualsCondition, "child", hp1, 0)
        self.assertRaises(ValueError, EqualsCondition, hp1, hp1, 0)

        self.assertEqual(cond, cond_)

        cond_reverse = EqualsCondition(hp1, hp2, 0)
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent == 0", str(cond))
Example #5
0
logging.basicConfig(level=logging.INFO)

# Build Configuration Space which defines all parameters and their ranges.
# To illustrate different parameter types,
# we use continuous, integer and categorical parameters.
cs = ConfigurationSpace()

# We can add multiple hyperparameters at once:
n_layer = UniformIntegerHyperparameter("n_layer", 1, 5, default_value=1)
n_neurons = UniformIntegerHyperparameter("n_neurons",
                                         8,
                                         1024,
                                         log=True,
                                         default_value=10)
activation = CategoricalHyperparameter("activation",
                                       ['logistic', 'tanh', 'relu'],
                                       default_value='tanh')
batch_size = UniformIntegerHyperparameter('batch_size',
                                          30,
                                          300,
                                          default_value=200)
learning_rate_init = UniformFloatHyperparameter('learning_rate_init',
                                                0.0001,
                                                1.0,
                                                default_value=0.001,
                                                log=True)
cs.add_hyperparameters(
    [n_layer, n_neurons, activation, batch_size, learning_rate_init])

# SMAC scenario object
scenario = Scenario({
Example #6
0

logger = logging.getLogger("RF-example")
logging.basicConfig(level=logging.INFO)
#logging.basicConfig(level=logging.DEBUG)  # Enable to show debug-output
logger.info("Running random forest example for SMAC. If you experience "
            "difficulties, try to decrease the memory-limit.")

# Build Configuration Space which defines all parameters and their ranges.
# To illustrate different parameter types,
# we use continuous, integer and categorical parameters.
cs = ConfigurationSpace()

# We can add single hyperparameters:
do_bootstrapping = CategoricalHyperparameter("do_bootstrapping",
                                             ["true", "false"],
                                             default_value="true")
cs.add_hyperparameter(do_bootstrapping)

# Or we can add multiple hyperparameters at once:
num_trees = UniformIntegerHyperparameter("num_trees", 10, 50, default_value=10)
max_features = UniformIntegerHyperparameter("max_features",
                                            1,
                                            boston.data.shape[1],
                                            default_value=1)
min_weight_frac_leaf = UniformFloatHyperparameter("min_weight_frac_leaf",
                                                  0.0,
                                                  0.5,
                                                  default_value=0.0)
criterion = CategoricalHyperparameter("criterion", ["mse", "mae"],
                                      default_value="mse")
Example #7
0
                return 1 - 0.001
            else:
                return 1 - cross_val_score(clf, X_, y, cv=5).mean()
    else:
        return 1 - cross_val_score(clf, X_, y, cv=5).mean()


#logger = logging.getLogger("SVMExample")
logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

# Build Configuration Space which defines all parameters and their ranges
cs = ConfigurationSpace()

# We define a few possible types of SVM-kernels and add them as "kernel" to our cs
penalty = CategoricalHyperparameter("penalty",
                                    ["l1", "l2", "elasticnet", "none"],
                                    default_value="l2")

dual = CategoricalHyperparameter("dual", [True, False], default_value=False)

tol = UniformFloatHyperparameter("tol", 0.00001, 0.1, default_value=0.0001)

C = UniformFloatHyperparameter("C", 0.0, 5.0, default_value=1.0)

fit_intercept = CategoricalHyperparameter("fit_intercept", [True, False],
                                          default_value=False)

intercept_scaling = UniformFloatHyperparameter("intercept_scaling",
                                               0.0,
                                               5.0,
                                               default_value=1.0)
Example #8
0
    def test_read_new_configuration_space_complex_conditionals(self):
        classi = OrdinalHyperparameter("classi", [
            "random_forest", "extra_trees", "k_nearest_neighbors", "something"
        ])
        knn_weights = CategoricalHyperparameter("knn_weights",
                                                ["uniform", "distance"])
        weather = OrdinalHyperparameter(
            "weather", ["sunny", "rainy", "cloudy", "snowing"])
        temperature = CategoricalHyperparameter("temperature", ["high", "low"])
        rain = CategoricalHyperparameter("rain", ["yes", "no"])
        gloves = OrdinalHyperparameter("gloves",
                                       ["none", "yarn", "leather", "gortex"])
        heur1 = CategoricalHyperparameter("heur1", ["off", "on"])
        heur2 = CategoricalHyperparameter("heur2", ["off", "on"])
        heur_order = CategoricalHyperparameter("heur_order",
                                               ["heur1then2", "heur2then1"])
        gloves_condition = OrConjunction(
            EqualsCondition(gloves, rain, "yes"),
            EqualsCondition(gloves, temperature, "low"))
        heur_condition = AndConjunction(
            EqualsCondition(heur_order, heur1, "on"),
            EqualsCondition(heur_order, heur2, "on"))
        and_conjunction = AndConjunction(
            NotEqualsCondition(knn_weights, classi, "extra_trees"),
            EqualsCondition(knn_weights, classi, "random_forest"))
        Cl_condition = OrConjunction(
            EqualsCondition(knn_weights, classi, "k_nearest_neighbors"),
            and_conjunction, EqualsCondition(knn_weights, classi, "something"))

        and1 = AndConjunction(EqualsCondition(temperature, weather, "rainy"),
                              EqualsCondition(temperature, weather, "cloudy"))
        and2 = AndConjunction(
            EqualsCondition(temperature, weather, "sunny"),
            NotEqualsCondition(temperature, weather, "snowing"))
        another_condition = OrConjunction(and1, and2)

        complex_conditional_space = ConfigurationSpace()
        complex_conditional_space.add_hyperparameter(classi)
        complex_conditional_space.add_hyperparameter(knn_weights)
        complex_conditional_space.add_hyperparameter(weather)
        complex_conditional_space.add_hyperparameter(temperature)
        complex_conditional_space.add_hyperparameter(rain)
        complex_conditional_space.add_hyperparameter(gloves)
        complex_conditional_space.add_hyperparameter(heur1)
        complex_conditional_space.add_hyperparameter(heur2)
        complex_conditional_space.add_hyperparameter(heur_order)

        complex_conditional_space.add_condition(gloves_condition)
        complex_conditional_space.add_condition(heur_condition)
        complex_conditional_space.add_condition(Cl_condition)
        complex_conditional_space.add_condition(another_condition)

        complex_cs = list()
        complex_cs.append(
            "classi ordinal {random_forest,extra_trees,k_nearest_neighbors, something} [random_forest]"
        )
        complex_cs.append(
            "knn_weights categorical {uniform, distance} [uniform]")
        complex_cs.append(
            "weather ordinal {sunny, rainy, cloudy, snowing} [sunny]")
        complex_cs.append("temperature categorical {high, low} [high]")
        complex_cs.append("rain categorical { yes, no } [yes]")
        complex_cs.append(
            "gloves ordinal { none, yarn, leather, gortex } [none]")
        complex_cs.append("heur1 categorical { off, on } [off]")
        complex_cs.append("heur2 categorical { off, on } [off]")
        complex_cs.append(
            "heur_order categorical { heur1then2, heur2then1 } [heur1then2]")
        complex_cs.append("gloves | rain == yes || temperature == low")
        complex_cs.append("heur_order | heur1 == on && heur2 == on")
        complex_cs.append(
            "knn_weights | classi == k_nearest_neighbors || classi != extra_trees && classi == random_forest || classi == something"
        )
        complex_cs.append(
            "temperature | weather == rainy && weather == cloudy || weather == sunny && weather != snowing"
        )
        cs_new = pcs_new.read(complex_cs)
        self.assertEqual(cs_new, complex_conditional_space)
 def test_categorical_is_legal(self):
     f1 = CategoricalHyperparameter("param", ["a", "b"])
     self.assertTrue(f1.is_legal("a"))
     self.assertTrue(f1.is_legal(u"a"))
     self.assertFalse(f1.is_legal("c"))
     self.assertFalse(f1.is_legal(3))
Example #10
0
            break

    return -count


logger = logging.getLogger("Optimizer")  # Enable to show Debug outputs
logger.parent.level = 20  # info level:20; debug:10

# build Configuration Space which defines all parameters and their ranges
n_params = 16
use_conditionals = True  # using conditionals should help a lot in this example

cs = ConfigurationSpace()
previous_param = None
for n in range(n_params):
    p = CategoricalHyperparameter("%d" % (n), [0, 1], default=0)
    cs.add_hyperparameter(p)

    if n > 0 and use_conditionals:
        cond = InCondition(child=p, parent=previous_param, values=[1])
        cs.add_condition(cond)

    previous_param = p

# SMAC scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternative runtime)
    "runcount-limit": n_params * 2,  # at most 200 function evaluations
    "cs": cs,  # configuration space
    "deterministic": "true"
})
Example #11
0
from ConfigSpace.conditions import EqualsCondition, InCondition
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import (CategoricalHyperparameter,
                                         UniformFloatHyperparameter,
                                         UniformIntegerHyperparameter,
                                         Constant)

C = UniformFloatHyperparameter("C",
                               0.03125,
                               32768,
                               log=True,
                               default_value=1.0)
# No linear kernel here, because we have liblinear
kernel = CategoricalHyperparameter(name="kernel",
                                   choices=["rbf", "poly", "sigmoid"],
                                   default_value="rbf")
degree = UniformIntegerHyperparameter("degree", 2, 5, default_value=3)
gamma = UniformFloatHyperparameter("gamma",
                                   3.0517578125e-05,
                                   8,
                                   log=True,
                                   default_value=0.1)
# TODO this is totally ad-hoc
coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
# probability is no hyperparameter, but an argument to the SVM algo
shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                      default_value="True")
tol = UniformFloatHyperparameter("tol",
                                 1e-5,
                                 1e-1,
                                 default_value=1e-3,
Example #12
0

#logger = logging.getLogger("SVMExample")
logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

# Build Configuration Space which defines all parameters and their ranges
cs = ConfigurationSpace()

# We define a few possible types of SVM-kernels and add them as "kernel" to our cs

n_neighbors = UniformIntegerHyperparameter("n_neighbors",
                                           1,
                                           50,
                                           default_value=5)

weights = CategoricalHyperparameter("weights", ["uniform", "distance"],
                                    default_value="uniform")

p = UniformIntegerHyperparameter("p", 1, 5, default_value=2)

cs.add_hyperparameters([n_neighbors, weights, p])

# Scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternatively runtime)
    "runcount-limit":
    500,  # max. number of function evaluations; for this example set to a low number
    "cs": cs,  # configuration space
    "deterministic": "true"
})

# Example call of the function
Example #13
0
    def get_hyperparameter_search_space(dataset_properties=None,
                                        optimizer='smac'):
        if optimizer == 'smac':
            cs = ConfigurationSpace()

            n_estimators = UniformFloatHyperparameter("n_estimators",
                                                      50,
                                                      500,
                                                      default_value=200,
                                                      q=20)
            eta = UniformFloatHyperparameter("eta",
                                             0.025,
                                             0.3,
                                             default_value=0.3,
                                             q=0.025)
            min_child_weight = UniformIntegerHyperparameter("min_child_weight",
                                                            1,
                                                            10,
                                                            default_value=1)
            max_depth = UniformIntegerHyperparameter("max_depth",
                                                     2,
                                                     10,
                                                     default_value=6)
            subsample = UniformFloatHyperparameter("subsample",
                                                   0.5,
                                                   1,
                                                   default_value=1,
                                                   q=0.05)
            gamma = UniformFloatHyperparameter("gamma",
                                               0,
                                               1,
                                               default_value=0,
                                               q=0.1)
            colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                          0.5,
                                                          1,
                                                          default_value=1.,
                                                          q=0.05)
            alpha = UniformFloatHyperparameter("alpha",
                                               1e-10,
                                               10,
                                               log=True,
                                               default_value=1e-10)
            lambda_t = UniformFloatHyperparameter("lambda_t",
                                                  1e-10,
                                                  10,
                                                  log=True,
                                                  default_value=1e-10)
            scale_pos_weight = CategoricalHyperparameter(
                "scale_pos_weight", [0.01, 0.1, 1., 10, 100], default_value=1.)

            cs.add_hyperparameters([
                n_estimators, eta, min_child_weight, max_depth, subsample,
                gamma, colsample_bytree, alpha, lambda_t, scale_pos_weight
            ])
            return cs
        elif optimizer == 'tpe':
            space = {
                'n_estimators':
                hp.randint('xgb_n_estimators', 451) + 50,
                'eta':
                hp.loguniform('xgb_eta', np.log(0.025), np.log(0.3)),
                'min_child_weight':
                hp.randint('xgb_min_child_weight', 10) + 1,
                'max_depth':
                hp.randint('xgb_max_depth', 9) + 2,
                'subsample':
                hp.uniform('xgb_subsample', 0.5, 1),
                'gamma':
                hp.uniform('xgb_gamma', 0, 1),
                'colsample_bytree':
                hp.uniform('xgb_colsample_bytree', 0.5, 1),
                'alpha':
                hp.loguniform('xgb_alpha', np.log(1e-10), np.log(10)),
                'lambda_t':
                hp.loguniform('xgb_lambda_t', np.log(1e-10), np.log(10)),
                'scale_pos_weight':
                hp.choice('xgb_scale_pos_weight', [0.01, 0.1, 1, 10, 100])
            }

            init_trial = {
                'n_estimators': 200,
                'eta': 0.3,
                'min_child_weight': 1,
                'max_depth': 6,
                'subsample': 1,
                'gamma': 0,
                'colsample_bytree': 1,
                'alpha': 0,
                'lambda_t': 1,
                'scale_pos_weight': 1
            }

            return space
Example #14
0
    def get_cs(self, scenario: ASlibScenario, autofolio_config: dict = None):
        '''
            returns the parameter configuration space of AutoFolio
            (based on the automl config space: https://github.com/automl/ConfigSpace)

            Arguments
            ---------
            scenario: aslib_scenario.aslib_scenario.ASlibScenario
                aslib scenario at hand

            autofolio_config: dict, or None
                An optional dictionary of configuration options
        '''

        self.cs = ConfigurationSpace()

        # only allow the feature groups specified in the config file
        # by default, though, all of the feature groups are allowed.
        allowed_feature_groups = autofolio_config.get("allowed_feature_groups",
                                                      scenario.feature_steps)

        if len(allowed_feature_groups) == 0:
            msg = "Please ensure at least one feature group is allowed"
            raise ValueError(msg)

        if len(allowed_feature_groups) == 1:
            choices = [
                True
            ]  # if we only have one feature group, it has to be active
        else:
            choices = [True, False]
        default = True

        for fs in allowed_feature_groups:

            fs_param = CategoricalHyperparameter(name="fgroup_%s" % (fs),
                                                 choices=choices,
                                                 default_value=default)
            self.cs.add_hyperparameter(fs_param)

        # preprocessing
        if autofolio_config.get("pca", True):
            PCAWrapper.add_params(self.cs)

        if autofolio_config.get("impute", True):
            ImputerWrapper.add_params(self.cs)

        if autofolio_config.get("scale", True):
            StandardScalerWrapper.add_params(self.cs)

        # Pre-Solving
        if scenario.performance_type[0] == "runtime":
            if autofolio_config.get("presolve", True):
                Aspeed.add_params(cs=self.cs,
                                  cutoff=scenario.algorithm_cutoff_time)

        if autofolio_config.get("classifier"):
            # fix parameter
            cls_choices = [autofolio_config["classifier"]]
            cls_def = autofolio_config["classifier"]
        else:
            cls_choices = ["RandomForest", "XGBoost"]
            cls_def = "RandomForest"
        classifier = CategoricalHyperparameter("classifier",
                                               choices=cls_choices,
                                               default_value=cls_def)

        self.cs.add_hyperparameter(classifier)

        RandomForest.add_params(self.cs)
        XGBoost.add_params(self.cs)

        if autofolio_config.get("regressor"):
            # fix parameter
            reg_choices = [autofolio_config["regressor"]]
            reg_def = autofolio_config["regressor"]
        else:
            reg_choices = ["RandomForestRegressor"]
            reg_def = "RandomForestRegressor"

        regressor = CategoricalHyperparameter("regressor",
                                              choices=reg_choices,
                                              default_value=reg_def)
        self.cs.add_hyperparameter(regressor)
        RandomForestRegressor.add_params(self.cs)

        # selectors
        if autofolio_config.get("selector"):
            # fix parameter
            sel_choices = [autofolio_config["selector"]]
            sel_def = autofolio_config["selector"]
        else:
            sel_choices = ["PairwiseClassifier", "PairwiseRegressor"]
            sel_def = "PairwiseClassifier"

        selector = CategoricalHyperparameter("selector",
                                             choices=sel_choices,
                                             default_value=sel_def)
        self.cs.add_hyperparameter(selector)
        PairwiseClassifier.add_params(self.cs)
        PairwiseRegression.add_params(self.cs)

        self.logger.debug(self.cs)

        return self.cs
Example #15
0
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import UniformFloatHyperparameter, \
    UniformIntegerHyperparameter, CategoricalHyperparameter, \
    UnParametrizedHyperparameter, Constant
from automl.utl import json_utils

cs = ConfigurationSpace()

# the smoothing parameter is a non-negative float
# I will limit it to 100 and put it on a logarithmic scale. (SF)
# Please adjust that, if you know a proper range, this is just a guess.
alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100,
                                   default_value=1, log=True)

fit_prior = CategoricalHyperparameter(name="fit_prior",
                                      choices=["True", "False"],
                                      default_value="True")

cs.add_hyperparameters([alpha, fit_prior])

json_utils.write_cs_to_json_file(cs, "MultinomialNB")
Example #16
0
                                is_enabled_default=False):
    enable_param = CategoricalHyperparameter(hyperparam.name + '__enable',
                                             [True, False],
                                             default_value=is_enabled_default)
    self.add_hyperparameters([enable_param, hyperparam])
    self.add_condition(EqualsCondition(hyperparam, enable_param, True))
    return hyperparam


ConfigurationSpace.add_optional_hyperparameter = add_optional_hyperparameter

cs = ConfigurationSpace()

arch_prefix = 'corenlp_train_params__arch__'
cs.add_hyperparameter(
    CategoricalHyperparameter(arch_prefix + f'wordTag__{1}', [(0, 1)],
                              default_value=(0, 1)))
cs.add_hyperparameter(
    CategoricalHyperparameter(arch_prefix + 'words', [(-3, 2)], (-3, 2)))

cs.add_hyperparameter(Constant(arch_prefix + 'tag_window_offset', -2))
cs.add_hyperparameter(
    CategoricalHyperparameter(arch_prefix + 'order', [(0, 2)], (0, 2)))
cs.add_hyperparameter(
    CategoricalHyperparameter(arch_prefix + f'prefix__multi',
                              [((1, 0), (2, 0), (3, 0))],
                              default_value=((1, 0), (2, 0), (3, 0))))
cs.add_hyperparameter(
    CategoricalHyperparameter(arch_prefix + f'suffix__multi',
                              [((2, 0), (3, 0), (4, 0), (5, 0))],
                              default_value=((2, 0), (3, 0), (4, 0), (5, 0))))
    def get_cs_dimensions(api_config: typing.Dict) -> ConfigurationSpace:
        """
        Help routine to setup ConfigurationSpace search space in constructor.

        Take api_config as argument so this can be static.
        Parameters
        ----------
        api_config: Dict
            api dictionary to construct
        Returns
        -------
        cs: ConfigurationSpace
            ConfigurationSpace that contains the same hyperparameter as api_config
        """
        # TODO 2 options to transform the real and int hyperaparameters in different scales
        #  option 1: similar to example_submission.skopt.optimizer, merge 'logit' into 'log' and 'bilog' into 'linear'
        #  option 2: use the api bayesmark.space.space to warp and unwarp the samples
        cs = ConfigurationSpace()
        param_list = sorted(api_config.keys())

        # hp_list = []
        for param_name in param_list:
            param_config = api_config[param_name]

            param_type = param_config["type"]
            param_space = param_config.get("space", None)
            param_values = param_config.get("values", None)
            param_range = param_config.get("range", None)

            if param_type == "cat":
                assert param_space is None
                assert param_range is None
                hp = CategoricalHyperparameter(name=param_name, choices=param_values)
            elif param_type == "bool":
                assert param_space is None
                assert param_values is None
                assert param_range is None
                hp = CategoricalHyperparameter(name=param_name, choices=[True, False])
            elif param_type == "ordinal":
                # appear in example_submission.skopt.optimizer but not in README
                assert param_space is None
                assert param_range is None
                hp = OrdinalHyperparameter(name=param_name, sequence=param_values)
            elif param_type in ("int", "real"):
                if param_values is not None:
                    # TODO: decide whether we treat these parameters as discrete values
                    #  or step function (example see example_submission.skopt.optimizer, line 71-77)
                    # sort the values to store them in OrdinalHyperparameter
                    param_values_sorted = np.sort(param_values)
                    hp = OrdinalHyperparameter(name=param_name, sequence=param_values_sorted)
                else:
                    log = True if param_space in ("log", "logit") else False
                    if param_type == "int":
                        hp = UniformIntegerHyperparameter(name=param_name, lower=param_range[0], upper=param_range[-1],
                                                          log=log)
                    else:
                        hp = UniformFloatHyperparameter(name=param_name, lower=param_range[0], upper=param_range[-1],
                                                        log=log)
            else:
                assert False, "type %s not handled in API" % param_type
            cs.add_hyperparameter(hp)

        return cs
Example #18
0
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        try:
            regressor = cs.get_hyperparameter("regressor")
            if "RandomForestRegressor" not in regressor.choices:
                return

            n_estimators = UniformIntegerHyperparameter(
                name="rfreg:n_estimators",
                lower=10,
                upper=100,
                default_value=10,
                log=True)
            cs.add_hyperparameter(n_estimators)
            max_features = CategoricalHyperparameter(
                name="rfreg:max_features",
                choices=["sqrt", "log2", "None"],
                default_value="sqrt")
            cs.add_hyperparameter(max_features)
            max_depth = UniformIntegerHyperparameter(name="rfreg:max_depth",
                                                     lower=10,
                                                     upper=2**31,
                                                     default_value=2**31,
                                                     log=True)
            cs.add_hyperparameter(max_depth)
            min_samples_split = UniformIntegerHyperparameter(
                name="rfreg:min_samples_split",
                lower=2,
                upper=100,
                default_value=2,
                log=True)
            cs.add_hyperparameter(min_samples_split)
            min_samples_leaf = UniformIntegerHyperparameter(
                name="rfreg:min_samples_leaf",
                lower=2,
                upper=100,
                default_value=10,
                log=True)
            cs.add_hyperparameter(min_samples_leaf)
            bootstrap = CategoricalHyperparameter(name="rfreg:bootstrap",
                                                  choices=[True, False],
                                                  default_value=True)
            cs.add_hyperparameter(bootstrap)

            cond = InCondition(child=n_estimators,
                               parent=regressor,
                               values=["RandomForestRegressor"])
            cs.add_condition(cond)
            cond = InCondition(child=max_features,
                               parent=regressor,
                               values=["RandomForestRegressor"])
            cs.add_condition(cond)
            cond = InCondition(child=max_depth,
                               parent=regressor,
                               values=["RandomForestRegressor"])
            cs.add_condition(cond)
            cond = InCondition(child=min_samples_split,
                               parent=regressor,
                               values=["RandomForestRegressor"])
            cs.add_condition(cond)
            cond = InCondition(child=min_samples_leaf,
                               parent=regressor,
                               values=["RandomForestRegressor"])
            cs.add_condition(cond)
            cond = InCondition(child=bootstrap,
                               parent=regressor,
                               values=["RandomForestRegressor"])
            cs.add_condition(cond)

        except:
            return
Example #19
0
logging.basicConfig(level=logging.INFO)

# Build Configuration Space which defines all parameters and their ranges.
# To illustrate different parameter types,
# we use continuous, integer and categorical parameters.
cs = ConfigurationSpace()

# We can add multiple hyperparameters at once:
n_layer = UniformIntegerHyperparameter("n_layer", 1, 5, default_value=1)
n_neurons = UniformIntegerHyperparameter("n_neurons",
                                         8,
                                         1024,
                                         log=True,
                                         default_value=10)
activation = CategoricalHyperparameter("activation",
                                       ['logistic', 'tanh', 'relu'],
                                       default_value='tanh')
solver = CategoricalHyperparameter('solver', ['lbfgs', 'sgd', 'adam'],
                                   default_value='adam')
batch_size = UniformIntegerHyperparameter('batch_size',
                                          30,
                                          300,
                                          default_value=200)
learning_rate = CategoricalHyperparameter(
    'learning_rate', ['constant', 'invscaling', 'adaptive'],
    default_value='constant')
learning_rate_init = UniformFloatHyperparameter('learning_rate_init',
                                                0.0001,
                                                1.0,
                                                default_value=0.001,
                                                log=True)
Example #20
0
def addSearchSpaceGrids(grids: List[SearchSpaceGrid],
                        cs: ConfigurationSpace) -> None:
    parent_disc = CategoricalHyperparameter(disc_str, range(len(grids)))
    cs.add_hyperparameter(parent_disc)
    for (i, g) in enumerate(grids):
        addSearchSpaceGrid(g, i, parent_disc, cs)
 def test_categorical_strings(self):
     f1 = CategoricalHyperparameter("param", ["a", "b"])
     f1_ = CategoricalHyperparameter("param", ["a", "b"])
     self.assertEqual(f1, f1_)
     self.assertEqual(
         "param, Type: Categorical, Choices: {a, b}, Default: a", str(f1))
Example #22
0
def cs_single():
    # Build Configuration Space which defines all parameters and their ranges
    cs = ConfigurationSpace()

    root = CategoricalHyperparameter("root", choices=["l1", "ln"])
    x1 = CategoricalHyperparameter("x1", choices=["l1", "ln"])
    x2 = CategoricalHyperparameter("x2", choices=["l1", "ln"])
    x3 = CategoricalHyperparameter("x3", choices=["l1", "ln"])
    x4 = CategoricalHyperparameter("x4", choices=["l1", "ln"])
    x5 = CategoricalHyperparameter("x5", choices=["l1", "ln"])
    x6 = CategoricalHyperparameter("x6", choices=["l1", "ln"])

    # r1 is the data associated in x1
    r1 = UniformFloatHyperparameter("r1", lower=0.01, upper=0.99, log=False)
    r2 = UniformFloatHyperparameter("r2", lower=0.01, upper=0.99, log=False)
    r3 = UniformFloatHyperparameter("r3", lower=0.01, upper=0.99, log=False)
    r4 = UniformFloatHyperparameter("r4", lower=0.01, upper=0.99, log=False)
    r5 = UniformFloatHyperparameter("r5", lower=0.01, upper=0.99, log=False)
    r6 = UniformFloatHyperparameter("r6", lower=0.01, upper=0.99, log=False)
    r7 = UniformFloatHyperparameter("r7", lower=0.01, upper=0.99, log=False)
    r8 = UniformFloatHyperparameter("r8", lower=0.01, upper=0.99, log=False)
    r9 = UniformFloatHyperparameter("r9", lower=0.01, upper=0.99, log=False)
    r10 = UniformFloatHyperparameter("r10", lower=0.01, upper=0.99, log=False)
    r11 = UniformFloatHyperparameter("r11", lower=0.01, upper=0.99, log=False)
    r12 = UniformFloatHyperparameter("r12", lower=0.01, upper=0.99, log=False)
    r13 = UniformFloatHyperparameter("r13", lower=0.01, upper=0.99, log=False)
    r14 = UniformFloatHyperparameter("r14", lower=0.01, upper=0.99, log=False)

    cs.add_hyperparameters([
        root,
        x1,
        x2,
        x3,
        x4,
        x5,
        x6,
        r1,
        r2,
        r3,
        r4,
        r5,
        r6,
        r7,
        r8,
        r9,
        r10,
        r11,
        r12,
        r13,
        r14,
    ])

    # add condition
    cs.add_condition(InCondition(x1, root, ["l1"]))
    cs.add_condition(InCondition(x2, root, ["ln"]))
    cs.add_condition(InCondition(r1, root, ["l1"]))
    cs.add_condition(InCondition(r2, root, ["ln"]))

    cs.add_condition(InCondition(x3, x1, ["l1"]))
    cs.add_condition(InCondition(x4, x1, ["ln"]))
    cs.add_condition(InCondition(r3, x1, ["l1"]))
    cs.add_condition(InCondition(r4, x1, ["ln"]))

    cs.add_condition(InCondition(x5, x2, ["l1"]))
    cs.add_condition(InCondition(x6, x2, ["ln"]))
    cs.add_condition(InCondition(r5, x2, ["l1"]))
    cs.add_condition(InCondition(r6, x2, ["ln"]))

    cs.add_condition(InCondition(r7, x3, ["l1"]))
    cs.add_condition(InCondition(r8, x3, ["ln"]))

    cs.add_condition(InCondition(r9, x4, ["l1"]))
    cs.add_condition(InCondition(r10, x4, ["ln"]))

    cs.add_condition(InCondition(r11, x5, ["l1"]))
    cs.add_condition(InCondition(r12, x5, ["ln"]))

    cs.add_condition(InCondition(r13, x6, ["l1"]))
    cs.add_condition(InCondition(r14, x6, ["ln"]))

    return cs
Example #23
0
import tempfile
import unittest

from ConfigSpace.configuration_space import ConfigurationSpace
import ConfigSpace.io.pcs as pcs
import ConfigSpace.io.pcs_new as pcs_new
from ConfigSpace.hyperparameters import CategoricalHyperparameter, \
    UniformIntegerHyperparameter, UniformFloatHyperparameter, OrdinalHyperparameter
from ConfigSpace.conditions import EqualsCondition, InCondition, \
    AndConjunction, OrConjunction, NotEqualsCondition, \
    LessThanCondition, GreaterThanCondition
from ConfigSpace.forbidden import ForbiddenEqualsClause, \
    ForbiddenInClause, ForbiddenAndConjunction

# More complex search space
classifier = CategoricalHyperparameter("classifier", ["svm", "nn"])
kernel = CategoricalHyperparameter("kernel", ["rbf", "poly", "sigmoid"])
kernel_condition = EqualsCondition(kernel, classifier, "svm")
C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True)
C_condition = EqualsCondition(C, classifier, "svm")
gamma = UniformFloatHyperparameter("gamma", 0.000030518, 8, log=True)
gamma_condition = EqualsCondition(gamma, kernel, "rbf")
degree = UniformIntegerHyperparameter("degree", 1, 5)
degree_condition = InCondition(degree, kernel, ["poly", "sigmoid"])
neurons = UniformIntegerHyperparameter("neurons", 16, 1024)
neurons_condition = EqualsCondition(neurons, classifier, "nn")
lr = UniformFloatHyperparameter("lr", 0.0001, 1.0)
lr_condition = EqualsCondition(lr, classifier, "nn")
preprocessing = CategoricalHyperparameter("preprocessing", ["None", "pca"])
conditional_space = ConfigurationSpace()
conditional_space.add_hyperparameter(preprocessing)
Example #24
0
validation_epochs = 200
# space_optimization_evals = 1
batch_size = 50

run_folder = 'run'

run_count_step = 1

restore_prev_run = False
restore_prev_run_folder = 'ctc_network/out/smac/run_1/'
results_dir = 'ctc_network/out/smac/{}/'.format(run_folder)
# -----------------------------------------------------------------------------------------

space = {
    'input_dense_depth':
    CategoricalHyperparameter("input_dense_depth", ["1", "2"],
                              default_value="1"),
    'input_dense_1':
    UniformIntegerHyperparameter("input_dense_1", 50, 250, default_value=100),
    'input_dense_2':
    UniformIntegerHyperparameter("input_dense_2", 50, 250, default_value=100),
    'out_dense_depth':
    CategoricalHyperparameter("out_dense_depth", ["1", "2"],
                              default_value="1"),
    'out_dense_1':
    UniformIntegerHyperparameter("out_dense_1", 50, 250, default_value=100),
    'out_dense_2':
    UniformIntegerHyperparameter("out_dense_2", 50, 250, default_value=100),
    'rnn_depth':
    CategoricalHyperparameter("rnn_depth", ["1", "2"], default_value="1"),
    'fw_1':
    UniformIntegerHyperparameter("fw_1", 10, 250, default_value=100),
def ppo_bohb_wrapper(**params):

    # Setup directories where live data is logged
    logdir = params["logdir"]
    ppo_output_dir = os.path.join(logdir, 'ppo_output')
    # if not os.path.isdir(a2c_output_dir):
    #     os.makedirs(a2c_output_dir)
    params["logdir"] = ppo_output_dir

    bohb_output_dir = os.path.join(logdir, 'bohb_output')
    # if not os.path.isdir(bohb_output_dir):
    #     os.makedirs(bohb_output_dir)

    logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output
    logger = logging.getLogger()
    logger.propagate = False  # no duplicate logging outputs
    fh = logging.FileHandler(os.path.join(logdir, 'bohb.log'))
    fh.setLevel(logging.INFO)
    fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s:%(name)s: %(message)s'))
    logger.addHandler(fh)

    # Build configuration space and define all hyperparameters
    cs = ConfigurationSpace()
    lr = UniformFloatHyperparameter("lr", 1e-4, 1e-2, default_value=1e-3)
    units_shared_layer1 = UniformIntegerHyperparameter("units_shared_layer1", 8, 100, default_value=24)
    units_shared_layer2 = UniformIntegerHyperparameter("units_shared_layer2", 8, 100, default_value=24)
    units_policy_layer = UniformIntegerHyperparameter("units_policy_layer", 8, 100, default_value=24)
    vf_coeff = UniformFloatHyperparameter("vf_coeff", 1e-2, 0.5, default_value=0.1)
    ent_coeff = UniformFloatHyperparameter("ent_coeff", 5e-6, 1e-4, default_value=1e-5)
    gamma = UniformFloatHyperparameter("gamma", 0.6, 1., default_value=0.90)
    activ_fcn = CategoricalHyperparameter("activ_fcn", ['relu6', 'elu', 'mixed'], default_value='relu6')
    nsteps = CategoricalHyperparameter("nsteps", [16, 32, 64, 128, 256], default_value=64)
    if params["architecture"] == 'ff':
        nminibatches = CategoricalHyperparameter("nminibatches", [1,2,4,8,16], default_value=4)
        noptepochs = UniformIntegerHyperparameter("noptepochs", 1, 10, default_value=2)
        cs.add_hyperparameters([units_shared_layer1, units_shared_layer2, units_policy_layer,
                                vf_coeff, ent_coeff, gamma, lr, activ_fcn, nsteps, nminibatches, noptepochs])  # batch_size
    else:
        cs.add_hyperparameters([units_shared_layer1, units_shared_layer2, units_policy_layer,
                                vf_coeff, ent_coeff, gamma, lr, activ_fcn, nsteps])  # batch_size
    # cs.add_hyperparameters([units_shared_layer1, units_shared_layer2, units_policy_layer,
    #                         vf_coeff, ent_coeff, gamma, lr, activ_fcn])  # batch_size

    logger.info('##############################################')
    logger.info('Run Optimization')
    logger.info('##############################################')
    if params["array_id"] == 1:
        # Setup directories where live data is logged
        logdir = params["logdir"]
        # a2c_output_dir = os.path.join(logdir, 'a2c_output')
        if not os.path.isdir(ppo_output_dir):
            os.makedirs(ppo_output_dir)
        # params["logdir"] = a2c_output_dir

        # bohb_output_dir = os.path.join(logdir, 'bohb_output')
        if not os.path.isdir(bohb_output_dir):
            os.makedirs(bohb_output_dir)

        # start nameserver
        NS = hpns.NameServer(run_id=params["instance_id"], nic_name=params["nic_name"],
                             working_directory=bohb_output_dir)
        ns_host, ns_port = NS.start()  # stores information for workers to find in working directory

        # BOHB is usually so cheap, that we can affort to run a worker on the master node, too.
        worker = PPOWorker(nameserver=ns_host, nameserver_port=ns_port, run_id=params["instance_id"], **params)
        worker.run(background=True)

        # Create scenario object
        logger.info('##############################################')
        logger.info('Setup BOHB instance')
        logger.info('##############################################')

        logger.info('Output_dir: %s' % bohb_output_dir)
        HB = BOHB(configspace=cs,
                  run_id=params["instance_id"],
                  eta=3,
                  min_budget=params["min_resource"],
                  max_budget=params["max_resource"],
                  host=ns_host,
                  nameserver=ns_host,
                  nameserver_port=ns_port,
                  ping_interval=3600)

        res = HB.run(n_iterations=4,
                     min_n_workers=4)  # BOHB can wait until a minimum number of workers is online before starting

        # pickle result here for later analysis
        with open(os.path.join(bohb_output_dir, 'results.pkl'), 'wb') as f:
            pickle.dump(res, f)

        id2config = res.get_id2config_mapping()
        print('A total of %i unique configurations where sampled.' % len(id2config.keys()))
        print('A total of %i runs where executed.' % len(res.get_all_runs()))
        # incumbent_trajectory = res.get_incumbent_trajectory()
        # import matplotlib.pyplot as plt
        # plt.plot(incumbent_trajectory['times_finished'], incumbent_trajectory['losses'])
        # plt.xlabel('wall clock time [s]')
        # plt.ylabel('incumbent loss')
        # plt.show()

        # shutdown all workers
        HB.shutdown(shutdown_workers=True)

        # shutdown nameserver
        NS.shutdown()

    else:
        host = hpns.nic_name_to_host(params["nic_name"])

        # workers only instantiate the MyWorker, find the nameserver and start serving
        w = PPOWorker(run_id=params["instance_id"], host=host, **params)
        w.load_nameserver_credentials(bohb_output_dir)
        # run worker in the forground,
        w.run(background=False)
Example #26
0
        cfg["gamma"] = cfg["gamma_value"] if cfg["gamma"] == "value" else "auto"
        cfg.pop("gamma_value", None)  # Remove "gamma_value"

    clf = svm.SVC(**cfg, random_state=42)
    scores = cross_val_score(clf, iris.data, iris.target, cv=5)
    return 1 - np.mean(scores)  # Minimize!


logging.basicConfig(level=logging.INFO)

# Build Configuration Space which defines all parameters and their ranges
cs = ConfigurationSpace()

# We define a few possible types of SVM-kernels and add them as "kernel" to our cs
kernel = CategoricalHyperparameter("kernel",
                                   ["linear", "rbf", "poly", "sigmoid"],
                                   default_value="poly")
cs.add_hyperparameter(kernel)

# There are some hyperparameters shared by all kernels
C = UniformFloatHyperparameter("C", 0.001, 1000.0, default_value=1.0)
shrinking = CategoricalHyperparameter("shrinking", ["true", "false"],
                                      default_value="true")
cs.add_hyperparameters([C, shrinking])

# Others are kernel-specific, so we can add conditions to limit the searchspace
degree = UniformIntegerHyperparameter(
    "degree", 1, 5, default_value=3)  # Only used by kernel poly
coef0 = UniformFloatHyperparameter("coef0", 0.0, 10.0,
                                   default_value=0.0)  # poly, sigmoid
cs.add_hyperparameters([degree, coef0])
Example #27
0
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import CategoricalHyperparameter
from automl.utl import json_utils

strategy = CategoricalHyperparameter("strategy",
                                     ["mean", "median", "most_frequent"],
                                     default_value="mean")
cs = ConfigurationSpace()
cs.add_hyperparameter(strategy)

json_utils.write_cs_to_json_file(cs, "Imputer")
    def get_hyperparameter_search_space(dataset_properties=None):
        C = UniformFloatHyperparameter(name="C",
                                       lower=0.03125,
                                       upper=32768,
                                       log=True,
                                       default_value=1.0)
        # Random Guess
        epsilon = UniformFloatHyperparameter(name="epsilon",
                                             lower=0.001,
                                             upper=1,
                                             default_value=0.1,
                                             log=True)

        kernel = CategoricalHyperparameter(
            name="kernel",
            choices=['linear', 'poly', 'rbf', 'sigmoid'],
            default_value="rbf")
        degree = UniformIntegerHyperparameter(name="degree",
                                              lower=2,
                                              upper=5,
                                              default_value=3)

        gamma = UniformFloatHyperparameter(name="gamma",
                                           lower=3.0517578125e-05,
                                           upper=8,
                                           log=True,
                                           default_value=0.1)

        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter(name="coef0",
                                           lower=-1,
                                           upper=1,
                                           default_value=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter(name="shrinking",
                                              choices=["True", "False"],
                                              default_value="True")
        tol = UniformFloatHyperparameter(name="tol",
                                         lower=1e-5,
                                         upper=1e-1,
                                         default_value=1e-3,
                                         log=True)
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        cs = ConfigurationSpace()
        cs.add_hyperparameters([
            C, kernel, degree, gamma, coef0, shrinking, tol, max_iter, epsilon
        ])

        degree_depends_on_kernel = InCondition(child=degree,
                                               parent=kernel,
                                               values=('poly', 'rbf',
                                                       'sigmoid'))
        gamma_depends_on_kernel = InCondition(child=gamma,
                                              parent=kernel,
                                              values=('poly', 'rbf'))
        coef0_depends_on_kernel = InCondition(child=coef0,
                                              parent=kernel,
                                              values=('poly', 'sigmoid'))
        cs.add_conditions([
            degree_depends_on_kernel, gamma_depends_on_kernel,
            coef0_depends_on_kernel
        ])

        return cs
Example #29
0
    cost_value = 1 - np.mean(scores)  # Minimize!

    # Return a dictionary with all of the objectives.
    # Alternatively you can return a list in the same order
    # as `multi_objectives`.
    return {"cost": cost_value, "time": t1 - t0}


if __name__ == "__main__":
    # Build Configuration Space which defines all parameters and their ranges
    cs = ConfigurationSpace()

    # We define a few possible types of SVM-kernels and add them as "kernel" to our cs
    kernel = CategoricalHyperparameter(
        name="kernel",
        choices=["linear", "rbf", "poly", "sigmoid"],
        default_value="poly",
    )
    cs.add_hyperparameter(kernel)

    # There are some hyperparameters shared by all kernels
    C = UniformFloatHyperparameter("C",
                                   0.001,
                                   1000.0,
                                   default_value=1.0,
                                   log=True)
    shrinking = CategoricalHyperparameter("shrinking", [True, False],
                                          default_value=True)
    cs.add_hyperparameters([C, shrinking])

    # Others are kernel-specific, so we can add conditions to limit the searchspace
Example #30
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = CategoricalHyperparameter("loss", ["least_squares"],
                                         default_value="least_squares")
        learning_rate = UniformFloatHyperparameter(name="learning_rate",
                                                   lower=0.01,
                                                   upper=1,
                                                   default_value=0.1,
                                                   log=True)
        max_iter = UniformIntegerHyperparameter("max_iter",
                                                32,
                                                512,
                                                default_value=100)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf",
            lower=1,
            upper=200,
            default_value=20,
            log=True)
        max_depth = UnParametrizedHyperparameter(name="max_depth",
                                                 value="None")
        max_leaf_nodes = UniformIntegerHyperparameter(name="max_leaf_nodes",
                                                      lower=3,
                                                      upper=2047,
                                                      default_value=31,
                                                      log=True)
        max_bins = Constant("max_bins", 256)
        l2_regularization = UniformFloatHyperparameter(
            name="l2_regularization",
            lower=1E-10,
            upper=1,
            default_value=1E-10,
            log=True)
        early_stop = CategoricalHyperparameter(
            name="early_stop",
            choices=["off", "train", "valid"],
            default_value="off")
        tol = UnParametrizedHyperparameter(name="tol", value=1e-7)
        scoring = UnParametrizedHyperparameter(name="scoring", value="loss")
        n_iter_no_change = UniformIntegerHyperparameter(
            name="n_iter_no_change", lower=1, upper=20, default_value=10)
        validation_fraction = UniformFloatHyperparameter(
            name="validation_fraction",
            lower=0.01,
            upper=0.4,
            default_value=0.1)

        cs.add_hyperparameters([
            loss, learning_rate, max_iter, min_samples_leaf, max_depth,
            max_leaf_nodes, max_bins, l2_regularization, early_stop, tol,
            scoring, n_iter_no_change, validation_fraction
        ])

        n_iter_no_change_cond = InCondition(n_iter_no_change, early_stop,
                                            ["valid", "train"])
        validation_fraction_cond = EqualsCondition(validation_fraction,
                                                   early_stop, "valid")

        cs.add_conditions([n_iter_no_change_cond, validation_fraction_cond])

        return cs
Example #31
0
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import CategoricalHyperparameter
from automl.utl import json_utils

cs = ConfigurationSpace()

norm = CategoricalHyperparameter("norm", ["l1", "l2", "max"], "l2")

cs.add_hyperparameter(norm)

json_utils.write_cs_to_json_file(cs, "Normalizer")
Example #32
0
    def test_forbidden_equals_clause(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        hp3 = CategoricalHyperparameter("grandchild", ["hot", "cold"])

        self.assertRaisesRegexp(TypeError, "Argument 'hyperparameter' is not of"
            " type <class 'ConfigSpace.hyperparameters.Hyperparameter'>.",
                                ForbiddenEqualsClause, "HP1", 1)

        self.assertRaisesRegexp(ValueError,
                                "Forbidden clause must be instantiated with a "
                                "legal hyperparameter value for "
                                "'parent, Type: Categorical, Choices: \{0, "
                                "1\}, Default: 0', but got '2'",
                                ForbiddenEqualsClause, hp1, 2)

        forb1 = ForbiddenEqualsClause(hp1, 1)
        forb1_ = ForbiddenEqualsClause(hp1, 1)
        forb1__ = ForbiddenEqualsClause(hp1, 0)
        forb2 = ForbiddenEqualsClause(hp2, 10)
        forb3 = ForbiddenEqualsClause(hp3, "hot")
        forb3_ = ForbiddenEqualsClause(hp3, "hot")

        self.assertEqual(forb3, forb3_)
        # print("\eq0:", 1, 1)
        # self.assertEqual(1, 1)
        # print("\neq1:", forb1, forb1_)
        self.assertEqual(forb1, forb1_)
        # print("\nneq2:", forb1, "forb1")
        self.assertNotEqual(forb1, "forb1")
        # print("\nneq3:", forb1, forb2)
        self.assertNotEqual(forb1, forb2)
        # print("\nneq4:", forb1_, forb1)
        self.assertNotEqual(forb1__, forb1)
        # print("\neq5:", "Forbidden: parent == 1", str(forb1))
        self.assertEqual("Forbidden: parent == 1", str(forb1))

        # print("\nraisereg6:")
        self.assertRaisesRegexp(ValueError,
                                "Is_forbidden must be called with the "
                                "instanstatiated hyperparameter in the "
                                "forbidden clause; you are missing "
                                "'parent'", forb1.is_forbidden,
                                {1: hp2}, True)
        # print("\nneq7:")
        self.assertFalse(forb1.is_forbidden({'child': 1}, strict=False))
        # print("\nneq8:")
        self.assertFalse(forb1.is_forbidden({'parent': 0}, True))
        # print("\nneq9:")
        self.assertTrue(forb1.is_forbidden({'parent': 1}, True))

        # Test forbidden on vector values
        hyperparameter_idx = {
            hp1.name: 0,
            hp2.name: 1
        }
        forb1.set_vector_idx(hyperparameter_idx)
        # print("\nneq10:")
        self.assertFalse(forb1.is_forbidden_vector(np.array([np.NaN, np.NaN]), strict=False))
        # print("\nneq11:")
        self.assertFalse(forb1.is_forbidden_vector(np.array([0., np.NaN]), strict=False))
        # print("\nneq12:")
        self.assertTrue(forb1.is_forbidden_vector(np.array([1., np.NaN]), strict=False))
Example #33
0
    def test_in_condition(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1, 2, 3, 4])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        hp3 = UniformIntegerHyperparameter("child2", 0, 10)
        hp4 = CategoricalHyperparameter("grandchild", ["hot", "cold", "warm"])

        self.assertRaisesRegexp(TypeError, "Argument 'hyperparameter' is not of"
                                " type <class 'ConfigSpace.hyperparameters.Hyperparameter'>.",
                                ForbiddenInClause, "HP1", 1)

        self.assertRaisesRegexp(ValueError,
                                "Forbidden clause must be instantiated with a "
                                "legal hyperparameter value for "
                                "'parent, Type: Categorical, Choices: {0, 1, 2, 3, 4}, "
                                "Default: 0', but got '5'",
                                ForbiddenInClause, hp1, [5])

        forb1 = ForbiddenInClause(hp2, [5, 6, 7, 8, 9])
        forb1_ = ForbiddenInClause(hp2, [9, 8, 7, 6, 5])
        forb2 = ForbiddenInClause(hp2, [5, 6, 7, 8])
        forb3 = ForbiddenInClause(hp3, [5, 6, 7, 8, 9])
        forb4 = ForbiddenInClause(hp4, ["hot", "cold"])
        forb4_ = ForbiddenInClause(hp4, ["hot", "cold"])
        forb5 = ForbiddenInClause(hp1, [3, 4])
        forb5_ = ForbiddenInClause(hp1, [3, 4])

        self.assertEqual(forb5, forb5_)
        self.assertEqual(forb4, forb4_)

        # print("\nTest1:")
        self.assertEqual(forb1, forb1_)
        # print("\nTest2:")
        self.assertNotEqual(forb1, forb2)
        # print("\nTest3:")
        self.assertNotEqual(forb1, forb3)
        # print("\nTest4:")
        self.assertEqual("Forbidden: child in {5, 6, 7, 8, 9}", str(forb1))
        # print("\nTest5:")
        self.assertRaisesRegexp(ValueError,
                                "Is_forbidden must be called with the "
                                "instanstatiated hyperparameter in the "
                                "forbidden clause; you are missing "
                                "'child'", forb1.is_forbidden,
                                {'parent': 1}, True)
        # print("\nTest6:")
        self.assertFalse(forb1.is_forbidden({'parent': 1}, strict=False))
        # print("\nTest7:")
        for i in range(0, 5):
            self.assertFalse(forb1.is_forbidden({'child': i}, True))
        # print("\nTest8:")
        for i in range(5, 10):
            self.assertTrue(forb1.is_forbidden({'child': i}, True))

        # Test forbidden on vector values
        hyperparameter_idx = {
            hp1.name: 0,
            hp2.name: 1
        }
        forb1.set_vector_idx(hyperparameter_idx)
        # print("\nTest9:")
        self.assertFalse(forb1.is_forbidden_vector(np.array([np.NaN, np.NaN]), strict=False))
        # print("\nTest10:")
        self.assertFalse(forb1.is_forbidden_vector(np.array([np.NaN, 0]), strict=False))
        correct_vector_value = hp2._inverse_transform(6)
        # print("\nTest11:")
        print(correct_vector_value, np.array([np.NaN, correct_vector_value]))
        self.assertTrue(forb1.is_forbidden_vector(np.array([np.NaN, correct_vector_value]), strict=False))
Example #34
0
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import UniformFloatHyperparameter, \
    UniformIntegerHyperparameter, CategoricalHyperparameter, \
    UnParametrizedHyperparameter, Constant
from ConfigSpace.conditions import EqualsCondition, InCondition
from automl.utl import json_utils

cs = ConfigurationSpace()

loss = CategoricalHyperparameter("loss", [
    "squared_loss", "huber", "epsilon_insensitive",
    "squared_epsilon_insensitive"
],
                                 default_value="squared_loss")
penalty = CategoricalHyperparameter("penalty", ["l1", "l2", "elasticnet"],
                                    default_value="l2")
alpha = UniformFloatHyperparameter("alpha",
                                   1e-7,
                                   1e-1,
                                   log=True,
                                   default_value=0.0001)
l1_ratio = UniformFloatHyperparameter("l1_ratio",
                                      1e-9,
                                      1,
                                      log=True,
                                      default_value=0.15)
fit_intercept = Constant("fit_intercept", "True")
tol = UniformFloatHyperparameter("tol",
                                 1e-5,
                                 1e-1,
                                 log=True,