Esempio n. 1
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        # base_estimator = Constant(name="base_estimator", value="None")
        n_estimators = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="n_estimators",
                                         lower=50,
                                         upper=500,
                                         default=50,
                                         log=False))
        learning_rate = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="learning_rate",
                                       lower=0.0001,
                                       upper=2,
                                       default=0.1,
                                       log=True))
        algorithm = cs.add_hyperparameter(
            CategoricalHyperparameter(name="algorithm",
                                      choices=["SAMME.R", "SAMME"],
                                      default="SAMME.R"))
        max_depth = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="max_depth",
                                         lower=1,
                                         upper=10,
                                         default=1,
                                         log=False))
        return cs
Esempio n. 2
0
    def get_hyperparameter_search_space(dataset_properties=None):
        criterion = Constant(name="criterion", value="mse")
        # Copied from classification/random_forest.py
        #n_estimators = UniformIntegerHyperparameter(
        #    name="n_estimators", lower=10, upper=100, default=10, log=False)
        n_estimators = Constant("n_estimators", 100)
        max_features = UniformFloatHyperparameter(
            "max_features", 0.5, 5, default=1)
        max_depth = UnParametrizedHyperparameter("max_depth", "None")
        min_samples_split = UniformIntegerHyperparameter(
            name="min_samples_split", lower=2, upper=20, default=2, log=False)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf", lower=1, upper=20, default=1, log=False)
        bootstrap = CategoricalHyperparameter(
            name="bootstrap", choices=["True", "False"], default="True")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(n_estimators)
        cs.add_hyperparameter(max_features)
        cs.add_hyperparameter(max_depth)
        cs.add_hyperparameter(min_samples_split)
        cs.add_hyperparameter(min_samples_leaf)
        cs.add_hyperparameter(bootstrap)
        cs.add_hyperparameter(criterion)

        return cs
Esempio n. 3
0
    def get_hyperparameter_search_space(dataset_properties=None):
        n_components = UniformIntegerHyperparameter("n_components",
                                                    10,
                                                    2000,
                                                    default=100)
        kernel = CategoricalHyperparameter(
            'kernel', ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf')
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        gamma = UniformFloatHyperparameter("gamma",
                                           3.0517578125e-05,
                                           8,
                                           log=True,
                                           default=1.0)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0)
        cs = ConfigurationSpace()
        cs.add_hyperparameter(n_components)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)
        cs.add_condition(gamma_condition)
        return cs
Esempio n. 4
0
 def get_hyperparameter_search_space(dataset_properties=None):
     N = UniformIntegerHyperparameter("N", 50, 2000, default=100)
     maxiter = UniformIntegerHyperparameter("maxiter", 50, 500, default=100)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(N)
     cs.add_hyperparameter(maxiter)
     return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100))
        criterion = cs.add_hyperparameter(
            CategoricalHyperparameter("criterion", ["gini", "entropy"],
                                      default="gini"))
        max_features = cs.add_hyperparameter(
            UniformFloatHyperparameter("max_features", 0.5, 5, default=1))

        max_depth = cs.add_hyperparameter(
            UnParametrizedHyperparameter(name="max_depth", value="None"))

        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter("min_samples_split", 2, 20,
                                         default=2))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default=1))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            Constant('min_weight_fraction_leaf', 0.))

        bootstrap = cs.add_hyperparameter(
            CategoricalHyperparameter("bootstrap", ["True", "False"],
                                      default="False"))

        return cs
Esempio n. 6
0
 def get_hyperparameter_search_space(dataset_properties=None):
     #n_estimators = UniformIntegerHyperparameter(
     #    "n_estimators", 10, 100, default=10)
     n_estimators = Constant("n_estimators", 100)
     criterion = CategoricalHyperparameter("criterion", ["gini", "entropy"],
                                           default="gini")
     #max_features = UniformFloatHyperparameter(
     #    "max_features", 0.01, 0.5, default=0.2)
     max_features = UniformFloatHyperparameter("max_features",
                                               0.5,
                                               5,
                                               default=1)
     max_depth = UnParametrizedHyperparameter("max_depth", "None")
     min_samples_split = UniformIntegerHyperparameter("min_samples_split",
                                                      2,
                                                      20,
                                                      default=2)
     min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf",
                                                     1,
                                                     20,
                                                     default=1)
     max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None")
     bootstrap = CategoricalHyperparameter("bootstrap", ["True", "False"],
                                           default="True")
     cs = ConfigurationSpace()
     cs.add_hyperparameter(n_estimators)
     cs.add_hyperparameter(criterion)
     cs.add_hyperparameter(max_features)
     cs.add_hyperparameter(max_depth)
     cs.add_hyperparameter(min_samples_split)
     cs.add_hyperparameter(min_samples_leaf)
     cs.add_hyperparameter(max_leaf_nodes)
     cs.add_hyperparameter(bootstrap)
     return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = cs.add_hyperparameter(
            CategoricalHyperparameter("loss",
                                      ["ls", "lad", "huber", "quantile"],
                                      default="ls"))
        learning_rate = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="learning_rate",
                                       lower=0.0001,
                                       upper=1,
                                       default=0.1,
                                       log=True))
        n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100))
        max_depth = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="max_depth",
                                         lower=1,
                                         upper=10,
                                         default=3))
        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_split",
                                         lower=2,
                                         upper=20,
                                         default=2,
                                         log=False))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_leaf",
                                         lower=1,
                                         upper=20,
                                         default=1,
                                         log=False))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.))
        subsample = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="subsample",
                                       lower=0.01,
                                       upper=1.0,
                                       default=1.0,
                                       log=False))
        max_features = cs.add_hyperparameter(
            UniformFloatHyperparameter("max_features", 0.5, 5, default=1))
        max_leaf_nodes = cs.add_hyperparameter(
            UnParametrizedHyperparameter(name="max_leaf_nodes", value="None"))
        alpha = cs.add_hyperparameter(
            UniformFloatHyperparameter("alpha",
                                       lower=0.75,
                                       upper=0.99,
                                       default=0.9))

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
Esempio n. 8
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100))
        criterion = cs.add_hyperparameter(Constant("criterion", "mse"))
        max_features = cs.add_hyperparameter(
            UniformFloatHyperparameter("max_features", 0.5, 5, default=1))

        max_depth = cs.add_hyperparameter(
            UnParametrizedHyperparameter(name="max_depth", value="None"))

        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter("min_samples_split", 2, 20,
                                         default=2))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default=1))

        # Unparametrized, we use min_samples as regularization
        # max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter(
        # name="max_leaf_nodes_or_max_depth", value="max_depth")
        # CategoricalHyperparameter("max_leaf_nodes_or_max_depth",
        # choices=["max_leaf_nodes", "max_depth"], default="max_depth")
        # min_weight_fraction_leaf = UniformFloatHyperparameter(
        #    "min_weight_fraction_leaf", 0.0, 0.1)
        # max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes",
        #                                              value="None")

        bootstrap = cs.add_hyperparameter(
            CategoricalHyperparameter("bootstrap", ["True", "False"],
                                      default="False"))

        # Conditions
        # Not applicable because max_leaf_nodes is no legal value of the parent
        #cond_max_leaf_nodes_or_max_depth = \
        #    EqualsCondition(child=max_leaf_nodes,
        #                    parent=max_leaf_nodes_or_max_depth,
        #                    value="max_leaf_nodes")
        #cond2_max_leaf_nodes_or_max_depth = \
        #    EqualsCondition(child=use_max_depth,
        #                    parent=max_leaf_nodes_or_max_depth,
        #                    value="max_depth")

        #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth,
        #value="True")
        #cs.add_condition(cond_max_leaf_nodes_or_max_depth)
        #cs.add_condition(cond2_max_leaf_nodes_or_max_depth)
        #cs.add_condition(cond_max_depth)

        return cs
Esempio n. 9
0
 def test_add_hyperparameters_with_equal_names(self):
     cs = ConfigurationSpace()
     hp = UniformIntegerHyperparameter("name", 0, 10)
     cs.add_hyperparameter(hp)
     self.assertRaisesRegexp(
         ValueError, "Hyperparameter 'name' is already in the "
         "configuration space.", cs.add_hyperparameter, hp)
Esempio n. 10
0
    def get_hyperparameter_search_space(dataset_properties=None):
        loss = CategoricalHyperparameter(
            "loss",
            ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"],
            default="hinge")
        penalty = CategoricalHyperparameter("penalty",
                                            ["l1", "l2", "elasticnet"],
                                            default="l2")
        alpha = UniformFloatHyperparameter("alpha",
                                           10**-7,
                                           10**-1,
                                           log=True,
                                           default=0.0001)
        l1_ratio = UniformFloatHyperparameter("l1_ratio", 0, 1, default=0.15)
        fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True")
        n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20)
        epsilon = UniformFloatHyperparameter("epsilon",
                                             1e-5,
                                             1e-1,
                                             default=1e-4,
                                             log=True)
        learning_rate = CategoricalHyperparameter(
            "learning_rate", ["optimal", "invscaling", "constant"],
            default="optimal")
        eta0 = UniformFloatHyperparameter("eta0", 10**-7, 0.1, default=0.01)
        power_t = UniformFloatHyperparameter("power_t", 1e-5, 1, default=0.5)
        # This does not allow for other resampling methods!
        class_weight = CategoricalHyperparameter("class_weight",
                                                 ["None", "auto"],
                                                 default="None")
        cs = ConfigurationSpace()
        cs.add_hyperparameter(loss)
        cs.add_hyperparameter(penalty)
        cs.add_hyperparameter(alpha)
        cs.add_hyperparameter(l1_ratio)
        cs.add_hyperparameter(fit_intercept)
        cs.add_hyperparameter(n_iter)
        cs.add_hyperparameter(epsilon)
        cs.add_hyperparameter(learning_rate)
        cs.add_hyperparameter(eta0)
        cs.add_hyperparameter(power_t)
        cs.add_hyperparameter(class_weight)

        # TODO add passive/aggressive here, although not properly documented?
        elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet")
        epsilon_condition = EqualsCondition(epsilon, loss, "modified_huber")
        # eta0 seems to be always active according to the source code; when
        # learning_rate is set to optimial, eta0 is the starting value:
        # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx
        #eta0_and_inv = EqualsCondition(eta0, learning_rate, "invscaling")
        #eta0_and_constant = EqualsCondition(eta0, learning_rate, "constant")
        #eta0_condition = OrConjunction(eta0_and_inv, eta0_and_constant)
        power_t_condition = EqualsCondition(power_t, learning_rate,
                                            "invscaling")

        cs.add_condition(elasticnet)
        cs.add_condition(epsilon_condition)
        cs.add_condition(power_t_condition)

        return cs
    def get_hyperparameter_search_space(dataset_properties=None):

        n_neighbors = UniformIntegerHyperparameter(name="n_neighbors",
                                                   lower=1,
                                                   upper=100,
                                                   default=1)
        weights = CategoricalHyperparameter(name="weights",
                                            choices=["uniform", "distance"],
                                            default="uniform")
        metric = UnParametrizedHyperparameter(name="metric", value="minkowski")
        algorithm = Constant(name='algorithm', value="auto")
        p = CategoricalHyperparameter(name="p", choices=[1, 2, 5], default=2)
        leaf_size = Constant(name="leaf_size", value=30)

        # Unparametrized
        # TODO: If we further parametrize 'metric' we need more metric params
        metric = UnParametrizedHyperparameter(name="metric", value="minkowski")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(n_neighbors)
        cs.add_hyperparameter(weights)
        cs.add_hyperparameter(metric)
        cs.add_hyperparameter(algorithm)
        cs.add_hyperparameter(p)
        cs.add_hyperparameter(leaf_size)

        # Conditions
        metric_p = EqualsCondition(parent=metric, child=p, value="minkowski")
        cs.add_condition(metric_p)

        return cs
Esempio n. 12
0
    def test_equals_condition(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = EqualsCondition(hp2, hp1, 0)
        cond_ = EqualsCondition(hp2, hp1, 0)

        # Test invalid conditions:
        self.assertRaisesRegexp(
            ValueError, "Argument 'parent' is not an "
            "instance of HPOlibConfigSpace.hyperparameter."
            "Hyperparameter.", EqualsCondition, hp2, "parent", 0)
        self.assertRaisesRegexp(
            ValueError, "Argument 'child' is not an "
            "instance of HPOlibConfigSpace.hyperparameter."
            "Hyperparameter.", EqualsCondition, "child", hp1, 0)
        self.assertRaisesRegexp(
            ValueError, "The child and parent hyperparameter "
            "must be different hyperparameters.", EqualsCondition, hp1, hp1, 0)

        self.assertEqual(cond, cond_)

        cond_reverse = EqualsCondition(hp1, hp2, 0)
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent == 0", str(cond))
Esempio n. 13
0
 def get_hyperparameter_search_space(dataset_properties=None):
     N = UniformIntegerHyperparameter("N", 5, 20, default=10)
     precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(N)
     cs.add_hyperparameter(precond)
     return cs
Esempio n. 14
0
 def get_hyperparameter_search_space(dataset_properties=None):
     target_dim = UniformIntegerHyperparameter("target_dim",
                                               10,
                                               256,
                                               default=128)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(target_dim)
     return cs
Esempio n. 15
0
 def get_hyperparameter_search_space(dataset_properties=None):
     max_epochs = UniformIntegerHyperparameter("max_epochs",
                                               1,
                                               20,
                                               default=2)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(max_epochs)
     return cs
Esempio n. 16
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        criterion = cs.add_hyperparameter(Constant('criterion', 'mse'))
        splitter = cs.add_hyperparameter(Constant("splitter", "best"))
        max_features = cs.add_hyperparameter(Constant('max_features', 1.0))
        max_depth = cs.add_hyperparameter(UniformFloatHyperparameter(
            'max_depth', 0., 2., default=0.5))
        min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter(
            "min_samples_split", 2, 20, default=2))
        min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter(
            "min_samples_leaf", 1, 20, default=1))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            Constant("min_weight_fraction_leaf", 0.0))
        max_leaf_nodes = cs.add_hyperparameter(
            UnParametrizedHyperparameter("max_leaf_nodes", "None"))

        return cs
Esempio n. 17
0
 def get_hyperparameter_search_space(dataset_properties=None):
     gamma = UniformFloatHyperparameter(
         "gamma", 0.3, 2., default=1.0)
     n_components = UniformIntegerHyperparameter(
         "n_components", 50, 10000, default=100, log=True)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(gamma)
     cs.add_hyperparameter(n_components)
     return cs
Esempio n. 18
0
 def test_condition_from_cryptominisat(self):
     parent = CategoricalHyperparameter('blkrest', ['0', '1'], default='1')
     child = UniformIntegerHyperparameter('blkrestlen',
                                          2000,
                                          10000,
                                          log=True)
     condition = EqualsCondition(child, parent, '1')
     self.assertFalse(condition.evaluate(dict(blkrest='0')))
     self.assertTrue(condition.evaluate(dict(blkrest='1')))
Esempio n. 19
0
 def test_hyperparameters_with_valid_condition(self):
     cs = ConfigurationSpace()
     hp1 = CategoricalHyperparameter("parent", [0, 1])
     cs.add_hyperparameter(hp1)
     hp2 = UniformIntegerHyperparameter("child", 0, 10)
     cs.add_hyperparameter(hp2)
     cond = EqualsCondition(hp2, hp1, 0)
     cs.add_condition(cond)
     self.assertEqual(len(cs._hyperparameters), 2)
Esempio n. 20
0
    def get_hyperparameter_search_space(dataset_properties=None):
        if dataset_properties is not None and \
                (dataset_properties.get("sparse") is True or
                 dataset_properties.get("signed") is False):
            allow_chi2 = False
        else:
            allow_chi2 = True

        possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine']
        if allow_chi2:
            possible_kernels.append("chi2")
        kernel = CategoricalHyperparameter('kernel', possible_kernels, 'rbf')
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        gamma = UniformFloatHyperparameter("gamma",
                                           3.0517578125e-05,
                                           8,
                                           log=True,
                                           default=0.1)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0)
        n_components = UniformIntegerHyperparameter("n_components",
                                                    50,
                                                    10000,
                                                    default=100,
                                                    log=True)

        cs = ConfigurationSpace()
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)
        cs.add_hyperparameter(n_components)

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])

        gamma_kernels = ["poly", "rbf", "sigmoid"]
        if allow_chi2:
            gamma_kernels.append("chi2")
        gamma_condition = InCondition(gamma, kernel, gamma_kernels)
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)
        cs.add_condition(gamma_condition)
        return cs
Esempio n. 21
0
 def test_get_conditions(self):
     cs = ConfigurationSpace()
     hp1 = CategoricalHyperparameter("parent", [0, 1])
     cs.add_hyperparameter(hp1)
     hp2 = UniformIntegerHyperparameter("child", 0, 10)
     cs.add_hyperparameter(hp2)
     self.assertEqual([], cs.get_conditions())
     cond1 = EqualsCondition(hp2, hp1, 0)
     cs.add_condition(cond1)
     self.assertEqual([cond1], cs.get_conditions())
Esempio n. 22
0
    def test_and_conjunction(self):
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        hp2 = UniformIntegerHyperparameter("child", 0, 2)
        hp3 = UniformIntegerHyperparameter("child2", 0, 2)
        hp4 = UniformIntegerHyperparameter("child3", 0, 2)

        forb2 = ForbiddenEqualsClause(hp1, 1)
        forb3 = ForbiddenInClause(hp2, range(2, 3))
        forb4 = ForbiddenInClause(hp3, range(2, 3))
        forb5 = ForbiddenInClause(hp4, range(2, 3))

        and1 = ForbiddenAndConjunction(forb2, forb3)
        and2 = ForbiddenAndConjunction(forb2, forb4)
        and3 = ForbiddenAndConjunction(forb2, forb5)

        total_and = ForbiddenAndConjunction(and1, and2, and3)
        self.assertEqual(
            "((Forbidden: parent == 1 && Forbidden: child in {2}) "
            "&& (Forbidden: parent == 1 && Forbidden: child2 in {2}) "
            "&& (Forbidden: parent == 1 && Forbidden: child3 in "
            "{2}))", str(total_and))

        results = [
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, False,
            False, False, False, False, False, False, False, False, True
        ]

        for i, values in enumerate(
                product(range(2), range(3), range(3), range(3))):
            is_forbidden = total_and.is_forbidden({
                "parent": values[0],
                "child": values[1],
                "child2": values[2],
                "child3": values[3]
            })

            self.assertEqual(results[i], is_forbidden)

            self.assertFalse(total_and.is_forbidden([], strict=False))
    def get_hyperparameter_search_space(dataset_properties=None):

        loss = CategoricalHyperparameter(
            name="loss", choices=["ls", "lad"],
            default='ls')  #, "huber", "quantile"], default='ls')

        learning_rate = UniformFloatHyperparameter(name="learning_rate",
                                                   lower=0.0001,
                                                   upper=1,
                                                   default=0.1,
                                                   log=True)
        subsample = UniformFloatHyperparameter(name="subsample",
                                               lower=0.01,
                                               upper=1.0,
                                               default=1.0,
                                               log=False)

        n_estimators = Constant("n_estimators", 100)

        max_features = UniformFloatHyperparameter("max_features",
                                                  0.5,
                                                  5,
                                                  default=1)
        max_depth = UniformIntegerHyperparameter(name="max_depth",
                                                 lower=1,
                                                 upper=10,
                                                 default=3)
        min_samples_split = UniformIntegerHyperparameter(
            name="min_samples_split", lower=2, upper=20, default=2, log=False)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf", lower=1, upper=20, default=1, log=False)

        cs = ConfigurationSpace()
        cs.add_hyperparameter(n_estimators)
        cs.add_hyperparameter(loss)
        cs.add_hyperparameter(learning_rate)
        cs.add_hyperparameter(max_features)
        cs.add_hyperparameter(max_depth)
        cs.add_hyperparameter(min_samples_split)
        cs.add_hyperparameter(min_samples_leaf)
        cs.add_hyperparameter(subsample)
        return cs
Esempio n. 24
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter(
            name="n_neighbors", lower=1, upper=100, log=True, default=1))
        weights = cs.add_hyperparameter(CategoricalHyperparameter(
            name="weights", choices=["uniform", "distance"], default="uniform"))
        p = cs.add_hyperparameter(CategoricalHyperparameter(
            name="p", choices=[1, 2], default=2))

        return cs
Esempio n. 25
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = cs.add_hyperparameter(Constant("loss", "deviance"))
        learning_rate = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="learning_rate",
                                       lower=0.0001,
                                       upper=1,
                                       default=0.1,
                                       log=True))
        n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100))
        max_depth = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="max_depth",
                                         lower=1,
                                         upper=10,
                                         default=3))
        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_split",
                                         lower=2,
                                         upper=20,
                                         default=2,
                                         log=False))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_leaf",
                                         lower=1,
                                         upper=20,
                                         default=1,
                                         log=False))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.))
        subsample = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="subsample",
                                       lower=0.01,
                                       upper=1.0,
                                       default=1.0,
                                       log=False))
        max_features = cs.add_hyperparameter(
            UniformFloatHyperparameter("max_features", 0.5, 5, default=1))
        max_leaf_nodes = cs.add_hyperparameter(
            UnParametrizedHyperparameter(name="max_leaf_nodes", value="None"))

        return cs
Esempio n. 26
0
    def test_get_hyperparameter(self):
        cs = ConfigurationSpace()
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cs.add_hyperparameter(hp2)

        retval = cs.get_hyperparameter("parent")
        self.assertEqual(hp1, retval)
        retval = cs.get_hyperparameter("child")
        self.assertEqual(hp2, retval)
        self.assertRaises(KeyError, cs.get_hyperparameter, "grandfather")
 def get_hyperparameter_search_space(dataset_properties=None):
     loss = CategoricalHyperparameter("loss", ["hinge", "squared_hinge"],
                                      default="hinge")
     fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True")
     n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20)
     C = UniformFloatHyperparameter("C", 1e-5, 10, 1, log=True)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(loss)
     cs.add_hyperparameter(fit_intercept)
     cs.add_hyperparameter(n_iter)
     cs.add_hyperparameter(C)
     return cs
Esempio n. 28
0
    def test_sample_configuration(self):
        cs = ConfigurationSpace()
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cs.add_hyperparameter(hp2)
        cond1 = EqualsCondition(hp2, hp1, 0)
        cs.add_condition(cond1)
        # This automatically checks the configuration!
        Configuration(cs, dict(parent=0, child=5))

        # and now for something more complicated
        cs = ConfigurationSpace(seed=1)
        hp1 = CategoricalHyperparameter("input1", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = CategoricalHyperparameter("input2", [0, 1])
        cs.add_hyperparameter(hp2)
        hp3 = CategoricalHyperparameter("input3", [0, 1])
        cs.add_hyperparameter(hp3)
        hp4 = CategoricalHyperparameter("input4", [0, 1])
        cs.add_hyperparameter(hp4)
        hp5 = CategoricalHyperparameter("input5", [0, 1])
        cs.add_hyperparameter(hp5)
        hp6 = Constant("AND", "True")
        cs.add_hyperparameter(hp6)

        cond1 = EqualsCondition(hp6, hp1, 1)
        cond2 = NotEqualsCondition(hp6, hp2, 1)
        cond3 = InCondition(hp6, hp3, [1])
        cond4 = EqualsCondition(hp5, hp3, 1)
        cond5 = EqualsCondition(hp4, hp5, 1)
        cond6 = EqualsCondition(hp6, hp4, 1)
        cond7 = EqualsCondition(hp6, hp5, 1)

        conj1 = AndConjunction(cond1, cond2)
        conj2 = OrConjunction(conj1, cond3)
        conj3 = AndConjunction(conj2, cond6, cond7)
        cs.add_condition(cond4)
        cs.add_condition(cond5)
        cs.add_condition(conj3)

        samples = []
        for i in range(5):
            cs.seed(1)
            samples.append([])
            for j in range(100):
                sample = cs.sample_configuration()
                samples[-1].append(sample)

            if i > 0:
                for j in range(100):
                    self.assertEqual(samples[-1][j], samples[-2][j])
Esempio n. 29
0
    def test_in_condition(self):
        hp1 = CategoricalHyperparameter("parent", range(0, 11))
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        cond_ = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        self.assertEqual(cond, cond_)

        cond_reverse = InCondition(hp1, hp2, [0, 1, 2, 3, 4, 5])
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent in {0, 1, 2, 3, 4, 5}", str(cond))
Esempio n. 30
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        shrinkage = cs.add_hyperparameter(CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default="None"))
        shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5))
        n_components = cs.add_hyperparameter(UniformIntegerHyperparameter(
            'n_components', 1, 250, default=10))
        tol = cs.add_hyperparameter(UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default=1e-4, log=True))

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))
        return cs