Exemplo n.º 1
0
    def test_keys(self):
        # A regression test to make sure issue #49 does no longer pop up. By
        # iterating over the configuration in the for loop, it should not raise
        # a KeyError if the child hyperparameter is inactive.
        cs = ConfigurationSpace()
        shrinkage = CategoricalHyperparameter(
            "shrinkage",
            ["None", "auto", "manual"],
            default_value="None",
        )
        shrinkage_factor = UniformFloatHyperparameter(
            "shrinkage_factor",
            0.,
            1.,
            0.5,
        )
        cs.add_hyperparameters([shrinkage, shrinkage_factor])

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage,
                                         "manual"))

        for i in range(10):
            config = cs.sample_configuration()
            {
                hp_name: config[hp_name]
                for hp_name in config if config[hp_name] is not None
            }
Exemplo n.º 2
0
    def test_acts_as_mapping(self):
        """
        Test that ConfigurationSpace can act as a mapping with iteration,
        indexing and items, values, keys.
        """
        cs = ConfigurationSpace()
        names = [f"name{i}" for i in range(5)]
        hyperparameters = [
            UniformIntegerHyperparameter(name, 0, 10) for name in names
        ]
        cs.add_hyperparameters(hyperparameters)

        # Test indexing
        assert cs['name3'] == hyperparameters[3]

        # Test dict methods
        assert list(cs.keys()) == names
        assert list(cs.values()) == hyperparameters
        assert list(cs.items()) == list(zip(names, hyperparameters))
        assert len(cs) == 5

        # Test __iter__
        assert list(iter(cs)) == names

        # Test unpacking
        d = {**cs}
        assert list(d.keys()) == names
        assert list(d.values()) == hyperparameters
        assert list(d.items()) == list(zip(names, hyperparameters))
        assert len(d) == 5
Exemplo n.º 3
0
    def test_check_neighbouring_config_diamond_str(self):
        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', ['red', 'green'])
        left = CategoricalHyperparameter('left', ['red', 'green'])
        right = CategoricalHyperparameter('right', ['red', 'green', 'blue', 'yellow'])
        bottom = CategoricalHyperparameter('bottom', ['red', 'green'])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 'red'))
        diamond.add_condition(EqualsCondition(right, head, 'red'))
        diamond.add_condition(AndConjunction(EqualsCondition(bottom, left, 'green'),
                                             EqualsCondition(bottom, right, 'green')))

        config = Configuration(diamond, {'bottom': 'red', 'head': 'red', 'left': 'green', 'right': 'green'})
        hp_name = "head"
        index = diamond.get_idx_by_hyperparameter_name(hp_name)
        neighbor_value = 1

        new_array = ConfigSpace.c_util.change_hp_value(
            diamond,
            config.get_array(),
            hp_name,
            neighbor_value,
            index
        )
        expected_array = np.array([1, np.nan, np.nan, np.nan])

        np.testing.assert_almost_equal(new_array, expected_array)
Exemplo n.º 4
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     BaseImageClassificationModel.set_training_space(cs)
     BaseImageClassificationModel.set_optimizer_space(cs)
     res_kernel_size = CategoricalHyperparameter('res_kernel_size', [3, 5],
                                                 default_value=3)
     res_stage2_block = UniformIntegerHyperparameter('res_stage2_block',
                                                     1,
                                                     3,
                                                     default_value=2)
     res_stage3_block = UniformIntegerHyperparameter('res_stage3_block',
                                                     1,
                                                     11,
                                                     default_value=3)
     res_stage4_block = UniformIntegerHyperparameter('res_stage4_block',
                                                     1,
                                                     47,
                                                     default_value=5)
     res_stage5_block = UniformIntegerHyperparameter('res_stage5_block',
                                                     1,
                                                     3,
                                                     default_value=2)
     cs.add_hyperparameters([
         res_kernel_size, res_stage2_block, res_stage3_block,
         res_stage4_block, res_stage5_block
     ])
     return cs
Exemplo n.º 5
0
    def get_pipeline_config_space(self, algorithm_candidates):
        cs = ConfigurationSpace()
        estimator_choice = CategoricalHyperparameter(
            "estimator",
            algorithm_candidates,
            default_value=algorithm_candidates[0])
        cs.add_hyperparameter(estimator_choice)
        if self.task_type == IMG_CLS:
            aug_space = get_aug_hyperparameter_space()
            cs.add_hyperparameters(aug_space.get_hyperparameters())
            cs.add_conditions(aug_space.get_conditions())

        for estimator_id in algorithm_candidates:
            sub_cs = self.get_model_config_space(estimator_id,
                                                 include_estimator=False,
                                                 include_aug=False)
            parent_hyperparameter = {
                'parent': estimator_choice,
                'value': estimator_id
            }
            cs.add_configuration_space(
                estimator_id,
                sub_cs,
                parent_hyperparameter=parent_hyperparameter)
        return cs
Exemplo n.º 6
0
    def test_sobol(self):
        cs = ConfigurationSpace()
        hyperparameters = [
            UniformFloatHyperparameter('x%d' % (i + 1), 0, 1)
            for i in range(21201)
        ]
        cs.add_hyperparameters(hyperparameters)

        sobol_kwargs = dict(
            rng=np.random.RandomState(1),
            traj_logger=unittest.mock.Mock(),
            ta_run_limit=1000,
            configs=None,
            n_configs_x_params=None,
            max_config_fracs=0.25,
            init_budget=1,
        )
        SobolDesign(cs=cs, **sobol_kwargs).select_configurations()

        cs.add_hyperparameter(UniformFloatHyperparameter('x21202', 0, 1))
        with self.assertRaisesRegex(
                Exception,
                "Maximum supported dimensionality is 21201.",
        ):
            SobolDesign(cs=cs, **sobol_kwargs).select_configurations()
Exemplo n.º 7
0
    def _get_configuration_space(self) -> ConfigurationSpace:
        """Get the configuration space for the random forest.

        Returns
        -------
        ConfigurationSpace
        """
        cfg = ConfigurationSpace()
        cfg.seed(int(self.rs.randint(0, 1000)))

        num_trees = Constant("num_trees", value=N_TREES)
        bootstrap = CategoricalHyperparameter(
            "do_bootstrapping",
            choices=(self.bootstrap, ),
            default_value=self.bootstrap,
        )
        max_feats = CategoricalHyperparameter("max_features",
                                              choices=(3 / 6, 4 / 6, 5 / 6, 1),
                                              default_value=1)
        min_split = UniformIntegerHyperparameter("min_samples_to_split",
                                                 lower=1,
                                                 upper=10,
                                                 default_value=2)
        min_leavs = UniformIntegerHyperparameter("min_samples_in_leaf",
                                                 lower=1,
                                                 upper=10,
                                                 default_value=1)
        cfg.add_hyperparameters(
            [num_trees, bootstrap, max_feats, min_split, min_leavs])
        return cfg
Exemplo n.º 8
0
    def get_hyperparameter_search_space():
        cs = ConfigurationSpace()

        n_estimators = UniformIntegerHyperparameter(name="n_estimators",
                                                    lower=50,
                                                    upper=500,
                                                    default_value=50,
                                                    log=False)
        learning_rate = UniformFloatHyperparameter(name="learning_rate",
                                                   lower=0.01,
                                                   upper=2,
                                                   default_value=0.1,
                                                   log=True)
        algorithm = CategoricalHyperparameter(name="algorithm",
                                              choices=["SAMME.R", "SAMME"],
                                              default_value="SAMME.R")
        max_depth = UniformIntegerHyperparameter(name="max_depth",
                                                 lower=2,
                                                 upper=8,
                                                 default_value=3,
                                                 log=False)

        cs.add_hyperparameters(
            [n_estimators, learning_rate, algorithm, max_depth])
        return cs
Exemplo n.º 9
0
    def test_check_neighbouring_config_diamond_str(self):
        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', ['red', 'green'])
        left = CategoricalHyperparameter('left', ['red', 'green'])
        right = CategoricalHyperparameter('right',
                                          ['red', 'green', 'blue', 'yellow'])
        bottom = CategoricalHyperparameter('bottom', ['red', 'green'])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 'red'))
        diamond.add_condition(EqualsCondition(right, head, 'red'))
        diamond.add_condition(
            AndConjunction(EqualsCondition(bottom, left, 'green'),
                           EqualsCondition(bottom, right, 'green')))

        config = Configuration(diamond, {
            'bottom': 'red',
            'head': 'red',
            'left': 'green',
            'right': 'green'
        })
        hp_name = "head"
        index = diamond.get_idx_by_hyperparameter_name(hp_name)
        neighbor_value = 1

        new_array = ConfigSpace.c_util.change_hp_value(diamond,
                                                       config.get_array(),
                                                       hp_name, neighbor_value,
                                                       index)
        expected_array = np.array([1, np.nan, np.nan, np.nan])

        np.testing.assert_almost_equal(new_array, expected_array)
Exemplo n.º 10
0
    def test_check_neighbouring_config_diamond(self):
        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', [0, 1])
        left = CategoricalHyperparameter('left', [0, 1])
        right = CategoricalHyperparameter('right', [0, 1, 2, 3])
        bottom = CategoricalHyperparameter('bottom', [0, 1])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 0))
        diamond.add_condition(EqualsCondition(right, head, 0))
        diamond.add_condition(
            AndConjunction(EqualsCondition(bottom, left, 1),
                           EqualsCondition(bottom, right, 1)))

        config = Configuration(diamond, {
            'bottom': 0,
            'head': 0,
            'left': 1,
            'right': 1
        })
        hp_name = "head"
        index = diamond.get_idx_by_hyperparameter_name(hp_name)
        neighbor_value = 1

        new_array = change_hp_value(diamond, config.get_array(), hp_name,
                                    neighbor_value, index)
        expected_array = np.array([1, np.nan, np.nan, np.nan])

        np.testing.assert_almost_equal(new_array, expected_array)
Exemplo n.º 11
0
 def get_configs(self, variables):
     from ConfigSpace import ConfigurationSpace
     from ConfigSpace.hyperparameters import UniformFloatHyperparameter
     from ConfigSpace.hyperparameters import CategoricalHyperparameter
     api_config = {}
     cs = ConfigurationSpace()
     hp_list = []
     for var in variables:
         if isinstance(var, ContinuousVariable):
             api_config[var.name] = {
                 'type': 'real',
                 'space': 'linear',
                 'range': var.domain
             }
             hp_list.append(
                 UniformFloatHyperparameter(name=var.name,
                                            lower=var.domain[0],
                                            upper=var.domain[1],
                                            log=False))
         else:
             api_config[var.name] = {'type': 'cat', 'values': var.domain}
             hp_list.append(
                 CategoricalHyperparameter(name=var.name,
                                           choices=var.domain))
     cs.add_hyperparameters(hp_list)
     return api_config, cs
Exemplo n.º 12
0
 def test_fix_types(self):
     # Test categorical and ordinal
     for hyperparameter_type in [CategoricalHyperparameter, OrdinalHyperparameter]:
         cs = ConfigurationSpace()
         cs.add_hyperparameters([
             hyperparameter_type('bools', [True, False]),
             hyperparameter_type('ints', [1, 2, 3, 4, 5]),
             hyperparameter_type('floats', [1.5, 2.5, 3.5, 4.5, 5.5]),
             hyperparameter_type('str', ['string', 'ding', 'dong']),
             hyperparameter_type('mixed', [2, True, 1.5, 'string', False, 'False']),
             ])
         c = cs.get_default_configuration().get_dictionary()
         # Check bools
         for b in [False, True]:
             c['bools'] = b
             c_str = {k: str(v) for k, v in c.items()}
             self.assertEqual(fix_types(c_str, cs), c)
         # Check legal mixed values
         for m in [2, True, 1.5, 'string']:
             c['mixed'] = m
             c_str = {k: str(v) for k, v in c.items()}
             self.assertEqual(fix_types(c_str, cs), c)
         # Check error on cornercase that cannot be caught
         for m in [False, 'False']:
             c['mixed'] = m
             c_str = {k: str(v) for k, v in c.items()}
             self.assertRaises(ValueError, fix_types, c_str, cs)
     # Test constant
     for m in [2, 1.5, 'string']:
         cs = ConfigurationSpace()
         cs.add_hyperparameter(Constant('constant', m))
         c = cs.get_default_configuration().get_dictionary()
         c_str = {k: str(v) for k, v in c.items()}
         self.assertEqual(fix_types(c_str, cs), c)
Exemplo n.º 13
0
    def get_hyperparameter_search_space():
        """
            ['n_estimators', 'learning_rate', 'max_depth', 'colsample_bytree', 'gamma',
                'min_child_weight',  'reg_alpha', 'reg_lambda', 'subsample']
        """
        cs = ConfigurationSpace()
        n_estimators = UniformIntegerHyperparameter("n_estimators",
                                                    100,
                                                    1000,
                                                    q=10,
                                                    default_value=500)
        learning_rate = UniformFloatHyperparameter("learning_rate",
                                                   1e-3,
                                                   0.9,
                                                   log=True,
                                                   default_value=0.1)
        max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.1,
                                                      1,
                                                      q=0.1,
                                                      default_value=1)
        gamma = UniformFloatHyperparameter("gamma",
                                           0,
                                           10,
                                           q=0.1,
                                           default_value=0)

        min_child_weight = UniformFloatHyperparameter("min_child_weight",
                                                      0,
                                                      10,
                                                      q=0.1,
                                                      default_value=1)
        reg_alpha = UniformFloatHyperparameter("reg_alpha",
                                               0,
                                               10,
                                               q=0.1,
                                               default_value=0)
        reg_lambda = UniformFloatHyperparameter("reg_lambda",
                                                1,
                                                10,
                                                q=0.1,
                                                default_value=1)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.1,
                                               1,
                                               q=0.1,
                                               default_value=1)

        cs.add_hyperparameters([
            n_estimators, max_depth, learning_rate, min_child_weight,
            subsample, colsample_bytree, gamma, reg_alpha, reg_lambda
        ])
        return cs
Exemplo n.º 14
0
    def get_configspace(self, optimizer='smac'):
        if optimizer == 'smac':
            cs = ConfigurationSpace()

            penalty = CategoricalHyperparameter(
                "penalty", ["l1", "l2"], default_value="l2")
            loss = CategoricalHyperparameter(
                "loss", ["hinge", "squared_hinge"], default_value="squared_hinge")
            dual = CategoricalHyperparameter("dual", ['True', 'False'], default_value='True')
            # This is set ad-hoc
            tol = UniformFloatHyperparameter(
                "tol", 1e-5, 1e-1, default_value=1e-4, log=True)
            C = UniformFloatHyperparameter(
                "C", 0.03125, 32768, log=True, default_value=1.0)
            multi_class = Constant("multi_class", "ovr")
            # These are set ad-hoc
            fit_intercept = Constant("fit_intercept", "True")
            intercept_scaling = Constant("intercept_scaling", 1)
            cs.add_hyperparameters([penalty, loss, dual, tol, C, multi_class,
                                    fit_intercept, intercept_scaling])

            penalty_and_loss = ForbiddenAndConjunction(
                ForbiddenEqualsClause(penalty, "l1"),
                ForbiddenEqualsClause(loss, "hinge")
            )
            constant_penalty_and_loss = ForbiddenAndConjunction(
                ForbiddenEqualsClause(dual, "False"),
                ForbiddenEqualsClause(penalty, "l2"),
                ForbiddenEqualsClause(loss, "hinge")
            )
            penalty_and_dual = ForbiddenAndConjunction(
                ForbiddenEqualsClause(dual, "True"),
                ForbiddenEqualsClause(penalty, "l1")
            )
            cs.add_forbidden_clause(penalty_and_loss)
            cs.add_forbidden_clause(constant_penalty_and_loss)
            cs.add_forbidden_clause(penalty_and_dual)
            return cs
        elif optimizer == 'tpe':
            from hyperopt import hp
            space = {'penalty': hp.choice('liblinear_combination',
                                          [{'penalty': "l1", 'loss': "squared_hinge", 'dual': "False"},
                                           {'penalty': "l2", 'loss': "hinge", 'dual': "True"},
                                           {'penalty': "l2", 'loss': "squared_hinge", 'dual': "True"},
                                           {'penalty': "l2", 'loss': "squared_hinge", 'dual': "False"}]),
                     'loss': None,
                     'dual': None,
                     'tol': hp.loguniform('liblinear_tol', np.log(1e-5), np.log(1e-1)),
                     'C': hp.loguniform('liblinear_C', np.log(0.03125), np.log(32768)),
                     'multi_class': hp.choice('liblinear_multi_class', ["ovr"]),
                     'fit_intercept': hp.choice('liblinear_fit_intercept', ["True"]),
                     'intercept_scaling': hp.choice('liblinear_intercept_scaling', [1])}
            return space
        else:
            raise ValueError('Unknown optimizer %s when getting configspace' % optimizer)
Exemplo n.º 15
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         cs.add_hyperparameters([
             UniformFloatHyperparameter("x%s" % i, self.lb, self.ub)
             for i in range(1, self.dim + 1)
         ])
         return cs
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' %
                          optimizer)
Exemplo n.º 16
0
 def get_configuration_space(self):
     cs = ConfigurationSpace()
     finite_horizon = CategoricalHyperparameter(name="finite_horizon",
             choices=["true", "false"], default_value="true")
     horizon = UniformIntegerHyperparameter(name="horizon",
             lower=1, upper=1000, default_value=10)
     use_horizon = CSC.InCondition(child=horizon, parent=finite_horizon,
             values=["true"])
     cs.add_hyperparameters([horizon, finite_horizon])
     cs.add_condition(use_horizon)
     return cs
Exemplo n.º 17
0
 def set_training_space(cs: ConfigurationSpace):
     '''
     Set hyperparameters for training
     '''
     batch_size = CategoricalHyperparameter('batch_size', [16, 32],
                                            default_value=32)
     keep_prob = UniformFloatHyperparameter('keep_prob',
                                            0,
                                            0.99,
                                            default_value=0.5)
     cs.add_hyperparameters([batch_size, keep_prob])
Exemplo n.º 18
0
def main(argv):
    if len(argv) < 2:
        raise ValueError("You should input filename.")
    if FLAGS.processes < 1 or FLAGS.processes > cpu_count():
        raise ValueError("Please input a proper number of processes.")
    # 读取输入文件
    if argv[1].endswith(".csv"):
        # 如果后缀名为csv则按csv读取
        df = pd.read_csv(sys.argv[1])
        X = df[df.columns[:-1]].values
        y = df[df.columns[-1]].values
    else:
        # 否则按照libsvm读取
        with open(argv[1], "r") as fp:
            lines = fp.readlines()
        X, y = libsvm2sparse(lines)
    # 分割数据集
    X_train, X_valid, y_train, y_valid = train_test_split(X,
                                                          y,
                                                          test_size=0.33,
                                                          random_state=1)

    processes = FLAGS.processes
    # 指定输入输出目录
    dirs = [
        "tmpfile/smac3-output_%s" % (datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d_%H:%M:%S_%f'))
        for _ in range(processes)
    ]
    # 创建进程池
    pool = []
    for i in range(FLAGS.processes):
        cs = ConfigurationSpace()
        # 超参搜索空间,使用[1e-6, 1]
        alpha = UniformFloatHyperparameter(name="alpha",
                                           lower=1e-3,
                                           upper=1,
                                           default_value=(i + 1) / processes,
                                           log=False)
        cs.add_hyperparameters([alpha])
        # 指定并行pSMAC目录和当前的输出目录
        pool.append(
            ParallelSMBO(X_train,
                         y_train,
                         X_valid,
                         y_valid,
                         dirs=dirs,
                         smbo_id=i,
                         cs=cs,
                         our_work=FLAGS.our_work))
    for i in range(processes):
        pool[i].start()
    for i in range(processes):
        pool[i].join()
Exemplo n.º 19
0
    def set_optimizer_space(cs: ConfigurationSpace):
        '''
        Set hyperparameters for optimizers
        '''
        optimizer = CategoricalHyperparameter('optimizer', ['SGD', 'Adam'],
                                              default_value='Adam')
        sgd_lr = UniformFloatHyperparameter('sgd_lr',
                                            0.00001,
                                            0.1,
                                            default_value=0.005,
                                            log=True)  # log scale
        sgd_decay = UniformFloatHyperparameter('sgd_decay',
                                               0.0001,
                                               0.1,
                                               default_value=0.05,
                                               log=True)  # log scale
        sgd_momentum = UniformFloatHyperparameter('sgd_momentum',
                                                  0.3,
                                                  0.99,
                                                  default_value=0.9)
        adam_lr = UniformFloatHyperparameter('adam_lr',
                                             0.00001,
                                             0.1,
                                             default_value=0.005,
                                             log=True)  # log scale
        adam_decay = UniformFloatHyperparameter('adam_decay',
                                                0.0001,
                                                0.1,
                                                default_value=0.05,
                                                log=True)  # log scale

        sgd_lr_cond = InCondition(child=sgd_lr,
                                  parent=optimizer,
                                  values=['SGD'])
        sgd_decay_cond = InCondition(child=sgd_decay,
                                     parent=optimizer,
                                     values=['SGD'])
        sgd_momentum_cond = InCondition(child=sgd_momentum,
                                        parent=optimizer,
                                        values=['SGD'])
        adam_lr_cond = InCondition(child=adam_lr,
                                   parent=optimizer,
                                   values=['Adam'])
        adam_decay_cond = InCondition(child=adam_decay,
                                      parent=optimizer,
                                      values=['Adam'])

        cs.add_hyperparameters(
            [optimizer, sgd_lr, sgd_decay, sgd_momentum, adam_lr, adam_decay])
        cs.add_conditions([
            sgd_lr_cond, sgd_decay_cond, sgd_momentum_cond, adam_lr_cond,
            adam_decay_cond
        ])
Exemplo n.º 20
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     BaseImageClassificationModel.set_training_space(cs)
     BaseImageClassificationModel.set_optimizer_space(cs)
     vgg_kernel_size = CategoricalHyperparameter('vgg_kernel_size', [3, 5], default_value=3)
     vgg_keep_prob = UniformFloatHyperparameter('vgg_keep_prob', 0, 0.99, default_value=0.5)
     vgg_block2_layer = UniformIntegerHyperparameter('vgg_block2_layer', 2, 3, default_value=2)
     vgg_block3_layer = UniformIntegerHyperparameter('vgg_block3_layer', 2, 5, default_value=3)
     vgg_block4_layer = UniformIntegerHyperparameter('vgg_block4_layer', 2, 5, default_value=3)
     vgg_block5_layer = UniformIntegerHyperparameter('vgg_block5_layer', 2, 5, default_value=3)
     cs.add_hyperparameters(
         [vgg_kernel_size, vgg_keep_prob, vgg_block2_layer, vgg_block3_layer, vgg_block4_layer, vgg_block5_layer])
     return cs
Exemplo n.º 21
0
 def get_hyperparameter_search_space():
     cs = ConfigurationSpace()
     BaseImageClassificationModel.set_training_space(cs)
     BaseImageClassificationModel.set_optimizer_space(cs)
     inceptionv3_block_a = UniformIntegerHyperparameter(
         'inceptionv3_block_a', 2, 4, default_value=3)
     inceptionv3_block_b = UniformIntegerHyperparameter(
         'inceptionv3_block_b', 3, 5, default_value=4)
     inceptionv3_block_c = UniformIntegerHyperparameter(
         'inceptionv3_block_c', 1, 3, default_value=2)
     cs.add_hyperparameters(
         [inceptionv3_block_a, inceptionv3_block_b, inceptionv3_block_c])
     return cs
Exemplo n.º 22
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         cs.add_hyperparameters(
             [UniformFloatHyperparameter("x%s" % i, self.lb, self.ub) for i in range(1, 1 + 2)])
         return cs
     elif optimizer == 'gpflowopt':
         import gpflowopt
         domain = gpflowopt.domain.ContinuousParameter('x1', self.lb, self.ub) + \
                  gpflowopt.domain.ContinuousParameter('x2', self.lb, self.ub)
         return domain
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' % optimizer)
Exemplo n.º 23
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         x1 = UniformFloatHyperparameter("x1", -2.25, 2.5)
         x2 = UniformFloatHyperparameter("x2", -2.5, 1.75)
         cs.add_hyperparameters([x1, x2])
         return cs
     elif optimizer == 'gpflowopt':
         import gpflowopt
         domain = gpflowopt.domain.ContinuousParameter('x1', -2.25, 2.5) + \
                  gpflowopt.domain.ContinuousParameter('x2', -2.5, 1.75)
         return domain
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' % optimizer)
Exemplo n.º 24
0
def test_remove_inactive_parameter():
    configuration_space = ConfigurationSpace(seed=1)
    hp1 = CategoricalHyperparameter("hp1", choices=[0, 1])
    hp2 = CategoricalHyperparameter("hp2", choices=['a'])
    hp3 = UniformIntegerHyperparameter("hp3",
                                       lower=0,
                                       upper=5,
                                       default_value=5)
    configuration_space.add_hyperparameters([hp1, hp2, hp3])

    # If hp1 = 0, then don't allow hp2
    not_condition = NotEqualsCondition(hp2, hp1, 0)
    configuration_space.add_condition(not_condition)

    allowed_cfg = Configuration(configuration_space, {
        'hp1': 1,
        'hp2': 'a',
        'hp3': 5
    })
    not_allowed = {'hp1': 0, 'hp2': 'a', 'hp3': 5}

    with pytest.raises(ValueError):
        Configuration(configuration_space, not_allowed)

    # No inactive hp - case: config is CS.configuration
    transformed = AbstractBenchmark._check_and_cast_configuration(
        allowed_cfg, configuration_space)
    assert transformed.get_dictionary() == {'hp1': 1, 'hp2': 'a', 'hp3': 5}

    # No inactive hp - case: config is dict
    transformed = AbstractBenchmark._check_and_cast_configuration(
        allowed_cfg.get_dictionary(), configuration_space)
    assert transformed.get_dictionary() == {'hp1': 1, 'hp2': 'a', 'hp3': 5}

    # Remove inactive: - case: config is CS.configuration
    not_allowed_cs = Configuration(configuration_space, {
        'hp1': 0,
        'hp2': 'a',
        'hp3': 5
    },
                                   allow_inactive_with_values=True)
    transformed = AbstractBenchmark._check_and_cast_configuration(
        not_allowed_cs, configuration_space)
    assert transformed.get_dictionary() == {'hp1': 0, 'hp3': 5}

    # Remove inactive: - case: config is dict
    transformed = AbstractBenchmark._check_and_cast_configuration(
        not_allowed, configuration_space)
    assert transformed.get_dictionary() == {'hp1': 0, 'hp3': 5}
Exemplo n.º 25
0
 def get_hyperparameter_search_space(dataset_properties=None):
     # TODO add hyperparameter to gbdt binning
     cs = ConfigurationSpace()
     binning_method = CategoricalHyperparameter()
     # shrinkage = UniformFloatHyperparameter(
     #     name="shrinkage", lower=0.0, upper=1.0, default_value=0.5
     # )
     # n_components = UniformIntegerHyperparameter(
     #     name="n_components", lower=1, upper=29, default_value=10
     # )
     # tol = UniformFloatHyperparameter(
     #     name="tol", lower=0.0001, upper=1, default_value=0.0001
     # )
     cs.add_hyperparameters([binning_method])
     return cs
Exemplo n.º 26
0
class SVMSpace(ParamSpace):
    def __init__(self):
        super().__init__()
        self.name = "SVM"
        self.model = LinearSVC
        self.is_deterministic = False
        self.configuration_space = ConfigurationSpace()
        self.configuration_space.add_hyperparameters([
            UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-4, log=True),
            UniformFloatHyperparameter("C", 0.03125, 32768, log=True, default_value=1.0)
        ])

    def _initialize_algorithm(self, random_state=None, **config):
        return self.model(penalty="l2", loss="squared_hinge", dual=False, multi_class="ovr", fit_intercept=True,
                          intercept_scaling=1, **config)
Exemplo n.º 27
0
 def get_hyperparameter_search_space():
     cs = ConfigurationSpace()
     n_estimators = UniformIntegerHyperparameter("n_estimators",
                                                 100,
                                                 1000,
                                                 q=50,
                                                 default_value=500)
     num_leaves = UniformIntegerHyperparameter("num_leaves",
                                               31,
                                               2047,
                                               default_value=128)
     max_depth = UnParametrizedHyperparameter('max_depth', 15)
     learning_rate = UniformFloatHyperparameter("learning_rate",
                                                1e-3,
                                                0.9,
                                                log=True,
                                                default_value=0.1)
     min_child_weight = UniformFloatHyperparameter("min_child_weight",
                                                   0,
                                                   10,
                                                   q=0.1,
                                                   default_value=1)
     subsample = UniformFloatHyperparameter("subsample",
                                            0.1,
                                            1,
                                            q=0.1,
                                            default_value=1)
     colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                   0.1,
                                                   1,
                                                   q=0.1,
                                                   default_value=1)
     reg_alpha = UniformFloatHyperparameter("reg_alpha",
                                            0,
                                            10,
                                            q=0.1,
                                            default_value=0)
     reg_lambda = UniformFloatHyperparameter("reg_lambda",
                                             1,
                                             10,
                                             q=0.1,
                                             default_value=1)
     cs.add_hyperparameters([
         n_estimators, num_leaves, max_depth, learning_rate,
         min_child_weight, subsample, colsample_bytree, reg_alpha,
         reg_lambda
     ])
     return cs
Exemplo n.º 28
0
    def test_keys(self):
        # A regression test to make sure issue #49 does no longer pop up. By
        # iterating over the configuration in the for loop, it should not raise
        # a KeyError if the child hyperparameter is inactive.
        cs = ConfigurationSpace()
        shrinkage = CategoricalHyperparameter(
            "shrinkage", ["None", "auto", "manual"], default_value="None",
        )
        shrinkage_factor = UniformFloatHyperparameter(
            "shrinkage_factor", 0., 1., 0.5,
        )
        cs.add_hyperparameters([shrinkage, shrinkage_factor])

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual"))

        for i in range(10):
            config = cs.sample_configuration()
            {hp_name: config[hp_name] for hp_name in config if config[hp_name] is not None}
Exemplo n.º 29
0
class DecisionTreeSpace(ParamSpace):
    def __init__(self):
        super().__init__()
        self.name = "DecisionTree"
        self.model = DecisionTreeClassifier
        self.is_deterministic = False
        self.configuration_space = ConfigurationSpace()
        self.configuration_space.add_hyperparameters([
            CategoricalHyperparameter("criterion", ["gini", "entropy"], default_value="gini"),
            UniformIntegerHyperparameter('max_depth', 1, 20, default_value=20),
            UniformIntegerHyperparameter("min_samples_split", 2, 20, default_value=2),
            UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default_value=1)
        ])

    def _initialize_algorithm(self, random_state=None, **config):
        return self.model(min_weight_fraction_leaf=0, max_features=1.0, max_leaf_nodes=None,
                          min_impurity_decrease=0.0, random_state=random_state,
                          **config)
Exemplo n.º 30
0
class GradientBoostingSpace(ParamSpace):
    def __init__(self):
        super().__init__()
        self.name = "GBM"
        self.model = LGBMClassifier
        self.is_deterministic = True
        self.configuration_space = ConfigurationSpace()
        self.configuration_space.add_hyperparameters([
            UniformIntegerHyperparameter("num_leaves", 4, 64, default_value=32),
            UniformIntegerHyperparameter("min_child_samples", 1, 100, default_value=20),
            UniformIntegerHyperparameter("max_depth", 3, 12, default_value=12),
            UniformFloatHyperparameter("reg_alpha", 0, 1, default_value=0),
            UniformFloatHyperparameter("reg_lambda", 0, 1, default_value=0),
            CategoricalHyperparameter('boosting_type', choices=["gbdt", "dart", "goss"])
        ])

    def _initialize_algorithm(self, random_state=None, **config):
        return self.model(n_estimators=100, verbose=-1, n_jobs=-1, random_state=random_state, **config)
Exemplo n.º 31
0
class RandomForestSpace(ParamSpace):
    def __init__(self):
        super().__init__()
        self.name = "RandomForest"
        self.model = LGBMClassifier
        self.is_deterministic = False
        self.configuration_space = ConfigurationSpace()
        self.configuration_space.add_hyperparameters([
            UniformFloatHyperparameter("colsample_bytree", 0.20, 0.80, default_value=0.70),
            UniformFloatHyperparameter("subsample", 0.20, 0.80, default_value=0.66),
            UniformIntegerHyperparameter("num_leaves", 4, 64, default_value=32),
            UniformIntegerHyperparameter("min_child_samples", 1, 100, default_value=20),
            UniformIntegerHyperparameter("max_depth", 4, 12, default_value=12),
        ])

    def _initialize_algorithm(self, random_state=None, **config):
        return self.model(n_estimators=100, subsample_freq=1, boosting_type="rf", verbose=-1, n_jobs=-1,
                          random_state=random_state, **config)
Exemplo n.º 32
0
    def test_deactivate_inactive_hyperparameters(self):
        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', [0, 1])
        left = CategoricalHyperparameter('left', [0, 1])
        right = CategoricalHyperparameter('right', [0, 1])
        bottom = CategoricalHyperparameter('bottom', [0, 1])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 0))
        diamond.add_condition(EqualsCondition(right, head, 0))
        diamond.add_condition(AndConjunction(EqualsCondition(bottom, left, 0),
                                             EqualsCondition(bottom, right, 0)))

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 0,
                                                 'right': 0, 'bottom': 0},
                                                 diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 1, 'left': 0,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 1,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', [0, 1])
        left = CategoricalHyperparameter('left', [0, 1])
        right = CategoricalHyperparameter('right', [0, 1])
        bottom = CategoricalHyperparameter('bottom', [0, 1])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 0))
        diamond.add_condition(EqualsCondition(right, head, 0))
        diamond.add_condition(OrConjunction(EqualsCondition(bottom, left, 0),
                                            EqualsCondition(bottom, right, 0)))

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 0,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 1, 'left': 1,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 1,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)


        plain = ConfigurationSpace()
        a = UniformIntegerHyperparameter('a', 0, 10)
        b = UniformIntegerHyperparameter('b', 0, 10)
        plain.add_hyperparameters([a, b])
        c = deactivate_inactive_hyperparameters({'a': 5, 'b': 6}, plain)
        plain.check_configuration(c)
Exemplo n.º 33
0
    def test_deactivate_inactive_hyperparameters(self):
        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', [0, 1])
        left = CategoricalHyperparameter('left', [0, 1])
        right = CategoricalHyperparameter('right', [0, 1])
        bottom = CategoricalHyperparameter('bottom', [0, 1])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 0))
        diamond.add_condition(EqualsCondition(right, head, 0))
        diamond.add_condition(AndConjunction(EqualsCondition(bottom, left, 0),
                                             EqualsCondition(bottom, right, 0)))

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 0,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 1, 'left': 0,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 1,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        diamond = ConfigurationSpace()
        head = CategoricalHyperparameter('head', [0, 1])
        left = CategoricalHyperparameter('left', [0, 1])
        right = CategoricalHyperparameter('right', [0, 1])
        bottom = CategoricalHyperparameter('bottom', [0, 1])
        diamond.add_hyperparameters([head, left, right, bottom])
        diamond.add_condition(EqualsCondition(left, head, 0))
        diamond.add_condition(EqualsCondition(right, head, 0))
        diamond.add_condition(OrConjunction(EqualsCondition(bottom, left, 0),
                                            EqualsCondition(bottom, right, 0)))

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 0,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 1, 'left': 1,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        c = deactivate_inactive_hyperparameters({'head': 0, 'left': 1,
                                                 'right': 0, 'bottom': 0},
                                                diamond)
        diamond._check_configuration_rigorous(c)

        plain = ConfigurationSpace()
        a = UniformIntegerHyperparameter('a', 0, 10)
        b = UniformIntegerHyperparameter('b', 0, 10)
        plain.add_hyperparameters([a, b])
        c = deactivate_inactive_hyperparameters({'a': 5, 'b': 6}, plain)
        plain.check_configuration(c)