Пример #1
0
    def test_sobol(self):
        cs = ConfigurationSpace()
        for i in range(40):
            cs.add_hyperparameter(UniformFloatHyperparameter('x%d' % (i + 1), 0, 1))

        sobol_kwargs = dict(
            rng=np.random.RandomState(1),
            traj_logger=unittest.mock.Mock(),
            ta_run_limit=1000,
            configs=None,
            n_configs_x_params=None,
            max_config_fracs=0.25,
            init_budget=1,
        )
        SobolDesign(
            cs=cs,
            **sobol_kwargs
        ).select_configurations()

        cs.add_hyperparameter(UniformFloatHyperparameter('x41', 0, 1))
        with self.assertRaisesRegex(
                Exception,
                "('NoneType' object is not iterable)|(cannot unpack non-iterable NoneType object)",
        ):
            SobolDesign(
                cs=cs,
                **sobol_kwargs
            ).select_configurations()
Пример #2
0
        class Dummy():
            configuration_space = ConfigurationSpace(seed=1)
            hp1 = UniformFloatHyperparameter("hp1",
                                             lower=0.0,
                                             upper=0.5,
                                             default_value=0.5)
            hp2 = UniformFloatHyperparameter("hp2",
                                             lower=1.0,
                                             upper=1.5,
                                             default_value=1.5)
            hp3 = UniformFloatHyperparameter("hp3",
                                             lower=2.0,
                                             upper=2.5,
                                             default_value=2.5)
            configuration_space.add_hyperparameters([hp1, hp2, hp3])

            _check_and_cast_configuration = AbstractBenchmark._check_and_cast_configuration
            _check_and_cast_fidelity = AbstractBenchmark._check_and_cast_fidelity

            fidelity_space = ConfigurationSpace(seed=1)
            fidelity_space.add_hyperparameter(
                UniformFloatHyperparameter('fidelity1',
                                           lower=0.,
                                           upper=1.,
                                           default_value=1.))
Пример #3
0
    def test_sobol(self):
        cs = ConfigurationSpace()
        hyperparameters = [
            UniformFloatHyperparameter('x%d' % (i + 1), 0, 1)
            for i in range(21201)
        ]
        cs.add_hyperparameters(hyperparameters)

        sobol_kwargs = dict(
            rng=np.random.RandomState(1),
            traj_logger=unittest.mock.Mock(),
            ta_run_limit=1000,
            configs=None,
            n_configs_x_params=None,
            max_config_fracs=0.25,
            init_budget=1,
        )
        SobolDesign(cs=cs, **sobol_kwargs).select_configurations()

        cs.add_hyperparameter(UniformFloatHyperparameter('x21202', 0, 1))
        with self.assertRaisesRegex(
                Exception,
                "Maximum supported dimensionality is 21201.",
        ):
            SobolDesign(cs=cs, **sobol_kwargs).select_configurations()
Пример #4
0
    def _get_cfg(self):
        cs = ConfigurationSpace()
        max_depth = UniformIntegerHyperparameter("max_depth",
                                                 3,
                                                 16,
                                                 default_value=3)
        min_child_weight = UniformIntegerHyperparameter("min_child_weight",
                                                        1,
                                                        15,
                                                        default_value=1)
        gamma = UniformFloatHyperparameter("gamma",
                                           0.0,
                                           0.4,
                                           default_value=0.0)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.6,
                                               0.9,
                                               default_value=0.6)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.6,
                                                      0.9,
                                                      default_value=0.6)

        cs.add_hyperparameters(
            [max_depth, min_child_weight, gamma, subsample, colsample_bytree])
        return cs
Пример #5
0
    def test_choose_next(self):
        configspace = ConfigurationSpace()
        configspace.add_hyperparameter(UniformFloatHyperparameter('a', 0, 1))
        configspace.add_hyperparameter(UniformFloatHyperparameter('b', 0, 1))

        dataset_name = 'foo'
        func_eval_time_limit = 15
        total_walltime_limit = 15
        memory_limit = 3000

        auto = AutoMLSMBO(None, dataset_name, None, func_eval_time_limit,
                          total_walltime_limit, memory_limit, None)
        auto.config_space = configspace
        scenario = Scenario({'cs': configspace,
                             'cutoff-time': func_eval_time_limit,
                             'wallclock-limit': total_walltime_limit,
                             'memory-limit': memory_limit,
                             'run-obj': 'quality'})
        smac = SMAC(scenario)

        self.assertRaisesRegex(ValueError, 'Cannot use SMBO algorithm on '
                                           'empty runhistory',
                               auto.choose_next, smac)

        runhistory = smac.solver.runhistory
        runhistory.add(config=Configuration(configspace,
                                            values={'a': 0.1, 'b': 0.2}),
                       cost=0.5, time=0.5, status=StatusType.SUCCESS)

        auto.choose_next(smac)
Пример #6
0
 def __init__(self):
     super().__init__()
     self.name = "SVM"
     self.model = LinearSVC
     self.is_deterministic = False
     self.configuration_space = ConfigurationSpace()
     self.configuration_space.add_hyperparameters([
         UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-4, log=True),
         UniformFloatHyperparameter("C", 0.03125, 32768, log=True, default_value=1.0)
     ])
Пример #7
0
    def get_configspace(self, optimizer='smac'):
        if optimizer == 'smac':
            cs = ConfigurationSpace()

            penalty = CategoricalHyperparameter(
                "penalty", ["l1", "l2"], default_value="l2")
            loss = CategoricalHyperparameter(
                "loss", ["hinge", "squared_hinge"], default_value="squared_hinge")
            dual = CategoricalHyperparameter("dual", ['True', 'False'], default_value='True')
            # This is set ad-hoc
            tol = UniformFloatHyperparameter(
                "tol", 1e-5, 1e-1, default_value=1e-4, log=True)
            C = UniformFloatHyperparameter(
                "C", 0.03125, 32768, log=True, default_value=1.0)
            multi_class = Constant("multi_class", "ovr")
            # These are set ad-hoc
            fit_intercept = Constant("fit_intercept", "True")
            intercept_scaling = Constant("intercept_scaling", 1)
            cs.add_hyperparameters([penalty, loss, dual, tol, C, multi_class,
                                    fit_intercept, intercept_scaling])

            penalty_and_loss = ForbiddenAndConjunction(
                ForbiddenEqualsClause(penalty, "l1"),
                ForbiddenEqualsClause(loss, "hinge")
            )
            constant_penalty_and_loss = ForbiddenAndConjunction(
                ForbiddenEqualsClause(dual, "False"),
                ForbiddenEqualsClause(penalty, "l2"),
                ForbiddenEqualsClause(loss, "hinge")
            )
            penalty_and_dual = ForbiddenAndConjunction(
                ForbiddenEqualsClause(dual, "True"),
                ForbiddenEqualsClause(penalty, "l1")
            )
            cs.add_forbidden_clause(penalty_and_loss)
            cs.add_forbidden_clause(constant_penalty_and_loss)
            cs.add_forbidden_clause(penalty_and_dual)
            return cs
        elif optimizer == 'tpe':
            from hyperopt import hp
            space = {'penalty': hp.choice('liblinear_combination',
                                          [{'penalty': "l1", 'loss': "squared_hinge", 'dual': "False"},
                                           {'penalty': "l2", 'loss': "hinge", 'dual': "True"},
                                           {'penalty': "l2", 'loss': "squared_hinge", 'dual': "True"},
                                           {'penalty': "l2", 'loss': "squared_hinge", 'dual': "False"}]),
                     'loss': None,
                     'dual': None,
                     'tol': hp.loguniform('liblinear_tol', np.log(1e-5), np.log(1e-1)),
                     'C': hp.loguniform('liblinear_C', np.log(0.03125), np.log(32768)),
                     'multi_class': hp.choice('liblinear_multi_class', ["ovr"]),
                     'fit_intercept': hp.choice('liblinear_fit_intercept', ["True"]),
                     'intercept_scaling': hp.choice('liblinear_intercept_scaling', [1])}
            return space
        else:
            raise ValueError('Unknown optimizer %s when getting configspace' % optimizer)
Пример #8
0
 def test_random_neighborhood_float(self):
     hp = UniformFloatHyperparameter('a', 1, 10)
     all_neighbors = self._test_get_one_exchange_neighbourhood(hp)
     all_neighbors = [neighbor['a'] for neighbor in all_neighbors]
     self.assertAlmostEqual(5.44, np.mean(all_neighbors), places=2)
     self.assertAlmostEqual(3.065, np.var(all_neighbors), places=2)
     hp = UniformFloatHyperparameter('a', 1, 10, log=True)
     all_neighbors = self._test_get_one_exchange_neighbourhood(hp)
     all_neighbors = [neighbor['a'] for neighbor in all_neighbors]
     # Default value is 3.16
     self.assertAlmostEqual(3.45, np.mean(all_neighbors), places=2)
     self.assertAlmostEqual(2.67, np.var(all_neighbors), places=2)
Пример #9
0
    def set_optimizer_space(cs: ConfigurationSpace):
        '''
        Set hyperparameters for optimizers
        '''
        optimizer = CategoricalHyperparameter('optimizer', ['SGD', 'Adam'],
                                              default_value='Adam')
        sgd_lr = UniformFloatHyperparameter('sgd_lr',
                                            0.00001,
                                            0.1,
                                            default_value=0.005,
                                            log=True)  # log scale
        sgd_decay = UniformFloatHyperparameter('sgd_decay',
                                               0.0001,
                                               0.1,
                                               default_value=0.05,
                                               log=True)  # log scale
        sgd_momentum = UniformFloatHyperparameter('sgd_momentum',
                                                  0.3,
                                                  0.99,
                                                  default_value=0.9)
        adam_lr = UniformFloatHyperparameter('adam_lr',
                                             0.00001,
                                             0.1,
                                             default_value=0.005,
                                             log=True)  # log scale
        adam_decay = UniformFloatHyperparameter('adam_decay',
                                                0.0001,
                                                0.1,
                                                default_value=0.05,
                                                log=True)  # log scale

        sgd_lr_cond = InCondition(child=sgd_lr,
                                  parent=optimizer,
                                  values=['SGD'])
        sgd_decay_cond = InCondition(child=sgd_decay,
                                     parent=optimizer,
                                     values=['SGD'])
        sgd_momentum_cond = InCondition(child=sgd_momentum,
                                        parent=optimizer,
                                        values=['SGD'])
        adam_lr_cond = InCondition(child=adam_lr,
                                   parent=optimizer,
                                   values=['Adam'])
        adam_decay_cond = InCondition(child=adam_decay,
                                      parent=optimizer,
                                      values=['Adam'])

        cs.add_hyperparameters(
            [optimizer, sgd_lr, sgd_decay, sgd_momentum, adam_lr, adam_decay])
        cs.add_conditions([
            sgd_lr_cond, sgd_decay_cond, sgd_momentum_cond, adam_lr_cond,
            adam_decay_cond
        ])
Пример #10
0
 def __init__(self):
     super().__init__()
     self.name = "RandomForest"
     self.model = LGBMClassifier
     self.is_deterministic = False
     self.configuration_space = ConfigurationSpace()
     self.configuration_space.add_hyperparameters([
         UniformFloatHyperparameter("colsample_bytree", 0.20, 0.80, default_value=0.70),
         UniformFloatHyperparameter("subsample", 0.20, 0.80, default_value=0.66),
         UniformIntegerHyperparameter("num_leaves", 4, 64, default_value=32),
         UniformIntegerHyperparameter("min_child_samples", 1, 100, default_value=20),
         UniformIntegerHyperparameter("max_depth", 4, 12, default_value=12),
     ])
Пример #11
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         x1 = UniformFloatHyperparameter("x1", -2.25, 2.5)
         x2 = UniformFloatHyperparameter("x2", -2.5, 1.75)
         cs.add_hyperparameters([x1, x2])
         return cs
     elif optimizer == 'gpflowopt':
         import gpflowopt
         domain = gpflowopt.domain.ContinuousParameter('x1', -2.25, 2.5) + \
                  gpflowopt.domain.ContinuousParameter('x2', -2.5, 1.75)
         return domain
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' % optimizer)
Пример #12
0
 def __init__(self):
     super().__init__()
     self.name = "GBM"
     self.model = LGBMClassifier
     self.is_deterministic = True
     self.configuration_space = ConfigurationSpace()
     self.configuration_space.add_hyperparameters([
         UniformIntegerHyperparameter("num_leaves", 4, 64, default_value=32),
         UniformIntegerHyperparameter("min_child_samples", 1, 100, default_value=20),
         UniformIntegerHyperparameter("max_depth", 3, 12, default_value=12),
         UniformFloatHyperparameter("reg_alpha", 0, 1, default_value=0),
         UniformFloatHyperparameter("reg_lambda", 0, 1, default_value=0),
         CategoricalHyperparameter('boosting_type', choices=["gbdt", "dart", "goss"])
     ])
Пример #13
0
        class Dummy():
            configuration_space = ConfigurationSpace(seed=1)
            flt = UniformFloatHyperparameter("flt", lower=0.0, upper=1.0)
            cat = CategoricalHyperparameter("cat", choices=(1, "a"))
            itg = UniformIntegerHyperparameter("itg", lower=0, upper=10)
            configuration_space.add_hyperparameters([flt, cat, itg])

            fidelity_space = ConfigurationSpace(seed=1)
            f1 = UniformFloatHyperparameter("f_flt", lower=0.0, upper=1.0)
            f2 = CategoricalHyperparameter("f_cat", choices=(1, "a"))
            f3 = UniformIntegerHyperparameter("f_itg", lower=0, upper=10)
            fidelity_space.add_hyperparameters([f1, f2, f3])

            def get_fidelity_space(self):
                return self.fidelity_space
Пример #14
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         for i in range(self.dim):
             xi = UniformFloatHyperparameter("x%d" % i, 0, 1)
             cs.add_hyperparameter(xi)
         return cs
     elif optimizer == 'tpe':
         from hyperopt import hp
         space = {
             'x%d' % i: hp.uniform('hp_x%d' % i, 0, 1)
             for i in range(self.dim)
         }
         return space
     elif optimizer == 'gpflowopt':
         import gpflowopt
         domain = (gpflowopt.domain.ContinuousParameter('x0', 0, 1) +
                   gpflowopt.domain.ContinuousParameter('x1', 0, 1) +
                   gpflowopt.domain.ContinuousParameter('x2', 0, 1) +
                   gpflowopt.domain.ContinuousParameter('x3', 0, 1) +
                   gpflowopt.domain.ContinuousParameter('x4', 0, 1) +
                   gpflowopt.domain.ContinuousParameter('x5', 0, 1))
         return domain
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' %
                          optimizer)
Пример #15
0
def get_config_space_from_dict(space_dict: dict):
    cs = ConfigurationSpace()
    params_dict = space_dict['parameters']
    for key in params_dict:
        param_dict = params_dict[key]
        param_type = param_dict['type']
        if param_type in ['float', 'int']:
            bound = param_dict['bound']
            optional_args = dict()
            if 'default' in param_dict:
                optional_args['default_value'] = param_dict['default']
            elif 'log' in param_dict:
                optional_args['log'] = parse_bool(param_dict['log'])
            elif 'q' in param_dict:
                optional_args['q'] = param_dict['q']

            if param_type == 'float':
                param = UniformFloatHyperparameter(key, bound[0], bound[1], **optional_args)
            else:
                param = UniformIntegerHyperparameter(key, bound[0], bound[1], **optional_args)

        elif param_type == 'cat':
            choices = param_dict['choice']
            optional_args = dict()
            if 'default' in param_dict:
                optional_args['default_value'] = param_dict['default']
            param = CategoricalHyperparameter(key, choices, **optional_args)

        else:
            raise ValueError("Parameter type %s not supported!" % param_type)

        cs.add_hyperparameter(param)
    return cs
Пример #16
0
    def _get_cfg(self):
        n_features = self.X_train.shape[1]

        cs = ConfigurationSpace()
        max_features = UniformIntegerHyperparameter("max_features",
                                                    1,
                                                    n_features,
                                                    default_value=1)
        min_samples_split = UniformIntegerHyperparameter("min_samples_split",
                                                         2,
                                                         50,
                                                         default_value=2)
        min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf",
                                                        1,
                                                        50,
                                                        default_value=1)
        min_weight_fraction_leaf = UniformFloatHyperparameter(
            "min_weight_fraction_leaf", 0.0, 0.5, default_value=0.0)
        max_leaf_nodes = UniformIntegerHyperparameter("max_leaf_nodes",
                                                      10,
                                                      1000,
                                                      default_value=100)

        cs.add_hyperparameters([
            max_features, min_samples_split, min_samples_leaf,
            min_weight_fraction_leaf, max_leaf_nodes
        ])
        return cs
Пример #17
0
    def test_impute_inactive_hyperparameters(self):
        cs = smac.configspace.ConfigurationSpace()
        a = cs.add_hyperparameter(CategoricalHyperparameter('a', [0, 1]))
        b = cs.add_hyperparameter(CategoricalHyperparameter('b', [0, 1]))
        c = cs.add_hyperparameter(UniformFloatHyperparameter('c', 0, 1))
        cs.add_condition(EqualsCondition(b, a, 1))
        cs.add_condition(EqualsCondition(c, a, 0))
        cs.seed(1)

        configs = cs.sample_configuration(size=100)
        config_array = smac.configspace.convert_configurations_to_array(
            configs)
        for line in config_array:
            if line[0] == 0:
                self.assertTrue(np.isnan(line[1]))
            elif line[0] == 1:
                self.assertTrue(np.isnan(line[2]))

        model = RandomForestWithInstances(
            configspace=cs,
            types=np.zeros((3, ), dtype=np.uint),
            bounds=list(map(lambda x: (0, 1), range(10))),
            seed=1,
        )
        config_array = model._impute_inactive(config_array)
        for line in config_array:
            if line[0] == 0:
                self.assertEqual(line[1], 2)
            elif line[0] == 1:
                self.assertEqual(line[2], -1)
Пример #18
0
def quniform(label: str, low: float, high: float, q: float = None, default=None):
    if not q:
        q = float_gcd(low, high)
    kwargs = {}
    if default:
        kwargs.update({'default_value': default})
    return UniformFloatHyperparameter(label, low, high, q=q, **kwargs)
Пример #19
0
    def setUp(self):
        def get_uniform_param(name: str):
            return UniformFloatHyperparameter(name, 0, 1)

        def get_constant_param(name: str):
            return Constant(name, 0.)

        def get_categorical_param(name: str):
            return CategoricalHyperparameter(name, choices=["a", "b", "c"])

        def get_ordinal_param(name: str):
            return OrdinalHyperparameter(name, [8, 6, 4, 2])

        get_params = [
            get_uniform_param, get_constant_param, get_categorical_param,
            get_ordinal_param
        ]

        self.cs = ConfigurationSpace()
        for j, get_param in enumerate(get_params):
            param_name = f"x{j}"
            self.cs.add_hyperparameter(get_param(param_name))

        param_constrained = CategoricalHyperparameter("constrained",
                                                      choices=["a", "b", "c"])
        self.cs.add_hyperparameter(param_constrained)
        self.cs.add_forbidden_clause(
            ForbiddenEqualsClause(param_constrained, "b"))

        for i in range(5):
            self.cs.add_hyperparameter(
                UniformFloatHyperparameter('x%d' % (i + len(get_params)), 0,
                                           1))
Пример #20
0
    def test_uniformfloat_transform(self):
        """This checks whether a value sampled through the configuration
        space (it does not happend when the variable is sampled alone) stays
        equal when it is serialized via JSON and the deserialized again."""

        cs = ConfigurationSpace()
        a = cs.add_hyperparameter(UniformFloatHyperparameter('a', -5, 10))
        b = cs.add_hyperparameter(
            NormalFloatHyperparameter('b', 1, 2, log=True))
        for i in range(100):
            config = cs.sample_configuration()
            value = OrderedDict(sorted(config.get_dictionary().items()))
            string = json.dumps(value)
            saved_value = json.loads(string)
            saved_value = OrderedDict(sorted(byteify(saved_value).items()))
            self.assertEqual(repr(value), repr(saved_value))

        # Next, test whether the truncation also works when initializing the
        # Configuration with a dictionary
        for i in range(100):
            rs = np.random.RandomState(1)
            value_a = a.sample(rs)
            value_b = b.sample(rs)
            values_dict = {'a': value_a, 'b': value_b}
            config = Configuration(cs, values=values_dict)
            string = json.dumps(config.get_dictionary())
            saved_value = json.loads(string)
            saved_value = byteify(saved_value)
            self.assertEqual(values_dict, saved_value)
Пример #21
0
    def test_keys(self):
        # A regression test to make sure issue #49 does no longer pop up. By
        # iterating over the configuration in the for loop, it should not raise
        # a KeyError if the child hyperparameter is inactive.
        cs = ConfigurationSpace()
        shrinkage = CategoricalHyperparameter(
            "shrinkage",
            ["None", "auto", "manual"],
            default_value="None",
        )
        shrinkage_factor = UniformFloatHyperparameter(
            "shrinkage_factor",
            0.,
            1.,
            0.5,
        )
        cs.add_hyperparameters([shrinkage, shrinkage_factor])

        cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage,
                                         "manual"))

        for i in range(10):
            config = cs.sample_configuration()
            {
                hp_name: config[hp_name]
                for hp_name in config if config[hp_name] is not None
            }
 def get_hyperparameter_search_space(dataset_properties=None):
     admissible_distortion = UniformFloatHyperparameter("eps",
                                                        0.01,
                                                        1.0,
                                                        default=0.5)
     cs = ConfigurationSpace()
     cs.add_hyperparameter(admissible_distortion)
     return cs
 def setUp(self):
     self.cs = ConfigurationSpace()
     self.cs.add_hyperparameter(UniformFloatHyperparameter(
         name="x1", lower=1, upper=10, default_value=2)
     )
     self.scenario = Scenario({'cs': self.cs, 'run_obj': 'quality',
                               'output_dir': ''})
     self.ta = ExecuteTAFuncDict(lambda x: x["x1"]**2)
Пример #24
0
    def test_choose_next(self):
        configspace = ConfigurationSpace()
        configspace.add_hyperparameter(UniformFloatHyperparameter('a', 0, 1))
        configspace.add_hyperparameter(UniformFloatHyperparameter('b', 0, 1))

        dataset_name = 'foo'
        func_eval_time_limit = 15
        total_walltime_limit = 15
        memory_limit = 3072

        auto = AutoMLSMBO(config_space=None,
                          dataset_name=dataset_name,
                          backend=None,
                          func_eval_time_limit=func_eval_time_limit,
                          total_walltime_limit=total_walltime_limit,
                          memory_limit=memory_limit,
                          watcher=None,
                          metric=accuracy)
        auto.config_space = configspace
        scenario = Scenario({
            'cs': configspace,
            'cutoff_time': func_eval_time_limit,
            'wallclock_limit': total_walltime_limit,
            'memory_limit': memory_limit,
            'run_obj': 'quality',
        })
        smac = SMAC(scenario)

        self.assertRaisesRegex(
            ValueError, 'Cannot use SMBO algorithm on empty runhistory',
            auto.choose_next, smac)

        config = Configuration(configspace, values={'a': 0.1, 'b': 0.2})
        # TODO make sure the incumbent is always set?
        smac.solver.incumbent = config
        runhistory = smac.solver.runhistory
        runhistory.add(config=config,
                       cost=0.5,
                       time=0.5,
                       status=StatusType.SUCCESS)

        auto.choose_next(smac)
Пример #25
0
 def test_estimate_size(self):
     cs = ConfigurationSpace()
     self.assertEqual(cs.estimate_size(), 0)
     cs.add_hyperparameter(Constant('constant', 0))
     self.assertEqual(cs.estimate_size(), 1)
     cs.add_hyperparameter(UniformIntegerHyperparameter('integer', 0, 5))
     self.assertEqual(cs.estimate_size(), 6)
     cs.add_hyperparameter(CategoricalHyperparameter('cat', [0, 1, 2]))
     self.assertEqual(cs.estimate_size(), 18)
     cs.add_hyperparameter(UniformFloatHyperparameter('float', 0, 1))
     self.assertTrue(np.isinf(cs.estimate_size()))
Пример #26
0
 def set_training_space(cs: ConfigurationSpace):
     '''
     Set hyperparameters for training
     '''
     batch_size = CategoricalHyperparameter('batch_size', [16, 32],
                                            default_value=32)
     keep_prob = UniformFloatHyperparameter('keep_prob',
                                            0,
                                            0.99,
                                            default_value=0.5)
     cs.add_hyperparameters([batch_size, keep_prob])
Пример #27
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         cs.add_hyperparameters([
             UniformFloatHyperparameter("x%s" % i, self.lb, self.ub)
             for i in range(1, self.dim + 1)
         ])
         return cs
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' %
                          optimizer)
Пример #28
0
    def _get_cfg(self):
        n_features = self.X_train.shape[1]

        cs = ConfigurationSpace()
        max_depth = UniformIntegerHyperparameter("max_depth", 5, 16, default_value=5)
        min_samples_split = UniformIntegerHyperparameter("min_samples_split", 200, 1000, default_value=200)
        min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf", 30, 70, default_value=30)
        max_features = UniformIntegerHyperparameter("max_features", 1, n_features, default_value=1)
        subsample = UniformFloatHyperparameter("subsample", 0.6, 0.9, default_value=0.6)

        cs.add_hyperparameters([max_depth, min_samples_split, min_samples_leaf, max_features, subsample])
        return cs
Пример #29
0
 def get_hyperparameter_search_space(dataset_properties=None):
     cs = ConfigurationSpace()
     BaseImageClassificationModel.set_training_space(cs)
     BaseImageClassificationModel.set_optimizer_space(cs)
     vgg_kernel_size = CategoricalHyperparameter('vgg_kernel_size', [3, 5], default_value=3)
     vgg_keep_prob = UniformFloatHyperparameter('vgg_keep_prob', 0, 0.99, default_value=0.5)
     vgg_block2_layer = UniformIntegerHyperparameter('vgg_block2_layer', 2, 3, default_value=2)
     vgg_block3_layer = UniformIntegerHyperparameter('vgg_block3_layer', 2, 5, default_value=3)
     vgg_block4_layer = UniformIntegerHyperparameter('vgg_block4_layer', 2, 5, default_value=3)
     vgg_block5_layer = UniformIntegerHyperparameter('vgg_block5_layer', 2, 5, default_value=3)
     cs.add_hyperparameters(
         [vgg_kernel_size, vgg_keep_prob, vgg_block2_layer, vgg_block3_layer, vgg_block4_layer, vgg_block5_layer])
     return cs
Пример #30
0
 def get_configspace(self, optimizer='smac'):
     if optimizer == 'smac':
         cs = ConfigurationSpace()
         x1 = UniformFloatHyperparameter("x1", -5, 10)
         x2 = UniformFloatHyperparameter("x2", 0, 15)
         cs.add_hyperparameters([x1, x2])
         return cs
     elif optimizer == 'tpe':
         from hyperopt import hp
         space = {'x1': hp.uniform('hp_x1', -5, 10),
                  'x2': hp.uniform('hp_x2', 0, 15),
                  }
         return space
     elif optimizer == 'gpflowopt':
         import gpflowopt
         domain = (
             gpflowopt.domain.ContinuousParameter('x1', -5, 10) +
             gpflowopt.domain.ContinuousParameter('x2', 0, 15)
         )
         return domain
     else:
         raise ValueError('Unknown optimizer %s when getting configspace' % optimizer)