示例#1
0
 def __init__(self, dim=2, bounds=None,
              noise_std=0, random_state=None):
     self.dim = dim
     params = {'x%d' % i: (0, 10, 5) for i in range(1, 1+self.dim)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std, optimal_value=0, random_state=random_state)
示例#2
0
 def get_cs_bc():
     cs_bc = ConfigurationSpace()
     x0 = UniformFloatHyperparameter("x0", scale1[0], scale1[1])
     # x0 = UniformIntegerHyperparameter("x0", scale1[0], scale1[1])  # test int
     x1 = UniformFloatHyperparameter("x1", scale2[0], scale2[1])
     cs_bc.add_hyperparameters([x0, x1])
     return cs_bc
示例#3
0
def impute_default_values(configuration_space: ConfigurationSpace,
                          configs_array: np.ndarray) -> np.ndarray:
    """Impute inactive hyperparameters in configuration array with their default.

    Necessary to apply an EPM to the data.

    Parameters
    ----------
    configuration_space : ConfigurationSpace
    
    configs_array : np.ndarray
        Array of configurations.

    Returns
    -------
    np.ndarray
        Array with configuration hyperparameters. Inactive values are imputed
        with their default value.
    """
    for hp in configuration_space.get_hyperparameters():
        default = hp.normalized_default_value
        idx = configuration_space.get_idx_by_hyperparameter_name(hp.name)
        nonfinite_mask = ~np.isfinite(configs_array[:, idx])
        configs_array[nonfinite_mask, idx] = default

    return configs_array
示例#4
0
    def get_configspace(self):
        hp_num = self._hp_cnt + self._delta
        if hp_num > self.hp_size:
            hp_num = self.hp_size

        hps = self.config_space.get_hyperparameters()
        cs = ConfigurationSpace()
        for _id in range(hp_num):
            _hp_id = self.importance_list[_id]
            for _hp in hps:
                if _hp.name == _hp_id:
                    cs.add_hyperparameter(_hp)

        history_list = list()
        if len(self.history_dict.keys()) > 0 and self._hp_cnt < self.hp_size:
            for _config in self.history_dict.keys():
                # Impute the default value for new hyperparameter.
                _config_dict = _config.get_dictionary().copy()
                for _idx in range(self._hp_cnt, hp_num):
                    new_hp = self.importance_list[_idx]
                    # print('hp_num=', self._hp_cnt, 'new hp is', new_hp)
                    _config_dict[new_hp] = self.defaults[new_hp]
                history_list.append((_config_dict, self.history_dict[_config]))
        if len(history_list) == 0:
            history_list = [(_config, self.history_dict[_config])
                            for _config in self.history_dict.keys()]
        return cs, history_list
示例#5
0
 def __init__(self, noise_std=0, random_state=None):
     lb, ub = -4.5, 4.5
     dim = 2
     params = {'x%d' % i: (lb, ub, (lb + ub)/2)
               for i in range(1, dim+1)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std, optimal_value=0, random_state=random_state)
示例#6
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x1': (-10, 0, -5), 'x2': (-6.5, 0, -3.25)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=-106.7645367,
                      optimal_point=[(-3.1302468, -1.5821422)],
                      random_state=random_state)
示例#7
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x%d' % i: (-1.25, 1.25, 1) for i in [1, 2]}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=-0.072,
                      optimal_point=[(0.84852813, -0.84852813), (-0.84852813, 0.84852813)],
                      random_state=random_state)
示例#8
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x1': (-5, 10, 0),
               'x2': (0, 15, 0)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=0.397887,
                      optimal_point=[(-np.pi, 12.275), (np.pi, 2.275), (9.42478, 2.475)],
                      random_state=random_state)
示例#9
0
    def __init__(self, noise_std=0, random_state=None):
        self.ref_point = [1864.72022, 11.81993945, 0.2903999384]

        params = {'x%d' % i: (1.0, 3.0) for i in range(1, 6)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std,
                         num_objs=3,
                         random_state=random_state)
示例#10
0
 def __init__(self, noise_std=0, random_state=None):
     params = {'x1': (-15.0, -5.0, -10.0),
               'x2': (-3.0, 3.0, 0)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=0,
                      optimal_point=[(-10.0, 1.0)],
                      random_state=random_state)
示例#11
0
 def __init__(self, dim: int, num_constraints=0, noise_std=0, random_state=None):
     self.dim = dim
     self.ref_point = [11.0, 11.0]
     params = {'x%d' % i: (0, 1) for i in range(1, dim+1)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      num_objs=2, num_constraints=num_constraints,
                      random_state=random_state)
示例#12
0
 def __init__(self, dim=2, constrained=False, noise_std=0, random_state=None):
     self.dim = dim
     self.constrained = constrained
     params = {'x%d' % i: (-5.0, 10.0, 2.5) for i in range(1, 1+self.dim)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std,
                      optimal_value=0,
                      optimal_point=[tuple(1.0 for _ in range(self.dim))],
                      random_state=random_state)
示例#13
0
    def __init__(self, noise_std=0, random_state=None):
        self.ref_point = [10.0, 10.0]

        params = {'x1': (0.1, 10.0),
                  'x2': (0.0, 5.0)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std,
                         num_objs=2, num_constraints=2,
                         random_state=random_state)
示例#14
0
def get_xgboost_config_space(task_type='cls'):
    if task_type == 'cls':
        cs = ConfigurationSpace()
        n_estimators = UniformIntegerHyperparameter("n_estimators",
                                                    100,
                                                    1000,
                                                    q=50,
                                                    default_value=500)
        max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)
        learning_rate = UniformFloatHyperparameter("learning_rate",
                                                   1e-3,
                                                   0.9,
                                                   log=True,
                                                   default_value=0.1)
        min_child_weight = UniformFloatHyperparameter("min_child_weight",
                                                      0,
                                                      10,
                                                      q=0.1,
                                                      default_value=1)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.1,
                                               1,
                                               q=0.1,
                                               default_value=1)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.1,
                                                      1,
                                                      q=0.1,
                                                      default_value=1)
        gamma = UniformFloatHyperparameter("gamma",
                                           0,
                                           10,
                                           q=0.1,
                                           default_value=0)
        reg_alpha = UniformFloatHyperparameter("reg_alpha",
                                               0,
                                               10,
                                               q=0.1,
                                               default_value=0)
        reg_lambda = UniformFloatHyperparameter("reg_lambda",
                                                1,
                                                10,
                                                q=0.1,
                                                default_value=1)
        cs.add_hyperparameters([
            n_estimators, max_depth, learning_rate, min_child_weight,
            subsample, colsample_bytree, gamma, reg_alpha, reg_lambda
        ])
        return cs
    elif task_type == 'rgs':
        raise NotImplementedError
    else:
        raise ValueError('Unsupported task type: %s.' % (task_type, ))
示例#15
0
    def __init__(self, constrained=False, noise_std=0, random_state=None):
        self.ref_point = [18.0, 6.0]
        self.constrained = constrained
        num_constraints = 1 if self.constrained else 0

        params = {'x1': (0, 1, 0.5),
                  'x2': (0, 1, 0.5)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std,
                         num_objs=2,
                         num_constraints=num_constraints,
                         random_state=random_state)
示例#16
0
 def __init__(self, dim, num_objs=2, num_constraints=0, noise_std=0, random_state=None):
     if dim <= num_objs:
         raise ValueError(
             "dim must be > num_objs, but got %s and %s" % (dim, num_objs)
         )
     self.dim = dim
     self.k = self.dim - num_objs + 1
     self.bounds = [(0.0, 1.0) for _ in range(self.dim)]
     self.ref_point = [self._ref_val for _ in range(num_objs)]
     params = {'x%d' % i: (0, 1, i/dim) for i in range(1, dim+1)}
     config_space = ConfigurationSpace()
     config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
     super().__init__(config_space, noise_std, num_objs, num_constraints, random_state=random_state)
示例#17
0
def string2config_space(space_desc: str):
    line_list = space_desc.split('\n')
    cur_line = 2
    cs = ConfigurationSpace()
    status = 'hp'
    hp_list = list()
    while cur_line != len(line_list) - 1:
        line_content = line_list[cur_line]
        if line_content == '  Conditions:':
            hp_dict = {hp.name: hp for hp in hp_list}
            status = 'cond'
        elif line_content == '  Forbidden Clauses:':
            status = 'bid'
        else:
            if status == 'hp':
                hp = string2hyperparameter(line_content)
                hp_list.append(hp)
                cs.add_hyperparameter(hp)
            elif status == 'cond':
                cond = string2condition(line_content, hp_dict)
                cs.add_condition(cond)
            else:
                forbid = string2forbidden(line_content, hp_dict)
                cs.add_forbidden_clause(forbid)
        cur_line += 1
    return cs
示例#18
0
    def __init__(self, dim=2, bounds=None, constrained=False,
                 noise_std=0, random_state=None):
        self.constrained = constrained
        if bounds is None:
            if constrained:
                lb, ub = -5, 10
            else:
                lb, ub = -10, 15
        else:
            lb, ub = bounds

        params = {'x%d' % i: (lb, ub, (lb + ub)/2)
                  for i in range(1, dim+1)}
        config_space = ConfigurationSpace()
        config_space.add_hyperparameters([UniformFloatHyperparameter(k, *v) for k, v in params.items()])
        super().__init__(config_space, noise_std, optimal_value=0, random_state=random_state)
示例#19
0
    def get_hyperparameter_search_space(space_size='large'):
        """
            ['n_estimators', 'learning_rate', 'max_depth', 'colsample_bytree', 'gamma',
                'min_child_weight',  'reg_alpha', 'reg_lambda', 'subsample']
        """
        cs = ConfigurationSpace()
        if space_size == 'large':
            n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
            learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
            max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

            colsample_bytree = UniformFloatHyperparameter("colsample_bytree", 0.1, 1, q=0.1, default_value=1)
            gamma = UniformFloatHyperparameter("gamma", 0, 10, q=0.1, default_value=0)

            min_child_weight = UniformFloatHyperparameter("min_child_weight", 0, 10, q=0.1, default_value=1)
            reg_alpha = UniformFloatHyperparameter("reg_alpha", 0, 10, q=0.1, default_value=0)
            reg_lambda = UniformFloatHyperparameter("reg_lambda", 1, 10, q=0.1, default_value=1)
            subsample = UniformFloatHyperparameter("subsample", 0.1, 1, q=0.1, default_value=1)
        elif space_size == 'medium':
            n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
            learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
            max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

            colsample_bytree = UniformFloatHyperparameter("colsample_bytree", 0.1, 1, q=0.1, default_value=1)
            gamma = UniformFloatHyperparameter("gamma", 0, 10, q=0.1, default_value=0)
            min_child_weight = UniformFloatHyperparameter("min_child_weight", 0, 10, q=0.1, default_value=1)

            reg_alpha = UnParametrizedHyperparameter("reg_alpha", 0)
            reg_lambda = UnParametrizedHyperparameter("reg_lambda", 1)
            subsample = UnParametrizedHyperparameter("subsample", 1)
        else:
            n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
            learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
            max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)

            colsample_bytree = UnParametrizedHyperparameter("colsample_bytree", 1)
            gamma = UnParametrizedHyperparameter("gamma", 0)
            min_child_weight = UnParametrizedHyperparameter("min_child_weight", 1)

            reg_alpha = UnParametrizedHyperparameter("reg_alpha", 0)
            reg_lambda = UnParametrizedHyperparameter("reg_lambda", 1)
            subsample = UnParametrizedHyperparameter("subsample", 1)

        cs.add_hyperparameters([n_estimators, max_depth, learning_rate, min_child_weight, subsample,
                                colsample_bytree, gamma, reg_alpha, reg_lambda])
        return cs
示例#20
0
def config_space2string(config_space: ConfigurationSpace):
    pattern = r'[,|{}\'=<>&]'
    for hp in config_space.get_hyperparameters():
        if re.search(pattern, hp.name):
            raise NameError('Invalid character in hyperparameter name!')
        if hasattr(hp, 'choices'):
            for value in hp.choices:
                if re.search(pattern, value):
                    raise NameError('Invalid character in categorical hyperparameter value!')
    return str(config_space)
示例#21
0
文件: utils.py 项目: DMALab/open-box
def sample_configurations(configuration_space: ConfigurationSpace,
                          num: int) -> List[Configuration]:
    result = []
    cnt = 0
    while cnt < num:
        config = configuration_space.sample_configuration(1)
        if config not in result:
            result.append(config)
            cnt += 1
    return result
示例#22
0
文件: utils.py 项目: DMALab/open-box
def expand_configurations(configs: List[Configuration],
                          configuration_space: ConfigurationSpace, num: int):
    num_config = len(configs)
    num_needed = num - num_config
    config_cnt = 0
    while config_cnt < num_needed:
        config = configuration_space.sample_configuration(1)
        if config not in configs:
            configs.append(config)
            config_cnt += 1
    return configs
示例#23
0
def get_lightgbm_config_space(task_type='cls'):
    if task_type == 'cls':
        cs = ConfigurationSpace()
        n_estimators = UniformIntegerHyperparameter("n_estimators",
                                                    100,
                                                    1000,
                                                    default_value=500,
                                                    q=50)
        num_leaves = UniformIntegerHyperparameter("num_leaves",
                                                  31,
                                                  2047,
                                                  default_value=128)
        max_depth = Constant('max_depth', 15)
        learning_rate = UniformFloatHyperparameter("learning_rate",
                                                   1e-3,
                                                   0.3,
                                                   default_value=0.1,
                                                   log=True)
        min_child_samples = UniformIntegerHyperparameter("min_child_samples",
                                                         5,
                                                         30,
                                                         default_value=20)
        subsample = UniformFloatHyperparameter("subsample",
                                               0.7,
                                               1,
                                               default_value=1,
                                               q=0.1)
        colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                      0.7,
                                                      1,
                                                      default_value=1,
                                                      q=0.1)
        cs.add_hyperparameters([
            n_estimators, num_leaves, max_depth, learning_rate,
            min_child_samples, subsample, colsample_bytree
        ])
        return cs
    elif task_type == 'rgs':
        raise NotImplementedError
    else:
        raise ValueError('Unsupported task type: %s.' % (task_type, ))
示例#24
0
def sample_configurations(configuration_space: ConfigurationSpace, num: int,
                          excluded_configs: List[Configuration] = None) -> List[Configuration]:
    if excluded_configs is None:
        excluded_configs = []
    result = []
    cnt = 0
    while cnt < num:
        config = configuration_space.sample_configuration(1)
        if config not in result and config not in excluded_configs:
            result.append(config)
            cnt += 1
    return result
示例#25
0
def expand_configurations(configs: List[Configuration], configuration_space: ConfigurationSpace, num: int,
                          excluded_configs: List[Configuration] = None):
    if excluded_configs is None:
        excluded_configs = []
    num_config = len(configs)
    num_needed = num - num_config
    config_cnt = 0
    while config_cnt < num_needed:
        config = configuration_space.sample_configuration(1)
        if config not in configs and config not in excluded_configs:
            configs.append(config)
            config_cnt += 1
    return configs
示例#26
0
 def get_cs_lightgbm():  # todo q and int for compare?
     cs = ConfigurationSpace()
     n_estimators = UniformFloatHyperparameter("n_estimators",
                                               100,
                                               1000,
                                               default_value=500,
                                               q=50)
     num_leaves = UniformIntegerHyperparameter("num_leaves",
                                               31,
                                               2047,
                                               default_value=128)
     # max_depth = Constant('max_depth', 15)
     learning_rate = UniformFloatHyperparameter("learning_rate",
                                                1e-3,
                                                0.3,
                                                default_value=0.1,
                                                log=True)
     min_child_samples = UniformIntegerHyperparameter("min_child_samples",
                                                      5,
                                                      30,
                                                      default_value=20)
     subsample = UniformFloatHyperparameter("subsample",
                                            0.7,
                                            1,
                                            default_value=1,
                                            q=0.1)
     colsample_bytree = UniformFloatHyperparameter("colsample_bytree",
                                                   0.7,
                                                   1,
                                                   default_value=1,
                                                   q=0.1)
     # cs.add_hyperparameters([n_estimators, num_leaves, max_depth, learning_rate, min_child_samples, subsample,
     #                         colsample_bytree])
     cs.add_hyperparameters([
         n_estimators, num_leaves, learning_rate, min_child_samples,
         subsample, colsample_bytree
     ])
     return cs
示例#27
0
def get_config_space_from_dict(space_dict: dict):
    cs = ConfigurationSpace()
    params_dict = space_dict['parameters']
    for key in params_dict:
        param_dict = params_dict[key]
        param_type = param_dict['type']
        if param_type in ['float', 'int']:
            bound = param_dict['bound']
            optional_args = dict()
            if 'default' in param_dict:
                optional_args['default_value'] = param_dict['default']
            if 'log' in param_dict:
                optional_args['log'] = parse_bool(param_dict['log'])
            if 'q' in param_dict:
                optional_args['q'] = param_dict['q']

            if param_type == 'float':
                param = UniformFloatHyperparameter(key, bound[0], bound[1], **optional_args)
            else:
                param = UniformIntegerHyperparameter(key, bound[0], bound[1], **optional_args)

        elif param_type == 'cat':
            choices = param_dict['choice']
            optional_args = dict()
            if 'default' in param_dict:
                optional_args['default_value'] = param_dict['default']
            param = CategoricalHyperparameter(key, choices, **optional_args)

        elif param_type == 'const':
            value = param_dict['value']
            param = Constant(key, value)

        else:
            raise ValueError("Parameter type %s not supported!" % param_type)

        cs.add_hyperparameter(param)
    return cs
示例#28
0
def sample_configuration(configuration_space: ConfigurationSpace, excluded_configs: List[Configuration] = None):
    """
    sample one config not in excluded_configs
    """
    if excluded_configs is None:
        excluded_configs = []
    sample_cnt = 0
    max_sample_cnt = 1000
    while True:
        config = configuration_space.sample_configuration()
        sample_cnt += 1
        if config not in excluded_configs:
            break
        if sample_cnt >= max_sample_cnt:
            raise ValueError('Cannot sample non duplicate configuration after %d iterations.' % max_sample_cnt)
    return config
示例#29
0
 def __init__(self, bounds=None, noise_std=0, random_state=None):
     if bounds is None:
         bounds = [0, 20]
     lb, ub = bounds
     config_space = ConfigurationSpace()
     config_space.add_hyperparameter(UniformFloatHyperparameter('x1', lb, ub, 1))
     config_space.add_hyperparameter(UniformIntegerHyperparameter('x2', lb, ub, 1))
     super().__init__(config_space, noise_std,
                      optimal_value=-31.9998,
                      optimal_point=[(5.333, 4)],
                      random_state=random_state)
示例#30
0
    def __init__(self,
                 config_space: ConfigurationSpace,
                 size,
                 lower_bounds=None,
                 upper_bounds=None,
                 random_state=None):
        """
        Parameters
        ----------
        config_space : ConfigurationSpace
            ConfigurationSpace to do sampling.

        size : int N
            Number of samples.

        lower_bounds : lower bounds in [0, 1] for continuous dimensions (optional)

        upper_bounds : upper bounds in [0, 1] for continuous dimensions (optional)
        """
        self.config_space = config_space

        self.search_dims = []
        for i, param in enumerate(config_space.get_hyperparameters()):
            if isinstance(param, UniformFloatHyperparameter):
                self.search_dims.append((0.0, 1.0))
            elif isinstance(param, UniformIntegerHyperparameter):
                self.search_dims.append((0.0, 1.0))
            else:
                raise NotImplementedError(
                    'Only Integer and Float are supported in %s.' %
                    self.__class__.__name__)

        self.size = size
        default_lb, default_ub = zip(*self.search_dims)
        self.lower_bounds = np.array(
            default_lb) if lower_bounds is None else np.clip(
                lower_bounds, default_lb, default_ub)
        self.upper_bounds = np.array(
            default_ub) if upper_bounds is None else np.clip(
                upper_bounds, default_lb, default_ub)

        self.rng = check_random_state(random_state)