def _get_range_integer(self, param, cd, q=1): if cd.get(param): p = cd[param] if len(p['i_range']) < 2: return CSH.Constant(param, int(p['i_range'][0])) low = int(p['i_range'][0]) upp = int(p['i_range'][1]) default_val = p.get('default') q_val_s = p.get('step') if q_val_s: q_val = int(q_val_s) else: q_val = 1 if default_val: default = float(default_val) else: default = int((upp + low) / 2) use_log = False if low == upp: return CSH.Constant(param, low) else: return CSH.UniformIntegerHyperparameter(param, lower=low, upper=upp, default_value=default, q=q_val, log=use_log) else: return None
def get_warmstart_configspace(): ws_cs = CS.ConfigurationSpace() # HYPERPARAMETERS n_estimators = CSH.Constant('n_estimators', value=100) max_depth = CSH.Constant('max_depth', value=40) min_samples_leaf = CSH.Constant('min_samples_leaf', value=30) min_samples_split = CSH.Constant('min_samples_split', value=20) max_features = CSH.Constant('max_features', value='auto') ws_cs.add_hyperparameters([n_estimators, max_depth, min_samples_leaf, min_samples_split, max_features]) return ws_cs
def get_hyperparameter(hyper_type, name, value_range, log = False): if isinstance(value_range, tuple) and len(value_range) == 2 and isinstance(value_range[1], bool) and \ isinstance(value_range[0], (tuple, list)): value_range, log = value_range if len(value_range) == 0: raise ValueError(name + ': The range has to contain at least one element') if len(value_range) == 1: return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0]) if len(value_range) == 2 and value_range[0] == value_range[1]: return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0]) if hyper_type == CSH.CategoricalHyperparameter: return CSH.CategoricalHyperparameter(name, value_range) if hyper_type == CSH.UniformFloatHyperparameter: assert len(value_range) == 2, "Float HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range)) return CSH.UniformFloatHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log) if hyper_type == CSH.UniformIntegerHyperparameter: assert len(value_range) == 2, "Int HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range)) return CSH.UniformIntegerHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log) raise ValueError('Unknown type: %s for hp %s' % (hyper_type, name) )
def get_configspace(hp: dict): config_space = ConfigurationSpace() # Add fixed hyperparameters (they are not going to be optimised by ABAS) for hp_name in hp: config_space.add_hyperparameter(csh.Constant(hp_name, hp[hp_name])) # Discriminator HPs (to be optimised) config_space.add_hyperparameter(csh.UniformIntegerHyperparameter('disc.num_fc_layers', lower=2, upper=7)) config_space.add_hyperparameter(csh.UniformIntegerHyperparameter('disc.hidden_size_log', lower=6, upper=12)) config_space.add_hyperparameter(csh.UniformFloatHyperparameter('disc.dropout', lower=0., upper=1.)) # Other HPSs (to be optimised) config_space.add_hyperparameter(csh.UniformIntegerHyperparameter('net.bottleneck_size_log', lower=6, upper=10)) config_space.add_hyperparameter(csh.UniformFloatHyperparameter('base.weight_da', lower=0., upper=2.)) return config_space
def _get_range_uniform(self, param, cd): if cd.get(param): p = cd[param] if len(p['range']) == 1: return CSH.Constant(param, float(p['range'][0])) low = float(p['range'][0]) upp = float(p['range'][1]) default_val = p.get('default') if default_val: default = float(default_val) else: default = (upp + low) / 2 use_log = False if ((low != 0.0) and (abs(upp / low) >= 1000)): use_log = True return CSH.UniformFloatHyperparameter(param, lower=low, upper=upp, default_value=default, log=use_log) else: return None
def get_config_space(): config_space = CS.ConfigurationSpace() # here we instantiate one categorical hyperparameter for each edge in # the DARTS cell for i in range(14): config_space.add_hyperparameter( CSH.CategoricalHyperparameter('edge_normal_{}'.format(i), PRIMITIVES)) config_space.add_hyperparameter( CSH.CategoricalHyperparameter('edge_reduce_{}'.format(i), PRIMITIVES)) # for the intermediate node 2 we add directly the two incoming edges to # the config_space. All nodes are topologicaly sorted and the labels 0 # and 1 correspond to the 2 input nodes of the cell. nodes 2, 3, 4, 5 # are intermediate nodes. We define below a CategoricalHyperparameter # for nodes 3, 4, 5 with each category representing two possible # predecesor nodes indices (for node 2 there is only one possibility) pred_nodes = { '3': ['0_1', '0_2', '1_2'], '4': ['0_1', '0_2', '0_3', '1_2', '1_3', '2_3'], '5': [ '0_1', '0_2', '0_3', '0_4', '1_2', '1_3', '1_4', '2_3', '2_4', '3_4' ] } for i in range(3, 6): config_space.add_hyperparameter( CSH.CategoricalHyperparameter( 'inputs_node_normal_{}'.format(i), pred_nodes[str(i)])) config_space.add_hyperparameter( CSH.CategoricalHyperparameter( 'inputs_node_reduce_{}'.format(i), pred_nodes[str(i)])) config_space.add_hyperparameter(CSH.Constant('layers', 20)) config_space.add_hyperparameter(CSH.Constant('init_channels', 36)) config_space.add_hyperparameter(CSH.Constant('drop_path_prob', 0.1)) config_space.add_hyperparameter( CSH.CategoricalHyperparameter('auxiliary', [False])) # now we define the conditions constraining the inclusion of the edges # on the optimization in order to be consistent with the DARTS original # search space for cell_type in ['normal', 'reduce']: config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_2'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_3'.format(cell_type)), values=['0_1', '0_2'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_3'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_3'.format(cell_type)), values=['0_1', '1_2'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_4'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_3'.format(cell_type)), values=['0_2', '1_2'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_5'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_4'.format(cell_type)), values=['0_1', '0_2', '0_3'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_6'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_4'.format(cell_type)), values=['0_1', '1_2', '1_3'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_7'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_4'.format(cell_type)), values=['0_2', '1_2', '2_3'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_8'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_4'.format(cell_type)), values=['0_3', '1_3', '2_3'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_9'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_5'.format(cell_type)), values=['0_1', '0_2', '0_3', '0_4'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_10'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_5'.format(cell_type)), values=['0_1', '1_2', '1_3', '1_4'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_11'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_5'.format(cell_type)), values=['0_2', '1_2', '2_3', '2_4'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_12'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_5'.format(cell_type)), values=['0_3', '1_3', '2_3', '3_4'])) config_space.add_condition( CS.InCondition(child=config_space.get_hyperparameter( 'edge_{}_13'.format(cell_type)), parent=config_space.get_hyperparameter( 'inputs_node_{}_5'.format(cell_type)), values=['0_4', '1_4', '2_4', '3_4'])) return config_space
def create_config_space( hidden_layers: bool = True, scaling: bool = True, learning: bool = True, loss: bool = True, easing: bool = True, activation_functions: bool = True, dropout: bool = True, activity_regularizer: bool = True, min_dropout_rate_input: float = 0.0, min_dropout_rate_hidden_layers: float = 0.0, min_dropout_rate_output: float = 0.0, max_dropout_rate_input: float = 0.99, max_dropout_rate_hidden_layers: float = 0.99, max_dropout_rate_output: float = 0.99, ): config_space = cs.ConfigurationSpace(seed=1234) config_space.add_hyperparameters([ csh.Constant('input_dimension', value=1025), csh.UniformIntegerHyperparameter('latent_dimension', lower=10, upper=1024, log=True), ]) if hidden_layers: config_space.add_hyperparameters([ csh.OrdinalHyperparameter('number_of_hidden_layers', list(range(1, 11, 2))), ]) if scaling: config_space.add_hyperparameters([ csh.CategoricalHyperparameter('_scaler', [ 'none', 'min_max', 'std', ]), ]) if learning: config_space.add_hyperparameters([ csh.UniformIntegerHyperparameter('_batch_size', lower=16, upper=512), csh.UniformFloatHyperparameter('learning_rate', lower=1e-6, upper=1e-1, log=True), csh.UniformFloatHyperparameter('learning_rate_decay', lower=1e-8, upper=1e-2, log=True), ]) if loss: config_space.add_hyperparameters([ csh.CategoricalHyperparameter( 'loss', ['mse', 'mae', 'binary_crossentropy']), ]) if easing: config_space.add_hyperparameters([ csh.CategoricalHyperparameter( 'easing', ['ease_linear', 'ease_in_quad', 'ease_out_quad']), ]) if activation_functions: config_space.add_hyperparameters([ csh.CategoricalHyperparameter( 'hidden_layer_activations', ['relu', 'linear', 'sigmoid', 'tanh']), csh.CategoricalHyperparameter( 'output_layer_activation', ['relu', 'linear', 'sigmoid', 'tanh']), ]) if dropout: config_space.add_hyperparameters([ csh.UniformFloatHyperparameter('dropout_rate_input', lower=min_dropout_rate_input, upper=max_dropout_rate_input), csh.UniformFloatHyperparameter( 'dropout_rate_hidden_layers', lower=min_dropout_rate_hidden_layers, upper=max_dropout_rate_hidden_layers), csh.UniformFloatHyperparameter('dropout_rate_output', lower=min_dropout_rate_output, upper=max_dropout_rate_output), ]) if activity_regularizer: config_space.add_hyperparameters([ csh.CategoricalHyperparameter('activity_regularizer', ['l1', 'l2']), csh.UniformFloatHyperparameter('l1_activity_regularizer_factor', lower=1e-6, upper=1e-1, default_value=1e-2, log=True), csh.UniformFloatHyperparameter('l2_activity_regularizer_factor', lower=1e-6, upper=1e-1, default_value=1e-2, log=True), ]) config_space.add_condition( cs.EqualsCondition( config_space.get_hyperparameter( 'l1_activity_regularizer_factor'), config_space.get_hyperparameter('activity_regularizer'), 'l1')) config_space.add_condition( cs.EqualsCondition( config_space.get_hyperparameter( 'l2_activity_regularizer_factor'), config_space.get_hyperparameter('activity_regularizer'), 'l2')) return config_space
# 'a2e.evaluation.health_score_cost', # 'a2e.evaluation.min_health_score_cost', ], } config_space = cs.ConfigurationSpace(seed=1234) config_space.add_hyperparameters([ csh.CategoricalHyperparameter('_scaler', [ 'none', 'min_max', 'min_max_per_sample', 'std', 'std_per_sample', ]), csh.Constant('input_dimension', value=1025), csh.CategoricalHyperparameter('number_of_hidden_layers', list(range(1, 10, 2))), csh.UniformFloatHyperparameter('compression_per_layer', lower=0.1, upper=0.9, default_value=0.7), csh.CategoricalHyperparameter('hidden_layer_activations', ['relu', 'linear', 'sigmoid', 'tanh']), csh.CategoricalHyperparameter('output_layer_activation', ['relu', 'linear', 'sigmoid', 'tanh']), # csh.CategoricalHyperparameter('use_dropout', [True, False]), # csh.UniformFloatHyperparameter('dropout_rate_input', lower=0.1, upper=0.9, default_value=0.5), # csh.UniformFloatHyperparameter('dropout_rate_encoder', lower=0.1, upper=0.9, default_value=0.5), # csh.UniformFloatHyperparameter('dropout_rate_decoder', lower=0.1, upper=0.9, default_value=0.5),
def convert_simple_param(name, param): """ Convert a simple labwatch parameter to a ConfigSpace parameter. Parameters ---------- name: str The name of the parameter. param: dict Dictionary describing the parameter. Returns ------- ConfigSpace.hyperparameters.Hyperparameter: The converted hyperparameter. """ if param["_class"] == 'Constant': return csh.Constant(name, param["value"]) elif param["_class"] == 'Categorical': # convert the choices to only contain # basic types (they might contain Constant parameters basic_choices = [] for choice in param["choices"]: if isinstance(choice, dict): basic_choices.append(choice["default"]) elif not isinstance(choice, basic_types): err = "Choice parameter {} is not " \ "a base type or Constant!" raise ParamValueExcept(err.format(choice)) else: basic_choices.append(choice) return csh.CategoricalHyperparameter(name=name, choices=basic_choices, default_value=basic_choices[0]) elif param["_class"] == 'UniformFloat': return csh.UniformFloatHyperparameter(name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) elif param["_class"] == 'UniformInt': return csh.UniformIntegerHyperparameter(name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) elif param["_class"] == 'UniformNumber': ptype = str_to_types[param["type"]] if ptype == float: return csh.UniformFloatHyperparameter( name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) elif ptype == int: return csh.UniformIntegerHyperparameter( name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) else: raise ValueError("Don't know how to represent UniformNumber with " "type: {} in ConfigSpace".format(param["type"])) elif param["_class"] == 'Gaussian': return csh.NormalFloatHyperparameter(name=name, mu=param["mu"], sigma=param["sigma"], log=param["log_scale"]) else: raise ValueError("Don't know how to represent {} in ConfigSpace " "notation.".format(param))