def _get_config_space(self): # Define the configuration space cs = CS.ConfigurationSpace() # learning rate shrinks the contribution of each tree by learning_rate learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=0.01, upper=0.2, default_value=0.1, log=True) cs.add_hyperparameter(learning_rate) # The number of boosting stages to perform n_estimators = CSH.UniformIntegerHyperparameter('n_estimators', lower=10, upper=100, default_value=100) cs.add_hyperparameter(n_estimators) # The fraction of samples to be used for fitting the individual base learners. subsample = CSH.UniformFloatHyperparameter('subsample', lower=0.5, upper=1.0, default_value=1.0) cs.add_hyperparameter(subsample) # The function to measure the quality of a split criterion = CSH.CategoricalHyperparameter( 'criterion', ['friedman_mse', 'mae', 'mse']) cs.add_hyperparameter(criterion) # The minimum number of samples required to split an internal node min_samples_split = CSH.UniformIntegerHyperparameter( 'min_samples_split', lower=2, upper=12, default_value=2) cs.add_hyperparameter(min_samples_split) # The minimum number of samples required to be at a leaf node min_samples_leaf = CSH.UniformIntegerHyperparameter('min_samples_leaf', lower=1, upper=12, default_value=2) cs.add_hyperparameter(min_samples_leaf) # The maximum depth of the individual regression estimators. max_depth = CSH.UniformIntegerHyperparameter('max_depth', lower=3, upper=8, default_value=3) cs.add_hyperparameter(max_depth) # The number of features to consider when looking for the best split max_features = CSH.CategoricalHyperparameter('max_features', ['log2', 'sqrt']) cs.add_hyperparameter(max_features) return cs
def get_configspace(): """:return: ConfigurationsSpace-Object Here is the main place to create particular hyperparameters to tune. Particular hyperparameter should be defined as: hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging) add.hyperparameter([hyperparameter]) """ cs = CS.ConfigurationSpace() # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True) # cs.add_hyperparameters([num_pca]) n_estimators = CSH.UniformIntegerHyperparameter('n_estimators', lower=1, upper=1000, default_value=100, log=True) lr = CSH.UniformFloatHyperparameter('lr', lower=0.001, upper=0.9, default_value=0.1, log=True) max_depth = CSH.UniformIntegerHyperparameter('max_depth', lower=1, upper=1000, default_value=3, log=True) gamma = CSH.UniformFloatHyperparameter('gamma', lower=0.1, upper=100, default_value=1, log=True) # min_ch_w = CSH.UniformFloatHyperparameter('min_ch_w', lower=0.1, upper=100, default_value=1, log=True) # max_d_s = CSH.UniformFloatHyperparameter('max_d_s', lower=0.01, upper=100, default_value=0.01, log=True) # subsample = CSH.UniformFloatHyperparameter('subsample', lower=0.001, upper=1., default_value=1, log=True) # cols_bytree = CSH.UniformFloatHyperparameter('cols_bytree', lower=0.001, upper=1., default_value=1, log=True) # cols_bylevel = CSH.UniformFloatHyperparameter('cols_bylevel', lower=0.001, upper=1., default_value=1, log=True) # reg_alpha = CSH.UniformFloatHyperparameter('reg_alpha', lower=0.001, upper=1., default_value=0.001, log=True) # reg_lambda = CSH.UniformFloatHyperparameter('reg_lambda', lower=0.001, upper=1., default_value=1, log=True) cs.add_hyperparameters( [n_estimators, lr, max_depth, gamma]) #, gamma, min_ch_w, max_d_s, subsample, cols_bylevel, # cols_bytree, reg_alpha, reg_lambda]) return cs
def get_configspace(self): cs = CS.ConfigurationSpace() cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='td3_batch_size', lower=64, upper=256, log=False, default_value=128)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_gamma', lower=0.001, upper=0.1, log=True, default_value=0.01)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_lr', lower=1e-4, upper=5e-3, log=True, default_value=1e-3)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_tau', lower=0.005, upper=0.05, log=True, default_value=0.01)) cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='td3_policy_delay', lower=1, upper=3, log=False, default_value=2)) cs.add_hyperparameter(CSH.CategoricalHyperparameter(name='td3_activation_fn', choices=['tanh', 'relu', 'leakyrelu', 'prelu'], default_value='relu')) cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='td3_hidden_size', lower=48, upper=192, log=True, default_value=128)) cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='td3_hidden_layer', lower=1, upper=2, log=False, default_value=2)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_action_std', lower=0.05, upper=0.2, log=True, default_value=0.1)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_policy_std', lower=0.1, upper=0.4, log=True, default_value=0.2)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_policy_std_clip', lower=0.25, upper=1, log=True, default_value=0.5)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='td3_early_out_virtual_diff', lower=1e-2, upper=1e-1, log=True, default_value=3e-2)) return cs
def get_nas101_configuration_space(): nas101_cs = ConfigSpace.ConfigurationSpace() nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_0", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_1", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_2", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_3", OPS)) nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_4", OPS)) for i in range(VERTICES * (VERTICES - 1) // 2): nas101_cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("edge_%d" % i, [0, 1])) return nas101_cs
def get_configspace(): config_space = CS.ConfigurationSpace() # TODO: Implement configuration space here. See https://github.com/automl/HpBandSter/blob/master/hpbandster/examples/example_5_keras_worker.py for an example config_space.add_hyperparameter( CS.UniformFloatHyperparameter('learning_rate', lower=10e-4, upper=1, log=True)) config_space.add_hyperparameter( CS.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, log=True)) config_space.add_hyperparameter( CS.CategoricalHyperparameter('filter_size', [3, 5])) return config_space
def get_config_space(user_updates=None): cs = CS.ConfigurationSpace() range_num_layers = (1, 15) range_num_units = (10, 1024) possible_activations = ('sigmoid', 'tanh', 'relu') range_dropout = (0.0, 0.8) if user_updates is not None and 'num_layers' in user_updates: range_num_layers = user_updates['num_layers'] num_layers = CSH.UniformIntegerHyperparameter( 'num_layers', lower=range_num_layers[0], upper=range_num_layers[1]) cs.add_hyperparameter(num_layers) use_dropout = cs.add_hyperparameter( CS.CategoricalHyperparameter("use_dropout", [True, False], default_value=True)) for i in range(1, range_num_layers[1] + 1): n_units = CSH.UniformIntegerHyperparameter( "num_units_%d" % i, lower=range_num_units[0], upper=range_num_units[1], log=True) cs.add_hyperparameter(n_units) dropout = CSH.UniformFloatHyperparameter("dropout_%d" % i, lower=range_dropout[0], upper=range_dropout[1]) cs.add_hyperparameter(dropout) dropout_condition_1 = CS.EqualsCondition(dropout, use_dropout, True) if i > range_num_layers[0]: cs.add_condition( CS.GreaterThanCondition(n_units, num_layers, i - 1)) dropout_condition_2 = CS.GreaterThanCondition( dropout, num_layers, i - 1) cs.add_condition( CS.AndConjunction(dropout_condition_1, dropout_condition_2)) else: cs.add_condition(dropout_condition_1) cs.add_hyperparameter( CSH.CategoricalHyperparameter('activation', possible_activations)) return (cs)
def get_configuration_space(self): cs = CS.ConfigurationSpace() method = CSH.CategoricalHyperparameter( "method", choices=["lstsq", "lasso", "stable"]) lasso_alpha = CSH.UniformFloatHyperparameter("lasso_alpha", lower=1e-10, upper=1e2, default_value=1.0, log=True) use_lasso_alpha = CSC.InCondition(child=lasso_alpha, parent=method, values=["lasso"]) poly_basis = CSH.CategoricalHyperparameter("poly_basis", choices=["true", "false"], default_value="false") poly_degree = CSH.UniformIntegerHyperparameter("poly_degree", lower=2, upper=8, default_value=3) use_poly_degree = CSC.InCondition(child=poly_degree, parent=poly_basis, values=["true"]) trig_basis = CSH.CategoricalHyperparameter("trig_basis", choices=["true", "false"], default_value="false") trig_freq = CSH.UniformIntegerHyperparameter("trig_freq", lower=1, upper=8, default_value=1) use_trig_freq = CSC.InCondition(child=trig_freq, parent=trig_basis, values=["true"]) product_terms = CSH.CategoricalHyperparameter("product_terms", choices=["false"], default_value="false") cs.add_hyperparameters([ method, poly_basis, poly_degree, trig_basis, trig_freq, product_terms, lasso_alpha ]) cs.add_conditions([use_poly_degree, use_trig_freq, use_lasso_alpha]) return cs
def get_configspace(): cs = CS.ConfigurationSpace() C = CSH.UniformFloatHyperparameter('C', lower=-3, upper=2, default_value='0.7', log=False) gamma = CSH.UniformFloatHyperparameter('gamma', lower=-4, upper=-1, default_value='-2', log=False) cs.add_hyperparameters([C, gamma]) return cs
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config): pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config) cs = ConfigSpace.ConfigurationSpace() possible_preprocessors = set(pipeline_config["preprocessors"]).intersection(self.preprocessors.keys()) selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("preprocessor", possible_preprocessors)) for preprocessor_name, preprocessor_type in self.preprocessors.items(): if (preprocessor_name not in possible_preprocessors): continue preprocessor_cs = preprocessor_type.get_hyperparameter_search_space(dataset_info=dataset_info, **self._get_search_space_updates(prefix=preprocessor_name)) cs.add_configuration_space( prefix=preprocessor_name, configuration_space=preprocessor_cs, delimiter=ConfigWrapper.delimiter, parent_hyperparameter={'parent': selector, 'value': preprocessor_name}) self._check_search_space_updates((possible_preprocessors, "*")) return cs
def get_configspace(self): cs = CS.ConfigurationSpace() cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ppo_update_episodes', lower=1, upper=100, log=True, default_value=20)) cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ppo_ppo_epochs', lower=20, upper=200, log=True, default_value=100)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_gamma', lower=0.001, upper=0.1, log=True, default_value=0.01)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_lr', lower=1e-4, upper=1e-2, log=True, default_value=3e-4)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_vf_coef', lower=0.1, upper=2, log=True, default_value=0.5)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_ent_coef', lower=0.002, upper=0.05, log=True, default_value=0.01)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_eps_clip', lower=0.05, upper=1, log=True, default_value=0.2)) cs.add_hyperparameter(CSH.CategoricalHyperparameter(name='ppo_activation_fn', choices=['relu', 'tanh', 'leakyrelu', 'prelu'], default_value='relu')) cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ppo_hidden_size', lower=48, upper=192, log=True, default_value=128)) cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ppo_hidden_layer', lower=1, upper=2, log=False, default_value=2)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_action_std', lower=0.1, upper=2, log=True, default_value=0.5)) cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ppo_early_out_virtual_diff', lower=1e-2, upper=1e-1, log=True, default_value=3e-2)) return cs
def get_config_space(*args, **kwargs): cs = CS.ConfigurationSpace() config = CSConfig['step_lr'] cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('step_size', lower=config['step_size'][0], upper=config['step_size'][1])) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('gamma', lower=config['gamma'][0], upper=config['gamma'][1])) cs.add_configuration_space( prefix='', delimiter='', configuration_space=AutoNetLearningRateSchedulerBase. get_config_space(*args, **kwargs)) return cs
def test_to_ndarray_name_last_pos(): np.random.seed(123456) random_state = np.random.RandomState(123456) config_space = CS.ConfigurationSpace() config_space.add_hyperparameters([ CSH.UniformFloatHyperparameter('a', lower=0., upper=1.), CSH.UniformIntegerHyperparameter('b', lower=2, upper=3), CSH.CategoricalHyperparameter('c', choices=('1', '2', '3')), CSH.UniformIntegerHyperparameter('d', lower=2, upper=3), CSH.CategoricalHyperparameter('e', choices=('1', '2')) ]) hp_a = HyperparameterRangeContinuous('a', lower_bound=0., upper_bound=1., scaling=LinearScaling()) hp_b = HyperparameterRangeInteger('b', lower_bound=2, upper_bound=3, scaling=LinearScaling()) hp_c = HyperparameterRangeCategorical('c', choices=('1', '2', '3')) hp_d = HyperparameterRangeInteger('d', lower_bound=2, upper_bound=3, scaling=LinearScaling()) hp_e = HyperparameterRangeCategorical('e', choices=('1', '2')) for name_last_pos in ['a', 'c', 'd', 'e']: hp_ranges_cs = HyperparameterRanges_CS(config_space, name_last_pos=name_last_pos) if name_last_pos == 'a': lst = [hp_b, hp_c, hp_d, hp_e, hp_a] elif name_last_pos == 'c': lst = [hp_a, hp_b, hp_d, hp_e, hp_c] elif name_last_pos == 'd': lst = [hp_a, hp_b, hp_c, hp_e, hp_d] else: lst = [hp_a, hp_b, hp_c, hp_d, hp_e] hp_ranges = HyperparameterRanges_Impl(*lst) names = [hp.name for hp in hp_ranges.hp_ranges] config_cs = hp_ranges_cs.random_candidate(random_state) _config = config_cs.get_dictionary() config = (_config[name] for name in names) ndarr_cs = hp_ranges_cs.to_ndarray(config_cs) ndarr = hp_ranges.to_ndarray(config) assert_allclose(ndarr_cs, ndarr, rtol=1e-4)
def _convert_hyper_parameters_to_cs(self): # type: () -> CS.ConfigurationSpace cs = CS.ConfigurationSpace(seed=self._seed) for p in self._hyper_parameters: if isinstance(p, UniformParameterRange): hp = CSH.UniformFloatHyperparameter( p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size) elif isinstance(p, UniformIntegerParameterRange): hp = CSH.UniformIntegerHyperparameter( p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size) elif isinstance(p, DiscreteParameterRange): hp = CSH.CategoricalHyperparameter(p.name, choices=p.values) else: raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p))) cs.add_hyperparameter(hp) return cs
def get_configuration_space(self): cs = ConfigSpace.ConfigurationSpace() for node in list(self.num_parents_per_node.keys())[1:-1]: cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter( "choice_block_{}_op".format(node), [CONV1X1, CONV3X3, MAXPOOL3X3])) for choice_block_index, num_parents in list( self.num_parents_per_node.items())[1:]: cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter( "choice_block_{}_parents".format(choice_block_index), parent_combinations(node=choice_block_index, num_parents=num_parents))) return cs
def create_ch(): import ConfigSpace as CS # BOHB uses ConfigSpace for their hyperparameter search space config_space = CS.ConfigurationSpace() config_space.add_hyperparameter( CS.UniformFloatHyperparameter("lr", lower=1e-10, upper=1, log=True)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("momentum", lower=0.1, upper=0.9)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("drop_out", lower=0.1, upper=0.7)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("hidden_layer1", lower=32, upper=700)) config_space.add_hyperparameter( CS.UniformFloatHyperparameter("hidden_layer2", lower=32, upper=256)) return config_space
def get_configspace(): """ Define all the hyperparameters that need to be optimised and store them in config """ cs = CS.ConfigurationSpace() n_conv_layers = CSH.UniformIntegerHyperparameter('n_conv_layers', lower=3, upper=6) initial_lr = CSH.UniformFloatHyperparameter('initial_lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) optimizer = CSH.CategoricalHyperparameter('optimizer', settings.opti_dict.keys()) batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=32, default_value=24) cs.add_hyperparameters([initial_lr, optimizer, batch_size, n_conv_layers]) lr_scheduler = CSH.CategoricalHyperparameter('lr_scheduler', ['Exponential', 'Cosine']) weight_decay = CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-3, default_value=3e-4, log=True) drop_path_prob = CSH.UniformFloatHyperparameter('drop_path_prob', lower=0, upper=0.4, default_value=0.3, log=False) grad_clip_value = CSH.UniformIntegerHyperparameter('grad_clip_value', lower=4, upper=8, default_value=5) cs.add_hyperparameters([lr_scheduler, drop_path_prob, weight_decay, grad_clip_value]) return cs
def reformat_for_tuning(self): """ Converts the dictionnary of CSH object to a proper ConfigurationSpace accepted by HpBandSter. """ # Initialization of configuration space cs = CS.ConfigurationSpace() # We extract CSH object from the dictionnary and put it in a list if len(self.space) != 0: self.space = list(self.space.values()) cs.add_hyperparameters(self.space) self.space = cs else: raise Exception('Search space has not been modified yet, no tuning can be done.')
def get_config_space(*args, **kwargs): cs = CS.ConfigurationSpace() config = CSConfig['reduce_on_plateau'] cs.add_hyperparameter( CSH.UniformFloatHyperparameter('factor', lower=config['factor'][0], upper=config['factor'][1])) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('patience', lower=config['patience'][0], upper=config['patience'][1])) cs.add_configuration_space( prefix='', delimiter='', configuration_space=AutoNetLearningRateSchedulerBase. get_config_space(*args, **kwargs)) return cs
def get_config_space(*args, **kwargs): cs = CS.ConfigurationSpace() config = CSConfig['cosine_annealing_lr'] cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('T_max', lower=config['T_max'][0], upper=config['T_max'][1])) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('T_mult', lower=config['T_mult'][0], upper=config['T_mult'][1])) cs.add_configuration_space( prefix='', delimiter='', configuration_space=AutoNetLearningRateSchedulerBase. get_config_space(*args, **kwargs)) return cs
def get_xsum_space(): cs = CS.ConfigurationSpace(seed=1) beam = CSH.CategoricalHyperparameter('beam', choices=[4, 5]) ngram = CSH.CategoricalHyperparameter('no_repeat_ngram_size', choices=[3, 4]) lenpen = CSH.UniformFloatHyperparameter('lenpen', lower=0.5, upper=2.0, q=0.1) min_len = CSH.UniformIntegerHyperparameter('min_len', lower=5, upper=40) max_len_b = CSH.UniformIntegerHyperparameter('max_len_b', lower=0, upper=50, q=5) cs.add_hyperparameters([beam, ngram, lenpen, min_len, max_len_b]) return cs
def get_config_space(seed=None): # XXX: Change lower upper and default cs = CS.ConfigurationSpace(seed) HPs = [ CS.UniformIntegerHyperparameter("length", lower=1, upper=64, default_value=2), CS.UniformIntegerHyperparameter("n_holes", lower=1, upper=32, default_value=16), ] [cs.add_hyperparameter(hp) for hp in HPs] return cs
def get_configuration_space(self): cs = CS.ConfigurationSpace() nonlintype = CSH.CategoricalHyperparameter( "nonlintype", choices=["relu", "tanh", "sigmoid", "selu"], default_value="relu") #choices=["relu"]) n_hidden_layers = CSH.CategoricalHyperparameter( "n_hidden_layers", choices=["1", "2", "3", "4"], default_value="2") hidden_size_1 = CSH.UniformIntegerHyperparameter("hidden_size_1", lower=16, upper=256, default_value=128) hidden_size_2 = CSH.UniformIntegerHyperparameter("hidden_size_2", lower=16, upper=256, default_value=128) hidden_size_3 = CSH.UniformIntegerHyperparameter("hidden_size_3", lower=16, upper=256, default_value=128) hidden_size_4 = CSH.UniformIntegerHyperparameter("hidden_size_4", lower=16, upper=256, default_value=128) hidden_cond_2 = CSC.InCondition(child=hidden_size_2, parent=n_hidden_layers, values=["2", "3", "4"]) hidden_cond_3 = CSC.InCondition(child=hidden_size_3, parent=n_hidden_layers, values=["3", "4"]) hidden_cond_4 = CSC.InCondition(child=hidden_size_4, parent=n_hidden_layers, values=["4"]) lr = CSH.UniformFloatHyperparameter("lr", lower=1e-5, upper=1, default_value=1e-3, log=True) cs.add_hyperparameters([ nonlintype, n_hidden_layers, hidden_size_1, hidden_size_2, hidden_size_3, hidden_size_4, lr ]) cs.add_conditions([hidden_cond_2, hidden_cond_3, hidden_cond_4]) return cs
def get_hyperparameter_search_space(seed=None): """ Neural Network search space based on a best effort using the scikit-learn implementation. Note that for state of the art performance, other packages could be preferred. Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('ResNet18_classifier', seed) # batch_size = ConfigSpace.UniformIntegerHyperparameter( # name='batch_size', lower=1, upper=256, log=True, default_value=128) # learning_rate = ConfigSpace.CategoricalHyperparameter( # name='learning_rate', choices=['constant', 'invscaling', 'adaptive'], default_value='constant') learning_rate_init = ConfigSpace.UniformFloatHyperparameter( name='learning_rate_init', lower=1e-6, upper=1, log=True, default_value=1e-1) epochs = ConfigSpace.UniformIntegerHyperparameter( name='epochs', lower=1, upper=200, default_value=150) batch_size = ConfigSpace.CategoricalHyperparameter( name='batch_size', choices=[32, 64, 128, 256, 512], default_value=128) # shuffle = ConfigSpace.CategoricalHyperparameter( # name='shuffle', choices=[True, False], default_value=True) momentum = ConfigSpace.UniformFloatHyperparameter( name='momentum', lower=0, upper=1, default_value=0.9) weight_decay = ConfigSpace.UniformFloatHyperparameter( name='weight_decay', lower=1e-6, upper=1e-2, log=True, default_value=5e-4) cs.add_hyperparameters([ batch_size, learning_rate_init, epochs, # shuffle, momentum, weight_decay, ]) return cs
def get_configspace(): """:return: ConfigurationsSpace-Object Here is the main place to create particular hyperparameters to tune. Particular hyperparameter should be defined as: hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging) add.hyperparameter([hyperparameter]) """ cs = CS.ConfigurationSpace() # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True) # cs.add_hyperparameters([num_pca]) n_estimators = CSH.UniformIntegerHyperparameter('n_estimators', lower=1, upper=500, default_value=100, log=True) criterion = CSH.CategoricalHyperparameter('criterion', ['gini', 'entropy']) max_depth = CSH.UniformIntegerHyperparameter('max_depth', lower=100, upper=1000, default_value=None, log=True) min_sample_split = CSH.UniformIntegerHyperparameter('min_sample_split', lower=2, upper=30, default_value=2, log=True) # min_sample_leaf = CSH.UniformIntegerHyperparameter('min_sample_leaf', lower=1, upper=100, # default_value=50, log=True) # max_features = CSH.CategoricalHyperparameter('max_features', ['auto', 'sqrt', 'log2']) # max_leaf_nodes = CSH.UniformIntegerHyperparameter('max_leaf_nodes', lower=10, upper=1000, # default_value=500, log=True) # min_impur_dist = CSH.UniformFloatHyperparameter('min_impur_dist', lower=0.1, upper=1.0, # default_value=0.5, log=True) cs.add_hyperparameters( [n_estimators, criterion, max_depth, min_sample_split]) # cs.add_hyperparameters([n_estimators, criterion, max_depth, # min_sample_split, min_sample_leaf, # max_features, max_leaf_nodes, min_impur_dist]) return cs
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() cs.add_hyperparameters([ CSH.UniformFloatHyperparameter( 'optimizer:lr', lower=0.001, upper=0.1, default_value=0.04, log=True, ), # CSH.OrdinalHyperparameter( # 'ignite_random:minibatch_size', # sequence=[2, 4, 8, 16, 32], # default_value=8, # ), # CSH.OrdinalHyperparameter( # 'ignite_random:num_minibatches', # sequence=[2, 4, 8, 16, 32], # default_value=8, # ), CSH.UniformIntegerHyperparameter( 'model:history', lower=1, upper=12, default_value=12, ), CSH.UniformIntegerHyperparameter( 'model:n_layers', lower=2, upper=8, default_value=3, ), CSH.OrdinalHyperparameter( 'model:n_channels', sequence=[2, 4, 8, 16, 32, 64], default_value=8, ), ]) return cs
def get_configuration_space(): cs = ConfigSpace.ConfigurationSpace() ops_choices = ['conv1x1-bn-relu', 'conv3x3-bn-relu', 'maxpool3x3'] cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_0", ops_choices)) cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_1", ops_choices)) cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_2", ops_choices)) cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_3", ops_choices)) cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("op_node_4", ops_choices)) for i in range(VERTICES * (VERTICES - 1) // 2): cs.add_hyperparameter( ConfigSpace.CategoricalHyperparameter("edge_%d" % i, [0, 1])) return cs
def setUp(self): self.configspace = CS.ConfigurationSpace(43) HPs = [] HPs.append(CS.UniformIntegerHyperparameter('int1', lower=-2, upper=2)) self.configspace.add_hyperparameters(HPs) x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train) ] self.x_train = np.array([c.get_array() for c in x_train_confs]).squeeze() x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test) ] self.x_test = np.array([c.get_array() for c in x_test_confs]).squeeze()
def get_range_creation_config(op_name, dtypes): config_space = random_range_cs config = config_space.sample_configuration() config_dict = config.get_dictionary() config_dict.update( {'stop': config_dict['start'] + config_dict.pop('interval')}) # random dtype config_space = cs.ConfigurationSpace() config_space.add_hyperparameter( csh.CategoricalHyperparameter('dtype', choices=dtypes)) config = config_space.sample_configuration() dtype = config.get('dtype') config_dict.update({'dtype': dtype}) if op_name == 'linspace': config_space = random_num_cs config = config_space.sample_configuration() config_dict.update(config.get_dictionary()) return config_dict
def get_warmstart_configspace(): ws_cs = CS.ConfigurationSpace() # HYPERPARAMETERS n_estimators = CSH.Constant('n_estimators', value=100) max_depth = CSH.Constant('max_depth', value=40) min_samples_leaf = CSH.Constant('min_samples_leaf', value=30) min_samples_split = CSH.Constant('min_samples_split', value=20) max_features = CSH.Constant('max_features', value='auto') ws_cs.add_hyperparameters([n_estimators, max_depth, min_samples_leaf, min_samples_split, max_features]) return ws_cs
def get_config_space(): config_space=CS.ConfigurationSpace() #config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('learning_rate', # lower=1e-3, # upper=1, # log=True)) config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-2, log=False)) config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('cutout_prob', lower=0, upper=1, log=False)) return config_space