def get_hyperparameter_search_space(): possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine'] kernel = CSH.CategoricalHyperparameter('kernel', possible_kernels, 'rbf') n_components = CSH.UniformIntegerHyperparameter("n_components", 50, 10000, default_value=100, log=True) gamma = CSH.UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, log=True, default_value=0.1) degree = CSH.UniformIntegerHyperparameter('degree', 2, 5, 3) coef0 = CSH.UniformFloatHyperparameter("coef0", -1, 1, default_value=0) cs = ConfigSpace.ConfigurationSpace() cs.add_hyperparameters([kernel, degree, gamma, coef0, n_components]) degree_depends_on_poly = CSC.EqualsCondition(degree, kernel, "poly") coef0_condition = CSC.InCondition(coef0, kernel, ["poly", "sigmoid"]) gamma_kernels = ["poly", "rbf", "sigmoid"] gamma_condition = CSC.InCondition(gamma, kernel, gamma_kernels) cs.add_conditions( [degree_depends_on_poly, coef0_condition, gamma_condition]) return cs
def __init__(self, *args, network, width, batch_size, **kwargs): super().__init__(*args, **kwargs) self.width = width self.batch_size = batch_size # setup default hyper-parameter search ranges self.lr = CSH.UniformFloatHyperparameter('lr', lower=1e-4, upper=1, default_value=1e-2, log=True) self.momentum = CSH.UniformFloatHyperparameter('momentum', lower=0.0, upper=0.99, default_value=0.5, log=False) self.n_layers = CSH.UniformIntegerHyperparameter('n_layers', lower=1, upper=40, default_value=2) self.weight_decay = CSH.UniformFloatHyperparameter('weight_decay', lower=1e-6, upper=1e-1, default_value=5e-4, log=True) self.p_drop = CSH.UniformFloatHyperparameter('p_drop', lower=0.005, upper=0.5, default_value=0.2, log=True)
def hyper_catboost_bohb(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('max_depth', lower=1, upper=16, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('learning_rate', lower=0.001, upper=1, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('l2_leaf_reg', lower=1, upper=50, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('bagging_temperature', lower=1, upper=50, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('random_strength', lower=1, upper=50, log=False)) return cs
def get_configuration_space(self): cs = CS.ConfigurationSpace() for i, obsname in enumerate(self.system.observations): if self.goal is not None and np.isnan(self.goal[i]): continue obsgain = CSH.UniformFloatHyperparameter("{}_Q".format(obsname), lower=1e-3, upper=1e4, default_value=1.0, log=True) cs.add_hyperparameter(obsgain) for i, obsname in enumerate(self.system.observations): if self.goal is not None and np.isnan(self.goal[i]): continue obsgain = CSH.UniformFloatHyperparameter("{}_F".format(obsname), lower=1e-3, upper=1e4, default_value=1.0, log=True) cs.add_hyperparameter(obsgain) for ctrlname in self.system.controls: ctrlgain = CSH.UniformFloatHyperparameter("{}_R".format(ctrlname), lower=1e-3, upper=1e4, default_value=1.0, log=True) cs.add_hyperparameter(ctrlgain) return cs
def get_configspace(): config_space = CS.ConfigurationSpace() learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=1e-4, upper=1e-1, default_value='1e-1', log=True) batch_size = CSH.UniformFloatHyperparameter('batch_size', lower=16, upper=128, default_value='32', log=True) num_filters = CSH.UniformIntegerHyperparameter('num_filters', lower=8, upper=64, default_value=32, log=True) filter_size = CSH.CategoricalHyperparameter('filter_size', [3, 5]) config_space.add_hyperparameters( [learning_rate, batch_size, num_filters, filter_size]) return config_space
def hyper_xgboost_bohb(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter('eta', lower=1e-5, upper=1, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('subsample', lower=0.1, upper=1, log=False)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('max_depth', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('gamma', lower=0.001, upper=2, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('min_child_weight', lower=1, upper=70, log=False)) return cs
def get_configspace(): cs = CS.ConfigurationSpace() # HYPERPARAMETERS lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, log=True) num_hidden_layers = CSH.UniformIntegerHyperparameter('num_hidden_layers', lower=1, upper=4, log=False) width_1stlayer = CSH.UniformIntegerHyperparameter('width_1stlayer', lower=8, upper=512, log=False) width_hidlayer1 = CSH.UniformIntegerHyperparameter('width_hidlayer1', lower=8, upper=256, log=False) width_hidlayer2 = CSH.UniformIntegerHyperparameter('width_hidlayer2', lower=8, upper=256, log=False) width_hidlayer3 = CSH.UniformIntegerHyperparameter('width_hidlayer3', lower=8, upper=256, log=False) width_hidlayer4 = CSH.UniformIntegerHyperparameter('width_hidlayer4', lower=8, upper=256, log=False) dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False) cs.add_hyperparameters([lr, dropout_rate, num_hidden_layers, width_1stlayer, width_hidlayer1, width_hidlayer2, width_hidlayer3, width_hidlayer4]) return cs
def get_config_space(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('gru_layers', lower=1, upper=3, log=True)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('gru_size', lower=16, upper=128, log=True)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('hddn1', lower=32, upper=512, log=True)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('hddn2', lower=32, upper=512, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('lr', lower=1e-8, upper=1e-6, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter( 'momentum', lower=1e-5, upper=0.99, )) return (cs)
def vary_hp(config): alpha = config['agents']['ql']['alpha'] gamma = config['agents']['ql']['gamma'] cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='alpha', lower=0.1, upper=1, log=False, default_value=alpha)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='gamma', lower=0.1, upper=1, log=False, default_value=gamma)) sample = cs.sample_configuration() print(f"sampled part of config: " f"alpha: {sample['alpha']}, " f"gamma: {sample['gamma']}, ") config_mod = deepcopy(config) config_mod['agents']['ql']['alpha'] = sample['alpha'] config_mod['agents']['ql']['gamma'] = sample['gamma'] return config_mod
def get_configspace(self): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='lr', lower=1e-6, upper=1e-3, log=True, default_value=1e-4)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='beta', lower=0.001, upper=1.0, log=True, default_value=0.2)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='eta', lower=0.001, upper=1.0, log=True, default_value=0.5)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='feature_dim', lower=16, upper=256, log=True, default_value=64)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='hidden_size', lower=16, upper=256, log=True, default_value=128)) return cs
def get_configspace(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='sl_weight', lower=1, upper=100000, log=True, default_value=100)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='sl_exp', lower=0.5, upper=2, log=True, default_value=1)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='pl_exp', lower=0.5, upper=2, log=True, default_value=1)) cs.add_hyperparameter( CSH.CategoricalHyperparameter(name='forward_flow', choices=[False, True], default_value=False)) cs.add_hyperparameter( CSH.CategoricalHyperparameter(name='weighted_sl_loss', choices=[False, True], default_value=False)) return cs
def hyper_lightgbm_bohb(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('max_depth', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('min_data_in_leaf', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('num_leaves', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('learning_rate', lower=1e-5, upper=1, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('bagging_fraction', lower=0.1, upper=1, log=False)) return cs
def cs_gb(): """ Defining the configuration space in case of GradientBoosting Classifier """ cs1 = ConfigSpace.ConfigurationSpace() cs2 = ConfigSpace.ConfigurationSpace() hp1 = csh.CategoricalHyperparameter('criterion', choices=['0', '1']) hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2']) hp3 = csh.UniformIntegerHyperparameter('max_depth', lower=1, upper=10, log=False) hp4 = csh.UniformFloatHyperparameter('learning_rate', lower=0.01, upper=1, log=True) hp5 = csh.UniformIntegerHyperparameter('n_estimators', lower=50, upper=500, log=False) hp6 = csh.UniformFloatHyperparameter('max_features', lower=0.1, upper=0.9, log=False) hp7 = csh.UniformIntegerHyperparameter('min_samples_leaf', lower=1, upper=20, log=False) hp8 = csh.UniformIntegerHyperparameter('min_samples_split', lower=2, upper=20, log=False) # imputation case cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5, hp6, hp7, hp8]) # no imputation case cs2.add_hyperparameters([hp1, hp3, hp4, hp5, hp6, hp7, hp8]) return cs1, cs2
def get_space(): """ Defines the search space to sample from for each hyperparameter for the hyperparameter optimization. Define all parameters to tune in the given model here. Returns: -------- ConfigSpace object containing the search space """ space = CS.ConfigurationSpace() timesteps_per_batch = CSH.CategoricalHyperparameter( 'timesteps_per_batch', [512, 1024, 2048, 4096, 8192]) vf_stepsize = CSH.UniformFloatHyperparameter('vf_stepsize', lower=2**-5, upper=2**-2, log=True) max_kl = CSH.UniformFloatHyperparameter('max_kl', lower=2**-2.5, upper=2**-0.5, log=True) gamma = CSH.UniformFloatHyperparameter('gamma', lower=(1 - (1 / ((10**(-1)) * 4))), upper=(1 - (1 / ((10**(1.5)) * 4)))) lam = CSH.UniformFloatHyperparameter('lam', lower=(1 - (1 / ((10**(-1)) * 4))), upper=(1 - (1 / ((10**(1.5)) * 4)))) space.add_hyperparameters( [timesteps_per_batch, vf_stepsize, max_kl, gamma, lam]) # Store the defined configuration space to a json file #with open('configspace.json', 'w') as fh: # fh.write(json.write(space)) return space
def get_configspace(): cs = CS.ConfigurationSpace() topology = CSH.CategoricalHyperparameter( 'topology', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) lr = CSH.UniformFloatHyperparameter('lr', lower=1e-4, upper=1, default_value='1e-1', log=True) momentum = CSH.UniformFloatHyperparameter('momentum', lower=0.5, upper=1, default_value=0.9, log=False) weight_decay = CSH.UniformFloatHyperparameter('weight_decay', lower=0.00001, upper=0.00005, default_value=0.00001, log=False) batch_size = CSH.CategoricalHyperparameter('batch_size', [1, 2, 4, 8, 16], default_value=8) cs.add_hyperparameters( [topology, lr, momentum, weight_decay, batch_size]) return cs
def get_configspace(self): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='ql_alpha', lower=0.001, upper=1, log=True, default_value=0.1)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='ql_gamma', lower=0.001, upper=1, log=True, default_value=0.1)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='ql_eps_init', lower=0.01, upper=1, log=True, default_value=0.01)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='ql_eps_min', lower=0.01, upper=1, log=True, default_value=0.01)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='ql_eps_decay', lower=0.001, upper=1, log=True, default_value=0.01)) return cs
def cs_svm(per_kernel=False): """ Defining the configuration space in case of SVM """ cs1 = ConfigSpace.ConfigurationSpace() cs2 = ConfigSpace.ConfigurationSpace() hp1 = csh.CategoricalHyperparameter('shrinking', choices=['0', '1']) hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2']) hp3 = csh.CategoricalHyperparameter('kernel', choices=['0', '1']) hp4 = csh.UniformFloatHyperparameter('C', lower=2**(-5), upper=2**15, log=True) hp5 = csh.UniformFloatHyperparameter('coef0', lower=-1, upper=1, log=False) hp6 = csh.UniformFloatHyperparameter('gamma', lower=2**(-15), upper=2**3, log=True) hp7 = csh.UniformFloatHyperparameter('tol', lower=10**(-5), upper=10**(-1), log=True) if per_kernel: cs1.add_hyperparameters([hp1, hp2, hp4, hp5, hp6, hp7]) cs2.add_hyperparameters([hp1, hp4, hp5, hp6, hp7]) else: cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5, hp6, hp7]) cs2.add_hyperparameters([hp1, hp3, hp4, hp5, hp6, hp7]) return cs1, cs2
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter( 'lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) # For demonstration purposes, we add different optimizers as categorical hyperparameters. # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'. # SGD has a different parameter 'momentum'. optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) sgd_momentum = CSH.UniformFloatHyperparameter( 'sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameters([lr, optimizer, sgd_momentum]) # The hyperparameter sgd_momentum will be used,if the configuration # contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) num_conv_layers = CSH.UniformIntegerHyperparameter( 'num_conv_layers', lower=1, upper=3, default_value=2) num_filters_1 = CSH.UniformIntegerHyperparameter( 'num_filters_1', lower=4, upper=64, default_value=16, log=True) num_filters_2 = CSH.UniformIntegerHyperparameter( 'num_filters_2', lower=4, upper=64, default_value=16, log=True) num_filters_3 = CSH.UniformIntegerHyperparameter( 'num_filters_3', lower=4, upper=64, default_value=16, log=True) cs.add_hyperparameters( [num_conv_layers, num_filters_1, num_filters_2, num_filters_3]) # You can also use inequality conditions: cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1) cs.add_condition(cond) cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2) cs.add_condition(cond) dropout_rate = CSH.UniformFloatHyperparameter( 'dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False) num_fc_units = CSH.UniformIntegerHyperparameter( 'num_fc_units', lower=8, upper=256, default_value=32, log=True) cs.add_hyperparameters([dropout_rate, num_fc_units]) return cs
def get_configspace(): """:return: ConfigurationsSpace-Object Here is the main place to create particular hyperparameters to tune. Particular hyperparameter should be defined as: hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging) add.hyperparameter([hyperparameter]) """ cs = CS.ConfigurationSpace() # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True) # cs.add_hyperparameters([num_pca]) lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) dropout_rate_1 = CSH.UniformFloatHyperparameter('dropout_rate_1', lower=0.0, upper=0.5, default_value=0.1, log=False) dropout_rate_2 = CSH.UniformFloatHyperparameter('dropout_rate_2', lower=0.0, upper=0.5, default_value=0.1, log=False) num_fc_units_1 = CSH.UniformIntegerHyperparameter('num_fc_units_1', lower=512, upper=2048, default_value=1024, log=True) num_fc_units_2 = CSH.UniformIntegerHyperparameter('num_fc_units_2', lower=256, upper=512, default_value=256, log=True) activation = CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']) cs.add_hyperparameters([ lr, optimizer, sgd_momentum, dropout_rate_1, dropout_rate_2, num_fc_units_1, num_fc_units_2, activation ]) # The hyperparameter sgd_momentum will be used,if the configuration # contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) return cs
def test_config_space_hp(self): import ConfigSpace.hyperparameters as csh from deephyper.problem import HpProblem alpha = csh.UniformFloatHyperparameter(name="alpha", lower=0, upper=1) beta = csh.UniformFloatHyperparameter(name="beta", lower=0, upper=1) pb = HpProblem() pb.add_hyperparameters([alpha, beta])
def get_configspace(seed=None): cs = CS.ConfigurationSpace(seed) # Hyperparameter defining first Conv layer kernel1 = CSH.OrdinalHyperparameter("kernel_1", sequence=[3, 5, 7], default_value=5) channels1 = CSH.UniformIntegerHyperparameter("channels_1", lower=3, upper=64, default_value=32) stride1 = CSH.UniformIntegerHyperparameter("stride_1", lower=1, upper=2, default_value=1) cs.add_hyperparameters([kernel1, channels1, stride1]) # Hyperparameter defining second Conv layer kernel2 = CSH.OrdinalHyperparameter("kernel_2", sequence=[3, 5, 7], default_value=5) channels2 = CSH.UniformIntegerHyperparameter("channels_2", lower=3, upper=64, default_value=32) stride2 = CSH.UniformIntegerHyperparameter("stride_2", lower=1, upper=2, default_value=1) cs.add_hyperparameters([kernel2, channels2, stride2]) # Hyperparameter for FC layer hidden = CSH.UniformIntegerHyperparameter("hidden", lower=32, upper=256, log=True, default_value=128) cs.add_hyperparameter(hidden) # Regularization Hyperparameter dropout = CSH.UniformFloatHyperparameter("dropout", lower=0, upper=0.5, default_value=0.1) cs.add_hyperparameter(dropout) # Training Hyperparameters batch_size = CSH.OrdinalHyperparameter("batch_size", sequence=[2, 4, 8, 16, 32, 64], default_value=4) lr = CSH.UniformFloatHyperparameter("lr", lower=1e-6, upper=0.1, log=True, default_value=1e-3) cs.add_hyperparameters([batch_size, lr]) return cs
def setUp(self) -> None: self.config_space = CS.ConfigurationSpace() lb, ub = 1, 100 self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('f', lower=lb, upper=ub)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fq', lower=lb, upper=ub, q=0.5)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fql', lower=lb, upper=ub, q=0.5, log=True)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fl', lower=lb, upper=ub, log=True)) self.config_space.add_hyperparameter( CSH.UniformIntegerHyperparameter('i', lower=lb, upper=ub)) self.config_space.add_hyperparameter( CSH.UniformIntegerHyperparameter('il', lower=lb, upper=ub, log=True)) self.config_space.add_hyperparameter( CSH.CategoricalHyperparameter('c', choices=['x', 'y', 'z'])) self.config_space.add_hyperparameter( CSH.OrdinalHyperparameter('o', sequence=list(range(1, 101)), meta={ 'lower': 1, 'upper': 100, 'log': False })) self.config_space.add_hyperparameter( CSH.OrdinalHyperparameter('ol', sequence=[1, 10, 100], meta={ 'lower': 1, 'upper': 100, 'log': True })) self.hp_names = self.config_space.get_hyperparameter_names() self.is_categoricals = { hp_name: self.config_space.get_hyperparameter( hp_name).__class__.__name__ == 'CategoricalHyperparameter' for hp_name in self.hp_names } self.is_ordinals = { hp_name: self.config_space.get_hyperparameter( hp_name).__class__.__name__ == 'OrdinalHyperparameter' for hp_name in self.hp_names }
def test_skoptsearcher(): logger.debug('Start testing SKoptSearcher') random.seed(1) # Create configuration space: cs = CS.ConfigurationSpace() a = CSH.UniformFloatHyperparameter('a', lower=1e-4, upper=1e-1, log=True) # log-scale float b = CSH.UniformFloatHyperparameter('b', lower=-2, upper=0) # float with uniform prior c = CSH.UniformIntegerHyperparameter('c', lower=0, upper=1000) # integer d = CSH.CategoricalHyperparameter('d', choices=['good','neutral','bad']) # categorical cs.add_hyperparameters([a,b,c,d]) # Determine reward of optimal config: optimal_config = cs.sample_configuration() optimal_config['a'] = 1e-1 optimal_config['b'] = 0 optimal_config['c'] = 1000 optimal_config['d'] = 'good' optimal_reward = toy_reward(optimal_config) # should ~= 7025.58 # Compare skopt searchers VS random sampling searcher: num_configs_totry = 15 skopt_searcher = SKoptSearcher(cs) # skopt searcher with all default arguments skopt_config_list = [None]*num_configs_totry skopt_reward_list = [0.0]*num_configs_totry # stores rewards scaled between 0-1 # Also try skopt searcher which uses various kwargs (random forest surrgoate model, expected improvement acquisition): skrf_searcher = SKoptSearcher(cs, base_estimator='RF', acq_func='EI') skrf_config_list = [None]*num_configs_totry skrf_reward_list = [0.0]*num_configs_totry # stores rewards scaled between 0-1 # Benchmark against random searcher: rs_searcher = RandomSearcher(cs) random_config_list = [None]*num_configs_totry random_reward_list = [0.0]*num_configs_totry # Run search: for i in range(num_configs_totry): skopt_config = skopt_searcher.get_config() skopt_reward = toy_reward(skopt_config) / optimal_reward skopt_searcher.update(skopt_config, skopt_reward) skopt_config_list[i] = skopt_config skopt_reward_list[i] = skopt_reward skrf_config = skrf_searcher.get_config() skrf_reward = toy_reward(skrf_config) / optimal_reward skrf_searcher.update(skrf_config, skrf_reward) skrf_config_list[i] = skrf_config skrf_reward_list[i] = skrf_reward rs_config = rs_searcher.get_config() rs_reward = toy_reward(rs_config) / optimal_reward rs_searcher.update(rs_config, rs_reward) random_config_list[i] = rs_config random_reward_list[i] = rs_reward # print("Round %d: skopt best reward=%f" % (i,max(skopt_reward_list))) # Summarize results: logger.debug("best reward from SKopt: %f, best reward from SKopt w/ RF: %f, best reward from Random search: %f" % (max(skopt_reward_list), max(skrf_reward_list), max(random_reward_list))) # Ensure skopt outperformed random search: assert (max(skopt_reward_list) >= max(random_reward_list)),"SKopt did worse than Random Search" # Ensure skopt found reasonably good config within num_configs_totry: assert (max(skopt_reward_list) >= 0.6),"SKopt performed poorly" logger.debug('Test Finished.')
def get_configspace(self): cs = CS.ConfigurationSpace() learning_rate = CSH.UniformFloatHyperparameter("learning_rate", lower=0.003, upper=0.005, default_value=0.004, log=False) num_leaves = CSH.UniformIntegerHyperparameter("num_leaves", lower=3, upper=4, default_value=3, log=False) min_data_in_leaf = CSH.UniformIntegerHyperparameter("min_data_in_leaf", lower=400, upper=1000, default_value=700, log=False) feature_fraction = CSH.UniformFloatHyperparameter("feature_fraction", lower=0.1, upper=0.9, default_value=0.45, log=False) subsample = CSH.UniformFloatHyperparameter("subsample", lower=0.5, upper=1.0, default_value=0.8, log=False) l1 = CSH.UniformFloatHyperparameter("lambda_l1", lower=1e-12, upper=10.0, default_value=1.0, log=True) l2 = CSH.UniformFloatHyperparameter("lambda_l2", lower=1e-12, upper=10.0, default_value=1.0, log=True) seed = CSH.UniformIntegerHyperparameter("seed", lower=1, upper=10000, default_value=7861) # feats_flag = [ # CSH.UniformIntegerHyperparameter(feat, lower=0, upper=1, default_value=1) # for feat in self.feats # ] cs.add_hyperparameters([ learning_rate, num_leaves, min_data_in_leaf, feature_fraction, subsample, l1, l2, seed, ]) return cs
def test_add_good_dim(self): from deephyper.problem import HpProblem pb = HpProblem() p0 = pb.add_hyperparameter((-10, 10), "p0") p0_csh = csh.UniformIntegerHyperparameter( name="p0", lower=-10, upper=10, log=False ) assert p0 == p0_csh p1 = pb.add_hyperparameter((1, 100, "log-uniform"), "p1") p1_csh = csh.UniformIntegerHyperparameter(name="p1", lower=1, upper=100, log=True) assert p1 == p1_csh p2 = pb.add_hyperparameter((-10.0, 10.0), "p2") p2_csh = csh.UniformFloatHyperparameter( name="p2", lower=-10.0, upper=10.0, log=False ) assert p2 == p2_csh p3 = pb.add_hyperparameter((1.0, 100.0, "log-uniform"), "p3") p3_csh = csh.UniformFloatHyperparameter( name="p3", lower=1.0, upper=100.0, log=True ) assert p3 == p3_csh p4 = pb.add_hyperparameter([1, 2, 3, 4], "p4") p4_csh = csh.OrdinalHyperparameter(name="p4", sequence=[1, 2, 3, 4]) assert p4 == p4_csh p5 = pb.add_hyperparameter([1.0, 2.0, 3.0, 4.0], "p5") p5_csh = csh.OrdinalHyperparameter(name="p5", sequence=[1.0, 2.0, 3.0, 4.0]) assert p5 == p5_csh p6 = pb.add_hyperparameter(["cat0", "cat1"], "p6") p6_csh = csh.CategoricalHyperparameter(name="p6", choices=["cat0", "cat1"]) assert p6 == p6_csh p7 = pb.add_hyperparameter({"mu": 0, "sigma": 1}, "p7") p7_csh = csh.NormalIntegerHyperparameter(name="p7", mu=0, sigma=1) assert p7 == p7_csh if cs.__version__ > "0.4.20": p8 = pb.add_hyperparameter( {"mu": 0, "sigma": 1, "lower": -5, "upper": 5}, "p8" ) p8_csh = csh.NormalIntegerHyperparameter( name="p8", mu=0, sigma=1, lower=-5, upper=5 ) assert p8 == p8_csh p9 = pb.add_hyperparameter({"mu": 0.0, "sigma": 1.0}, "p9") p9_csh = csh.NormalFloatHyperparameter(name="p9", mu=0, sigma=1) assert p9 == p9_csh
def test_base_problem(): import ConfigSpace.hyperparameters as CSH alpha = CSH.UniformFloatHyperparameter(name="alpha", lower=0, upper=1) beta = CSH.UniformFloatHyperparameter(name="beta", lower=0, upper=1) pb = BaseProblem(42) pb.add_hyperparameter(alpha) pb.add_hyperparameter(beta) print(pb)
def handle_update_search_space(self, data): """change json format to ConfigSpace format dict<dict> -> configspace Parameters ---------- data: JSON object search space of this experiment """ search_space = data cs = CS.ConfigurationSpace() for var in search_space: _type = str(search_space[var]["_type"]) if _type == 'choice': cs.add_hyperparameter(CSH.CategoricalHyperparameter( var, choices=search_space[var]["_value"])) elif _type == 'randint': cs.add_hyperparameter(CSH.UniformIntegerHyperparameter( var, lower=0, upper=search_space[var]["_value"][0])) elif _type == 'uniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1])) elif _type == 'quniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2])) elif _type == 'loguniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], log=True)) elif _type == 'qloguniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2], log=True)) elif _type == 'normal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2])) elif _type == 'qnormal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3])) elif _type == 'lognormal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], log=True)) elif _type == 'qlognormal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3], log=True)) else: raise ValueError( 'unrecognized type in search_space, type is {}'.format(_type)) self.search_space = cs
def get_warmstart_config(hp_space: dict, warmstart_params: dict): ws_cs = CS.ConfigurationSpace() ws_params_list = [] # Create a hpbandster hyperparameter space with the warmstart hyperparameters for i in range(len(hp_space)): this_param = hp_space[i].name if type(hp_space[i]) == skopt.space.space.Integer: # ConfigSpace doesn't accept equal values for the lower and the upper bounds (integer HPs) if warmstart_params[this_param] == hp_space[i].high: ws_params_list.append( CSH.UniformIntegerHyperparameter( name=this_param, lower=warmstart_params[this_param] - 1, upper=warmstart_params[this_param])) else: ws_params_list.append( CSH.UniformIntegerHyperparameter( name=this_param, lower=warmstart_params[this_param], upper=warmstart_params[this_param] + 1)) elif type(hp_space[i]) == skopt.space.space.Categorical: ws_params_list.append( CSH.CategoricalHyperparameter( this_param, choices=[warmstart_params[this_param]])) elif type(hp_space[i]) == skopt.space.space.Real: # ConfigSpace doesn't accept equal values for the lower and the upper bounds (real HPs) if warmstart_params[this_param] == hp_space[i].high: ws_params_list.append( CSH.UniformFloatHyperparameter( this_param, lower=warmstart_params[this_param] - 0.0001, upper=warmstart_params[this_param])) else: ws_params_list.append( CSH.UniformFloatHyperparameter( this_param, lower=warmstart_params[this_param], upper=warmstart_params[this_param] + 0.0001)) else: raise Exception( "The warmstart configuration space couldn't be created correctly." ) ws_cs.add_hyperparameters(ws_params_list) return ws_cs
def get_configspace(): """ Here we define the configuration space for the hyperparameters for the model. Returns: ConfigSpace-object """ cs = CS.ConfigurationSpace() cs.add_hyperparameter(CSH.UniformFloatHyperparameter( 'lr', lower=1e-6, upper=1e-2, default_value='1e-2', log=True)) cs.add_hyperparameter(CSH.CategoricalHyperparameter( 'act_f', ['ReLU', 'Tanh'], default_value='ReLU')) # For demonstration purposes, we add different optimizers as categorical hyperparameters. # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'. # SGD has a different parameter 'momentum'. optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) cs.add_hyperparameter(optimizer) sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameter(sgd_momentum) # The hyperparameter sgd_momentum will be used, # if the configuration contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) # The hyperparameters (hidden units for layer 2 and 3) are conditional parameters conditioned by # the number of hidden layers. # These dependencies are realised with inequality conditions. num_hidden_layers = CSH.UniformIntegerHyperparameter('num_hidden_layers', lower=1, upper=3, default_value=1) cs.add_hyperparameter(num_hidden_layers) hidden_dim_1 = CSH.UniformIntegerHyperparameter('hidden_dim_1', lower=100, upper=1000, log=False) cs.add_hyperparameter(hidden_dim_1) hidden_dim_2 = CSH.UniformIntegerHyperparameter('hidden_dim_2', lower=100, upper=1000, log=False) cs.add_hyperparameter(hidden_dim_2) hidden_dim_3 = CSH.UniformIntegerHyperparameter('hidden_dim_3', lower=100, upper=1000, log=False) cs.add_hyperparameter(hidden_dim_3) # Use inequality conditions cond = CS.GreaterThanCondition(hidden_dim_2, num_hidden_layers, 1) cs.add_condition(cond) cond = CS.GreaterThanCondition(hidden_dim_3, num_hidden_layers, 2) cs.add_condition(cond) return cs
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() # Learning rate hyperparameter lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) # Stochastic gradient descent momentum as parameter. sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameters([lr, sgd_momentum]) # Optimizer hyperparameters. #optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) #cs.add_hyperparameters([optimizer]) # Only add the sgd_momentum hyperparameter if the optimizer is stochastic gradient descent. Otherwise, it doesn't make sense. #cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') #cs.add_condition(cond) ''' The below is commented out because we're not fiddling with architecture in this optimization.''' #num_new_fc_layers = CSH.UniformIntegerHyperparameter('num_new_fc_layers', lower=0, upper=3, default_value=0, log=False) #num_els_new_1 = CSH.UniformIntegerHyperparameter('num_els_new_1', lower=128, upper=4096, default_value = 1000, log=True) #num_els_new_2 = CSH.UniformIntegerHyperparameter('num_els_new_2', lower=128, upper=4096, default_value = 1000, log=True) #num_els_new_3 = CSH.UniformIntegerHyperparameter('num_els_new_3', lower=128, upper=4096, default_value = 1000, log=True) #freeze0_old = CSH.UniformIntegerHyperparameter('freeze0_cat', lower = 0, upper = 1, default_value = 1, log=False) #freeze1_old = CSH.UniformIntegerHyperparameter('freeze1_cat', lower=0, upper=1, default_value=1, log=False) #cs.add_hyperparameters([num_new_fc_layers, num_els_new_1, num_els_new_2, num_els_new_3, freeze0_old, freeze1_old, batchsize]) dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False) cs.add_hyperparameters([dropout_rate]) return cs