def get_configspace(self) -> CS.ConfigurationSpace: cs = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-0, default_value=1e-2, log=True) #Number of h cells on LSTM h_cell = CSH.UniformIntegerHyperparameter('h_cells', lower=2, upper=11, default_value=3) #Number of LSTM Concatenated num_lstm = CSH.UniformIntegerHyperparameter('num_lstm', lower=1, upper=8, default_value=3) #Hidden Fully Conected layers dimention hidden_dim = CSH.UniformIntegerHyperparameter('hidden_dim', lower=16, upper=512, default_value=128) cs.add_hyperparameters([h_cell, num_lstm, hidden_dim, lr]) return cs
def get_configspace(exercise): config_space = CS.ConfigurationSpace() # TODO: Implement configuration space here. See https://github.com/automl/HpBandSter/blob/master/hpbandster/examples/example_5_keras_worker.py for an example # <JAB> # Define the exercise to run from the exercise sheet # Depending on the exercise, define the configuration space accordingly # These are the hyperparameters that need to be set: # config["learning_rate"] # config["num_filters"] # config["batch_size"] # config["filter_size"] if exercise == 1: # All values will be fixed to test that the CNN works propperly config_space.add_hyperparameters([ CSH.CategoricalHyperparameter('learning_rate', [0.1]), CSH.CategoricalHyperparameter('batch_size', [64]), CSH.CategoricalHyperparameter('num_filters', [16]), CSH.CategoricalHyperparameter('filter_size', [3]), ]) elif exercise == 2: config_space.add_hyperparameters([ CSH.CategoricalHyperparameter('learning_rate', [0.1, 0.01, 0.001, 0.0001]), CSH.CategoricalHyperparameter('batch_size', [64]), CSH.CategoricalHyperparameter('num_filters', [16]), CSH.CategoricalHyperparameter('filter_size', [3]), ]) elif exercise == 3: config_space.add_hyperparameters([ CSH.CategoricalHyperparameter('learning_rate', [0.1]), CSH.CategoricalHyperparameter('batch_size', [64]), CSH.CategoricalHyperparameter('num_filters', [16]), CSH.CategoricalHyperparameter('filter_size', [1, 3, 5, 7]), ]) elif exercise == 4: config_space.add_hyperparameters([ CSH.UniformFloatHyperparameter('learning_rate', lower=10e-4, upper=10e-1, default_value=10e-1, log=True), CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, default_value=64, log=True), CSH.UniformIntegerHyperparameter('num_filters', lower=2**3, upper=2**6, default_value=2**4, log=True), CSH.CategoricalHyperparameter('filter_size', [3, 5]), ]) # </JAB> return config_space
def hyper_lightgbm_bohb(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('max_depth', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('min_data_in_leaf', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('num_leaves', lower=1, upper=100, log=False)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('learning_rate', lower=1e-5, upper=1, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('bagging_fraction', lower=0.1, upper=1, log=False)) return cs
def cs_ab(): """ Defining the configuration space in case of AdaBoost Classifier """ cs1 = ConfigSpace.ConfigurationSpace() cs2 = ConfigSpace.ConfigurationSpace() hp1 = csh.CategoricalHyperparameter('algorithm', choices=['0', '1']) hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2']) hp3 = csh.UniformIntegerHyperparameter('max_depth', lower=1, upper=10, log=False) hp4 = csh.UniformFloatHyperparameter('learning_rate', lower=0.01, upper=2, log=True) hp5 = csh.UniformIntegerHyperparameter('n_estimators', lower=50, upper=500, log=False) # imputation case cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5]) # no imputation case cs2.add_hyperparameters([hp1, hp3, hp4, hp5]) return cs1, cs2
def get_hyperparameter_search_space(): possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine'] kernel = CSH.CategoricalHyperparameter('kernel', possible_kernels, 'rbf') n_components = CSH.UniformIntegerHyperparameter("n_components", 50, 10000, default_value=100, log=True) gamma = CSH.UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, log=True, default_value=0.1) degree = CSH.UniformIntegerHyperparameter('degree', 2, 5, 3) coef0 = CSH.UniformFloatHyperparameter("coef0", -1, 1, default_value=0) cs = ConfigSpace.ConfigurationSpace() cs.add_hyperparameters([kernel, degree, gamma, coef0, n_components]) degree_depends_on_poly = CSC.EqualsCondition(degree, kernel, "poly") coef0_condition = CSC.InCondition(coef0, kernel, ["poly", "sigmoid"]) gamma_kernels = ["poly", "rbf", "sigmoid"] gamma_condition = CSC.InCondition(gamma, kernel, gamma_kernels) cs.add_conditions( [degree_depends_on_poly, coef0_condition, gamma_condition]) return cs
def get_hyperparameter_search_space(self, **pipeline_config): import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH cs = CS.ConfigurationSpace() augment = cs.add_hyperparameter( CSH.CategoricalHyperparameter('augment', [True, False])) autoaugment = cs.add_hyperparameter( CSH.CategoricalHyperparameter('autoaugment', [True, False])) fastautoaugment = cs.add_hyperparameter( CSH.CategoricalHyperparameter('fastautoaugment', [True, False])) cutout = cs.add_hyperparameter( CSH.CategoricalHyperparameter('cutout', [True, False])) cutout_length = cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False)) cutout_holes = cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('cutout_holes', lower=1, upper=3, log=False)) cs.add_condition(CS.EqualsCondition(cutout_length, cutout, True)) cs.add_condition(CS.EqualsCondition(cutout_holes, cutout, True)) cs.add_condition(CS.EqualsCondition(autoaugment, augment, True)) cs.add_condition(CS.EqualsCondition(fastautoaugment, augment, True)) return cs
def cs_gb(): """ Defining the configuration space in case of GradientBoosting Classifier """ cs1 = ConfigSpace.ConfigurationSpace() cs2 = ConfigSpace.ConfigurationSpace() hp1 = csh.CategoricalHyperparameter('criterion', choices=['0', '1']) hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2']) hp3 = csh.UniformIntegerHyperparameter('max_depth', lower=1, upper=10, log=False) hp4 = csh.UniformFloatHyperparameter('learning_rate', lower=0.01, upper=1, log=True) hp5 = csh.UniformIntegerHyperparameter('n_estimators', lower=50, upper=500, log=False) hp6 = csh.UniformFloatHyperparameter('max_features', lower=0.1, upper=0.9, log=False) hp7 = csh.UniformIntegerHyperparameter('min_samples_leaf', lower=1, upper=20, log=False) hp8 = csh.UniformIntegerHyperparameter('min_samples_split', lower=2, upper=20, log=False) # imputation case cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5, hp6, hp7, hp8]) # no imputation case cs2.add_hyperparameters([hp1, hp3, hp4, hp5, hp6, hp7, hp8]) return cs1, cs2
def get_configspace(): # Create an configSpace object and add the Hp setting to it config_space = CS.ConfigurationSpace() #Float learn = CSH.UniformFloatHyperparameter('learning_rate', lower=1e-4, upper=1e-1, default_value='1e-2', log=True) # Integer batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, default_value=64, log=True) num_filters = CSH.UniformIntegerHyperparameter('num_filters', lower=8, upper=64, default_value=16, log=True) # Categorical filter_size = CSH.CategoricalHyperparameter('filter_size', ['3', '4', '5']) config_space.add_hyperparameters( [learn, batch_size, num_filters, filter_size]) # TODO: Implement configuration space here. See https://github.com/automl/HpBandSter/blob/master/hpbandster/examples/example_5_keras_worker.py for an example return config_space
def get_configspace(self): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='lr', lower=1e-6, upper=1e-3, log=True, default_value=1e-4)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='beta', lower=0.001, upper=1.0, log=True, default_value=0.2)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='eta', lower=0.001, upper=1.0, log=True, default_value=0.5)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='feature_dim', lower=16, upper=256, log=True, default_value=64)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='hidden_size', lower=16, upper=256, log=True, default_value=128)) return cs
def get_config_space(): cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('gru_layers', lower=1, upper=3, log=True)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('gru_size', lower=16, upper=128, log=True)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('hddn1', lower=32, upper=512, log=True)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('hddn2', lower=32, upper=512, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter('lr', lower=1e-8, upper=1e-6, log=True)) cs.add_hyperparameter( CSH.UniformFloatHyperparameter( 'momentum', lower=1e-5, upper=0.99, )) return (cs)
def get_configspace(): config_space = CS.ConfigurationSpace() l0 = CSH.UniformIntegerHyperparameter('l0', lower=10, upper=200, log=True) l1 = CSH.UniformIntegerHyperparameter('l1', lower=10, upper=200, log=True) l2 = CSH.UniformIntegerHyperparameter('l2', lower=10, upper=200, log=True) config_space.add_hyperparameters([l0, l1, l2]) num_layers = CSH.UniformIntegerHyperparameter('num_layers', lower=1, upper=3) config_space.add_hyperparameters([num_layers]) cond = CS.GreaterThanCondition(l1, num_layers, 1) config_space.add_condition(cond) cond = CS.GreaterThanCondition(l2, num_layers, 2) config_space.add_condition(cond) return config_space
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() layers = CSH.UniformIntegerHyperparameter('layers', lower=8, upper=20, default_value=8) cs.add_hyperparameter(layers) init_channels = CSH.UniformIntegerHyperparameter('init_channels', lower=4, upper=8, default_value=6) cs.add_hyperparameter(init_channels) batch_size = CSH.CategoricalHyperparameter('batch_size', ['32']) cs.add_hyperparameter(batch_size) return cs
def get_configspace(): configspace = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter('learning_rate', lower=0.0001, upper=0.1, default_value='1e-2', log=True) num_filters = CSH.UniformIntegerHyperparameter('num_filters', lower=2**3, upper=2**6, default_value=16, log=True) batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, default_value=128, log=True) filtersize = CSH.CategoricalHyperparameter('filtersize', [5, 3]) configspace.add_hyperparameters( [lr, num_filters, batch_size, filtersize]) return configspace
def load_config_space(path): """ Load ConfigSpace object As certain hyperparameters are not denoted as optimizable but overriden later, they are manually overriden here too. :param path: :return: """ with open(os.path.join(path), 'r') as fh: json_string = fh.read() config_space = config_space_json_r_w.read(json_string) # Override the constant hyperparameters for num_layers, init_channels and config_space._hyperparameters.pop('NetworkSelectorDatasetInfo:darts:layers', None) num_layers = CSH.UniformIntegerHyperparameter(name='NetworkSelectorDatasetInfo:darts:layers', lower=1, upper=10000) config_space._hyperparameters.pop('SimpleLearningrateSchedulerSelector:cosine_annealing:T_max', None) t_max = CSH.UniformIntegerHyperparameter(name='SimpleLearningrateSchedulerSelector:cosine_annealing:T_max', lower=1, upper=10000) config_space._hyperparameters.pop('NetworkSelectorDatasetInfo:darts:init_channels', None) init_channels = CSH.UniformIntegerHyperparameter(name='NetworkSelectorDatasetInfo:darts:init_channels', lower=1, upper=10000) config_space._hyperparameters.pop('SimpleLearningrateSchedulerSelector:cosine_annealing:eta_min', None) eta_min_cosine = CSH.UniformFloatHyperparameter( name='SimpleLearningrateSchedulerSelector:cosine_annealing:eta_min', lower=0, upper=10000) config_space.add_hyperparameters([num_layers, t_max, init_channels, eta_min_cosine]) return config_space
def get_configspace(): cs = CS.ConfigurationSpace() # HYPERPARAMETERS n_estimators = CSH.UniformIntegerHyperparameter('n_estimators', lower=10, upper=200) max_depth = CSH.UniformIntegerHyperparameter('max_depth', lower=1, upper=80) min_samples_leaf = CSH.UniformIntegerHyperparameter('min_samples_leaf', lower=1, upper=30) min_samples_split = CSH.UniformIntegerHyperparameter('min_samples_split', lower=2, upper=20) max_features = CSH.CategoricalHyperparameter('max_features', choices=['auto', 'sqrt']) # class_weight = CSH.CategoricalHyperparameter('class_weight', # choices=['balanced', None]) cs.add_hyperparameters([n_estimators, max_depth, min_samples_leaf, min_samples_split, max_features]) return cs
def get_configspace(): config_space = CS.ConfigurationSpace() filter_count = [4] #[4, 8, 16] filter_size = [7] #[3, 5, 7, 9] l1_fc = CSH.CategoricalHyperparameter('l1_fc', filter_count) l1_fs = CSH.CategoricalHyperparameter('l1_fs', filter_size) config_space.add_hyperparameters([l1_fc, l1_fs]) #l2_fc = CSH.CategoricalHyperparameter('l2_fc', filter_count) #l2_fs = CSH.CategoricalHyperparameter('l2_fs', filter_size) #config_space.add_hyperparameters([l2_fc, l2_fs]) l3 = CSH.UniformIntegerHyperparameter('l3', lower=20, upper=500) l4 = CSH.UniformIntegerHyperparameter('l4', lower=20, upper=500) l5 = CSH.UniformIntegerHyperparameter('l5', lower=10, upper=300) config_space.add_hyperparameters([l3, l4, l5]) latent = CSH.UniformIntegerHyperparameter('latent', lower=20, upper=100) #latent = CSH.CategoricalHyperparameter('latent', [33]) config_space.add_hyperparameters([latent]) return config_space
def get_configspace(): cs = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter('learning_rate', lower=1e-4, upper=1e-1, default_value=1e-2, log=True) batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, default_value=32, log=True) num_filters = CSH.UniformIntegerHyperparameter('num_filters', lower=8, upper=64, default_value=16, log=True) filter_size = CSH.UniformIntegerHyperparameter('filter_size', lower=3, upper=5, default_value=4, log=False) cs.add_hyperparameters([lr, num_filters, filter_size, batch_size]) # TODO: Implement configuration space here. See https://github.com/automl/HpBandSter/blob/master/hpbandster/examples/example_5_keras_worker.py for an example return cs
def get_configspace(): # create config_space object config_space = CS.ConfigurationSpace() learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=1e-4, upper=1e-1, default_value='1e-2', log=True) # Create the hyperparameters and add them to the object config_space batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, default_value=64, log=True) num_filters = CSH.UniformIntegerHyperparameter('num_filters', lower=8, upper=64, default_value=16, log=True) filter_size = CSH.CategoricalHyperparameter('filter_size', ['3', '4', '5']) config_space.add_hyperparameters( [learning_rate, batch_size, num_filters, filter_size]) return config_space
def cs_dt(): """ Defining the configuration space in case of Decision Tree Classifier """ cs1 = ConfigSpace.ConfigurationSpace() cs2 = ConfigSpace.ConfigurationSpace() hp1 = csh.CategoricalHyperparameter('criterion', choices=['0', '1']) hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2']) hp3 = csh.UniformFloatHyperparameter('max_features', lower=0.1, upper=0.9, log=False) hp4 = csh.UniformIntegerHyperparameter('min_samples_leaf', lower=1, upper=20, log=False) hp5 = csh.UniformIntegerHyperparameter('min_samples_split', lower=2, upper=20, log=False) # imputation case cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5]) # no imputation case cs2.add_hyperparameters([hp1, hp3, hp4, hp5]) return cs1, cs2
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter( 'lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) # For demonstration purposes, we add different optimizers as categorical hyperparameters. # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'. # SGD has a different parameter 'momentum'. optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) sgd_momentum = CSH.UniformFloatHyperparameter( 'sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameters([lr, optimizer, sgd_momentum]) # The hyperparameter sgd_momentum will be used,if the configuration # contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) num_conv_layers = CSH.UniformIntegerHyperparameter( 'num_conv_layers', lower=1, upper=3, default_value=2) num_filters_1 = CSH.UniformIntegerHyperparameter( 'num_filters_1', lower=4, upper=64, default_value=16, log=True) num_filters_2 = CSH.UniformIntegerHyperparameter( 'num_filters_2', lower=4, upper=64, default_value=16, log=True) num_filters_3 = CSH.UniformIntegerHyperparameter( 'num_filters_3', lower=4, upper=64, default_value=16, log=True) cs.add_hyperparameters( [num_conv_layers, num_filters_1, num_filters_2, num_filters_3]) # You can also use inequality conditions: cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1) cs.add_condition(cond) cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2) cs.add_condition(cond) dropout_rate = CSH.UniformFloatHyperparameter( 'dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False) num_fc_units = CSH.UniformIntegerHyperparameter( 'num_fc_units', lower=8, upper=256, default_value=32, log=True) cs.add_hyperparameters([dropout_rate, num_fc_units]) return cs
def get_configspace(): """:return: ConfigurationsSpace-Object Here is the main place to create particular hyperparameters to tune. Particular hyperparameter should be defined as: hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging) add.hyperparameter([hyperparameter]) """ cs = CS.ConfigurationSpace() # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True) # cs.add_hyperparameters([num_pca]) lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) dropout_rate_1 = CSH.UniformFloatHyperparameter('dropout_rate_1', lower=0.0, upper=0.5, default_value=0.1, log=False) dropout_rate_2 = CSH.UniformFloatHyperparameter('dropout_rate_2', lower=0.0, upper=0.5, default_value=0.1, log=False) num_fc_units_1 = CSH.UniformIntegerHyperparameter('num_fc_units_1', lower=512, upper=2048, default_value=1024, log=True) num_fc_units_2 = CSH.UniformIntegerHyperparameter('num_fc_units_2', lower=256, upper=512, default_value=256, log=True) activation = CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']) cs.add_hyperparameters([ lr, optimizer, sgd_momentum, dropout_rate_1, dropout_rate_2, num_fc_units_1, num_fc_units_2, activation ]) # The hyperparameter sgd_momentum will be used,if the configuration # contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) return cs
def get_configspace(seed=None): cs = CS.ConfigurationSpace(seed) # Hyperparameter defining first Conv layer kernel1 = CSH.OrdinalHyperparameter("kernel_1", sequence=[3, 5, 7], default_value=5) channels1 = CSH.UniformIntegerHyperparameter("channels_1", lower=3, upper=64, default_value=32) stride1 = CSH.UniformIntegerHyperparameter("stride_1", lower=1, upper=2, default_value=1) cs.add_hyperparameters([kernel1, channels1, stride1]) # Hyperparameter defining second Conv layer kernel2 = CSH.OrdinalHyperparameter("kernel_2", sequence=[3, 5, 7], default_value=5) channels2 = CSH.UniformIntegerHyperparameter("channels_2", lower=3, upper=64, default_value=32) stride2 = CSH.UniformIntegerHyperparameter("stride_2", lower=1, upper=2, default_value=1) cs.add_hyperparameters([kernel2, channels2, stride2]) # Hyperparameter for FC layer hidden = CSH.UniformIntegerHyperparameter("hidden", lower=32, upper=256, log=True, default_value=128) cs.add_hyperparameter(hidden) # Regularization Hyperparameter dropout = CSH.UniformFloatHyperparameter("dropout", lower=0, upper=0.5, default_value=0.1) cs.add_hyperparameter(dropout) # Training Hyperparameters batch_size = CSH.OrdinalHyperparameter("batch_size", sequence=[2, 4, 8, 16, 32, 64], default_value=4) lr = CSH.UniformFloatHyperparameter("lr", lower=1e-6, upper=0.1, log=True, default_value=1e-3) cs.add_hyperparameters([batch_size, lr]) return cs
def setUp(self) -> None: self.config_space = CS.ConfigurationSpace() lb, ub = 1, 100 self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('f', lower=lb, upper=ub)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fq', lower=lb, upper=ub, q=0.5)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fql', lower=lb, upper=ub, q=0.5, log=True)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fl', lower=lb, upper=ub, log=True)) self.config_space.add_hyperparameter( CSH.UniformIntegerHyperparameter('i', lower=lb, upper=ub)) self.config_space.add_hyperparameter( CSH.UniformIntegerHyperparameter('il', lower=lb, upper=ub, log=True)) self.config_space.add_hyperparameter( CSH.CategoricalHyperparameter('c', choices=['x', 'y', 'z'])) self.config_space.add_hyperparameter( CSH.OrdinalHyperparameter('o', sequence=list(range(1, 101)), meta={ 'lower': 1, 'upper': 100, 'log': False })) self.config_space.add_hyperparameter( CSH.OrdinalHyperparameter('ol', sequence=[1, 10, 100], meta={ 'lower': 1, 'upper': 100, 'log': True })) self.hp_names = self.config_space.get_hyperparameter_names() self.is_categoricals = { hp_name: self.config_space.get_hyperparameter( hp_name).__class__.__name__ == 'CategoricalHyperparameter' for hp_name in self.hp_names } self.is_ordinals = { hp_name: self.config_space.get_hyperparameter( hp_name).__class__.__name__ == 'OrdinalHyperparameter' for hp_name in self.hp_names }
def get_configspace(self): cs = CS.ConfigurationSpace() learning_rate = CSH.UniformFloatHyperparameter("learning_rate", lower=0.003, upper=0.005, default_value=0.004, log=False) num_leaves = CSH.UniformIntegerHyperparameter("num_leaves", lower=3, upper=4, default_value=3, log=False) min_data_in_leaf = CSH.UniformIntegerHyperparameter("min_data_in_leaf", lower=400, upper=1000, default_value=700, log=False) feature_fraction = CSH.UniformFloatHyperparameter("feature_fraction", lower=0.1, upper=0.9, default_value=0.45, log=False) subsample = CSH.UniformFloatHyperparameter("subsample", lower=0.5, upper=1.0, default_value=0.8, log=False) l1 = CSH.UniformFloatHyperparameter("lambda_l1", lower=1e-12, upper=10.0, default_value=1.0, log=True) l2 = CSH.UniformFloatHyperparameter("lambda_l2", lower=1e-12, upper=10.0, default_value=1.0, log=True) seed = CSH.UniformIntegerHyperparameter("seed", lower=1, upper=10000, default_value=7861) # feats_flag = [ # CSH.UniformIntegerHyperparameter(feat, lower=0, upper=1, default_value=1) # for feat in self.feats # ] cs.add_hyperparameters([ learning_rate, num_leaves, min_data_in_leaf, feature_fraction, subsample, l1, l2, seed, ]) return cs
def get_configuration_space(system, task, model): cs = CS.ConfigurationSpace() horizon = CSH.UniformIntegerHyperparameter(name="horizon", lower=10, upper=100, default_value=10) cs.add_hyperparameter(horizon) kappa = CSH.UniformFloatHyperparameter(name='kappa', lower=0.1, upper=1.0, default_value=1.0) cs.add_hyperparameter(kappa) num_traj = CSH.UniformIntegerHyperparameter(name='num_traj', lower=100, upper=1000, default_value=200) cs.add_hyperparameter(num_traj) return cs
def test_add_good_dim(self): from deephyper.problem import HpProblem pb = HpProblem() p0 = pb.add_hyperparameter((-10, 10), "p0") p0_csh = csh.UniformIntegerHyperparameter( name="p0", lower=-10, upper=10, log=False ) assert p0 == p0_csh p1 = pb.add_hyperparameter((1, 100, "log-uniform"), "p1") p1_csh = csh.UniformIntegerHyperparameter(name="p1", lower=1, upper=100, log=True) assert p1 == p1_csh p2 = pb.add_hyperparameter((-10.0, 10.0), "p2") p2_csh = csh.UniformFloatHyperparameter( name="p2", lower=-10.0, upper=10.0, log=False ) assert p2 == p2_csh p3 = pb.add_hyperparameter((1.0, 100.0, "log-uniform"), "p3") p3_csh = csh.UniformFloatHyperparameter( name="p3", lower=1.0, upper=100.0, log=True ) assert p3 == p3_csh p4 = pb.add_hyperparameter([1, 2, 3, 4], "p4") p4_csh = csh.OrdinalHyperparameter(name="p4", sequence=[1, 2, 3, 4]) assert p4 == p4_csh p5 = pb.add_hyperparameter([1.0, 2.0, 3.0, 4.0], "p5") p5_csh = csh.OrdinalHyperparameter(name="p5", sequence=[1.0, 2.0, 3.0, 4.0]) assert p5 == p5_csh p6 = pb.add_hyperparameter(["cat0", "cat1"], "p6") p6_csh = csh.CategoricalHyperparameter(name="p6", choices=["cat0", "cat1"]) assert p6 == p6_csh p7 = pb.add_hyperparameter({"mu": 0, "sigma": 1}, "p7") p7_csh = csh.NormalIntegerHyperparameter(name="p7", mu=0, sigma=1) assert p7 == p7_csh if cs.__version__ > "0.4.20": p8 = pb.add_hyperparameter( {"mu": 0, "sigma": 1, "lower": -5, "upper": 5}, "p8" ) p8_csh = csh.NormalIntegerHyperparameter( name="p8", mu=0, sigma=1, lower=-5, upper=5 ) assert p8 == p8_csh p9 = pb.add_hyperparameter({"mu": 0.0, "sigma": 1.0}, "p9") p9_csh = csh.NormalFloatHyperparameter(name="p9", mu=0, sigma=1) assert p9 == p9_csh
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() cs.add_hyperparameters([ CSH.UniformFloatHyperparameter( 'optimizer:lr', lower=0.001, upper=0.01, log=True, ), CSH.UniformIntegerHyperparameter( 'model:temp_reg_params.history', lower=4, upper=12, default_value=12, ), CSH.UniformIntegerHyperparameter( 'model:temp_reg_params.n_layers', lower=2, upper=8, default_value=3, ), CSH.OrdinalHyperparameter( 'model:temp_reg_params.n_channels', sequence=[2, 4, 8, 16, 32], default_value=8, ), CSH.OrdinalHyperparameter( 'model:temp_reg_params.kernel_size', sequence=[1, 3, 5], default_value=1, ), CSH.CategoricalHyperparameter( 'model:temp_reg_params.activation', choices="ReLU ELU LeakyReLU SELU".split(), ), CSH.CategoricalHyperparameter( 'model:biases_type.location', choices="LxT L+T".split(), ), CSH.CategoricalHyperparameter( 'model:biases_type.weekday', choices=["", "W", "WxT"], ), CSH.CategoricalHyperparameter( 'model:biases_type.month', choices=["", "M", "MxT"], ), ]) return cs
def vary_hyperparameters(self, config_mod): lr = config_mod['agents'][self.agent_name]['lr'] batch_size = config_mod['agents'][self.agent_name]['batch_size'] hidden_size = config_mod['agents'][self.agent_name]['hidden_size'] hidden_layer = config_mod['agents'][self.agent_name]['hidden_layer'] cs = CS.ConfigurationSpace() cs.add_hyperparameter( CSH.UniformFloatHyperparameter(name='lr', lower=lr / 3, upper=lr * 3, log=True, default_value=lr)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='batch_size', lower=int(batch_size / 3), upper=int(batch_size * 3), log=True, default_value=batch_size)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='hidden_size', lower=int(hidden_size / 3), upper=int(hidden_size * 3), log=True, default_value=hidden_size)) cs.add_hyperparameter( CSH.UniformIntegerHyperparameter(name='hidden_layer', lower=hidden_layer - 1, upper=hidden_layer + 1, log=False, default_value=hidden_layer)) config = cs.sample_configuration() print(f"sampled part of config: " f"lr: {config['lr']}, " f"batch_size: {config['batch_size']}, " f"hidden_size: {config['hidden_size']}, " f"hidden_layer: {config['hidden_layer']}") config_mod['agents'][self.agent_name]['lr'] = config['lr'] config_mod['agents'][ self.agent_name]['batch_size'] = config['batch_size'] config_mod['agents'][ self.agent_name]['hidden_size'] = config['hidden_size'] config_mod['agents'][ self.agent_name]['hidden_layer'] = config['hidden_layer'] print("full config: ", config_mod['agents'][self.agent_name]) return config_mod
def get_configspace(): cs = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter('learning_rate', lower=1e-5, upper=1e-1, default_value=1e-2, log=True) batch_size = CSH.UniformIntegerHyperparameter('batch_size', lower=16, upper=128, default_value=32, log=True) num_filters = CSH.UniformIntegerHyperparameter('num_filters', lower=32, upper=128, default_value=32, log=True) # filter_size = CSH.UniformIntegerHyperparameter('filter_size', lower=3, upper=5, default_value=4, log=False) history_length = CSH.UniformIntegerHyperparameter('history_length', lower=1, upper=7, default_value=3, log=False) num_uniform_sample = CSH.CategoricalHyperparameter('num_uniform_sample', [12000, 16000, 20000]) cs.add_hyperparameters([lr, num_filters, batch_size, history_length, num_uniform_sample]) # TODO: Implement configuration space here. See https://github.com/automl/HpBandSter/blob/master/hpbandster/examples/example_5_keras_worker.py for an example return cs
def get_warmstart_config(hp_space: dict, warmstart_params: dict): ws_cs = CS.ConfigurationSpace() ws_params_list = [] # Create a hpbandster hyperparameter space with the warmstart hyperparameters for i in range(len(hp_space)): this_param = hp_space[i].name if type(hp_space[i]) == skopt.space.space.Integer: # ConfigSpace doesn't accept equal values for the lower and the upper bounds (integer HPs) if warmstart_params[this_param] == hp_space[i].high: ws_params_list.append( CSH.UniformIntegerHyperparameter( name=this_param, lower=warmstart_params[this_param] - 1, upper=warmstart_params[this_param])) else: ws_params_list.append( CSH.UniformIntegerHyperparameter( name=this_param, lower=warmstart_params[this_param], upper=warmstart_params[this_param] + 1)) elif type(hp_space[i]) == skopt.space.space.Categorical: ws_params_list.append( CSH.CategoricalHyperparameter( this_param, choices=[warmstart_params[this_param]])) elif type(hp_space[i]) == skopt.space.space.Real: # ConfigSpace doesn't accept equal values for the lower and the upper bounds (real HPs) if warmstart_params[this_param] == hp_space[i].high: ws_params_list.append( CSH.UniformFloatHyperparameter( this_param, lower=warmstart_params[this_param] - 0.0001, upper=warmstart_params[this_param])) else: ws_params_list.append( CSH.UniformFloatHyperparameter( this_param, lower=warmstart_params[this_param], upper=warmstart_params[this_param] + 0.0001)) else: raise Exception( "The warmstart configuration space couldn't be created correctly." ) ws_cs.add_hyperparameters(ws_params_list) return ws_cs