def get_configspace(): """ Returns the configuration space for the network to be configured in the example. """ config_space = CS.ConfigurationSpace() config_space.add_hyperparameters([ CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']), CS.UniformFloatHyperparameter('learning_rate_init', lower=1e-6, upper=1e-2, log=True) ]) solver = CSH.CategoricalHyperparameter('solver', ['sgd', 'adam']) config_space.add_hyperparameter(solver) beta_1 = CS.UniformFloatHyperparameter('beta_1', lower=0, upper=1) config_space.add_hyperparameter(beta_1) condition = CS.EqualsCondition(beta_1, solver, 'adam') config_space.add_condition(condition) beta_2 = CS.UniformFloatHyperparameter('beta_2', lower=0, upper=1) config_space.add_hyperparameter(beta_2) condition = CS.EqualsCondition(beta_2, solver, 'adam') config_space.add_condition(condition) return config_space
def get_hyperparameter_search_space(self, **pipeline_config): import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH cs = CS.ConfigurationSpace() augment = cs.add_hyperparameter( CSH.CategoricalHyperparameter('augment', [True, False])) autoaugment = cs.add_hyperparameter( CSH.CategoricalHyperparameter('autoaugment', [True, False])) fastautoaugment = cs.add_hyperparameter( CSH.CategoricalHyperparameter('fastautoaugment', [True, False])) cutout = cs.add_hyperparameter( CSH.CategoricalHyperparameter('cutout', [True, False])) cutout_length = cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False)) cutout_holes = cs.add_hyperparameter( CSH.UniformIntegerHyperparameter('cutout_holes', lower=1, upper=3, log=False)) cs.add_condition(CS.EqualsCondition(cutout_length, cutout, True)) cs.add_condition(CS.EqualsCondition(cutout_holes, cutout, True)) cs.add_condition(CS.EqualsCondition(autoaugment, augment, True)) cs.add_condition(CS.EqualsCondition(fastautoaugment, augment, True)) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None, use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="use_augmenter", value_range=(True, False), default_value=True, ), sigma_min: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="sigma_min", value_range=(0, 3), default_value=0, ), sigma_offset: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="sigma_offset", value_range=(0.0, 3.0), default_value=0.5, ), ) -> ConfigurationSpace: cs = ConfigurationSpace() use_augmenter = get_hyperparameter(use_augmenter, CategoricalHyperparameter) sigma_min = get_hyperparameter(sigma_min, UniformFloatHyperparameter) sigma_offset = get_hyperparameter(sigma_offset, UniformFloatHyperparameter) cs.add_hyperparameters([use_augmenter, sigma_min, sigma_offset]) # only add hyperparameters to configuration space if we are using the augmenter cs.add_condition(CS.EqualsCondition(sigma_min, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(sigma_offset, use_augmenter, True)) return cs
def get_config_space(num_groups=(1, 9), blocks_per_group=(1, 4), max_units=((10, 1024), True), activation=('sigmoid', 'tanh', 'relu'), max_shake_drop_probability=(0, 1), max_dropout=(0, 0.8), resnet_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'), dropout_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'), use_dropout=(True, False), use_shake_shake=(True, False), use_shake_drop=(True, False)): cs = CS.ConfigurationSpace() num_groups_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "num_groups", num_groups) cs.add_hyperparameter(num_groups_hp) blocks_per_group_hp = get_hyperparameter( CS.UniformIntegerHyperparameter, "blocks_per_group", blocks_per_group) cs.add_hyperparameter(blocks_per_group_hp) add_hyperparameter(cs, CS.CategoricalHyperparameter, "activation", activation) use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout) add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_shake", use_shake_shake) shake_drop_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_drop", use_shake_drop) if True in use_shake_drop: shake_drop_prob_hp = add_hyperparameter( cs, CS.UniformFloatHyperparameter, "max_shake_drop_probability", max_shake_drop_probability) cs.add_condition( CS.EqualsCondition(shake_drop_prob_hp, shake_drop_hp, True)) add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'resnet_shape', resnet_shape) add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "max_units", max_units) if True in use_dropout: dropout_shape_hp = add_hyperparameter( cs, CSH.CategoricalHyperparameter, 'dropout_shape', dropout_shape) max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout) cs.add_condition( CS.EqualsCondition(dropout_shape_hp, use_dropout_hp, True)) cs.add_condition( CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True)) return cs
def get_configspace(self): cd = self.cd cs = CS.ConfigurationSpace() lr_c = self._get_range_uniform('lr', cd) latent_distribution_c = self._get_categorical('latent_distribution', cd) optimizer_c = self._get_categorical('optimizer', cd) n_latent_c = self._get_range_integer('n_latent', cd) enc_hidden_dim_c = self._get_range_integer('enc_hidden_dim', cd) batch_size_c = self._get_range_integer('batch_size', cd) cs.add_hyperparameters([ batch_size_c, lr_c, latent_distribution_c, optimizer_c, n_latent_c, enc_hidden_dim_c ]) ## optional hyperparameters target_sparsity_c = self._get_range_uniform('target_sparsity', cd) if target_sparsity_c: cs.add_hyperparameters([target_sparsity_c]) coherence_reg_penalty_c = self._get_range_uniform( 'coherence_regularizer_penalty', cd) if coherence_reg_penalty_c: cs.add_hyperparameters([coherence_reg_penalty_c]) embedding_source_c = self._get_categorical('embedding_source', cd) if embedding_source_c: cs.add_hyperparameters([embedding_source_c]) fixed_embedding_c = self._get_categorical('fixed_embedding', cd) if fixed_embedding_c: cs.add_hyperparameters([fixed_embedding_c]) if embedding_source_c.is_legal( 'random' ): ## if NOT, then don't add embedding size as a hyperparameters at all as it's determined by the embedding embedding_size_c = self._get_range_integer( 'embedding_size', cd) cs.add_hyperparameters([embedding_size_c]) cond_embed = CS.EqualsCondition(embedding_size_c, embedding_source_c, 'random') cs.add_condition(cond_embed) else: ## add embedding size if no source is specified embedding_size_c = self._get_range_integer('embedding_size', cd) cs.add_hyperparameters([embedding_size_c]) if 'vmf' in self.cd['latent_distribution']: kappa_c = self._get_range_uniform('kappa', cd) cond_kappa = CS.EqualsCondition(kappa_c, latent_distribution_c, 'vmf') cs.add_hyperparameters([kappa_c]) cs.add_condition( cond_kappa) # only use kappa_c if condition is met return cs
def get_configspace(): """ Define all the hyperparameters that need to be optimised and store them in config """ cs = CS.ConfigurationSpace() dense_units = CSH.UniformIntegerHyperparameter('dense_units', lower=64, upper=256, default_value=128) initial_lr = CSH.UniformFloatHyperparameter('initial_lr', lower=1e-3, upper=1e-1, default_value='1e-2', log=True) optimizer = CSH.CategoricalHyperparameter('optimizer', get('opti_dict').keys()) sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) nesterov = CSH.CategoricalHyperparameter('nesterov', ['True', 'False']) cs.add_hyperparameters( [initial_lr, optimizer, sgd_momentum, nesterov, dense_units]) lr_scheduler = CSH.CategoricalHyperparameter('lr_scheduler', ['Exponential', 'Cosine']) weight_decay = CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-3, default_value=3e-4, log=True) drop_path_prob = CSH.UniformFloatHyperparameter('drop_path_prob', lower=0, upper=0.4, default_value=0.3, log=False) grad_clip_value = CSH.UniformIntegerHyperparameter('grad_clip_value', lower=4, upper=8, default_value=5) cs.add_hyperparameters( [lr_scheduler, drop_path_prob, weight_decay, grad_clip_value]) cond = CS.EqualsCondition(sgd_momentum, optimizer, 'sgd') cs.add_condition(cond) cond2 = CS.EqualsCondition(nesterov, optimizer, 'sgd') cs.add_condition(cond2) return cs
def get_config_space(num_layers=(1, 15), max_units=((10, 1024), True), activation=('sigmoid', 'tanh', 'relu'), mlp_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'), max_dropout=(0, 1.0), use_dropout=(True, False)): cs = CS.ConfigurationSpace() mlp_shape_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'mlp_shape', mlp_shape) cs.add_hyperparameter(mlp_shape_hp) num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, 'num_layers', num_layers) cs.add_hyperparameter(num_layers_hp) max_units_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "max_units", max_units) cs.add_hyperparameter(max_units_hp) use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout) max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout) cs.add_condition( CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True)) add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation', activation) return cs
def get_config_space(growth_rate_range=(12, 40), nr_blocks=(3, 4), layer_range=([1, 12], [6, 24], [12, 64], [12, 64]), num_init_features=(32, 128), **kwargs): import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH from autoPyTorch.utils.config_space_hyperparameter import add_hyperparameter cs = CS.ConfigurationSpace() growth_rate_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'growth_rate', growth_rate_range) cs.add_hyperparameter(growth_rate_hp) # add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'bn_size', [2, 4]) # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'num_init_features', num_init_features, log=True) # add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'bottleneck', [True, False]) blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'blocks', nr_blocks) cs.add_hyperparameter(blocks_hp) use_dropout = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'use_dropout', [True, False]) dropout = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'dropout', [0.0, 1.0]) cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True)) if type(nr_blocks[0]) == int: min_blocks = nr_blocks[0] max_blocks = nr_blocks[1] else: min_blocks = nr_blocks[0][0] max_blocks = nr_blocks[0][1] for i in range(1, max_blocks+1): layer_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'layer_in_block_%d' % i, layer_range[i-1]) cs.add_hyperparameter(layer_hp) if i > min_blocks: cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i-1)) return cs
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() lr = CSH.UniformFloatHyperparameter( 'lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) # For demonstration purposes, we add different optimizers as categorical hyperparameters. # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'. # SGD has a different parameter 'momentum'. optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) sgd_momentum = CSH.UniformFloatHyperparameter( 'sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameters([lr, optimizer, sgd_momentum]) # The hyperparameter sgd_momentum will be used,if the configuration # contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) num_conv_layers = CSH.UniformIntegerHyperparameter( 'num_conv_layers', lower=1, upper=3, default_value=2) num_filters_1 = CSH.UniformIntegerHyperparameter( 'num_filters_1', lower=4, upper=64, default_value=16, log=True) num_filters_2 = CSH.UniformIntegerHyperparameter( 'num_filters_2', lower=4, upper=64, default_value=16, log=True) num_filters_3 = CSH.UniformIntegerHyperparameter( 'num_filters_3', lower=4, upper=64, default_value=16, log=True) cs.add_hyperparameters( [num_conv_layers, num_filters_1, num_filters_2, num_filters_3]) # You can also use inequality conditions: cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1) cs.add_condition(cond) cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2) cs.add_condition(cond) dropout_rate = CSH.UniformFloatHyperparameter( 'dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False) num_fc_units = CSH.UniformIntegerHyperparameter( 'num_fc_units', lower=8, upper=256, default_value=32, log=True) cs.add_hyperparameters([dropout_rate, num_fc_units]) return cs
def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper: """ The SVM configuration space based on the search space from auto-sklearn: https://github.com/automl/auto-sklearn/blob/master/autosklearn/pipeline/components/classification/libsvm_svc.py Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.svm.SVC', seed) C = ConfigSpace.UniformFloatHyperparameter(name='C', lower=0.03125, upper=32768, log=True, default_value=1.0) kernel = ConfigSpace.CategoricalHyperparameter( name='kernel', choices=['rbf', 'poly', 'sigmoid'], default_value='rbf') degree = ConfigSpace.UniformIntegerHyperparameter(name='degree', lower=1, upper=5, default_value=3) gamma = ConfigSpace.UniformFloatHyperparameter(name='gamma', lower=3.0517578125e-05, upper=8, log=True, default_value=0.1) coef0 = ConfigSpace.UniformFloatHyperparameter(name='coef0', lower=-1, upper=1, default_value=0) shrinking = ConfigSpace.CategoricalHyperparameter(name='shrinking', choices=[True, False], default_value=True) tol = ConfigSpace.UniformFloatHyperparameter(name='tol', lower=1e-5, upper=1e-1, default_value=1e-3, log=True) max_iter = ConfigSpace.UnParametrizedHyperparameter('max_iter', -1) hyperparameters = [ C, kernel, degree, gamma, coef0, shrinking, tol, max_iter ] degree_depends_on_poly = ConfigSpace.EqualsCondition( degree, kernel, 'poly') coef0_condition = ConfigSpace.InCondition(coef0, kernel, ['poly', 'sigmoid']) conditions = [degree_depends_on_poly, coef0_condition] return ConfigSpaceWrapper(cs, hyperparameters, conditions)
def get_configspace(): """:return: ConfigurationsSpace-Object Here is the main place to create particular hyperparameters to tune. Particular hyperparameter should be defined as: hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging) add.hyperparameter([hyperparameter]) """ cs = CS.ConfigurationSpace() # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True) # cs.add_hyperparameters([num_pca]) lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) dropout_rate_1 = CSH.UniformFloatHyperparameter('dropout_rate_1', lower=0.0, upper=0.5, default_value=0.1, log=False) dropout_rate_2 = CSH.UniformFloatHyperparameter('dropout_rate_2', lower=0.0, upper=0.5, default_value=0.1, log=False) num_fc_units_1 = CSH.UniformIntegerHyperparameter('num_fc_units_1', lower=512, upper=2048, default_value=1024, log=True) num_fc_units_2 = CSH.UniformIntegerHyperparameter('num_fc_units_2', lower=256, upper=512, default_value=256, log=True) activation = CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']) cs.add_hyperparameters([ lr, optimizer, sgd_momentum, dropout_rate_1, dropout_rate_2, num_fc_units_1, num_fc_units_2, activation ]) # The hyperparameter sgd_momentum will be used,if the configuration # contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None, num_layers: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='num_layers', value_range=(4, 64), default_value=16, ), num_blocks: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='num_blocks', value_range=(3, 4), default_value=3, ), growth_rate: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='growth_rate', value_range=(12, 40), default_value=20, ), activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='activation', value_range=tuple(_activations.keys()), default_value=list(_activations.keys())[0], ), use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='use_dropout', value_range=(True, False), default_value=False, ), dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='dropout', value_range=(0, 0.5), default_value=0.2, ), ) -> ConfigurationSpace: cs = CS.ConfigurationSpace() add_hyperparameter(cs, num_layers, UniformIntegerHyperparameter) add_hyperparameter(cs, growth_rate, UniformIntegerHyperparameter) min_num_blocks, max_num_blocks = num_blocks.value_range blocks_hp = get_hyperparameter(num_blocks, UniformIntegerHyperparameter) cs.add_hyperparameter(blocks_hp) add_hyperparameter(cs, activation, CategoricalHyperparameter) use_dropout = get_hyperparameter(use_dropout, CategoricalHyperparameter) dropout = get_hyperparameter(dropout, UniformFloatHyperparameter) cs.add_hyperparameters([use_dropout, dropout]) cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True)) for i in range(1, int(max_num_blocks) + 1): layer_search_space = HyperparameterSearchSpace(hyperparameter='layer_in_block_%d' % i, value_range=num_layers.value_range, default_value=num_layers.default_value, log=num_layers.log) layer_hp = get_hyperparameter(layer_search_space, UniformIntegerHyperparameter) cs.add_hyperparameter(layer_hp) if i > int(min_num_blocks): cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i - 1)) return cs
def setUp(self): self.configspace = CS.ConfigurationSpace() self.HPs = [] self.HPs.append( CS.CategoricalHyperparameter('parent', [1,2,3])) self.HPs.append( CS.CategoricalHyperparameter('child1_x1', ['foo','bar'])) self.HPs.append( CS.UniformFloatHyperparameter('child2_x1', lower=-1, upper=1)) self.HPs.append( CS.UniformIntegerHyperparameter('child3_x1', lower=-2, upper=5)) self.configspace.add_hyperparameters(self.HPs) self.conditions = [] self.conditions += [CS.EqualsCondition(self.HPs[1], self.HPs[0], 1)] self.conditions += [CS.EqualsCondition(self.HPs[2], self.HPs[0], 2)] self.conditions += [CS.EqualsCondition(self.HPs[3], self.HPs[0], 3)] [self.configspace.add_condition(cond) for cond in self.conditions]
def test_sample_types_conditional(self): import ConfigSpace as cs import numpy as np from deephyper.evaluator import Evaluator from deephyper.problem import HpProblem from deephyper.search.hps import CBO problem = HpProblem() # choices choice = problem.add_hyperparameter( name="choice", value=["choice1", "choice2"], ) # integers x1_int = problem.add_hyperparameter(name="x1_int", value=(1, 10)) x2_int = problem.add_hyperparameter(name="x2_int", value=(1, 10)) # conditions cond_1 = cs.EqualsCondition(x1_int, choice, "choice1") cond_2 = cs.EqualsCondition(x2_int, choice, "choice2") problem.add_condition(cond_1) problem.add_condition(cond_2) def run(config): if config["choice"] == "choice1": assert np.issubdtype(type(config["x1_int"]), np.integer) else: assert np.issubdtype(type(config["x2_int"]), np.integer) return 0 create_evaluator = lambda: Evaluator.create(run, method="serial") CBO(problem, create_evaluator(), random_state=42, surrogate_model="DUMMY").search(10)
def get_config_space(user_updates=None): cs = CS.ConfigurationSpace() range_num_layers = (1, 15) range_max_num_units = (10, 1024) possible_activations = ('sigmoid', 'tanh', 'relu') possible_net_shapes = ('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs') possible_dropout_shapes = ('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs') range_max_dropout = (0, 0.8) layer_shape = CSH.CategoricalHyperparameter('mlp_shape', possible_net_shapes) cs.add_hyperparameter(layer_shape) num_layers = CSH.UniformIntegerHyperparameter( 'num_layers', lower=range_num_layers[0], upper=range_num_layers[1]) cs.add_hyperparameter(num_layers) max_units = CSH.UniformIntegerHyperparameter( "max_units", lower=range_max_num_units[0], upper=range_max_num_units[1], log=True) cs.add_hyperparameter(max_units) use_dropout = cs.add_hyperparameter( CS.CategoricalHyperparameter("use_dropout", [True, False], default_value=True)) dropout_shape = cs.add_hyperparameter( CSH.CategoricalHyperparameter('dropout_shape', possible_dropout_shapes)) max_dropout = cs.add_hyperparameter( CSH.UniformFloatHyperparameter("max_dropout", lower=range_max_dropout[0], upper=range_max_dropout[1], default_value=0.2)) cs.add_condition(CS.EqualsCondition(dropout_shape, use_dropout, True)) cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True)) cs.add_hyperparameter( CSH.CategoricalHyperparameter('activation', possible_activations)) return (cs)
def insert_inter_node_hyperparameter_dependencies(self, config_space, dataset_info=None, **pipeline_config): if pipeline_config['categorical_features'] is None or not any(pipeline_config['categorical_features']) or 'none' not in pipeline_config['preprocessors']: # no categorical features -> no embedding return config_space embedding_hyperparameter = config_space.get_hyperparameter(EmbeddingSelector.get_name() + ConfigWrapper.delimiter + "embedding") preprocessor_hyperparameter = config_space.get_hyperparameter(PreprocessorSelector.get_name() + ConfigWrapper.delimiter + "preprocessor") condition = ConfigSpace.EqualsCondition(embedding_hyperparameter, preprocessor_hyperparameter, "none") config_space.add_condition(condition) return config_space
def get_configspace(): """ Here we define the configuration space for the hyperparameters for the model. Returns: ConfigSpace-object """ cs = CS.ConfigurationSpace() cs.add_hyperparameter(CSH.UniformFloatHyperparameter( 'lr', lower=1e-6, upper=1e-2, default_value='1e-2', log=True)) cs.add_hyperparameter(CSH.CategoricalHyperparameter( 'act_f', ['ReLU', 'Tanh'], default_value='ReLU')) # For demonstration purposes, we add different optimizers as categorical hyperparameters. # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'. # SGD has a different parameter 'momentum'. optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) cs.add_hyperparameter(optimizer) sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameter(sgd_momentum) # The hyperparameter sgd_momentum will be used, # if the configuration contains 'SGD' as optimizer. cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') cs.add_condition(cond) # The hyperparameters (hidden units for layer 2 and 3) are conditional parameters conditioned by # the number of hidden layers. # These dependencies are realised with inequality conditions. num_hidden_layers = CSH.UniformIntegerHyperparameter('num_hidden_layers', lower=1, upper=3, default_value=1) cs.add_hyperparameter(num_hidden_layers) hidden_dim_1 = CSH.UniformIntegerHyperparameter('hidden_dim_1', lower=100, upper=1000, log=False) cs.add_hyperparameter(hidden_dim_1) hidden_dim_2 = CSH.UniformIntegerHyperparameter('hidden_dim_2', lower=100, upper=1000, log=False) cs.add_hyperparameter(hidden_dim_2) hidden_dim_3 = CSH.UniformIntegerHyperparameter('hidden_dim_3', lower=100, upper=1000, log=False) cs.add_hyperparameter(hidden_dim_3) # Use inequality conditions cond = CS.GreaterThanCondition(hidden_dim_2, num_hidden_layers, 1) cs.add_condition(cond) cond = CS.GreaterThanCondition(hidden_dim_3, num_hidden_layers, 2) cs.add_condition(cond) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None, use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="use_augmenter", value_range=(True, False), default_value=True, ), scale_offset: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="scale_offset", value_range=(0, 0.4), default_value=0.2, ), translate_percent_offset: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="translate_percent_offset", value_range=(0, 0.4), default_value=0.2), shear: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="shear", value_range=(0, 45), default_value=30, ), rotate: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="rotate", value_range=(0, 360), default_value=45, ), ) -> ConfigurationSpace: cs = ConfigurationSpace() use_augmenter = get_hyperparameter(use_augmenter, CategoricalHyperparameter) scale_offset = get_hyperparameter(scale_offset, UniformFloatHyperparameter) translate_percent_offset = get_hyperparameter(translate_percent_offset, UniformFloatHyperparameter) shear = get_hyperparameter(shear, UniformIntegerHyperparameter) rotate = get_hyperparameter(rotate, UniformIntegerHyperparameter) cs.add_hyperparameters([use_augmenter, scale_offset, translate_percent_offset]) cs.add_hyperparameters([shear, rotate]) # only add hyperparameters to configuration space if we are using the augmenter cs.add_condition(CS.EqualsCondition(scale_offset, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(translate_percent_offset, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(shear, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(rotate, use_augmenter, True)) return cs
def get_configspace(): """Builds the config space as described in the header docstring.""" cs = CS.ConfigurationSpace() lr = CS.UniformFloatHyperparameter('lr', lower=1e-5, upper=1e-2, default_value=1e-4, log=True) optimizer = CS.CategoricalHyperparameter('optimizer', ['adam', 'sgd']) momentum = CS.UniformFloatHyperparameter('momentum', lower=0., upper=1.00, default_value=0.9) epsilon = CS.UniformFloatHyperparameter('epsilon', lower=1e-2, upper=1., default_value=0.1) bs = CS.UniformIntegerHyperparameter('bs', lower=4, upper=256) first_layer = CS.UniformIntegerHyperparameter('first_layer', lower=16, upper=64) second_layer = CS.UniformIntegerHyperparameter('second_layer', lower=8, upper=64) leaky1 = CS.CategoricalHyperparameter('leaky1', [True, False]) leaky2 = CS.CategoricalHyperparameter('leaky2', [True, False]) leaky3 = CS.CategoricalHyperparameter('leaky3', [True, False]) cs.add_hyperparameters([ lr, optimizer, momentum, epsilon, bs, first_layer, second_layer, leaky1, leaky2, leaky3 ]) cs.add_condition(CS.EqualsCondition(momentum, optimizer, 'sgd')) cs.add_condition(CS.EqualsCondition(epsilon, optimizer, 'adam')) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None, num_blocks: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="num_blocks", value_range=(1, 10), default_value=5), num_filters: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="num_filters", value_range=(4, 64), default_value=32), kernel_size: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="kernel_size", value_range=(4, 64), default_value=32), use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="use_dropout", value_range=(True, False), default_value=False), dropout: HyperparameterSearchSpace = HyperparameterSearchSpace( hyperparameter="dropout", value_range=(0, 0.5), default_value=0.1), ) -> ConfigurationSpace: cs = ConfigurationSpace() min_num_blocks, max_num_blocks = num_blocks.value_range num_blocks_hp = get_hyperparameter(num_blocks, UniformIntegerHyperparameter) cs.add_hyperparameter(num_blocks_hp) add_hyperparameter(cs, kernel_size, UniformIntegerHyperparameter) use_dropout_hp = get_hyperparameter(use_dropout, CategoricalHyperparameter) cs.add_hyperparameter(use_dropout_hp) dropout_hp = get_hyperparameter(dropout, UniformFloatHyperparameter) cs.add_hyperparameter(dropout_hp) cs.add_condition(CS.EqualsCondition(dropout_hp, use_dropout_hp, True)) for i in range(0, int(max_num_blocks)): num_filter_search_space = HyperparameterSearchSpace( f"num_filters_{i}", value_range=num_filters.value_range, default_value=num_filters.default_value, log=num_filters.log) num_filters_hp = get_hyperparameter(num_filter_search_space, UniformIntegerHyperparameter) cs.add_hyperparameter(num_filters_hp) if i >= int(min_num_blocks): cs.add_condition( CS.GreaterThanCondition(num_filters_hp, num_blocks_hp, i)) return cs
def get_configspace(): cs = CS.ConfigurationSpace() clf = CSH.CategoricalHyperparameter('clf', ['skrf', 'sporf']) sporf_fc = CSH.CategoricalHyperparameter( 'sporf_fc', [1.0, 1.5, 2.0, 2.5, 3.0, 4.0, 8.0]) cs.add_hyperparameters([clf, sporf_fc]) mf_sporf = CSH.UniformFloatHyperparameter('max_features_sporf', lower=0.01, upper=4.0) mf_sk = CSH.UniformFloatHyperparameter('max_features_sk', lower=0.01, upper=0.9) #mf_sporf = CSH.CategoricalHyperparameter('max_features_sporf', [i / 100 for i in range(25, 420, 25)]) #mf_sk = CSH.CategoricalHyperparameter('max_features_sk', [i / 100 for i in range(25,100,25)] + [0.9]) #cs.add_hyperparameters([mf_sporf, mf_sk]) cs.add_hyperparameters([mf_sporf, mf_sk]) cs.add_hyperparameter(CS.UniformIntegerHyperparameter('max_depth', log = True,\ lower=2, upper=65535)) # The hyperparameter sporf_fc will only be used if the # classifier is sporf. cond1 = CS.EqualsCondition(sporf_fc, clf, "sporf") cs.add_condition(cond1) ## Set max_features \in (0,4) if clf == 'sporf' cond2 = CS.EqualsCondition(mf_sporf, clf, "sporf") cs.add_condition(cond2) ## Set max_features \in (0,1) if clf == 'skrf' cond3 = CS.EqualsCondition(mf_sk, clf, "skrf") cs.add_condition(cond3) return (cs)
def get_configspace(self): """ It builds the configuration space with the needed hyperparameters. """ cs = CS.ConfigurationSpace() cs.add_hyperparameters([self.lr, self.momentum]) cs.add_hyperparameters([self.n_layers]) cs.add_hyperparameters( [self.prior, self.gauss_σ2, self.pMOM_σ2, self.pMOM_r]) cs.add_hyperparameters([self.BMA_prior]) cond = CS.EqualsCondition(self.gauss_σ2, self.prior, 'gauss') cs.add_condition(cond) cond = CS.EqualsCondition(self.pMOM_σ2, self.prior, 'pMOM') cs.add_condition(cond) cond = CS.EqualsCondition(self.pMOM_r, self.prior, 'pMOM') cs.add_condition(cond) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, str]] = None) -> ConfigurationSpace: cs = ConfigurationSpace() use_augmenter = CategoricalHyperparameter('use_augmenter', choices=[True, False], default_value=True) sigma_min = UniformFloatHyperparameter('sigma_min', lower=0, upper=3, default_value=0) sigma_offset = UniformFloatHyperparameter('sigma_offset', lower=0, upper=3, default_value=0.5) cs.add_hyperparameters([use_augmenter, sigma_min, sigma_offset]) # only add hyperparameters to configuration space if we are using the augmenter cs.add_condition(CS.EqualsCondition(sigma_min, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(sigma_offset, use_augmenter, True)) return cs
def get_configspace(): """Builds the config space as described in the header docstring.""" cs = CS.ConfigurationSpace() lr = CS.UniformFloatHyperparameter('lr', lower=1e-5, upper=1e-2, default_value=1e-4, log=True) optimizer = CS.CategoricalHyperparameter('optimizer', ['adam', 'sgd']) momentum = CS.UniformFloatHyperparameter('momentum', lower=0., upper=1.00, default_value=0.9, q=5e-2) epsilon = CS.UniformFloatHyperparameter('epsilon', lower=1e-2, upper=1., default_value=0.1, q=5e-2) sync_bn = CS.CategoricalHyperparameter('sync_bn', [True, False]) weight_ratio = CS.UniformFloatHyperparameter('weight_ratio', lower=1., upper=6., default_value=5.) loss_criterion = CS.CategoricalHyperparameter('loss_criterion', ['cross_entropy', 'mce']) cs.add_hyperparameters([ lr, optimizer, momentum, epsilon, sync_bn, weight_ratio, loss_criterion ]) cs.add_condition(CS.EqualsCondition(momentum, optimizer, 'sgd')) cs.add_condition(CS.EqualsCondition(epsilon, optimizer, 'adam')) return cs
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, str]] = None ) -> ConfigurationSpace: cs = ConfigurationSpace() scale_offset = UniformFloatHyperparameter('scale_offset', lower=0, upper=0.4, default_value=0.2) translate_percent_offset = UniformFloatHyperparameter('translate_percent_offset', lower=0, upper=0.4, default_value=0.2) shear = UniformIntegerHyperparameter('shear', lower=0, upper=45, default_value=30) rotate = UniformIntegerHyperparameter('rotate', lower=0, upper=360, default_value=45) use_augmenter = CategoricalHyperparameter('use_augmenter', choices=[True, False], default_value=True) cs.add_hyperparameters([scale_offset, translate_percent_offset]) cs.add_hyperparameters([shear, rotate, use_augmenter]) # only add hyperparameters to configuration space if we are using the augmenter cs.add_condition(CS.EqualsCondition(scale_offset, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(translate_percent_offset, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(shear, use_augmenter, True)) cs.add_condition(CS.EqualsCondition(rotate, use_augmenter, True)) return cs
def get_config_space(num_layers=((1, 15), False), num_units=((10, 1024), True), activation=('sigmoid', 'tanh', 'relu'), dropout=(0.0, 0.8), use_dropout=(True, False), **kwargs): cs = CS.ConfigurationSpace() num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, 'num_layers', num_layers) cs.add_hyperparameter(num_layers_hp) use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout) for i in range(1, num_layers[0][1] + 1): n_units_hp = get_hyperparameter( CSH.UniformIntegerHyperparameter, "num_units_%d" % i, kwargs.pop("num_units_%d" % i, num_units)) cs.add_hyperparameter(n_units_hp) if i > num_layers[0][0]: cs.add_condition( CS.GreaterThanCondition(n_units_hp, num_layers_hp, i - 1)) if True in use_dropout: dropout_hp = get_hyperparameter( CSH.UniformFloatHyperparameter, "dropout_%d" % i, kwargs.pop("dropout_%d" % i, dropout)) cs.add_hyperparameter(dropout_hp) dropout_condition_1 = CS.EqualsCondition( dropout_hp, use_dropout_hp, True) if i > num_layers[0][0]: dropout_condition_2 = CS.GreaterThanCondition( dropout_hp, num_layers_hp, i - 1) cs.add_condition( CS.AndConjunction(dropout_condition_1, dropout_condition_2)) else: cs.add_condition(dropout_condition_1) add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation', activation) assert len( kwargs ) == 0, "Invalid hyperparameter updates for mlpnet: %s" % str(kwargs) return (cs)
def get_hyperparameter_search_space(dataset_properties: Optional[Dict] = None, min_num_gropus: int = 1, max_num_groups: int = 15, min_num_units: int = 10, max_num_units: int = 1024, ) -> ConfigurationSpace: cs = ConfigurationSpace() # The number of groups that will compose the resnet. That is, # a group can have N Resblock. The M number of this N resblock # repetitions is num_groups num_groups = UniformIntegerHyperparameter( "num_groups", lower=min_num_gropus, upper=max_num_groups, default_value=5) mlp_shape = CategoricalHyperparameter('mlp_shape', choices=[ 'funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs' ]) activation = CategoricalHyperparameter( "activation", choices=list(_activations.keys()) ) max_units = UniformIntegerHyperparameter( "max_units", lower=min_num_units, upper=max_num_units, ) output_dim = UniformIntegerHyperparameter( "output_dim", lower=min_num_units, upper=max_num_units ) cs.add_hyperparameters([num_groups, activation, mlp_shape, max_units, output_dim]) # We can have dropout in the network for # better generalization use_dropout = CategoricalHyperparameter( "use_dropout", choices=[True, False]) max_dropout = UniformFloatHyperparameter("max_dropout", lower=0.0, upper=1.0) cs.add_hyperparameters([use_dropout, max_dropout]) cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True)) return cs
def get_config_space(user_updates=None): cs = CS.ConfigurationSpace() range_num_layers = (1, 15) range_num_units = (10, 1024) possible_activations = ('sigmoid', 'tanh', 'relu') range_dropout = (0.0, 0.8) if user_updates is not None and 'num_layers' in user_updates: range_num_layers = user_updates['num_layers'] num_layers = CSH.UniformIntegerHyperparameter( 'num_layers', lower=range_num_layers[0], upper=range_num_layers[1]) cs.add_hyperparameter(num_layers) use_dropout = cs.add_hyperparameter( CS.CategoricalHyperparameter("use_dropout", [True, False], default_value=True)) for i in range(1, range_num_layers[1] + 1): n_units = CSH.UniformIntegerHyperparameter( "num_units_%d" % i, lower=range_num_units[0], upper=range_num_units[1], log=True) cs.add_hyperparameter(n_units) dropout = CSH.UniformFloatHyperparameter("dropout_%d" % i, lower=range_dropout[0], upper=range_dropout[1]) cs.add_hyperparameter(dropout) dropout_condition_1 = CS.EqualsCondition(dropout, use_dropout, True) if i > range_num_layers[0]: cs.add_condition( CS.GreaterThanCondition(n_units, num_layers, i - 1)) dropout_condition_2 = CS.GreaterThanCondition( dropout, num_layers, i - 1) cs.add_condition( CS.AndConjunction(dropout_condition_1, dropout_condition_2)) else: cs.add_condition(dropout_condition_1) cs.add_hyperparameter( CSH.CategoricalHyperparameter('activation', possible_activations)) return (cs)
def get_hyperparameter_search_space( dataset_properties: Optional[Dict[str, str]] = None, min_num_blocks: int = 1, max_num_blocks: int = 10, min_num_filters: int = 4, max_num_filters: int = 64, min_kernel_size: int = 4, max_kernel_size: int = 64, min_dropout: float = 0.0, max_dropout: float = 0.5) -> ConfigurationSpace: cs = ConfigurationSpace() num_blocks_hp = UniformIntegerHyperparameter("num_blocks", lower=min_num_blocks, upper=max_num_blocks) cs.add_hyperparameter(num_blocks_hp) kernel_size_hp = UniformIntegerHyperparameter("kernel_size", lower=min_kernel_size, upper=max_kernel_size) cs.add_hyperparameter(kernel_size_hp) use_dropout_hp = CategoricalHyperparameter("use_dropout", choices=[True, False]) cs.add_hyperparameter(use_dropout_hp) dropout_hp = UniformFloatHyperparameter("dropout", lower=min_dropout, upper=max_dropout) cs.add_hyperparameter(dropout_hp) cs.add_condition(CS.EqualsCondition(dropout_hp, use_dropout_hp, True)) for i in range(0, max_num_blocks): num_filters_hp = UniformIntegerHyperparameter( f"num_filters_{i}", lower=min_num_filters, upper=max_num_filters) cs.add_hyperparameter(num_filters_hp) if i >= min_num_blocks: cs.add_condition( CS.GreaterThanCondition(num_filters_hp, num_blocks_hp, i)) return cs
def get_hyperparameter_search_space_small(seed): """ Small version of svm config space, featuring important hyperparameters based on https://arxiv.org/abs/1710.04725 Parameters ---------- seed: int Random seed that will be used to sample random configurations Returns ------- cs: ConfigSpace.ConfigurationSpace The configuration space object """ cs = ConfigSpace.ConfigurationSpace('sklearn.svm.SVC', seed) C = ConfigSpace.UniformFloatHyperparameter( name='svc__C', lower=0.03125, upper=32768, log=True, default_value=1.0) kernel = ConfigSpace.CategoricalHyperparameter( name='svc__kernel', choices=['rbf', 'poly', 'sigmoid'], default_value='rbf') degree = ConfigSpace.UniformIntegerHyperparameter( name='svc__degree', lower=1, upper=5, default_value=3) gamma = ConfigSpace.UniformFloatHyperparameter( name='svc__gamma', lower=3.0517578125e-05, upper=8, log=True, default_value=0.1) coef0 = ConfigSpace.UniformFloatHyperparameter( name='svc__coef0', lower=-1, upper=1, default_value=0) cs.add_hyperparameters([ C, kernel, degree, gamma, coef0 ]) degree_depends_on_poly = ConfigSpace.EqualsCondition(degree, kernel, 'poly') coef0_condition = ConfigSpace.InCondition(coef0, kernel, ['poly', 'sigmoid']) cs.add_condition(degree_depends_on_poly) cs.add_condition(coef0_condition) return cs