def get_configspace():
    """ Returns the configuration space for the network to be configured in the example. """
    config_space = CS.ConfigurationSpace()
    config_space.add_hyperparameters([
        CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']),
        CS.UniformFloatHyperparameter('learning_rate_init',
                                      lower=1e-6,
                                      upper=1e-2,
                                      log=True)
    ])

    solver = CSH.CategoricalHyperparameter('solver', ['sgd', 'adam'])
    config_space.add_hyperparameter(solver)

    beta_1 = CS.UniformFloatHyperparameter('beta_1', lower=0, upper=1)
    config_space.add_hyperparameter(beta_1)

    condition = CS.EqualsCondition(beta_1, solver, 'adam')
    config_space.add_condition(condition)

    beta_2 = CS.UniformFloatHyperparameter('beta_2', lower=0, upper=1)
    config_space.add_hyperparameter(beta_2)

    condition = CS.EqualsCondition(beta_2, solver, 'adam')
    config_space.add_condition(condition)

    return config_space
Ejemplo n.º 2
0
    def get_hyperparameter_search_space(self, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        hp_batch_loss_computation = cs.add_hyperparameter(
            CSH.CategoricalHyperparameter(
                "batch_loss_computation_technique",
                sorted(self.batch_loss_computation_techniques.keys())))

        for name, technique in self.batch_loss_computation_techniques.items():
            parent = {
                'parent': hp_batch_loss_computation,
                'value': name
            } if hp_batch_loss_computation is not None else None
            cs.add_configuration_space(
                prefix=name,
                configuration_space=technique.get_hyperparameter_search_space(
                    **pipeline_config),
                delimiter=ConfigWrapper.delimiter,
                parent_hyperparameter=parent)

        possible_loss_comps = sorted(
            list(
                set(pipeline_config["batch_loss_computation_techniques"]).
                intersection(self.batch_loss_computation_techniques.keys())))

        if 'batch_loss_computation_techniques' not in pipeline_config.keys():
            cs.add_hyperparameter(
                CSH.CategoricalHyperparameter(
                    "batch_loss_computation_technique", possible_loss_comps))
            self._check_search_space_updates()

        return cs
Ejemplo n.º 3
0
def cs_ab():
    """
    Defining the configuration space in case of
    AdaBoost Classifier

    """
    cs1 = ConfigSpace.ConfigurationSpace()
    cs2 = ConfigSpace.ConfigurationSpace()

    hp1 = csh.CategoricalHyperparameter('algorithm', choices=['0', '1'])
    hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2'])

    hp3 = csh.UniformIntegerHyperparameter('max_depth',
                                           lower=1,
                                           upper=10,
                                           log=False)
    hp4 = csh.UniformFloatHyperparameter('learning_rate',
                                         lower=0.01,
                                         upper=2,
                                         log=True)
    hp5 = csh.UniformIntegerHyperparameter('n_estimators',
                                           lower=50,
                                           upper=500,
                                           log=False)

    # imputation case
    cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5])

    # no imputation case
    cs2.add_hyperparameters([hp1, hp3, hp4, hp5])

    return cs1, cs2
Ejemplo n.º 4
0
def cs_svm(per_kernel=False):
    """
    Defining the configuration space in case of 
    SVM
    
    """
    cs1 = ConfigSpace.ConfigurationSpace()
    cs2 = ConfigSpace.ConfigurationSpace()
    
    hp1 = csh.CategoricalHyperparameter('shrinking', choices=['0', '1'])
    hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2'])
    hp3 = csh.CategoricalHyperparameter('kernel', choices=['0', '1'])
    
    hp4 = csh.UniformFloatHyperparameter('C', lower=2**(-5), upper=2**15, log=True)
    hp5 = csh.UniformFloatHyperparameter('coef0', lower=-1, upper=1, log=False)
    hp6 = csh.UniformFloatHyperparameter('gamma', lower=2**(-15), upper=2**3, log=True)
    hp7 = csh.UniformFloatHyperparameter('tol', lower=10**(-5), upper=10**(-1), log=True)

    if per_kernel:
        cs1.add_hyperparameters([hp1, hp2, hp4, hp5, hp6, hp7])
        cs2.add_hyperparameters([hp1, hp4, hp5, hp6, hp7])
    else:
        cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5, hp6, hp7])
        cs2.add_hyperparameters([hp1, hp3, hp4, hp5, hp6, hp7])
        
    return cs1, cs2
def get_configspace():
    cs = CS.ConfigurationSpace()
    cs.add_hyperparameter(
        CSH.UniformFloatHyperparameter(name='sl_weight',
                                       lower=1,
                                       upper=100000,
                                       log=True,
                                       default_value=100))
    cs.add_hyperparameter(
        CSH.UniformFloatHyperparameter(name='sl_exp',
                                       lower=0.5,
                                       upper=2,
                                       log=True,
                                       default_value=1))
    cs.add_hyperparameter(
        CSH.UniformFloatHyperparameter(name='pl_exp',
                                       lower=0.5,
                                       upper=2,
                                       log=True,
                                       default_value=1))
    cs.add_hyperparameter(
        CSH.CategoricalHyperparameter(name='forward_flow',
                                      choices=[False, True],
                                      default_value=False))
    cs.add_hyperparameter(
        CSH.CategoricalHyperparameter(name='weighted_sl_loss',
                                      choices=[False, True],
                                      default_value=False))

    return cs
Ejemplo n.º 6
0
    def get_hyperparameter_search_space(self, **pipeline_config):
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        cs = CS.ConfigurationSpace()

        augment = cs.add_hyperparameter(
            CSH.CategoricalHyperparameter('augment', [True, False]))
        autoaugment = cs.add_hyperparameter(
            CSH.CategoricalHyperparameter('autoaugment', [True, False]))
        fastautoaugment = cs.add_hyperparameter(
            CSH.CategoricalHyperparameter('fastautoaugment', [True, False]))

        cutout = cs.add_hyperparameter(
            CSH.CategoricalHyperparameter('cutout', [True, False]))
        cutout_length = cs.add_hyperparameter(
            CSH.UniformIntegerHyperparameter('length',
                                             lower=0,
                                             upper=20,
                                             log=False))
        cutout_holes = cs.add_hyperparameter(
            CSH.UniformIntegerHyperparameter('cutout_holes',
                                             lower=1,
                                             upper=3,
                                             log=False))

        cs.add_condition(CS.EqualsCondition(cutout_length, cutout, True))
        cs.add_condition(CS.EqualsCondition(cutout_holes, cutout, True))

        cs.add_condition(CS.EqualsCondition(autoaugment, augment, True))
        cs.add_condition(CS.EqualsCondition(fastautoaugment, augment, True))

        return cs
Ejemplo n.º 7
0
def cs_gb():
    """
    Defining the configuration space in case of 
    GradientBoosting Classifier
    
    """
    cs1 = ConfigSpace.ConfigurationSpace()
    cs2 = ConfigSpace.ConfigurationSpace()

    hp1 = csh.CategoricalHyperparameter('criterion', choices=['0', '1'])
    hp2 = csh.CategoricalHyperparameter('imputation', choices=['0', '1', '2'])

    hp3 = csh.UniformIntegerHyperparameter('max_depth', lower=1, upper=10, log=False)
    hp4 = csh.UniformFloatHyperparameter('learning_rate', lower=0.01, upper=1, log=True)
    hp5 = csh.UniformIntegerHyperparameter('n_estimators', lower=50, upper=500, log=False)
    hp6 = csh.UniformFloatHyperparameter('max_features', lower=0.1, upper=0.9, log=False)
    hp7 = csh.UniformIntegerHyperparameter('min_samples_leaf', lower=1,
                                           upper=20, log=False)
    hp8 = csh.UniformIntegerHyperparameter('min_samples_split', lower=2,
                                           upper=20, log=False)

    # imputation case
    cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5, hp6, hp7, hp8])

    # no imputation case
    cs2.add_hyperparameters([hp1, hp3, hp4, hp5, hp6, hp7, hp8])
    
    return cs1, cs2
    def get_configspace(self):
        cs = CS.ConfigurationSpace()

        cs.add_hyperparameter(
            CSH.UniformIntegerHyperparameter(name='gtn_score_transform_type',
                                             lower=0,
                                             upper=7,
                                             log=False,
                                             default_value=7))
        cs.add_hyperparameter(
            CSH.UniformFloatHyperparameter(name='gtn_step_size',
                                           lower=1e-2,
                                           upper=100,
                                           log=True,
                                           default_value=1))
        cs.add_hyperparameter(
            CSH.CategoricalHyperparameter(name='gtn_mirrored_sampling',
                                          choices=[False, True],
                                          default_value=False))
        cs.add_hyperparameter(
            CSH.CategoricalHyperparameter(name='gtn_nes_step_size',
                                          choices=[False, True],
                                          default_value=False))

        return cs
    def get_configspace(self):
        cs = CS.ConfigurationSpace()

        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='gtn_score_transform_type', lower=0, upper=7, log=False, default_value=7))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='gtn_step_size', lower=0.1, upper=1, log=True, default_value=0.5))
        cs.add_hyperparameter(CSH.CategoricalHyperparameter(name='gtn_mirrored_sampling', choices=[False, True], default_value=True))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='gtn_noise_std', lower=0.01, upper=1, log=True, default_value=0.1))

        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ddqn_init_episodes', lower=1, upper=20, log=True, default_value=10))
        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ddqn_batch_size', lower=64, upper=256, log=False, default_value=128))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ddqn_gamma', lower=0.001, upper=0.1, log=True, default_value=0.01))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ddqn_lr', lower=1e-4, upper=5e-3, log=True, default_value=1e-3))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ddqn_tau', lower=0.005, upper=0.05, log=True, default_value=0.01))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ddqn_eps_init', lower=0.8, upper=1, log=True, default_value=0.9))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ddqn_eps_min', lower=0.005, upper=0.05, log=True, default_value=0.05))
        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(name='ddqn_eps_decay', lower=0.01, upper=0.2, log=True, default_value=0.1))
        cs.add_hyperparameter(CSH.CategoricalHyperparameter(name='ddqn_activation_fn', choices=['tanh', 'relu', 'leakyrelu', 'prelu'], default_value='relu'))
        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ddqn_hidden_size', lower=48, upper=192, log=True, default_value=128))
        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='ddqn_hidden_layer', lower=1, upper=2, log=False, default_value=2))

        cs.add_hyperparameter(CSH.CategoricalHyperparameter(name='acrobot_activation_fn', choices=['tanh', 'relu', 'leakyrelu', 'prelu'], default_value='leakyrelu'))
        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='acrobot_hidden_size', lower=48, upper=192, log=True, default_value=128))
        cs.add_hyperparameter(CSH.UniformIntegerHyperparameter(name='acrobot_hidden_layer', lower=1, upper=2, log=False, default_value=1))

        return cs
Ejemplo n.º 10
0
    def get_configspace():
        cs = CS.ConfigurationSpace()

        topology = CSH.CategoricalHyperparameter(
            'topology', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
        lr = CSH.UniformFloatHyperparameter('lr',
                                            lower=1e-4,
                                            upper=1,
                                            default_value='1e-1',
                                            log=True)
        momentum = CSH.UniformFloatHyperparameter('momentum',
                                                  lower=0.5,
                                                  upper=1,
                                                  default_value=0.9,
                                                  log=False)
        weight_decay = CSH.UniformFloatHyperparameter('weight_decay',
                                                      lower=0.00001,
                                                      upper=0.00005,
                                                      default_value=0.00001,
                                                      log=False)
        batch_size = CSH.CategoricalHyperparameter('batch_size',
                                                   [1, 2, 4, 8, 16],
                                                   default_value=8)

        cs.add_hyperparameters(
            [topology, lr, momentum, weight_decay, batch_size])

        return cs
Ejemplo n.º 11
0
def cs_dt():
    """
    Defining the configuration space in case of 
    Decision Tree Classifier    
    """
    cs1 = ConfigSpace.ConfigurationSpace()
    cs2 = ConfigSpace.ConfigurationSpace()

    hp1 = csh.CategoricalHyperparameter('criterion',
                                        choices=['0', '1'])
    hp2 = csh.CategoricalHyperparameter('imputation',
                                        choices=['0', '1', '2'])

    hp3 = csh.UniformFloatHyperparameter('max_features', lower=0.1,
                                         upper=0.9, log=False)
    hp4 = csh.UniformIntegerHyperparameter('min_samples_leaf', lower=1,
                                           upper=20, log=False)
    hp5 = csh.UniformIntegerHyperparameter('min_samples_split', lower=2,
                                           upper=20, log=False)

    # imputation case
    cs1.add_hyperparameters([hp1, hp2, hp3, hp4, hp5])

    # no imputation case
    cs2.add_hyperparameters([hp1, hp3, hp4, hp5])
    
    return cs1, cs2
    def get_hyperparameter_search_space(self, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_over_sampling_methods = set(pipeline_config["over_sampling_methods"]).intersection(self.over_sampling_methods.keys())
        possible_under_sampling_methods = set(pipeline_config["under_sampling_methods"]).intersection(self.under_sampling_methods.keys())
        possible_target_size_strategies = set(pipeline_config["target_size_strategies"]).intersection(self.target_size_strategies.keys())
        selector_over_sampling = cs.add_hyperparameter(CSH.CategoricalHyperparameter("over_sampling_method", possible_over_sampling_methods))
        selector_under_sampling = cs.add_hyperparameter(CSH.CategoricalHyperparameter("under_sampling_method", possible_under_sampling_methods))
        cs.add_hyperparameter(CSH.CategoricalHyperparameter("target_size_strategy", possible_target_size_strategies))

        for method_name, method_type in self.over_sampling_methods.items():
            if method_name not in possible_over_sampling_methods:
                continue
            method_cs = method_type.get_hyperparameter_search_space()
            cs.add_configuration_space( prefix=method_name, configuration_space=method_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector_over_sampling, 'value': method_name})
        
        for method_name, method_type in self.under_sampling_methods.items():
            if method_name not in possible_under_sampling_methods:
                continue
            method_cs = method_type.get_hyperparameter_search_space()
            cs.add_configuration_space( prefix=method_name, configuration_space=method_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector_under_sampling, 'value': method_name})

        return self._apply_user_updates(cs)
Ejemplo n.º 13
0
    def get_configspace():
        config_space = CS.ConfigurationSpace()

        filter_count = [4]  #[4, 8, 16]
        filter_size = [7]  #[3, 5, 7, 9]

        l1_fc = CSH.CategoricalHyperparameter('l1_fc', filter_count)
        l1_fs = CSH.CategoricalHyperparameter('l1_fs', filter_size)
        config_space.add_hyperparameters([l1_fc, l1_fs])

        #l2_fc = CSH.CategoricalHyperparameter('l2_fc', filter_count)
        #l2_fs = CSH.CategoricalHyperparameter('l2_fs', filter_size)
        #config_space.add_hyperparameters([l2_fc, l2_fs])

        l3 = CSH.UniformIntegerHyperparameter('l3', lower=20, upper=500)
        l4 = CSH.UniformIntegerHyperparameter('l4', lower=20, upper=500)
        l5 = CSH.UniformIntegerHyperparameter('l5', lower=10, upper=300)
        config_space.add_hyperparameters([l3, l4, l5])

        latent = CSH.UniformIntegerHyperparameter('latent',
                                                  lower=20,
                                                  upper=100)
        #latent = CSH.CategoricalHyperparameter('latent', [33])
        config_space.add_hyperparameters([latent])

        return config_space
Ejemplo n.º 14
0
    def get_hyperparameter_search_space(
            self,
            dataset_properties: Optional[Dict[str, Any]] = None,
            default: Optional[str] = None,
            include: Optional[List[str]] = None,
            exclude: Optional[List[str]] = None) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = dict()

        dataset_properties = {**self.dataset_properties, **dataset_properties}

        available_preprocessors = self.get_available_components(
            dataset_properties=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError("no encoders found, please add a encoder")

        if default is None:
            defaults = ['OneHotEncoder', 'OrdinalEncoder', 'NoEncoder']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    if include is not None and default_ not in include:
                        continue
                    if exclude is not None and default_ in exclude:
                        continue
                    default = default_
                    break

        # add only no encoder to choice hyperparameters in case the dataset is only numerical
        if len(dataset_properties['categorical_columns']) == 0:
            default = 'NoEncoder'
            preprocessor = CSH.CategoricalHyperparameter('__choice__',
                                                         ['NoEncoder'],
                                                         default_value=default)
        else:
            preprocessor = CSH.CategoricalHyperparameter(
                '__choice__',
                list(available_preprocessors.keys()),
                default_value=default)

        cs.add_hyperparameter(preprocessor)

        # add only child hyperparameters of early_preprocessor choices
        for name in preprocessor.choices:
            preprocessor_configuration_space = available_preprocessors[name].\
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': preprocessor, 'value': name}
            cs.add_configuration_space(
                name,
                preprocessor_configuration_space,
                parent_hyperparameter=parent_hyperparameter)

        self.configuration_space = cs
        self.dataset_properties = dataset_properties
        return cs
    def get_configspace():
        """:return: ConfigurationsSpace-Object
        Here is the main place to create particular hyperparameters to tune.
        Particular hyperparameter should be defined as:
        hyperparameter = type_of_parameter(name, lower_range, upper_range, default_value, logging)
        add.hyperparameter([hyperparameter])
        """

        cs = CS.ConfigurationSpace()

        # num_pca = CSH.UniformIntegerHyperparameter('num_pca', lower=850, upper=930, default_value=900, log=True)
        # cs.add_hyperparameters([num_pca])

        lr = CSH.UniformFloatHyperparameter('lr',
                                            lower=1e-6,
                                            upper=1e-1,
                                            default_value='1e-2',
                                            log=True)
        optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD'])
        sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum',
                                                      lower=0.0,
                                                      upper=0.99,
                                                      default_value=0.9,
                                                      log=False)
        dropout_rate_1 = CSH.UniformFloatHyperparameter('dropout_rate_1',
                                                        lower=0.0,
                                                        upper=0.5,
                                                        default_value=0.1,
                                                        log=False)
        dropout_rate_2 = CSH.UniformFloatHyperparameter('dropout_rate_2',
                                                        lower=0.0,
                                                        upper=0.5,
                                                        default_value=0.1,
                                                        log=False)
        num_fc_units_1 = CSH.UniformIntegerHyperparameter('num_fc_units_1',
                                                          lower=512,
                                                          upper=2048,
                                                          default_value=1024,
                                                          log=True)
        num_fc_units_2 = CSH.UniformIntegerHyperparameter('num_fc_units_2',
                                                          lower=256,
                                                          upper=512,
                                                          default_value=256,
                                                          log=True)
        activation = CSH.CategoricalHyperparameter('activation',
                                                   ['tanh', 'relu'])

        cs.add_hyperparameters([
            lr, optimizer, sgd_momentum, dropout_rate_1, dropout_rate_2,
            num_fc_units_1, num_fc_units_2, activation
        ])

        # The hyperparameter sgd_momentum will be used,if the configuration
        # contains 'SGD' as optimizer.
        cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')
        cs.add_condition(cond)

        return cs
Ejemplo n.º 16
0
    def get_hyperparameter_search_space(self,
                                        dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
                                        default: Optional[str] = None,
                                        include: Optional[List[str]] = None,
                                        exclude: Optional[List[str]] = None) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = dict()

        dataset_properties = {**self.dataset_properties, **dataset_properties}

        available_preprocessors = self.get_available_components(dataset_properties=dataset_properties,
                                                                include=include,
                                                                exclude=exclude)

        if len(available_preprocessors) == 0:
            raise ValueError("no image normalizers found, please add an image normalizer")

        if default is None:
            defaults = ['ImageNormalizer', 'NoNormalizer']
            for default_ in defaults:
                if default_ in available_preprocessors:
                    if include is not None and default_ not in include:
                        continue
                    if exclude is not None and default_ in exclude:
                        continue
                    default = default_
                    break

        updates = self._get_search_space_updates()
        if '__choice__' in updates.keys():
            choice_hyperparameter = updates['__choice__']
            if not set(choice_hyperparameter.value_range).issubset(available_preprocessors):
                raise ValueError("Expected given update for {} to have "
                                 "choices in {} got {}".format(self.__class__.__name__,
                                                               available_preprocessors,
                                                               choice_hyperparameter.value_range))
            preprocessor = CSH.CategoricalHyperparameter('__choice__',
                                                         choice_hyperparameter.value_range,
                                                         default_value=choice_hyperparameter.default_value)
        else:
            preprocessor = CSH.CategoricalHyperparameter('__choice__',
                                                         list(available_preprocessors.keys()),
                                                         default_value=default)
        cs.add_hyperparameter(preprocessor)

        # add only child hyperparameters of preprocessor choices
        for name in preprocessor.choices:
            preprocessor_configuration_space = available_preprocessors[name].\
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': preprocessor, 'value': name}
            cs.add_configuration_space(name, preprocessor_configuration_space,
                                       parent_hyperparameter=parent_hyperparameter)

        self.configuration_space = cs
        self.dataset_properties = dataset_properties
        return cs
Ejemplo n.º 17
0
 def get_configspace():
     """ It builds the configuration space with the needed hyperparameters.
     It is easily possible to implement different types of hyperparameters.
     Beside float-hyperparameters on a log scale, it is also able to handle
     categorical input parameter.
     :return: ConfigurationsSpace-Object
     """
     cs = CS.ConfigurationSpace()
     cs.add_hyperparameters([
         CSH.UniformFloatHyperparameter(
             'optimizer:lr',
             lower=0.001,
             upper=0.01,
             log=True,
         ),
         CSH.UniformIntegerHyperparameter(
             'model:temp_reg_params.history',
             lower=4,
             upper=12,
             default_value=12,
         ),
         CSH.UniformIntegerHyperparameter(
             'model:temp_reg_params.n_layers',
             lower=2,
             upper=8,
             default_value=3,
         ),
         CSH.OrdinalHyperparameter(
             'model:temp_reg_params.n_channels',
             sequence=[2, 4, 8, 16, 32],
             default_value=8,
         ),
         CSH.OrdinalHyperparameter(
             'model:temp_reg_params.kernel_size',
             sequence=[1, 3, 5],
             default_value=1,
         ),
         CSH.CategoricalHyperparameter(
             'model:temp_reg_params.activation',
             choices="ReLU ELU LeakyReLU SELU".split(),
         ),
         CSH.CategoricalHyperparameter(
             'model:biases_type.location',
             choices="LxT L+T".split(),
         ),
         CSH.CategoricalHyperparameter(
             'model:biases_type.weekday',
             choices=["", "W", "WxT"],
         ),
         CSH.CategoricalHyperparameter(
             'model:biases_type.month',
             choices=["", "M", "MxT"],
         ),
     ])
     return cs
Ejemplo n.º 18
0
    def get_hyperparameter_search_space(dataset_properties=None):
        degree = CSH.UniformIntegerHyperparameter("degree", lower=2, upper=3)
        interaction_only = CSH.CategoricalHyperparameter(
            "interaction_only", [False, True])
        include_bias = CSH.CategoricalHyperparameter("include_bias",
                                                     [True, False])

        cs = ConfigSpace.ConfigurationSpace()
        cs.add_hyperparameters([degree, interaction_only, include_bias])

        return cs
Ejemplo n.º 19
0
    def get_configspace():
        """
        Here we define the configuration space for the hyperparameters for the model.
        Returns:
            ConfigSpace-object
        """
        cs = CS.ConfigurationSpace()

        cs.add_hyperparameter(CSH.UniformFloatHyperparameter(
            'lr', lower=1e-6, upper=1e-2, default_value='1e-2', log=True))
        cs.add_hyperparameter(CSH.CategoricalHyperparameter(
            'act_f', ['ReLU', 'Tanh'], default_value='ReLU'))

        # For demonstration purposes, we add different optimizers as categorical hyperparameters.
        # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'.
        # SGD has a different parameter 'momentum'.
        optimizer = CSH.CategoricalHyperparameter('optimizer',
                                                  ['Adam', 'SGD'])
        cs.add_hyperparameter(optimizer)

        sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum',
                                                      lower=0.0, upper=0.99, default_value=0.9,
                                                      log=False)
        cs.add_hyperparameter(sgd_momentum)

        # The hyperparameter sgd_momentum will be used,
        # if the configuration contains 'SGD' as optimizer.
        cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')
        cs.add_condition(cond)

        # The hyperparameters (hidden units for layer 2 and 3) are conditional parameters conditioned by
        # the number of hidden layers.
        # These dependencies are realised with inequality conditions.
        num_hidden_layers = CSH.UniformIntegerHyperparameter('num_hidden_layers', lower=1, upper=3, default_value=1)
        cs.add_hyperparameter(num_hidden_layers)

        hidden_dim_1 = CSH.UniformIntegerHyperparameter('hidden_dim_1', lower=100, upper=1000, log=False)
        cs.add_hyperparameter(hidden_dim_1)

        hidden_dim_2 = CSH.UniformIntegerHyperparameter('hidden_dim_2', lower=100, upper=1000, log=False)
        cs.add_hyperparameter(hidden_dim_2)

        hidden_dim_3 = CSH.UniformIntegerHyperparameter('hidden_dim_3', lower=100, upper=1000, log=False)
        cs.add_hyperparameter(hidden_dim_3)

        # Use inequality conditions
        cond = CS.GreaterThanCondition(hidden_dim_2, num_hidden_layers, 1)
        cs.add_condition(cond)

        cond = CS.GreaterThanCondition(hidden_dim_3, num_hidden_layers, 2)
        cs.add_condition(cond)

        return cs
Ejemplo n.º 20
0
    def get_configspace():
            """
            It builds the configuration space with the needed hyperparameters.
            It is easily possible to implement different types of
            hyperparameters. Beside float-hyperparameters on a log scale, it
            is also able to handle categorical input parameter.
            :return: ConfigurationsSpace-Object
            """
            cs = CS.ConfigurationSpace()
            if True:
                lr = CSH.UniformFloatHyperparameter('lr',
                        lower=0.0001, upper=0.01, default_value=0.001,
                         log=True)
                hidden_dim = CSH.CategoricalHyperparameter('hidden_dim',
                        [16, 32, 64, 128, 256])
                activation = CSH.CategoricalHyperparameter('activation',
                        ['tanh', 'relu'])
                epsilon =  CSH.UniformFloatHyperparameter('epsilon',
                        lower=0.01, upper=1., default_value=1., log=True)
                decay_rate =  CSH.UniformFloatHyperparameter('decay_rate',
                        lower=0.001, upper=.1, default_value=.01, log=True)
                gamma = CSH.CategoricalHyperparameter('gamma',[0.99])
                action_dim = CSH.UniformIntegerHyperparameter('action_dim',
                        lower=3, upper=30)
                seed = CSH.UniformIntegerHyperparameter('seed',
                        lower=0, upper=4000, default_value=42)
                dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate',
                        lower=0.0, upper=0.8, default_value=0.0)
            else:
                lr = CSH.UniformFloatHyperparameter('lr',
                        lower=0.001, upper=0.0012, default_value=0.001,
                        log=True)
                hidden_dim = CSH.CategoricalHyperparameter('hidden_dim',
                        [16, 32, 64, 128, 256])
                activation = CSH.CategoricalHyperparameter('activation',
                        ['tanh', 'relu'])
                epsilon =  CSH.UniformFloatHyperparameter('epsilon',
                        lower=0.01, upper=1., default_value=1., log=True)
                decay_rate =  CSH.UniformFloatHyperparameter('decay_rate',
                        lower=0.001, upper=.1, default_value=.01)
                gamma = CSH.UniformFloatHyperparameter('gamma',
                        lower=0.5, upper=0.999, default_value=0.99, log=True)
                action_dim = CSH.UniformIntegerHyperparameter('action_dim',
                        lower=3, upper=100)
                seed = CSH.UniformIntegerHyperparameter('seed',
                        lower=0, upper=1000, default_value=42)
                dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate',
                        lower=0.0, upper=0.3, default_value=0.0)
            cs.add_hyperparameters([lr, hidden_dim, activation, epsilon,
                                    gamma, action_dim, seed, dropout_rate,
                                    decay_rate,])

            return cs
Ejemplo n.º 21
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigSpace.ConfigurationSpace()

        n_components = CSH.UniformIntegerHyperparameter("n_components", lower=10, upper=2000)
        algorithm = CSH.CategoricalHyperparameter('algorithm', ['parallel', 'deflation'])
        whiten = CSH.CategoricalHyperparameter('whiten', [True, False])
        fun = CSH.CategoricalHyperparameter('fun', ['logcosh', 'exp', 'cube'])
        cs.add_hyperparameters([n_components, algorithm, whiten, fun])

        cs.add_condition(CSC.EqualsCondition(n_components, whiten, True))

        return cs
Ejemplo n.º 22
0
    def get_configspace():
        """
        Define all the hyperparameters that need to be optimised and store them in config
        """
        cs = CS.ConfigurationSpace()
        dense_units = CSH.UniformIntegerHyperparameter('dense_units',
                                                       lower=64,
                                                       upper=256,
                                                       default_value=128)
        initial_lr = CSH.UniformFloatHyperparameter('initial_lr',
                                                    lower=1e-3,
                                                    upper=1e-1,
                                                    default_value='1e-2',
                                                    log=True)
        optimizer = CSH.CategoricalHyperparameter('optimizer',
                                                  get('opti_dict').keys())
        sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum',
                                                      lower=0.0,
                                                      upper=0.99,
                                                      default_value=0.9,
                                                      log=False)
        nesterov = CSH.CategoricalHyperparameter('nesterov', ['True', 'False'])
        cs.add_hyperparameters(
            [initial_lr, optimizer, sgd_momentum, nesterov, dense_units])

        lr_scheduler = CSH.CategoricalHyperparameter('lr_scheduler',
                                                     ['Exponential', 'Cosine'])
        weight_decay = CSH.UniformFloatHyperparameter('weight_decay',
                                                      lower=1e-5,
                                                      upper=1e-3,
                                                      default_value=3e-4,
                                                      log=True)
        drop_path_prob = CSH.UniformFloatHyperparameter('drop_path_prob',
                                                        lower=0,
                                                        upper=0.4,
                                                        default_value=0.3,
                                                        log=False)
        grad_clip_value = CSH.UniformIntegerHyperparameter('grad_clip_value',
                                                           lower=4,
                                                           upper=8,
                                                           default_value=5)
        cs.add_hyperparameters(
            [lr_scheduler, drop_path_prob, weight_decay, grad_clip_value])

        cond = CS.EqualsCondition(sgd_momentum, optimizer, 'sgd')
        cs.add_condition(cond)
        cond2 = CS.EqualsCondition(nesterov, optimizer, 'sgd')
        cs.add_condition(cond2)

        return cs
Ejemplo n.º 23
0
def _create_config_space(dict_hyperparams):
    """Create the hyperparameters hyperspace."""
    config_space = ConfigurationSpace()

    if not isinstance(dict_hyperparams, dict):
        raise TypeError('Hyperparams must be a dictionary.')

    for name, hyperparam in dict_hyperparams.items():
        hp_type = hyperparam['type']

        if hp_type == 'int':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformIntegerHyperparameter(name,
                                                hp_min,
                                                hp_max,
                                                default_value=hp_default))

        elif hp_type == 'float':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformFloatHyperparameter(name,
                                              hp_min,
                                              hp_max,
                                              default_value=hp_default))

        elif hp_type == 'bool':
            hp_default = bool(hyperparam.get('default'))
            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, ['true', 'false'],
                                             default_value=hp_default))

        elif hp_type == 'str':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_range = [_NONE if hp is None else hp for hp in hp_range]
            hp_default = hyperparam.get('default') or hp_range[0]
            hp_default = _NONE if hp_default is None else hp_default

            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name,
                                             hp_range,
                                             default_value=hp_default))

    return config_space
Ejemplo n.º 24
0
def get_duc2004_space():
    cs = CS.ConfigurationSpace(seed=1)
    beam = CSH.CategoricalHyperparameter('beam', choices=[2, 3, 4, 5, 6])
    ngram = CSH.CategoricalHyperparameter('no_repeat_ngram_size',
                                          choices=[2, 3, 4])
    lenpen = CSH.UniformFloatHyperparameter('lenpen',
                                            lower=0.1,
                                            upper=2.0,
                                            q=0.1)
    min_len = CSH.UniformIntegerHyperparameter('min_len', lower=5, upper=19)
    max_len_b = CSH.CategoricalHyperparameter('max_len_b', choices=[20])
    max_len_a = CSH.CategoricalHyperparameter('max_len_a', choices=[0])
    cs.add_hyperparameters(
        [beam, ngram, lenpen, min_len, max_len_b, max_len_a])
    return cs
Ejemplo n.º 25
0
def create_hyperparameter(var_type, name, lower=None, upper=None, log=False, q=None, choices=None):
    """
    Parameters
    ----------
    var_type: type
        int or float or str
    dist: string
        "u" (uniform) or "c" (categorical)
    q: float or int
        any positive real number
    lower: float or int
        upper bound of the variable
    upper: float or int
        lower bound of the variable
    choices: list of str or float or int
        the choices of categorical parameter

    Returns
    -------
    ConfigSpace.hyperparameters object
        the information of hyperparameter
    """

    if var_type == int:
        return CSH.UniformIntegerHyperparameter(name=name, lower=lower, upper=upper, log=log, q=q)
    elif var_type == float:
        return CSH.UniformFloatHyperparameter(name=name, lower=lower, upper=upper, log=log, q=q)
    elif var_type == str or var_type == bool:
        return CSH.CategoricalHyperparameter(name=name, choices=choices)
    else:
        raise ValueError("The hp_type must be chosen from [int, float, str, bool]")
Ejemplo n.º 26
0
    def _convert_hyper_parameters_to_cs(self):
        # type: () -> CS.ConfigurationSpace
        cs = CS.ConfigurationSpace(seed=self._seed)
        for p in self._hyper_parameters:
            if isinstance(p, UniformParameterRange):
                hp = CSH.UniformFloatHyperparameter(p.name,
                                                    lower=p.min_value,
                                                    upper=p.max_value,
                                                    log=False,
                                                    q=p.step_size)
            elif isinstance(p, UniformIntegerParameterRange):
                hp = CSH.UniformIntegerHyperparameter(p.name,
                                                      lower=p.min_value,
                                                      upper=p.max_value,
                                                      log=False,
                                                      q=p.step_size)
            elif isinstance(p, DiscreteParameterRange):
                hp = CSH.CategoricalHyperparameter(p.name, choices=p.values)
            else:
                raise ValueError(
                    "HyperParameter type {} not supported yet with OptimizerBOHB"
                    .format(type(p)))
            cs.add_hyperparameter(hp)

        return cs
Ejemplo n.º 27
0
    def get_hyperparameter_search_space(self,
                                        dataset_info=None,
                                        **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_techniques = set(
            pipeline_config['batch_loss_computation_techniques']).intersection(
                self.batch_loss_computation_techniques.keys())
        hp_batch_loss_computation = CSH.CategoricalHyperparameter(
            "batch_loss_computation_technique", sorted(possible_techniques))
        cs.add_hyperparameter(hp_batch_loss_computation)

        for name, technique in self.batch_loss_computation_techniques.items():
            if name not in possible_techniques:
                continue
            technique = self.batch_loss_computation_techniques[name]

            technique_cs = technique.get_hyperparameter_search_space(
                **self._get_search_space_updates(
                    prefix=("batch_loss_computation_technique", name)))
            cs.add_configuration_space(prefix=name,
                                       configuration_space=technique_cs,
                                       delimiter=ConfigWrapper.delimiter,
                                       parent_hyperparameter={
                                           'parent': hp_batch_loss_computation,
                                           'value': name
                                       })

        self._check_search_space_updates((possible_techniques, "*"))
        return cs
    def get_hyperparameter_search_space(
        self,
        dataset_properties: Optional[Dict[str, str]] = None,
        default: Optional[str] = None,
        include: Optional[List[str]] = None,
        exclude: Optional[List[str]] = None,
    ) -> ConfigurationSpace:
        """Returns the configuration space of the current chosen components

        Args:
            dataset_properties (Optional[Dict[str, str]]): Describes the dataset to work on
            default (Optional[str]): Default component to use
            include: Optional[Dict[str, Any]]: what components to include. It is an exhaustive
                list, and will exclusively use this components.
            exclude: Optional[Dict[str, Any]]: which components to skip

        Returns:
            ConfigurationSpace: the configuration space of the hyper-parameters of the
                 chosen component
        """
        cs = ConfigurationSpace()

        if dataset_properties is None:
            dataset_properties = {}

        # Compile a list of legal preprocessors for this problem
        available_initializers = self.get_available_components(
            dataset_properties=dataset_properties,
            include=include,
            exclude=exclude)

        if len(available_initializers) == 0:
            raise ValueError("No initializers found")

        if default is None:
            defaults = [
                'MLPNet',
            ]
            for default_ in defaults:
                if default_ in available_initializers:
                    default = default_
                    break

        initializer = CSH.CategoricalHyperparameter(
            '__choice__',
            list(available_initializers.keys()),
            default_value=default)
        cs.add_hyperparameter(initializer)
        for name in available_initializers:
            initializer_configuration_space = available_initializers[name]. \
                get_hyperparameter_search_space(dataset_properties)
            parent_hyperparameter = {'parent': initializer, 'value': name}
            cs.add_configuration_space(
                name,
                initializer_configuration_space,
                parent_hyperparameter=parent_hyperparameter)

        self.configuration_space_ = cs
        self.dataset_properties_ = dataset_properties
        return cs
Ejemplo n.º 29
0
def get_space():
    """
        Defines the search space to sample from for each hyperparameter for the hyperparameter 
        optimization. Define all parameters to tune in the given model here. 
      
        Returns:
        --------
            ConfigSpace object containing the search space
    """
    space = CS.ConfigurationSpace()
    timesteps_per_batch = CSH.CategoricalHyperparameter(
        'timesteps_per_batch', [512, 1024, 2048, 4096, 8192])
    vf_stepsize = CSH.UniformFloatHyperparameter('vf_stepsize',
                                                 lower=2**-5,
                                                 upper=2**-2,
                                                 log=True)
    max_kl = CSH.UniformFloatHyperparameter('max_kl',
                                            lower=2**-2.5,
                                            upper=2**-0.5,
                                            log=True)
    gamma = CSH.UniformFloatHyperparameter('gamma',
                                           lower=(1 - (1 / ((10**(-1)) * 4))),
                                           upper=(1 - (1 / ((10**(1.5)) * 4))))
    lam = CSH.UniformFloatHyperparameter('lam',
                                         lower=(1 - (1 / ((10**(-1)) * 4))),
                                         upper=(1 - (1 / ((10**(1.5)) * 4))))

    space.add_hyperparameters(
        [timesteps_per_batch, vf_stepsize, max_kl, gamma, lam])

    # Store the defined configuration space to a json file
    #with open('configspace.json', 'w') as fh:
    #    fh.write(json.write(space))
    return space
Ejemplo n.º 30
0
def convert_hyperparameter_ranges(hp_ranges: HyperparameterRanges_Impl) -> \
        (HyperparameterRanges_CS, List[str]):
    names = []
    hps = []
    for hp_range in hp_ranges.hp_ranges:
        name = hp_range.name
        names.append(name)
        if isinstance(hp_range, HyperparameterRangeContinuous):
            hp = CSH.UniformFloatHyperparameter(name=name,
                                                lower=hp_range.lower_bound,
                                                upper=hp_range.upper_bound,
                                                log=isinstance(
                                                    hp_range.scaling,
                                                    LogScaling))
        elif isinstance(hp_range, HyperparameterRangeInteger):
            hp = CSH.UniformIntegerHyperparameter(name=name,
                                                  lower=hp_range.lower_bound,
                                                  upper=hp_range.upper_bound,
                                                  log=isinstance(
                                                      hp_range.scaling,
                                                      LogScaling))
        elif isinstance(hp_range, HyperparameterRangeCategorical):
            hp = CSH.CategoricalHyperparameter(name=name,
                                               choices=hp_range.choices)
        else:
            raise AssertionError(
                "Parameter '{}' has unknown type".format(name))
        hps.append(hp)
    cs = CS.ConfigurationSpace()
    cs.add_hyperparameters(hps)

    return HyperparameterRanges_CS(cs), names