Exemplo n.º 1
0
 def get_config_space(T_max=(10, 500), eta_min=(1e-8, 1e-8)):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'T_max',
                        T_max)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'eta_min',
                        eta_min)
     return cs
Exemplo n.º 2
0
    def get_config_space(num_layers=(1, 15),
                         max_units=((10, 1024), True),
                         activation=('sigmoid', 'tanh', 'relu'),
                         mlp_shape=('funnel', 'long_funnel', 'diamond',
                                    'hexagon', 'brick', 'triangle', 'stairs'),
                         max_dropout=(0, 1.0),
                         use_dropout=(True, False)):
        cs = CS.ConfigurationSpace()

        mlp_shape_hp = get_hyperparameter(CSH.CategoricalHyperparameter,
                                          'mlp_shape', mlp_shape)
        cs.add_hyperparameter(mlp_shape_hp)

        num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter,
                                           'num_layers', num_layers)
        cs.add_hyperparameter(num_layers_hp)
        max_units_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter,
                                          "max_units", max_units)
        cs.add_hyperparameter(max_units_hp)

        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter,
                                            "use_dropout", use_dropout)

        max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter,
                                            "max_dropout", max_dropout)
        cs.add_condition(
            CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation',
                           activation)
        return cs
Exemplo n.º 3
0
    def get_config_space(growth_rate_range=(12, 40), nr_blocks=(3, 4), layer_range=([1, 12], [6, 24], [12, 64], [12, 64]), num_init_features=(32, 128), **kwargs):

        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        from autoPyTorch.utils.config_space_hyperparameter import add_hyperparameter

        cs = CS.ConfigurationSpace()
        growth_rate_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'growth_rate', growth_rate_range)
        cs.add_hyperparameter(growth_rate_hp)
        # add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'bn_size', [2, 4])
        # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'num_init_features', num_init_features, log=True)
        # add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'bottleneck', [True, False])

        blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'blocks', nr_blocks)
        cs.add_hyperparameter(blocks_hp)
        use_dropout =   add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'use_dropout', [True, False])
        dropout =       add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'dropout', [0.0, 1.0])
        cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True))

        if type(nr_blocks[0]) == int:
            min_blocks = nr_blocks[0]
            max_blocks = nr_blocks[1]
        else:
            min_blocks = nr_blocks[0][0]
            max_blocks = nr_blocks[0][1]

        for i in range(1, max_blocks+1):
            layer_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'layer_in_block_%d' % i, layer_range[i-1])
            cs.add_hyperparameter(layer_hp)
            
            if i > min_blocks:
                cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i-1))

        return cs
Exemplo n.º 4
0
 def get_config_space(T_max=(1, 20), T_mult=(1.0, 2.0)):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'T_max',
                        T_max)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'T_mult',
                        T_mult)
     return cs
Exemplo n.º 5
0
 def get_config_space(factor=(0.05, 0.5), patience=(3, 10)):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'factor',
                        factor)
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'patience',
                        patience)
     return cs
Exemplo n.º 6
0
 def get_config_space(learning_rate=((0.0001, 0.1), True),
                      weight_decay=(0.0001, 0.1)):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'learning_rate',
                        learning_rate)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'weight_decay',
                        weight_decay)
     return cs
 def get_config_space(
     step_size=(1, 10),
     gamma=(0.001, 0.9)
 ):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'step_size', step_size)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'gamma', gamma)
     return cs
Exemplo n.º 8
0
 def get_hyperparameter_search_space(
     dataset_info=None,
     standardize=(True, False),
     method=("yeo-johnson", "box-cox"),
 ):
     cs = ConfigSpace.ConfigurationSpace()
     add_hyperparameter(cs, CSH.CategoricalHyperparameter, "standardize", standardize)
     if dataset_info is None or (
             (dataset_info.x_min_value is None or dataset_info.x_min_value > 0) and not any(dataset_info.categorical_features)):
         add_hyperparameter(cs, CSH.CategoricalHyperparameter, "method", method)
     return cs
Exemplo n.º 9
0
    def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        import ConfigSpace
        import ConfigSpace.hyperparameters as CSH

        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        batch_size_range = self._get_search_space_updates().get('batch_size', ((32, 500), True))
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'batch_size', batch_size_range)
        self._check_search_space_updates('batch_size')
        return cs
Exemplo n.º 10
0
    def get_config_space(num_layers=((1, 15), False),
                         num_units=((10, 1024), True),
                         activation=('sigmoid', 'tanh', 'relu'),
                         dropout=(0.0, 0.8),
                         use_dropout=(True, False),
                         **kwargs):
        cs = CS.ConfigurationSpace()

        num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter,
                                           'num_layers', num_layers)
        cs.add_hyperparameter(num_layers_hp)
        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter,
                                            "use_dropout", use_dropout)

        for i in range(1, num_layers[0][1] + 1):
            n_units_hp = get_hyperparameter(
                CSH.UniformIntegerHyperparameter, "num_units_%d" % i,
                kwargs.pop("num_units_%d" % i, num_units))
            cs.add_hyperparameter(n_units_hp)

            if i > num_layers[0][0]:
                cs.add_condition(
                    CS.GreaterThanCondition(n_units_hp, num_layers_hp, i - 1))

            if True in use_dropout:
                dropout_hp = get_hyperparameter(
                    CSH.UniformFloatHyperparameter, "dropout_%d" % i,
                    kwargs.pop("dropout_%d" % i, dropout))
                cs.add_hyperparameter(dropout_hp)
                dropout_condition_1 = CS.EqualsCondition(
                    dropout_hp, use_dropout_hp, True)

                if i > num_layers[0][0]:
                    dropout_condition_2 = CS.GreaterThanCondition(
                        dropout_hp, num_layers_hp, i - 1)
                    cs.add_condition(
                        CS.AndConjunction(dropout_condition_1,
                                          dropout_condition_2))
                else:
                    cs.add_condition(dropout_condition_1)

        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation',
                           activation)
        assert len(
            kwargs
        ) == 0, "Invalid hyperparameter updates for mlpnet: %s" % str(kwargs)
        return (cs)
Exemplo n.º 11
0
 def get_config_space(
     learning_rate=((1e-4, 0.1), True),
     momentum=((0.1, 0.99), True),
     weight_decay=(1e-5, 0.1),
     alpha=(0.1,0.99)
 ):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'learning_rate', learning_rate)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'momentum', momentum)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'weight_decay', weight_decay)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'alpha', alpha)
     return cs
Exemplo n.º 12
0
 def get_config_space(
     T_max=(300,1000),
     patience=(2,5),
     T_mult=(1.0,2.0),
     threshold=(0.001, 0.5)
 ):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'T_max', T_max)
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'patience', patience)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'T_mult', T_mult)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'threshold', threshold)
     return cs
Exemplo n.º 13
0
    def get_hyperparameter_search_space(dataset_info=None,
                                        kernel=('poly', 'rbf', 'sigmoid',
                                                'cosine'),
                                        n_components=(10, 2000),
                                        gamma=((3.0517578125e-05, 8), True),
                                        degree=(2, 5),
                                        coef0=(-1, 1)):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter,
                                       'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter,
                           "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs,
                                           CSH.UniformIntegerHyperparameter,
                                           'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter,
                                          "coef0", coef0)
            cs.add_condition(
                CSC.InCondition(coef0_hp, kernel_hp,
                                list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter,
                                          "gamma", gamma)
            cs.add_condition(
                CSC.InCondition(
                    gamma_hp, kernel_hp,
                    list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))
        return cs
 def get_config_space(
     max_factor=(1.0, 2),
     min_factor=(0.001, 1.0),
     cycle_length=(3, 10)
 ):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'max_factor', max_factor)
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'min_factor', min_factor)
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'cycle_length', cycle_length)
     return cs
Exemplo n.º 15
0
    def get_hyperparameter_search_space(
        dataset_info=None,
        degree=(2, 3),
        interaction_only=(True, False),
        include_bias=(True, False)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "degree", degree)
        add_hyperparameter(cs, CSH.CategoricalHyperparameter, "interaction_only", [False, True])
        add_hyperparameter(cs, CSH.CategoricalHyperparameter, "include_bias", [True, False])

        return cs
Exemplo n.º 16
0
 def get_hyperparameter_search_space(initialize_bias=("Yes", "No", "Zero")):
     cs = ConfigSpace.ConfigurationSpace()
     add_hyperparameter(cs, ConfigSpace.CategoricalHyperparameter,
                        "initialize_bias", initialize_bias)
     return cs
Exemplo n.º 17
0
 def get_hyperparameter_search_space(alpha=(0, 1)):
     cs = ConfigSpace.ConfigurationSpace()
     add_hyperparameter(
         cs, ConfigSpace.hyperparameters.UniformFloatHyperparameter,
         "alpha", alpha)
     return cs
Exemplo n.º 18
0
    def get_config_space(num_groups=((1, 9), False),
                         blocks_per_group=((1, 4), False),
                         num_units=((10, 1024), True),
                         activation=('sigmoid', 'tanh', 'relu'),
                         max_shake_drop_probability=(0, 1),
                         dropout=(0, 1.0),
                         use_shake_drop=(True, False),
                         use_shake_shake=(True, False),
                         use_dropout=(True, False),
                         **kwargs):
        cs = ConfigSpace.ConfigurationSpace()

        num_groups_hp = get_hyperparameter(
            ConfigSpace.UniformIntegerHyperparameter, "num_groups", num_groups)
        cs.add_hyperparameter(num_groups_hp)
        blocks_per_group_hp = get_hyperparameter(
            ConfigSpace.UniformIntegerHyperparameter, "blocks_per_group",
            blocks_per_group)
        cs.add_hyperparameter(blocks_per_group_hp)
        add_hyperparameter(cs, ConfigSpace.CategoricalHyperparameter,
                           "activation", activation)

        use_dropout_hp = get_hyperparameter(
            ConfigSpace.CategoricalHyperparameter, "use_dropout", use_dropout)
        cs.add_hyperparameter(use_dropout_hp)
        add_hyperparameter(cs, ConfigSpace.CategoricalHyperparameter,
                           "use_shake_shake", use_shake_shake)

        use_shake_drop_hp = add_hyperparameter(
            cs, ConfigSpace.CategoricalHyperparameter, "use_shake_drop",
            use_shake_drop)
        if True in use_shake_drop:
            shake_drop_prob_hp = add_hyperparameter(
                cs, ConfigSpace.UniformFloatHyperparameter,
                "max_shake_drop_probability", max_shake_drop_probability)
            cs.add_condition(
                ConfigSpace.EqualsCondition(shake_drop_prob_hp,
                                            use_shake_drop_hp, True))

        # it is the upper bound of the nr of groups, since the configuration will actually be sampled.
        for i in range(0, num_groups[0][1] + 1):

            n_units_hp = add_hyperparameter(
                cs, ConfigSpace.UniformIntegerHyperparameter,
                "num_units_%d" % i, kwargs.pop("num_units_%d" % i, num_units))

            if i > 1:
                cs.add_condition(
                    ConfigSpace.GreaterThanCondition(n_units_hp, num_groups_hp,
                                                     i - 1))

            if True in use_dropout:
                dropout_hp = add_hyperparameter(
                    cs, ConfigSpace.UniformFloatHyperparameter,
                    "dropout_%d" % i, kwargs.pop("dropout_%d" % i, dropout))
                dropout_condition_1 = ConfigSpace.EqualsCondition(
                    dropout_hp, use_dropout_hp, True)

                if i > 1:

                    dropout_condition_2 = ConfigSpace.GreaterThanCondition(
                        dropout_hp, num_groups_hp, i - 1)

                    cs.add_condition(
                        ConfigSpace.AndConjunction(dropout_condition_1,
                                                   dropout_condition_2))
                else:
                    cs.add_condition(dropout_condition_1)
        assert len(
            kwargs
        ) == 0, "Invalid hyperparameter updates for resnet: %s" % str(kwargs)
        return cs
Exemplo n.º 19
0
 def get_hyperparameter_search_space(dataset_info=None,
                                     target_dim=(10, 256)):
     cs = ConfigSpace.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "target_dim",
                        target_dim)
     return cs
Exemplo n.º 20
0
    def get_config_space(num_groups=(1, 9),
                         blocks_per_group=(1, 4),
                         max_units=((10, 1024), True),
                         activation=('sigmoid', 'tanh', 'relu'),
                         max_shake_drop_probability=(0, 1),
                         max_dropout=(0, 0.8),
                         resnet_shape=('funnel', 'long_funnel', 'diamond',
                                       'hexagon', 'brick', 'triangle',
                                       'stairs'),
                         dropout_shape=('funnel', 'long_funnel', 'diamond',
                                        'hexagon', 'brick', 'triangle',
                                        'stairs'),
                         use_dropout=(True, False),
                         use_shake_shake=(True, False),
                         use_shake_drop=(True, False)):
        cs = CS.ConfigurationSpace()

        num_groups_hp = get_hyperparameter(CS.UniformIntegerHyperparameter,
                                           "num_groups", num_groups)
        cs.add_hyperparameter(num_groups_hp)
        blocks_per_group_hp = get_hyperparameter(
            CS.UniformIntegerHyperparameter, "blocks_per_group",
            blocks_per_group)
        cs.add_hyperparameter(blocks_per_group_hp)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "activation",
                           activation)
        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter,
                                            "use_dropout", use_dropout)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_shake",
                           use_shake_shake)

        shake_drop_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter,
                                           "use_shake_drop", use_shake_drop)
        if True in use_shake_drop:
            shake_drop_prob_hp = add_hyperparameter(
                cs, CS.UniformFloatHyperparameter,
                "max_shake_drop_probability", max_shake_drop_probability)
            cs.add_condition(
                CS.EqualsCondition(shake_drop_prob_hp, shake_drop_hp, True))

        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'resnet_shape',
                           resnet_shape)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "max_units",
                           max_units)

        if True in use_dropout:
            dropout_shape_hp = add_hyperparameter(
                cs, CSH.CategoricalHyperparameter, 'dropout_shape',
                dropout_shape)
            max_dropout_hp = add_hyperparameter(cs,
                                                CSH.UniformFloatHyperparameter,
                                                "max_dropout", max_dropout)

            cs.add_condition(
                CS.EqualsCondition(dropout_shape_hp, use_dropout_hp, True))
            cs.add_condition(
                CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))
        return cs
Exemplo n.º 21
0
    def get_config_space(   growth_rate_range=(5, 128), nr_blocks=(1, 5), kernel_range=(2, 7), 
                            layer_range=(5, 50), activations=all_activations.keys(),
                            conv_init=('random', 'kaiming_normal', 'constant_0', 'constant_1', 'constant_05'),
                            batchnorm_weight_init=('random', 'constant_0', 'constant_1', 'constant_05'),
                            batchnorm_bias_init=('random', 'constant_0', 'constant_1', 'constant_05'),
                            linear_bias_init=('random', 'constant_0', 'constant_1', 'constant_05'), **kwargs):

        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        from autoPyTorch.utils.config_space_hyperparameter import add_hyperparameter

        cs = CS.ConfigurationSpace()
        growth_rate_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'growth_rate', growth_rate_range)
        first_conv_kernel_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'first_conv_kernel', kernel_range)
        first_pool_kernel_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'first_pool_kernel', kernel_range)
        conv_init_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'conv_init', conv_init)
        batchnorm_weight_init_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'batchnorm_weight_init', batchnorm_weight_init)
        batchnorm_bias_init_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'batchnorm_bias_init', batchnorm_bias_init)
        linear_bias_init_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'linear_bias_init', linear_bias_init)
        first_activation_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'first_activation', sorted(set(activations).intersection(all_activations)))
        blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'blocks', nr_blocks)

        cs.add_hyperparameter(growth_rate_hp)
        cs.add_hyperparameter(first_conv_kernel_hp)
        cs.add_hyperparameter(first_pool_kernel_hp)
        cs.add_hyperparameter(conv_init_hp)
        cs.add_hyperparameter(batchnorm_weight_init_hp)
        cs.add_hyperparameter(batchnorm_bias_init_hp)
        cs.add_hyperparameter(linear_bias_init_hp)
        cs.add_hyperparameter(first_activation_hp)
        cs.add_hyperparameter(blocks_hp)
        add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'channel_reduction', [0.1, 0.9])
        add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'last_image_size', [0, 1])
        add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'bottleneck', [True, False])
        use_dropout =   add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'use_dropout', [True, False])

        if type(nr_blocks[0]) == int:
            min_blocks = nr_blocks[0]
            max_blocks = nr_blocks[1]
        else:
            min_blocks = nr_blocks[0][0]
            max_blocks = nr_blocks[0][1]

        for i in range(1, max_blocks+1):
            layer_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'layer_in_block_%d' % i, layer_range)
            pool_kernel_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'pool_kernel_%d' % i, kernel_range)
            activation_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'activation_%d' % i, sorted(set(activations).intersection(all_activations)))
            cs.add_hyperparameter(layer_hp)
            cs.add_hyperparameter(pool_kernel_hp)
            cs.add_hyperparameter(activation_hp)
            dropout =       add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'dropout_%d' % i, [0.0, 1.0])
            conv_kernel =   add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'conv_kernel_%d' % i, [3, 5, 7])

            
            if i > min_blocks:
                cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(conv_kernel, blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(pool_kernel_hp, blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(activation_hp, blocks_hp, i-1))
                cs.add_condition(CS.AndConjunction(CS.EqualsCondition(dropout, use_dropout, True), CS.GreaterThanCondition(dropout, blocks_hp, i-1)))
            else:
                cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True))

        return cs
 def get_config_space(
     gamma=(0.8, 0.9999)
 ):
     cs = CS.ConfigurationSpace()
     add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'gamma', gamma)
     return cs