Пример #1
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_augmenter",
            value_range=(True, False),
            default_value=True,
        ),
        sigma_min: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="sigma_min",
            value_range=(0, 3),
            default_value=0,
        ),
        sigma_offset: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="sigma_offset",
            value_range=(0.0, 3.0),
            default_value=0.5,
        ),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()
        use_augmenter = get_hyperparameter(use_augmenter,
                                           CategoricalHyperparameter)
        sigma_min = get_hyperparameter(sigma_min, UniformFloatHyperparameter)
        sigma_offset = get_hyperparameter(sigma_offset,
                                          UniformFloatHyperparameter)
        cs.add_hyperparameters([use_augmenter, sigma_min, sigma_offset])
        # only add hyperparameters to configuration space if we are using the augmenter
        cs.add_condition(CS.EqualsCondition(sigma_min, use_augmenter, True))
        cs.add_condition(CS.EqualsCondition(sigma_offset, use_augmenter, True))

        return cs
Пример #2
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
        num_layers: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='num_layers',
                                                                          value_range=(4, 64),
                                                                          default_value=16,
                                                                          ),
        num_blocks: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='num_blocks',
                                                                          value_range=(3, 4),
                                                                          default_value=3,
                                                                          ),
        growth_rate: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='growth_rate',
                                                                           value_range=(12, 40),
                                                                           default_value=20,
                                                                           ),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='activation',
                                                                          value_range=tuple(_activations.keys()),
                                                                          default_value=list(_activations.keys())[0],
                                                                          ),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='use_dropout',
                                                                           value_range=(True, False),
                                                                           default_value=False,
                                                                           ),
        dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter='dropout',
                                                                       value_range=(0, 0.5),
                                                                       default_value=0.2,
                                                                       ),
    ) -> ConfigurationSpace:
        cs = CS.ConfigurationSpace()

        add_hyperparameter(cs, num_layers, UniformIntegerHyperparameter)
        add_hyperparameter(cs, growth_rate, UniformIntegerHyperparameter)

        min_num_blocks, max_num_blocks = num_blocks.value_range
        blocks_hp = get_hyperparameter(num_blocks, UniformIntegerHyperparameter)
        cs.add_hyperparameter(blocks_hp)

        add_hyperparameter(cs, activation, CategoricalHyperparameter)

        use_dropout = get_hyperparameter(use_dropout, CategoricalHyperparameter)

        dropout = get_hyperparameter(dropout, UniformFloatHyperparameter)

        cs.add_hyperparameters([use_dropout, dropout])
        cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True))

        for i in range(1, int(max_num_blocks) + 1):

            layer_search_space = HyperparameterSearchSpace(hyperparameter='layer_in_block_%d' % i,
                                                           value_range=num_layers.value_range,
                                                           default_value=num_layers.default_value,
                                                           log=num_layers.log)
            layer_hp = get_hyperparameter(layer_search_space, UniformIntegerHyperparameter)

            cs.add_hyperparameter(layer_hp)
            if i > int(min_num_blocks):
                cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i - 1))

        return cs
Пример #3
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_layers: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_layers", value_range=(1, 4), default_value=2),
        units_layer: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="units_layer",
            value_range=(64, 512),
            default_value=128),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0]),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_layers: int = num_layers.value_range[0]  # type: ignore
        max_num_layers: int = num_layers.value_range[-1]  # type: ignore
        num_layers_is_constant = (min_num_layers == max_num_layers)

        num_layers_hp = get_hyperparameter(num_layers,
                                           UniformIntegerHyperparameter)
        activation_hp = get_hyperparameter(activation,
                                           CategoricalHyperparameter)
        cs.add_hyperparameter(num_layers_hp)

        if not num_layers_is_constant:
            cs.add_hyperparameter(activation_hp)
            cs.add_condition(
                CS.GreaterThanCondition(activation_hp, num_layers_hp, 1))
        elif max_num_layers > 1:
            # only add activation if we have more than 1 layer
            cs.add_hyperparameter(activation_hp)

        for i in range(1, max_num_layers + 1):
            num_units_search_space = HyperparameterSearchSpace(
                hyperparameter=f"units_layer_{i}",
                value_range=units_layer.value_range,
                default_value=units_layer.default_value,
                log=units_layer.log,
            )
            num_units_hp = get_hyperparameter(num_units_search_space,
                                              UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_units_hp)

            if i >= min_num_layers and not num_layers_is_constant:
                # In the case of a constant, the max and min number of layers are the same.
                # So no condition is needed. If it is not a constant but a hyperparameter,
                # then a condition has to be made so that it accounts for the value of the
                # hyperparameter.
                cs.add_condition(
                    CS.GreaterThanCondition(num_units_hp, num_layers_hp, i))

        return cs
Пример #4
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_blocks: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_blocks", value_range=(1, 10), default_value=5),
        num_filters: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_filters",
            value_range=(4, 64),
            default_value=32),
        kernel_size: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="kernel_size",
            value_range=(4, 64),
            default_value=32),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False),
        dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="dropout", value_range=(0, 0.5), default_value=0.1),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_blocks, max_num_blocks = num_blocks.value_range
        num_blocks_hp = get_hyperparameter(num_blocks,
                                           UniformIntegerHyperparameter)
        cs.add_hyperparameter(num_blocks_hp)

        add_hyperparameter(cs, kernel_size, UniformIntegerHyperparameter)

        use_dropout_hp = get_hyperparameter(use_dropout,
                                            CategoricalHyperparameter)
        cs.add_hyperparameter(use_dropout_hp)

        dropout_hp = get_hyperparameter(dropout, UniformFloatHyperparameter)
        cs.add_hyperparameter(dropout_hp)
        cs.add_condition(CS.EqualsCondition(dropout_hp, use_dropout_hp, True))

        for i in range(0, int(max_num_blocks)):
            num_filter_search_space = HyperparameterSearchSpace(
                f"num_filters_{i}",
                value_range=num_filters.value_range,
                default_value=num_filters.default_value,
                log=num_filters.log)
            num_filters_hp = get_hyperparameter(num_filter_search_space,
                                                UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_filters_hp)
            if i >= int(min_num_blocks):
                cs.add_condition(
                    CS.GreaterThanCondition(num_filters_hp, num_blocks_hp, i))

        return cs
Пример #5
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_layers: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_layers", value_range=(1, 4), default_value=2),
        num_filters: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_filters",
            value_range=(16, 256),
            default_value=32),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0]),
        pooling_method: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="pooling_method",
            value_range=("average", "max"),
            default_value="max"),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        min_num_layers, max_num_layers = num_layers.value_range
        num_layers_hp = get_hyperparameter(num_layers,
                                           UniformIntegerHyperparameter)

        add_hyperparameter(cs, pooling_method, CategoricalHyperparameter)

        activation_hp = get_hyperparameter(activation,
                                           CategoricalHyperparameter)

        cs.add_hyperparameters([num_layers_hp, activation_hp])
        cs.add_condition(
            CS.GreaterThanCondition(activation_hp, num_layers_hp, 1))

        for i in range(1, int(max_num_layers)):
            num_filters_search_space = HyperparameterSearchSpace(
                f"layer_{i}_filters",
                value_range=num_filters.value_range,
                default_value=num_filters.default_value,
                log=num_filters.log)
            num_filters_hp = get_hyperparameter(num_filters_search_space,
                                                UniformIntegerHyperparameter)
            cs.add_hyperparameter(num_filters_hp)
            if i >= int(min_num_layers):
                cs.add_condition(
                    CS.GreaterThanCondition(num_filters_hp, num_layers_hp, i))

        return cs
Пример #6
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
        use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="use_augmenter",
                                                                             value_range=(True, False),
                                                                             default_value=True,
                                                                             ),
        scale_offset: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="scale_offset",
                                                                            value_range=(0, 0.4),
                                                                            default_value=0.2,
                                                                            ),
        translate_percent_offset: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="translate_percent_offset",
            value_range=(0, 0.4),
            default_value=0.2),
        shear: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="shear",
                                                                     value_range=(0, 45),
                                                                     default_value=30,
                                                                     ),
        rotate: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="rotate",
                                                                      value_range=(0, 360),
                                                                      default_value=45,
                                                                      ),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        use_augmenter = get_hyperparameter(use_augmenter, CategoricalHyperparameter)
        scale_offset = get_hyperparameter(scale_offset, UniformFloatHyperparameter)
        translate_percent_offset = get_hyperparameter(translate_percent_offset, UniformFloatHyperparameter)
        shear = get_hyperparameter(shear, UniformIntegerHyperparameter)
        rotate = get_hyperparameter(rotate, UniformIntegerHyperparameter)
        cs.add_hyperparameters([use_augmenter, scale_offset, translate_percent_offset])
        cs.add_hyperparameters([shear, rotate])

        # only add hyperparameters to configuration space if we are using the augmenter
        cs.add_condition(CS.EqualsCondition(scale_offset, use_augmenter, True))
        cs.add_condition(CS.EqualsCondition(translate_percent_offset, use_augmenter, True))
        cs.add_condition(CS.EqualsCondition(shear, use_augmenter, True))
        cs.add_condition(CS.EqualsCondition(rotate, use_augmenter, True))

        return cs
Пример #7
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        use_augmenter: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_augmenter",
            value_range=(True, False),
            default_value=True,
        ),
        p: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="p",
            value_range=(0.2, 1.0),
            default_value=0.5,
        ),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        use_augmenter = get_hyperparameter(use_augmenter,
                                           CategoricalHyperparameter)
        p = get_hyperparameter(p, UniformFloatHyperparameter)
        cs.add_hyperparameters([use_augmenter, p])
        # only add hyperparameters to configuration space if we are using the augmenter
        cs.add_condition(CS.EqualsCondition(p, use_augmenter, True))
        return cs
Пример #8
0
    def get_hyperparameter_search_space(  # type: ignore[override]
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        resnet_shape: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="resnet_shape",
            value_range=('funnel', 'long_funnel', 'diamond', 'hexagon',
                         'brick', 'triangle', 'stairs'),
            default_value='funnel',
        ),
        output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="output_dim",
            value_range=(10, 1024),
            default_value=200,
        ),
        num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_groups",
            value_range=(1, 15),
            default_value=5,
        ),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False,
        ),
        max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="max_units",
            value_range=(10, 1024),
            default_value=200),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0]),
        blocks_per_group: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="blocks_per_group",
            value_range=(1, 4),
            default_value=2),
        max_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="max_dropout",
            value_range=(0, 0.8),
            default_value=0.5),
        use_shake_shake: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_shake_shake",
            value_range=(True, False),
            default_value=True),
        use_shake_drop: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_shake_drop",
            value_range=(True, False),
            default_value=True),
        max_shake_drop_probability:
        HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="max_shake_drop_probability",
            value_range=(0, 1),
            default_value=0.5),
    ) -> ConfigurationSpace:

        cs = ConfigurationSpace()

        # Support for different shapes
        add_hyperparameter(cs, resnet_shape, CategoricalHyperparameter)

        # The number of groups that will compose the resnet. That is,
        # a group can have N Resblock. The M number of this N resblock
        # repetitions is num_groups
        add_hyperparameter(cs, num_groups, UniformIntegerHyperparameter)
        add_hyperparameter(cs, blocks_per_group, UniformIntegerHyperparameter)

        add_hyperparameter(cs, activation, CategoricalHyperparameter)
        add_hyperparameter(cs, output_dim, UniformIntegerHyperparameter)

        use_shake_shake = get_hyperparameter(use_shake_shake,
                                             CategoricalHyperparameter)
        use_shake_drop = get_hyperparameter(use_shake_drop,
                                            CategoricalHyperparameter)
        shake_drop_prob = get_hyperparameter(max_shake_drop_probability,
                                             UniformFloatHyperparameter)
        cs.add_hyperparameters(
            [use_shake_shake, use_shake_drop, shake_drop_prob])
        cs.add_condition(
            CS.EqualsCondition(shake_drop_prob, use_shake_drop, True))

        add_hyperparameter(cs, max_units, UniformIntegerHyperparameter)

        use_dropout = get_hyperparameter(use_dropout,
                                         CategoricalHyperparameter)
        max_dropout = get_hyperparameter(max_dropout,
                                         UniformFloatHyperparameter)

        cs.add_hyperparameters([use_dropout])
        cs.add_hyperparameters([max_dropout])
        cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True))

        return cs
Пример #9
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        n_components: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter='n_components',
            value_range=(0.5, 0.9),
            default_value=0.5,
        ),
        kernel: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter='kernel',
            value_range=('poly', 'rbf', 'sigmoid', 'cosine'),
            default_value='rbf',
        ),
        gamma: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter='gamma',
            value_range=(3.0517578125e-05, 8),
            default_value=0.01,
            log=True),
        degree: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter='degree',
            value_range=(2, 5),
            default_value=3,
            log=True),
        coef0: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter='coef0',
            value_range=(-1, 1),
            default_value=0,
        )
    ) -> ConfigurationSpace:

        cs = ConfigurationSpace()

        if dataset_properties is not None:
            n_features = len(
                dataset_properties['numerical_columns']) if isinstance(
                    dataset_properties['numerical_columns'], List) else 0
            if n_features == 1:
                log = False
            else:
                log = n_components.log
            n_components = HyperparameterSearchSpace(
                hyperparameter='n_components',
                value_range=(floor(
                    float(n_components.value_range[0]) * n_features),
                             ceil(
                                 float(n_components.value_range[1]) *
                                 n_features)),
                default_value=ceil(
                    float(n_components.default_value) * n_features),
                log=log)
        else:
            n_components = HyperparameterSearchSpace(
                hyperparameter='n_components',
                value_range=(10, 2000),
                default_value=100,
                log=n_components.log)

        add_hyperparameter(cs, n_components, UniformIntegerHyperparameter)
        kernel_hp = get_hyperparameter(kernel, CategoricalHyperparameter)
        gamma = get_hyperparameter(gamma, UniformFloatHyperparameter)
        coef0 = get_hyperparameter(coef0, UniformFloatHyperparameter)
        cs.add_hyperparameters([kernel_hp, gamma, coef0])

        if "poly" in kernel_hp.choices:
            degree = get_hyperparameter(degree, UniformIntegerHyperparameter)
            cs.add_hyperparameters([degree])
            degree_depends_on_poly = EqualsCondition(degree, kernel_hp, "poly")
            cs.add_conditions([degree_depends_on_poly])
        kernels = []
        if "sigmoid" in kernel_hp.choices:
            kernels.append("sigmoid")
        if "poly" in kernel_hp.choices:
            kernels.append("poly")
        coef0_condition = InCondition(coef0, kernel_hp, kernels)
        kernels = []
        if "rbf" in kernel_hp.choices:
            kernels.append("rbf")
        if "poly" in kernel_hp.choices:
            kernels.append("poly")
        gamma_condition = InCondition(gamma, kernel_hp, kernels)
        cs.add_conditions([coef0_condition, gamma_condition])
        return cs
Пример #10
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_groups",
            value_range=(1, 15),
            default_value=5,
        ),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0],
        ),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False,
        ),
        num_units: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_units",
            value_range=(10, 1024),
            default_value=200,
        ),
        dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="dropout",
            value_range=(0, 0.8),
            default_value=0.5,
        ),
    ) -> ConfigurationSpace:

        cs = ConfigurationSpace()

        # The number of hidden layers the network will have.
        # Layer blocks are meant to have the same architecture, differing only
        # by the number of units
        min_mlp_layers, max_mlp_layers = num_groups.value_range
        num_groups = get_hyperparameter(num_groups,
                                        UniformIntegerHyperparameter)
        add_hyperparameter(cs, activation, CategoricalHyperparameter)

        # We can have dropout in the network for
        # better generalization
        use_dropout = get_hyperparameter(use_dropout,
                                         CategoricalHyperparameter)
        cs.add_hyperparameters([num_groups, use_dropout])

        for i in range(1, int(max_mlp_layers) + 1):
            n_units_search_space = HyperparameterSearchSpace(
                hyperparameter='num_units_%d' % i,
                value_range=num_units.value_range,
                default_value=num_units.default_value,
                log=num_units.log)
            n_units_hp = get_hyperparameter(n_units_search_space,
                                            UniformIntegerHyperparameter)
            cs.add_hyperparameter(n_units_hp)

            if i > int(min_mlp_layers):
                # The units of layer i should only exist
                # if there are at least i layers
                cs.add_condition(
                    CS.GreaterThanCondition(n_units_hp, num_groups, i - 1))
            dropout_search_space = HyperparameterSearchSpace(
                hyperparameter='dropout_%d' % i,
                value_range=dropout.value_range,
                default_value=dropout.default_value,
                log=dropout.log)
            dropout_hp = get_hyperparameter(dropout_search_space,
                                            UniformFloatHyperparameter)
            cs.add_hyperparameter(dropout_hp)

            dropout_condition_1 = CS.EqualsCondition(dropout_hp, use_dropout,
                                                     True)

            if i > int(min_mlp_layers):
                dropout_condition_2 = CS.GreaterThanCondition(
                    dropout_hp, num_groups, i - 1)
                cs.add_condition(
                    CS.AndConjunction(dropout_condition_1,
                                      dropout_condition_2))
            else:
                cs.add_condition(dropout_condition_1)

        return cs
Пример #11
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_groups",
            value_range=(1, 15),
            default_value=5,
        ),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False,
        ),
        num_units: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_units",
            value_range=(10, 1024),
            default_value=200,
        ),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0],
        ),
        blocks_per_group: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="blocks_per_group",
            value_range=(1, 4),
            default_value=2,
        ),
        dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="dropout",
            value_range=(0, 0.8),
            default_value=0.5,
        ),
        use_shake_shake: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_shake_shake",
            value_range=(True, False),
            default_value=True,
        ),
        use_shake_drop: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_shake_drop",
            value_range=(True, False),
            default_value=True,
        ),
        max_shake_drop_probability:
        HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="max_shake_drop_probability",
            value_range=(0, 1),
            default_value=0.5),
    ) -> ConfigurationSpace:
        cs = ConfigurationSpace()

        # The number of groups that will compose the resnet. That is,
        # a group can have N Resblock. The M number of this N resblock
        # repetitions is num_groups
        min_num_gropus, max_num_groups = num_groups.value_range
        num_groups = get_hyperparameter(num_groups,
                                        UniformIntegerHyperparameter)

        add_hyperparameter(cs, activation, CategoricalHyperparameter)
        cs.add_hyperparameters([num_groups])

        # We can have dropout in the network for
        # better generalization
        use_dropout = get_hyperparameter(use_dropout,
                                         CategoricalHyperparameter)
        cs.add_hyperparameters([use_dropout])

        use_shake_shake = get_hyperparameter(use_shake_shake,
                                             CategoricalHyperparameter)
        use_shake_drop = get_hyperparameter(use_shake_drop,
                                            CategoricalHyperparameter)
        shake_drop_prob = get_hyperparameter(max_shake_drop_probability,
                                             UniformFloatHyperparameter)
        cs.add_hyperparameters(
            [use_shake_shake, use_shake_drop, shake_drop_prob])
        cs.add_condition(
            CS.EqualsCondition(shake_drop_prob, use_shake_drop, True))

        # It is the upper bound of the nr of groups,
        # since the configuration will actually be sampled.
        for i in range(0, int(max_num_groups) + 1):

            n_units_search_space = HyperparameterSearchSpace(
                hyperparameter='num_units_%d' % i,
                value_range=num_units.value_range,
                default_value=num_units.default_value,
                log=num_units.log)
            n_units_hp = get_hyperparameter(n_units_search_space,
                                            UniformIntegerHyperparameter)

            blocks_per_group_search_space = HyperparameterSearchSpace(
                hyperparameter='blocks_per_group_%d' % i,
                value_range=blocks_per_group.value_range,
                default_value=blocks_per_group.default_value,
                log=blocks_per_group.log)
            blocks_per_group_hp = get_hyperparameter(
                blocks_per_group_search_space, UniformIntegerHyperparameter)
            cs.add_hyperparameters([n_units_hp, blocks_per_group_hp])

            if i > 1:
                cs.add_condition(
                    CS.GreaterThanCondition(n_units_hp, num_groups, i - 1))
                cs.add_condition(
                    CS.GreaterThanCondition(blocks_per_group_hp, num_groups,
                                            i - 1))

            dropout_search_space = HyperparameterSearchSpace(
                hyperparameter='dropout_%d' % i,
                value_range=dropout.value_range,
                default_value=dropout.default_value,
                log=dropout.log)
            dropout_hp = get_hyperparameter(dropout_search_space,
                                            UniformFloatHyperparameter)
            cs.add_hyperparameter(dropout_hp)

            dropout_condition_1 = CS.EqualsCondition(dropout_hp, use_dropout,
                                                     True)

            if i > 1:

                dropout_condition_2 = CS.GreaterThanCondition(
                    dropout_hp, num_groups, i - 1)

                cs.add_condition(
                    CS.AndConjunction(dropout_condition_1,
                                      dropout_condition_2))
            else:
                cs.add_condition(dropout_condition_1)
        return cs
Пример #12
0
    def get_hyperparameter_search_space(
        dataset_properties: Optional[Dict[str,
                                          BaseDatasetPropertiesType]] = None,
        num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="num_groups",
            value_range=(1, 15),
            default_value=5,
        ),
        max_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="max_dropout",
            value_range=(0, 1),
            default_value=0.5,
        ),
        use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="use_dropout",
            value_range=(True, False),
            default_value=False,
        ),
        max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="max_units",
            value_range=(10, 1024),
            default_value=200,
        ),
        output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="output_dim",
            value_range=(10, 1024),
            default_value=200,
        ),
        mlp_shape: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="mlp_shape",
            value_range=('funnel', 'long_funnel', 'diamond', 'hexagon',
                         'brick', 'triangle', 'stairs'),
            default_value='funnel',
        ),
        activation: HyperparameterSearchSpace = HyperparameterSearchSpace(
            hyperparameter="activation",
            value_range=tuple(_activations.keys()),
            default_value=list(_activations.keys())[0],
        ),
    ) -> ConfigurationSpace:

        cs = ConfigurationSpace()

        # The number of groups that will compose the resnet. That is,
        # a group can have N Resblock. The M number of this N resblock
        # repetitions is num_groups
        add_hyperparameter(cs, num_groups, UniformIntegerHyperparameter)
        add_hyperparameter(cs, mlp_shape, CategoricalHyperparameter)
        add_hyperparameter(cs, activation, CategoricalHyperparameter)
        add_hyperparameter(cs, max_units, UniformIntegerHyperparameter)
        add_hyperparameter(cs, output_dim, UniformIntegerHyperparameter)

        # We can have dropout in the network for
        # better generalization
        use_dropout = get_hyperparameter(use_dropout,
                                         CategoricalHyperparameter)
        max_dropout = get_hyperparameter(max_dropout,
                                         UniformFloatHyperparameter)

        cs.add_hyperparameters([use_dropout, max_dropout])
        cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True))

        return cs