示例#1
0
 def get_default_params(cls) -> ParamTable:
     """:return: model default parameters."""
     params = super().get_default_params(with_embedding=True,
                                         with_multi_layer_perceptron=True)
     params['optimizer'] = 'adam'
     params.add(
         Param(name='num_blocks',
               value=1,
               desc="Number of convolution blocks."))
     params.add(
         Param(name='left_filters',
               value=[32],
               desc="The filter size of each convolution "
               "blocks for the left input."))
     params.add(
         Param(name='left_kernel_sizes',
               value=[3],
               desc="The kernel size of each convolution "
               "blocks for the left input."))
     params.add(
         Param(name='right_filters',
               value=[32],
               desc="The filter size of each convolution "
               "blocks for the right input."))
     params.add(
         Param(name='right_kernel_sizes',
               value=[3],
               desc="The kernel size of each convolution "
               "blocks for the right input."))
     params.add(
         Param(name='conv_activation_func',
               value='relu',
               desc="The activation function in the "
               "convolution layer."))
     params.add(
         Param(name='left_pool_sizes',
               value=[2],
               desc="The pooling size of each convolution "
               "blocks for the left input."))
     params.add(
         Param(name='right_pool_sizes',
               value=[2],
               desc="The pooling size of each convolution "
               "blocks for the right input."))
     params.add(
         Param(name='padding',
               value='same',
               hyper_space=hyper_spaces.choice(['same', 'valid', 'causal']),
               desc=
               "The padding mode in the convolution layer. It should be one"
               "of `same`, `valid`, and `causal`."))
     params.add(
         Param('dropout_rate',
               0.0,
               hyper_space=hyper_spaces.quniform(low=0.0, high=0.8, q=0.01),
               desc="The dropout rate."))
     return params
示例#2
0
 def get_default_params(cls) -> ParamTable:
     """:return: model default parameters."""
     params = super().get_default_params(with_embedding=True)
     params['optimizer'] = 'adam'
     opt_space = hyper_spaces.choice(['adam', 'rmsprop', 'adagrad'])
     params.get('optimizer').hyper_space = opt_space
     params.add(Param(name='num_blocks', value=1,
                      desc="Number of 2D convolution blocks."))
     params.add(Param(name='kernel_1d_count', value=32,
                      desc="Kernel count of 1D convolution layer."))
     params.add(Param(name='kernel_1d_size', value=3,
                      desc="Kernel size of 1D convolution layer."))
     params.add(Param(name='kernel_2d_count', value=[32],
                      desc="Kernel count of 2D convolution layer in"
                           "each block"))
     params.add(Param(name='kernel_2d_size', value=[[3, 3]],
                      desc="Kernel size of 2D convolution layer in"
                           " each block."))
     params.add(Param(name='activation', value='relu',
                      desc="Activation function."))
     params.add(Param(name='pool_2d_size', value=[[2, 2]],
                      desc="Size of pooling layer in each block."))
     params.add(Param(
         name='padding', value='same',
         hyper_space=hyper_spaces.choice(
             ['same', 'valid']),
         desc="The padding mode in the convolution layer. It should be one"
              "of `same`, `valid`."
     ))
     params.add(Param(
         name='dropout_rate', value=0.0,
         hyper_space=hyper_spaces.quniform(low=0.0, high=0.8,
                                           q=0.01),
         desc="The dropout rate."
     ))
     return params
示例#3
0
    def get_default_params(
            cls,
            with_embedding=False,
            with_multi_layer_perceptron=False) -> engine.ParamTable:
        """
        Model default parameters.

        The common usage is to instantiate :class:`matchzoo.engine.ModelParams`
            first, then set the model specific parametrs.

        Examples:
            >>> class MyModel(BaseModel):
            ...     def build(self):
            ...         print(self._params['num_eggs'], 'eggs')
            ...         print('and', self._params['ham_type'])
            ...
            ...     @classmethod
            ...     def get_default_params(cls):
            ...         params = engine.ParamTable()
            ...         params.add(engine.Param('num_eggs', 512))
            ...         params.add(engine.Param('ham_type', 'Parma Ham'))
            ...         return params
            >>> my_model = MyModel()
            >>> my_model.build()
            512 eggs
            and Parma Ham

        Notice that all parameters must be serialisable for the entire model
        to be serialisable. Therefore, it's strongly recommended to use python
        native data types to store parameters.

        :return: model parameters

        """
        params = engine.ParamTable()
        params.add(
            engine.Param(name='name',
                         desc="Not related to the model\'s behavior."))
        params.add(
            engine.Param(name='model_class',
                         value=cls,
                         desc="Model class. Used internally for save/load. "
                         "Changing this may cause unexpected behaviors."))
        params.add(
            engine.Param(
                name='input_shapes',
                desc="Dependent on the model and data. Should be set manually."
            ))
        params.add(
            engine.Param(
                name='task',
                desc="Decides model output shape, loss, and metrics."))
        params.add(
            engine.Param(name='optimizer',
                         hyper_space=hyper_spaces.choice(
                             ['adam', 'adagrad', 'rmsprop'])))
        if with_embedding:
            params.add(
                engine.Param(
                    name='with_embedding',
                    value=True,
                    desc="A flag used help `auto` module. Shouldn't be changed."
                ))
            params.add(
                engine.Param(
                    name='embedding_input_dim',
                    desc=
                    'Usually equals vocab size + 1. Should be set manually.'))
            params.add(
                engine.Param(name='embedding_output_dim',
                             desc='Should be set manually.'))
            params.add(
                engine.Param(name='embedding_trainable',
                             desc='`True` to enable embedding layer training, '
                             '`False` to freeze embedding parameters.'))
        if with_multi_layer_perceptron:
            params.add(
                engine.Param(
                    name='with_multi_layer_perceptron',
                    value=True,
                    desc=
                    "A flag of whether a multiple layer perceptron is used. "
                    "Shouldn't be changed."))
            params.add(
                engine.Param(
                    name='mlp_num_units',
                    desc="Number of units in first `mlp_num_layers` layers."))
            params.add(
                engine.Param(
                    name='mlp_num_layers',
                    desc="Number of layers of the multiple layer percetron."))
            params.add(
                engine.Param(
                    name='mlp_num_fan_out',
                    desc=
                    "Number of units of the layer that connects the multiple "
                    "layer percetron and the output."))
            params.add(
                engine.Param(name='mlp_activation_func',
                             desc='Activation function used in the multiple '
                             'layer perceptron.'))
        return params
@pytest.fixture(scope='module',
                params=[
                    lambda x: x + 2, lambda x: x - 2, lambda x: x * 2,
                    lambda x: x / 2, lambda x: x // 2, lambda x: x**2,
                    lambda x: 2 + x, lambda x: 2 - x, lambda x: 2 * x,
                    lambda x: 2 / x, lambda x: 2 // x, lambda x: 2**x,
                    lambda x: -x
                ])
def op(request):
    return request.param


@pytest.fixture(scope='module',
                params=[
                    hyper_spaces.choice(options=[0, 1]),
                    hyper_spaces.uniform(low=0, high=10),
                    hyper_spaces.quniform(low=0, high=10, q=2)
                ])
def proxy(request):
    return request.param


def test_init(proxy):
    assert isinstance(proxy.convert('label'), hyperopt.pyll.base.Apply)


def test_op(proxy, op):
    assert isinstance(op(proxy).convert('label'), hyperopt.pyll.base.Apply)

示例#5
0
 def get_default_params(cls):
     """Default parameters."""
     params = super().get_default_params()
     params.get('optimizer').hyper_space = \
         hyper_spaces.choice(['adam', 'adagrad', 'rmsprop'])
     return params