Exemple #1
0
 def test_serialization_v2(self):
     activation_map = {nn.softmax_v2: 'softmax'}
     for fn_v2_key in activation_map:
         fn_v2 = activations.get(fn_v2_key)
         config = activations.serialize(fn_v2)
         fn = activations.deserialize(config)
         assert fn.__name__ == activation_map[fn_v2_key]
Exemple #2
0
    def from_config(cls, config, custom_objects=None):
        """Creates a RNNModel from its config.

    Args:
      config: A Python dictionary, typically the output of `get_config`.
      custom_objects: Optional dictionary mapping names (strings) to custom
        classes or functions to be considered during deserialization.

    Returns:
      A RNNModel.
    """
        rnn_layer = keras_layers.deserialize(config.pop('rnn_layer'),
                                             custom_objects=custom_objects)
        sequence_feature_columns = fc.deserialize_feature_columns(
            config.pop('sequence_feature_columns'),
            custom_objects=custom_objects)
        context_feature_columns = config.pop('context_feature_columns', None)
        if context_feature_columns:
            context_feature_columns = fc.deserialize_feature_columns(
                context_feature_columns, custom_objects=custom_objects)
        activation = activations.deserialize(config.pop('activation', None),
                                             custom_objects=custom_objects)
        return cls(rnn_layer=rnn_layer,
                   sequence_feature_columns=sequence_feature_columns,
                   context_feature_columns=context_feature_columns,
                   activation=activation,
                   **config)
    def from_config(cls, config):
        config = config.copy()
        # use_bias is not an argument of this class, as explained by
        # comment in __init__.
        config.pop('use_bias')
        config['post_activation'] = activations.deserialize(
            config['post_activation'])

        return cls(**config)
 def test_serialization(self):
   all_activations = ['softmax', 'relu', 'elu', 'tanh',
                      'sigmoid', 'hard_sigmoid', 'linear',
                      'softplus', 'softsign', 'selu']
   for name in all_activations:
     fn = activations.get(name)
     ref_fn = getattr(activations, name)
     assert fn == ref_fn
     config = activations.serialize(fn)
     fn = activations.deserialize(config)
     assert fn == ref_fn
Exemple #5
0
 def from_config(cls, config, custom_objects=None):
     linear_config = config.pop('linear_model')
     linear_model = layer_module.deserialize(linear_config, custom_objects)
     dnn_config = config.pop('dnn_model')
     dnn_model = layer_module.deserialize(dnn_config, custom_objects)
     activation = activations.deserialize(config.pop('activation', None),
                                          custom_objects=custom_objects)
     return cls(linear_model=linear_model,
                dnn_model=dnn_model,
                activation=activation,
                **config)
  def _from_config(cls_initializer, config):
    """All shared from_config logic for fused layers."""
    config = config.copy()
    # use_bias is not an argument of this class, as explained by
    # comment in __init__.
    config.pop('use_bias')
    is_advanced_activation = 'class_name' in config['post_activation']
    if is_advanced_activation:
      config['post_activation'] = deserialize_layer(config['post_activation'])
    else:
      config['post_activation'] = activations.deserialize(
          config['post_activation'])

    return cls_initializer(**config)
Exemple #7
0
 def from_config(cls, config):
     return activations.deserialize(config['activation'])