Exemple #1
0
    def _python_properties_internal(self):
        """Returns dictionary of all python properties."""
        # TODO(kathywu): Add support for metrics serialization.
        # TODO(kathywu): Synchronize with the keras spec (go/keras-json-spec) once
        # the python config serialization has caught up.
        metadata = dict(
            class_name=generic_utils.get_registered_name(type(self.obj)),
            name=self.obj.name,
            trainable=self.obj.trainable,
            expects_training_arg=self.obj._expects_training_arg,  # pylint: disable=protected-access
            dtype=policy.serialize(self.obj._dtype_policy),  # pylint: disable=protected-access
            batch_input_shape=getattr(self.obj, '_batch_input_shape', None),
            stateful=self.obj.stateful,
            must_restore_from_config=self.obj._must_restore_from_config,  # pylint: disable=protected-access
        )

        metadata.update(get_config(self.obj))
        if self.obj.input_spec is not None:
            # Layer's input_spec has already been type-checked in the property setter.
            metadata['input_spec'] = tf.nest.map_structure(
                lambda x: generic_utils.serialize_keras_object(x)
                if x else None, self.obj.input_spec)
        if (self.obj.activity_regularizer is not None
                and hasattr(self.obj.activity_regularizer, 'get_config')):
            metadata[
                'activity_regularizer'] = generic_utils.serialize_keras_object(
                    self.obj.activity_regularizer)
        if self.obj._build_input_shape is not None:  # pylint: disable=protected-access
            metadata['build_input_shape'] = self.obj._build_input_shape  # pylint: disable=protected-access
        return metadata
    def _python_properties_internal(self):
        """Returns dictionary of all python properties."""
        # TODO(kathywu): Add support for metrics serialization.
        # TODO(kathywu): Synchronize with the keras spec (go/keras-json-spec)
        # once the python config serialization has caught up.
        metadata = dict(
            name=self.obj.name,
            trainable=self.obj.trainable,
            expects_training_arg=self.obj._expects_training_arg,
            dtype=policy.serialize(self.obj._dtype_policy),
            batch_input_shape=getattr(self.obj, "_batch_input_shape", None),
            stateful=self.obj.stateful,
            must_restore_from_config=self.obj._must_restore_from_config,
            preserve_input_structure_in_config=self.obj.
            _preserve_input_structure_in_config,  # noqa: E501
        )

        metadata.update(get_serialized(self.obj))
        if self.obj.input_spec is not None:
            # Layer's input_spec has already been type-checked in the property
            # setter.
            metadata["input_spec"] = tf.nest.map_structure(
                lambda x: generic_utils.serialize_keras_object(x)
                if x else None,
                self.obj.input_spec,
            )
        if self.obj.activity_regularizer is not None and hasattr(
                self.obj.activity_regularizer, "get_config"):
            metadata[
                "activity_regularizer"] = generic_utils.serialize_keras_object(
                    self.obj.activity_regularizer)
        if self.obj._build_input_shape is not None:
            metadata["build_input_shape"] = self.obj._build_input_shape
        return metadata
Exemple #3
0
 def get_config(self):
     linear_config = generic_utils.serialize_keras_object(self.linear_model)
     dnn_config = generic_utils.serialize_keras_object(self.dnn_model)
     config = {
         'linear_model': linear_config,
         'dnn_model': dnn_config,
         'activation': activations.serialize(self.activation),
     }
     base_config = base_layer.Layer.get_config(self)
     return dict(list(base_config.items()) + list(config.items()))
 def get_config(self):
     config = {'x_imputation': self.x_imputation,
               'input_decay': serialize_keras_object(self.input_decay),
               'hidden_decay': serialize_keras_object(self.hidden_decay),
               'use_decay_bias': self.use_decay_bias,
               'feed_masking': self.feed_masking,
               'masking_decay': serialize_keras_object(self.masking_decay),
               'decay_initializer': initializers.get(self.decay_initializer),
               'decay_regularizer': regularizers.get(self.decay_regularizer),
               'decay_constraint': constraints.get(self.decay_constraint)}
     base_config = super(GRUD, self).get_config()
     for c in ['implementation', 'reset_after']:
         del base_config[c]
     return dict(list(base_config.items()) + list(config.items()))
 def get_config(self):
     # Remember to record all args of the `__init__`
     # which are not covered by `GRUCell`.
     config = {'x_imputation': self.x_imputation,
               'input_decay': serialize_keras_object(self.input_decay),
               'hidden_decay': serialize_keras_object(self.hidden_decay),
               'use_decay_bias': self.use_decay_bias,
               'feed_masking': self.feed_masking,
               'masking_decay': serialize_keras_object(self.masking_decay),
               'decay_initializer': initializers.serialize(self.decay_initializer),
               'decay_regularizer': regularizers.serialize(self.decay_regularizer),
               'decay_constraint': constraints.serialize(self.decay_constraint)
              }
     base_config = super(GRUDCell, self).get_config()
     return dict(list(base_config.items()) + list(config.items()))
Exemple #6
0
def serialize(policy):
    if _policy_equivalent_to_dtype(policy):
        # We return either None or the policy name for compatibility with older
        # versions of Keras. If the policy name is returned, it is a dtype string
        # such as 'float32'.
        return None if policy.name == '_infer' else policy.name
    return generic_utils.serialize_keras_object(policy)
Exemple #7
0
 def OnEndOfAlgorithm(self):
     ''' Save the data and the mode using the ObjectStore '''
     for symbol, model in self.modelBySymbol.items():
         modelStr = json.dumps(serialize_keras_object(model))
         self.ObjectStore.Save(f'{symbol}_model', modelStr)
         self.Debug(
             f'Model for {symbol} sucessfully saved in the ObjectStore')
 def get_config(self):
     cells = []
     for cell in self.cells:
         cells.append(generic_utils.serialize_keras_object(cell))
     config = {'cells': cells}
     base_config = super(StackedRNNCells, self).get_config()
     return dict(list(base_config.items()) + list(config.items()))
 def get_config(self):
     config = super().get_config()
     config.update({
         "model": generic_utils.serialize_keras_object(self.model),
         "rho": self.rho,
     })
     return config
def get_serialized(obj):
    with generic_utils.skip_failed_serialization():
        # Store the config dictionary, which may be used when reviving the
        # object.  When loading, the program will attempt to revive the object
        # from config, and if that fails, the object will be revived from the
        # SavedModel.
        return generic_utils.serialize_keras_object(obj)
Exemple #11
0
    def serialize_one(activation):
        if isinstance(activation, six.string_types):
            return activation

        if isinstance(activation, keras.engine.Layer):  # Advanced activation
            return serialize_keras_object(activation)

        # The order matters here, since Layers are also callable.
        if callable(activation):  # A function
            return func_dump(activation)

        # Keras serialized config
        if isinstance(activation, dict) \
                and "class_name" in activation \
                and "config" in activation:
            return activation

        # Could be a marshalled function
        if isinstance(activation, (list, tuple)) \
                and len(activation) == 3 \
                and isinstance(activation[0], six.string_types):
            try:
                # TODO: Better way to check if it is a marshalled function!
                func_load(activation)  # Try to unmarshal it

                return activation

            except ValueError:
                pass

        return None
Exemple #12
0
def serialize(activation):
    """Returns the string identifier of an activation function.

    Args:
        activation : Function object.

    Returns:
        String denoting the name attribute of the input function

    For example:

    >>> tf.keras.activations.serialize(tf.keras.activations.tanh)
    'tanh'
    >>> tf.keras.activations.serialize(tf.keras.activations.sigmoid)
    'sigmoid'
    >>> tf.keras.activations.serialize('abcd')
    Traceback (most recent call last):
    ...
    ValueError: ('Cannot serialize', 'abcd')

    Raises:
        ValueError: The input function is not a valid one.
    """
    if (hasattr(activation, "__name__")
            and activation.__name__ in _TF_ACTIVATIONS_V2):
        return _TF_ACTIVATIONS_V2[activation.__name__]
    return generic_utils.serialize_keras_object(activation)
Exemple #13
0
def test_metrics():
    expect = [0.0, 2.0, 1.0, 2.0]
    y_pred = np.array([0, 0.2, 0.6, 0.4, 1, 0])
    y_true = np.array([1, 0, -1, 1, 0, 0])
    y_true_mask = y_true[y_true != MASK_VALUE]
    y_pred_mask = y_pred[y_true != MASK_VALUE]

    y_true_r = y_true.reshape((-1, 2))
    y_pred_r = y_pred.reshape((-1, 2))
    y_true_mask_r = y_true_mask.reshape((-1, 1))
    y_pred_mask_r = y_pred_mask.reshape((-1, 1))
    res1 = [K.eval(x) for x in cm.contingency_table(y_true, y_pred)]
    res2 = [K.eval(x) for x in cm.contingency_table(y_true_mask, y_pred_mask)]
    res3 = [K.eval(x) for x in cm.contingency_table(y_true_r, y_pred_r)]
    res4 = [
        K.eval(x) for x in cm.contingency_table(y_true_mask_r, y_pred_mask_r)
    ]

    assert sum(res1) == 5
    assert sum(res2) == 5
    assert sum(res3) == 5
    assert sum(res4) == 5
    assert res1 == expect
    assert res2 == expect
    assert res3 == expect
    assert res4 == expect

    assert np.allclose(K.eval(cm.tpr(y_true, y_pred)), cem.tpr(y_true, y_pred))
    assert np.allclose(K.eval(cm.accuracy(y_true, y_pred)),
                       cem.accuracy(y_true, y_pred))

    # test serialization
    s = serialize_keras_object(cm.accuracy)
    a = deserialize_keras_object(s)
    assert a == cm.accuracy
Exemple #14
0
def serialize(metric):
    """Serializes metric function or `Metric` instance.

    Args:
      metric: A Keras `Metric` instance or a metric function.

    Returns:
      Metric configuration dictionary.
    """
    return serialize_keras_object(metric)
Exemple #15
0
def get_config(obj):
    with generic_utils.skip_failed_serialization():
        # Store the config dictionary, which may be used when reviving the object.
        # When loading, the program will attempt to revive the object from config,
        # and if that fails, the object will be revived from the SavedModel.
        config = generic_utils.serialize_keras_object(obj)['config']

    if config is not None:
        return {'config': config}
    return {}
Exemple #16
0
  def get_config(self):
    column_configs = [tf.__internal__.feature_column.serialize_feature_column(fc)
                      for fc in self._feature_columns]
    config = {'feature_columns': column_configs}
    config['partitioner'] = generic_utils.serialize_keras_object(
        self._partitioner)

    base_config = super(  # pylint: disable=bad-super-call
        _BaseFeaturesLayer, self).get_config()
    return dict(list(base_config.items()) + list(config.items()))
    def get_config(self):
        column_configs = [
            tf.__internal__.feature_column.serialize_feature_column(fc)
            for fc in self._feature_columns
        ]
        config = {"feature_columns": column_configs}
        config["partitioner"] = generic_utils.serialize_keras_object(
            self._partitioner)

        base_config = super().get_config()
        return dict(list(base_config.items()) + list(config.items()))
Exemple #18
0
 def get_config(self):
     layer_configs = []
     for layer in super(Sequential, self).layers:
         # `super().layers` include the InputLayer if available (it is filtered out
         # of `self.layers`). Note that `self._self_tracked_trackables` is managed
         # by the tracking infrastructure and should not be used.
         layer_configs.append(generic_utils.serialize_keras_object(layer))
     config = {'name': self.name, 'layers': copy.deepcopy(layer_configs)}
     if not self._is_graph_network and self._build_input_shape is not None:
         config['build_input_shape'] = self._build_input_shape
     return config
Exemple #19
0
    def get_config(self):
        # Import here to avoid circular imports.
        from tensorflow.python.feature_column import serialization  # pylint: disable=g-import-not-at-top
        column_configs = serialization.serialize_feature_columns(
            self._feature_columns)
        config = {'feature_columns': column_configs}
        config['partitioner'] = generic_utils.serialize_keras_object(
            self._partitioner)

        base_config = super(  # pylint: disable=bad-super-call
            _BaseFeaturesLayer, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
Exemple #20
0
    def get_config(self):
        config = {
            'return_sequences': self.return_sequences,
            'return_state': self.return_state,
            'go_backwards': self.go_backwards,
            'stateful': self.stateful,
            'unroll': self.unroll,
            'time_major': self.time_major
        }
        if self._num_constants:
            config['num_constants'] = self._num_constants
        if self.zero_output_for_mask:
            config['zero_output_for_mask'] = self.zero_output_for_mask

        config['cell'] = generic_utils.serialize_keras_object(self.cell)
        base_config = super(RNN, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
Exemple #21
0
    def get_config(self):
        config = {
            "return_sequences": self.return_sequences,
            "return_state": self.return_state,
            "go_backwards": self.go_backwards,
            "stateful": self.stateful,
            "unroll": self.unroll,
            "time_major": self.time_major,
        }
        if self._num_constants:
            config["num_constants"] = self._num_constants
        if self.zero_output_for_mask:
            config["zero_output_for_mask"] = self.zero_output_for_mask

        config["cell"] = generic_utils.serialize_keras_object(self.cell)
        base_config = super().get_config()
        return dict(list(base_config.items()) + list(config.items()))
Exemple #22
0
def serialize(learning_rate_schedule):
    """Serializes a `LearningRateSchedule` into a JSON-compatible representation.

    Args:
      learning_rate_schedule: The `LearningRateSchedule` object to serialize.

    Returns:
      A JSON-serializable dict representing the object's config.

    Example:

    >>> lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
    ...   0.1, decay_steps=100000, decay_rate=0.96, staircase=True)
    >>> tf.keras.optimizers.schedules.serialize(lr_schedule)
    {'class_name': 'ExponentialDecay', 'config': {...}}
    """
    return generic_utils.serialize_keras_object(learning_rate_schedule)
Exemple #23
0
def test_init_serialization(kernel_initializer, bias_initializer):
    pwm_list = [PWM([[1, 2, 3, 4],
                     [2, 4, 4, 5]]),
                PWM([[1, 2, 1, 4],
                     [2, 10, 4, 5]])]

    # should work out of the box
    # get_custom_objects()['PWMKernelInitializer'] = PWMKernelInitializer
    # get_custom_objects()['PWMBiasInitializer'] = PWMBiasInitializer

    seq_length = 100
    input_shape = (None, seq_length, 4)  # (batch_size, steps, input_dim)
    # input_shape = (seq_length, 4)  # (batch_size, steps, input_dim)

    # output_shape = (None, steps, filters)

    conv_l = kl.Conv1D(filters=15, kernel_size=11,
                       kernel_regularizer=L1L2(l1=1, l2=1),  # Regularization
                       padding="valid",
                       activation="relu",
                       kernel_initializer=kernel_initializer(pwm_list, stddev=0.1),
                       bias_initializer=bias_initializer(pwm_list, kernel_size=11),
                       batch_input_shape=input_shape,
                       )

    # output_shape: (batch_size, new_steps, filters)
    # (new_)steps = length along the sequence, might changed due to padding
    model = Sequential()
    model.add(conv_l)
    model.compile(optimizer="adam", loss="mse", metrics=["mse"])
    js = model.to_json()
    js
    # a = model_from_json(js, custom_objects={"Conv1D": kl.Conv1D})
    a = model_from_json(js)
    assert np.all(a.layers[0].kernel_initializer.pwm_list[0].pwm == pwm_list[0].pwm)

    # check just layer serialization:
    conv_l.build(input_shape)
    s = serialize_keras_object(conv_l)

    a = deserialize_keras_object(s, custom_objects={"Conv1D": kl.Conv1D})

    conv_l.get_config()

    # serialization was successfull
    assert np.all(a.kernel_initializer.pwm_list[0].pwm == pwm_list[0].pwm)
Exemple #24
0
def serialize(optimizer):
    """Serialize the optimizer configuration to JSON compatible python dict.

    The configuration can be used for persistence and reconstruct the `Optimizer`
    instance again.

    >>> tf.keras.optimizers.serialize(tf.keras.optimizers.SGD())
    {'class_name': 'SGD', 'config': {'name': 'SGD', 'learning_rate': 0.01,
                                     'decay': 0.0, 'momentum': 0.0,
                                     'nesterov': False}}

    Args:
      optimizer: An `Optimizer` instance to serialize.

    Returns:
      Python dict which contains the configuration of the input optimizer.
    """
    return serialize_keras_object(optimizer)
Exemple #25
0
def serialize(layer):
    """Serializes a `Layer` object into a JSON-compatible representation.

    Args:
      layer: The `Layer` object to serialize.

    Returns:
      A JSON-serializable dict representing the object's config.

    Example:

    ```python
    from pprint import pprint
    model = tf.keras.models.Sequential()
    model.add(tf.keras.Input(shape=(16,)))
    model.add(tf.keras.layers.Dense(32, activation='relu'))

    pprint(tf.keras.layers.serialize(model))
    # prints the configuration of the model, as a dict.
    """
    return generic_utils.serialize_keras_object(layer)
Exemple #26
0
def test_MaskLoss():
    l = closs.binary_crossentropy_masked
    y_pred = np.array([0, 0.2, 0.6, 0.4, 1])
    y_true = np.array([1, 0, -1, 1, 0.0])

    y_true_mask = K.cast(y_true[y_true != MASK_VALUE], K.floatx())
    y_pred_mask = K.cast(y_pred[y_true != MASK_VALUE], K.floatx())
    y_true_cast = K.cast(y_true, K.floatx())
    y_pred_cast = K.cast(y_pred, K.floatx())

    res = K.eval(l(y_true, y_pred))

    res_mask = K.eval(kloss.binary_crossentropy(y_true_mask, y_pred_mask))

    assert np.allclose(res, res_mask)

    # test serialization
    s = serialize_keras_object(l)
    a = deserialize_keras_object(s)
    # assert a.loss == l.loss
    # assert a.mask_value == l.mask_value
    res2 = K.eval(a(y_true, y_pred))
    assert np.allclose(res, res2)
Exemple #27
0
def test_serialization():

    seq_length = 100
    input_shape = (None, seq_length, 4)  # (batch_size, steps, input_dim)
    # input_shape = (seq_length, 4)  # (batch_size, steps, input_dim)

    # output_shape = (None, steps, filters)

    conv_l = kl.Conv1D(
        filters=15,
        kernel_size=11,
        padding="valid",
        activation="relu",
        batch_input_shape=input_shape,
    )

    # output_shape: (batch_size, new_steps, filters)
    # (new_)steps = length along the sequence, might changed due to padding
    model = Sequential()
    model.add(conv_l)
    model.add(cl.GAMSmooth())
    model.compile(optimizer="adam", loss="mse", metrics=["mse"])
    js = model.to_json()
    js
    # a = model_from_json(js, custom_objects={"Conv1D": kl.Conv1D})
    a = model_from_json(js)
    assert np.all(a.layers[1].get_weights()[0] == 0)

    # check just layer serialization:
    conv_l.build(input_shape)
    s = serialize_keras_object(cl.GAMSmooth())

    a = deserialize_keras_object(s, custom_objects={"Conv1D": kl.Conv1D})
    a.get_config()

    # serialization was successfull
    assert isinstance(a.get_config(), dict)
Exemple #28
0
 def _serialize_fn(obj):
     if callable(obj):
         return generic_utils.serialize_keras_object(obj)
     return obj
Exemple #29
0
def serialize(layer):
    return generic_utils.serialize_keras_object(layer)
Exemple #30
0
def serialize(loss):
    return serialize_keras_object(loss)
Exemple #31
0
def serialize(optimizer):
    return serialize_keras_object(optimizer)