예제 #1
0
def deserialize(config, custom_objects=None):
    """Return an `Initializer` object from its config."""
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=_ALL_INITIALIZERS,
        custom_objects=custom_objects,
        printable_module_name='initializer')
예제 #2
0
 def _parse_function_from_config(cls, config, custom_objects,
                                 func_attr_name, module_attr_name,
                                 func_type_attr_name):
     globs = globals().copy()
     module = config.pop(module_attr_name, None)
     if module in sys.modules:
         globs.update(sys.modules[module].__dict__)
     elif module is not None:
         # Note: we don't know the name of the function if it's a lambda.
         warnings.warn('{} is not loaded, but a Lambda layer uses it. '
                       'It may cause errors.'.format(module),
                       UserWarning,
                       stacklevel=2)
     if custom_objects:
         globs.update(custom_objects)
     function_type = config.pop(func_type_attr_name)
     if function_type == 'function':
         # Simple lookup in custom objects
         function = generic_utils.deserialize_keras_object(
             config[func_attr_name],
             custom_objects=custom_objects,
             printable_module_name='function in Lambda layer')
     elif function_type == 'lambda':
         # Unsafe deserialization from bytecode
         function = generic_utils.func_load(config[func_attr_name],
                                            globs=globs)
     elif function_type == 'raw':
         function = config[func_attr_name]
     else:
         supported_types = ['function', 'lambda', 'raw']
         raise TypeError(
             f'Unsupported value for `function_type` argument. Received: '
             f'function_type={function_type}. Expected one of {supported_types}'
         )
     return function
예제 #3
0
def deserialize(config, custom_objects=None):
    """Instantiates a `LearningRateSchedule` object from a serialized form.

  Args:
    config: The serialized form of the `LearningRateSchedule`.
      Dictionary of the form {'class_name': str, 'config': dict}.
    custom_objects: A dictionary mapping class names (or function names) of
      custom (non-Keras) objects to class/functions.

  Returns:
    A `LearningRateSchedule` object.

  Example:

  ```python
  # Configuration for PolynomialDecay
  config = {
    'class_name': 'PolynomialDecay',
    'config': {'cycle': False,
      'decay_steps': 10000,
      'end_learning_rate': 0.01,
      'initial_learning_rate': 0.1,
      'name': None,
      'power': 0.5}}
  lr_schedule = tf.keras.optimizers.schedules.deserialize(config)
  ```
  """
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=globals(),
        custom_objects=custom_objects,
        printable_module_name="decay")
예제 #4
0
def test_metrics():
    expect = [0.0, 2.0, 1.0, 2.0]
    y_pred = np.array([0, 0.2, 0.6, 0.4, 1, 0])
    y_true = np.array([1, 0, -1, 1, 0, 0])
    y_true_mask = y_true[y_true != MASK_VALUE]
    y_pred_mask = y_pred[y_true != MASK_VALUE]

    y_true_r = y_true.reshape((-1, 2))
    y_pred_r = y_pred.reshape((-1, 2))
    y_true_mask_r = y_true_mask.reshape((-1, 1))
    y_pred_mask_r = y_pred_mask.reshape((-1, 1))
    res1 = [K.eval(x) for x in cm.contingency_table(y_true, y_pred)]
    res2 = [K.eval(x) for x in cm.contingency_table(y_true_mask, y_pred_mask)]
    res3 = [K.eval(x) for x in cm.contingency_table(y_true_r, y_pred_r)]
    res4 = [
        K.eval(x) for x in cm.contingency_table(y_true_mask_r, y_pred_mask_r)
    ]

    assert sum(res1) == 5
    assert sum(res2) == 5
    assert sum(res3) == 5
    assert sum(res4) == 5
    assert res1 == expect
    assert res2 == expect
    assert res3 == expect
    assert res4 == expect

    assert np.allclose(K.eval(cm.tpr(y_true, y_pred)), cem.tpr(y_true, y_pred))
    assert np.allclose(K.eval(cm.accuracy(y_true, y_pred)),
                       cem.accuracy(y_true, y_pred))

    # test serialization
    s = serialize_keras_object(cm.accuracy)
    a = deserialize_keras_object(s)
    assert a == cm.accuracy
예제 #5
0
def deserialize(config, custom_objects=None):
    """Inverse of the `serialize` function.
    # Arguments
        config: Optimizer configuration dictionary.
        custom_objects: Optional dictionary mapping
            names (strings) to custom objects
            (classes and functions)
            to be considered during deserialization.
    # Returns
        A Keras Optimizer instance.
    """
    all_classes = {
        'sgd': optimizers.SGD,
        'rmsprop': optimizers.RMSprop,
        'adagrad': optimizers.Adagrad,
        'adadelta': optimizers.Adadelta,
        'adam': optimizers.Adam,
        'adamax': optimizers.Adamax,
        'nadam': optimizers.Nadam,
        'tfoptimizer': optimizers.TFOptimizer
    }
    # Make deserialization case-insensitive for built-in optimizers.
    if config['class_name'].lower() in all_classes:
        config['class_name'] = config['class_name'].lower()
    return deserialize_keras_object(config,
                                    module_objects=all_classes,
                                    custom_objects=custom_objects,
                                    printable_module_name='optimizer')
예제 #6
0
def deserialize(config, custom_objects=None):
    return deserialize_keras_object(
        config,
        module_objects=globals(),
        custom_objects=custom_objects,
        printable_module_name="constraint",
    )
예제 #7
0
def _parse_config_to_function(config, custom_objects, func_attr_name,
                              func_type_attr_name, module_attr_name):
  """Reconstruct the function from the config."""
  globs = globals()
  module = config.pop(module_attr_name, None)
  if module in sys.modules:
    globs.update(sys.modules[module].__dict__)
  elif module is not None:
    # Note: we don't know the name of the function if it's a lambda.
    warnings.warn("{} is not loaded, but a layer uses it. "
                  "It may cause errors.".format(module), UserWarning)
  if custom_objects:
    globs.update(custom_objects)
  function_type = config.pop(func_type_attr_name)
  if function_type == "function":
    # Simple lookup in custom objects
    function = generic_utils.deserialize_keras_object(
        config[func_attr_name],
        custom_objects=custom_objects,
        printable_module_name="function in wrapper")
  elif function_type == "lambda":
    # Unsafe deserialization from bytecode
    function = generic_utils.func_load(
        config[func_attr_name], globs=globs)
  else:
    raise TypeError("Unknown function type:", function_type)
  return function
예제 #8
0
def deserialize(config, custom_objects=None):
    """Inverse of the `serialize` function.
    # Arguments
        config: Optimizer configuration dictionary.
        custom_objects: Optional dictionary mapping
            names (strings) to custom objects
            (classes and functions)
            to be considered during deserialization.
    # Returns
        A Keras Optimizer instance.
    """
    all_classes = {
        'sgd': optimizers.SGD,
        'rmsprop': optimizers.RMSprop,
        'adagrad': optimizers.Adagrad,
        'adadelta': optimizers.Adadelta,
        'adam': optimizers.Adam,
        'adamax': optimizers.Adamax,
        'nadam': optimizers.Nadam,
        'tfoptimizer': optimizers.TFOptimizer
    }
    # Make deserialization case-insensitive for built-in optimizers.
    if config['class_name'].lower() in all_classes:
        config['class_name'] = config['class_name'].lower()
    return deserialize_keras_object(config,
                                    module_objects=all_classes,
                                    custom_objects=custom_objects,
                                    printable_module_name='optimizer')
예제 #9
0
def deserialize(config, custom_objects=None):
    """Return an `Initializer` object from its config."""
    populate_deserializable_objects()
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=LOCAL.ALL_OBJECTS,
        custom_objects=custom_objects,
        printable_module_name='initializer')
예제 #10
0
  def from_config(cls, config, custom_objects=None):
    config_cp = config.copy()
    columns_by_name = {}
    config_cp['feature_columns'] = [tf.__internal__.feature_column.deserialize_feature_column(
        c, custom_objects, columns_by_name) for c in config['feature_columns']]
    config_cp['partitioner'] = generic_utils.deserialize_keras_object(
        config['partitioner'], custom_objects)

    return cls(**config_cp)
예제 #11
0
def deserialize(config, custom_objects=None):
    if config == 'l1_l2':
        # Special case necessary since the defaults used for "l1_l2" (string)
        # differ from those of the L1L2 class.
        return L1L2(l1=0.01, l2=0.01)
    return deserialize_keras_object(config,
                                    module_objects=globals(),
                                    custom_objects=custom_objects,
                                    printable_module_name='regularizer')
예제 #12
0
  def from_config(cls, config, custom_objects=None):
    # Import here to avoid circular imports.
    from tensorflow.python.feature_column import serialization  # pylint: disable=g-import-not-at-top
    config_cp = config.copy()
    config_cp['feature_columns'] = serialization.deserialize_feature_columns(
        config['feature_columns'], custom_objects=custom_objects)
    config_cp['partitioner'] = generic_utils.deserialize_keras_object(
        config['partitioner'], custom_objects)

    return cls(**config_cp)
예제 #13
0
파일: loss_scale.py 프로젝트: z-a-f/keras-1
def deserialize(config, custom_objects=None):
    loss_scale_module_objects = {
        'FixedLossScale': tf.mixed_precision.experimental.FixedLossScale,
        'DynamicLossScale': tf.mixed_precision.experimental.DynamicLossScale,
    }

    return generic_utils.deserialize_keras_object(
        config,
        module_objects=loss_scale_module_objects,
        custom_objects=custom_objects,
        printable_module_name='loss scale')
예제 #14
0
def deserialize(config, custom_objects=None):
    if isinstance(config, str) and _is_convertible_to_dtype(config):
        return Policy(config)
    if config is None:
        return Policy('_infer')
    module_objects = {'Policy': Policy, 'PolicyV1': Policy}
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=module_objects,
        custom_objects=custom_objects,
        printable_module_name='dtype policy')
예제 #15
0
def _decode_helper(
    obj, deserialize=False, module_objects=None, custom_objects=None
):
    """A decoding helper that is TF-object aware.

    Args:
      obj: A decoded dictionary that may represent an object.
      deserialize: Boolean, defaults to False. When True, deserializes any Keras
        objects found in `obj`.
      module_objects: A dictionary of built-in objects to look the name up in.
        Generally, `module_objects` is provided by midlevel library implementers.
      custom_objects: A dictionary of custom objects to look the name up in.
        Generally, `custom_objects` is provided by the end user.

    Returns:
      The decoded object.
    """
    if isinstance(obj, dict) and "class_name" in obj:
        if obj["class_name"] == "TensorShape":
            return tf.TensorShape(obj["items"])
        elif obj["class_name"] == "TypeSpec":
            return type_spec.lookup(
                obj["type_spec"]
            )._deserialize(  # pylint: disable=protected-access
                _decode_helper(obj["serialized"])
            )
        elif obj["class_name"] == "CompositeTensor":
            spec = obj["spec"]
            tensors = []
            for dtype, tensor in obj["tensors"]:
                tensors.append(
                    tf.constant(tensor, dtype=tf.dtypes.as_dtype(dtype))
                )
            return tf.nest.pack_sequence_as(
                _decode_helper(spec), tensors, expand_composites=True
            )
        elif obj["class_name"] == "__tuple__":
            return tuple(_decode_helper(i) for i in obj["items"])
        elif obj["class_name"] == "__ellipsis__":
            return Ellipsis
        elif deserialize and "__passive_serialization__" in obj:
            # __passive_serialization__ is added by the JSON encoder when encoding
            # an object that has a `get_config()` method.
            try:
                return generic_utils.deserialize_keras_object(
                    obj,
                    module_objects=module_objects,
                    custom_objects=custom_objects,
                )
            except ValueError:
                pass
    return obj
예제 #16
0
파일: policy.py 프로젝트: ttigong/keras
def deserialize(config, custom_objects=None):
    if isinstance(config, str) and _is_convertible_to_dtype(config):
        return Policy(config)
    if config is None:
        return Policy('_infer')
    # PolicyV1 was an old version of Policy that was removed. Deserializing it
    # turns it into a (non-V1) Policy.
    module_objects = {'Policy': Policy, 'PolicyV1': Policy}
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=module_objects,
        custom_objects=custom_objects,
        printable_module_name='dtype policy')
예제 #17
0
def recursively_deserialize_keras_object(config, module_objects=None):
  """Deserialize Keras object from a nested structure."""
  if isinstance(config, dict):
    if 'class_name' in config:
      return generic_utils.deserialize_keras_object(
          config, module_objects=module_objects)
    else:
      return {key: recursively_deserialize_keras_object(config[key],
                                                        module_objects)
              for key in config}
  if isinstance(config, (tuple, list)):
    return [recursively_deserialize_keras_object(x, module_objects)
            for x in config]
  else:
    raise ValueError('Unable to decode config: {}'.format(config))
예제 #18
0
def deserialize(config, custom_objects=None):
  """Deserializes a serialized metric class/function instance.

  Args:
    config: Metric configuration.
    custom_objects: Optional dictionary mapping names (strings) to custom
      objects (classes and functions) to be considered during deserialization.

  Returns:
      A Keras `Metric` instance or a metric function.
  """
  return deserialize_keras_object(
      config,
      module_objects=globals(),
      custom_objects=custom_objects,
      printable_module_name='metric function')
예제 #19
0
def recursively_deserialize_keras_object(config, module_objects=None):
  """Deserialize Keras object from a nested structure."""
  if isinstance(config, dict):
    if 'class_name' in config:
      return generic_utils.deserialize_keras_object(
          config, module_objects=module_objects)
    else:
      return {key: recursively_deserialize_keras_object(config[key],
                                                        module_objects)
              for key in config}
  elif isinstance(config, (tuple, list)):
    return [recursively_deserialize_keras_object(x, module_objects)
            for x in config]
  else:
    raise ValueError(
        f'Unable to decode Keras layer config. Config should be a dictionary, '
        f'tuple or list. Received: config={config}')
예제 #20
0
def test_init_serialization(kernel_initializer, bias_initializer):
    pwm_list = [PWM([[1, 2, 3, 4],
                     [2, 4, 4, 5]]),
                PWM([[1, 2, 1, 4],
                     [2, 10, 4, 5]])]

    # should work out of the box
    # get_custom_objects()['PWMKernelInitializer'] = PWMKernelInitializer
    # get_custom_objects()['PWMBiasInitializer'] = PWMBiasInitializer

    seq_length = 100
    input_shape = (None, seq_length, 4)  # (batch_size, steps, input_dim)
    # input_shape = (seq_length, 4)  # (batch_size, steps, input_dim)

    # output_shape = (None, steps, filters)

    conv_l = kl.Conv1D(filters=15, kernel_size=11,
                       kernel_regularizer=L1L2(l1=1, l2=1),  # Regularization
                       padding="valid",
                       activation="relu",
                       kernel_initializer=kernel_initializer(pwm_list, stddev=0.1),
                       bias_initializer=bias_initializer(pwm_list, kernel_size=11),
                       batch_input_shape=input_shape,
                       )

    # output_shape: (batch_size, new_steps, filters)
    # (new_)steps = length along the sequence, might changed due to padding
    model = Sequential()
    model.add(conv_l)
    model.compile(optimizer="adam", loss="mse", metrics=["mse"])
    js = model.to_json()
    js
    # a = model_from_json(js, custom_objects={"Conv1D": kl.Conv1D})
    a = model_from_json(js)
    assert np.all(a.layers[0].kernel_initializer.pwm_list[0].pwm == pwm_list[0].pwm)

    # check just layer serialization:
    conv_l.build(input_shape)
    s = serialize_keras_object(conv_l)

    a = deserialize_keras_object(s, custom_objects={"Conv1D": kl.Conv1D})

    conv_l.get_config()

    # serialization was successfull
    assert np.all(a.kernel_initializer.pwm_list[0].pwm == pwm_list[0].pwm)
예제 #21
0
def deserialize(config, custom_objects=None):
    """Instantiates a layer from a config dictionary.

  Args:
      config: dict of the form {'class_name': str, 'config': dict}
      custom_objects: dict mapping class names (or function names)
          of custom (non-Keras) objects to class/functions

  Returns:
      Layer instance (may be Model, Sequential, Network, Layer...)
  """
    populate_deserializable_objects()
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=LOCAL.ALL_OBJECTS,
        custom_objects=custom_objects,
        printable_module_name='layer')
예제 #22
0
def deserialize(config, custom_objects=None):
    """Inverse of the `serialize` function.

    Args:
        config: Optimizer configuration dictionary.
        custom_objects: Optional dictionary mapping names (strings) to custom
          objects (classes and functions) to be considered during
          deserialization.

    Returns:
        A Keras Optimizer instance.
    """
    # loss_scale_optimizer has a direct dependency of optimizer, import here
    # rather than top to avoid the cyclic dependency.
    from keras.mixed_precision import (
        loss_scale_optimizer, )

    all_classes = {
        "adadelta": adadelta_v2.Adadelta,
        "adagrad": adagrad_v2.Adagrad,
        "adam": adam_v2.Adam,
        "adamax": adamax_v2.Adamax,
        "experimentaladadelta": adadelta_experimental.Adadelta,
        "experimentaladagrad": adagrad_experimental.Adagrad,
        "experimentaladam": adam_experimental.Adam,
        "experimentalsgd": sgd_experimental.SGD,
        "nadam": nadam_v2.Nadam,
        "rmsprop": rmsprop_v2.RMSprop,
        "sgd": gradient_descent_v2.SGD,
        "ftrl": ftrl.Ftrl,
        "lossscaleoptimizer": loss_scale_optimizer.LossScaleOptimizer,
        "lossscaleoptimizerv3": loss_scale_optimizer.LossScaleOptimizerV3,
        # LossScaleOptimizerV1 was an old version of LSO that was removed.
        # Deserializing it turns it into a LossScaleOptimizer
        "lossscaleoptimizerv1": loss_scale_optimizer.LossScaleOptimizer,
    }

    # Make deserialization case-insensitive for built-in optimizers.
    if config["class_name"].lower() in all_classes:
        config["class_name"] = config["class_name"].lower()
    return deserialize_keras_object(
        config,
        module_objects=all_classes,
        custom_objects=custom_objects,
        printable_module_name="optimizer",
    )
예제 #23
0
def deserialize(config, custom_objects=None):
    """Instantiates a layer from a config dictionary.

    Args:
        config: dict of the form {'class_name': str, 'config': dict}
        custom_objects: dict mapping class names (or function names) of custom
          (non-Keras) objects to class/functions

    Returns:
        Layer instance (may be Model, Sequential, Network, Layer...)

    Example:

    ```python
    # Configuration of Dense(32, activation='relu')
    config = {
      'class_name': 'Dense',
      'config': {
        'activation': 'relu',
        'activity_regularizer': None,
        'bias_constraint': None,
        'bias_initializer': {'class_name': 'Zeros', 'config': {}},
        'bias_regularizer': None,
        'dtype': 'float32',
        'kernel_constraint': None,
        'kernel_initializer': {'class_name': 'GlorotUniform',
                               'config': {'seed': None}},
        'kernel_regularizer': None,
        'name': 'dense',
        'trainable': True,
        'units': 32,
        'use_bias': True
      }
    }
    dense_layer = tf.keras.layers.deserialize(config)
    ```
    """
    populate_deserializable_objects()
    return generic_utils.deserialize_keras_object(
        config,
        module_objects=LOCAL.ALL_OBJECTS,
        custom_objects=custom_objects,
        printable_module_name="layer",
    )
예제 #24
0
def deserialize(name, custom_objects=None):
    """Returns activation function given a string identifier.

    Args:
      name: The name of the activation function.
      custom_objects: Optional `{function_name: function_obj}`
        dictionary listing user-provided activation functions.

    Returns:
        Corresponding activation function.

    For example:

    >>> tf.keras.activations.deserialize('linear')
     <function linear at 0x1239596a8>
    >>> tf.keras.activations.deserialize('sigmoid')
     <function sigmoid at 0x123959510>
    >>> tf.keras.activations.deserialize('abcd')
    Traceback (most recent call last):
    ...
    ValueError: Unknown activation function:abcd

    Raises:
        ValueError: `Unknown activation function` if the input string does not
        denote any defined Tensorflow activation function.
    """
    activation_functions = {}
    current_module = sys.modules[__name__]

    # we put 'current_module' after 'activation_layers' to prefer the local one
    # if there is a collision
    generic_utils.populate_dict_with_module_objects(
        activation_functions,
        (activation_layers, current_module),
        obj_filter=callable,
    )

    return generic_utils.deserialize_keras_object(
        name,
        module_objects=activation_functions,
        custom_objects=custom_objects,
        printable_module_name="activation function",
    )
예제 #25
0
def deserialize(config, custom_objects=None):
    """Inverse of the `serialize` function.

  Args:
      config: Optimizer configuration dictionary.
      custom_objects: Optional dictionary mapping names (strings) to custom
        objects (classes and functions) to be considered during deserialization.

  Returns:
      A Keras Optimizer instance.
  """
    # loss_scale_optimizer has a direct dependency of optimizer, import here
    # rather than top to avoid the cyclic dependency.
    from keras.mixed_precision import loss_scale_optimizer  # pylint: disable=g-import-not-at-top
    all_classes = {
        'adadelta': adadelta_v2.Adadelta,
        'adagrad': adagrad_v2.Adagrad,
        'adam': adam_v2.Adam,
        'adamax': adamax_v2.Adamax,
        'experimentaladadelta': adadelta_experimental.Adadelta,
        'experimentaladagrad': adagrad_experimental.Adagrad,
        'experimentaladam': adam_experimental.Adam,
        'experimentalsgd': sgd_experimental.SGD,
        'nadam': nadam_v2.Nadam,
        'rmsprop': rmsprop_v2.RMSprop,
        'sgd': gradient_descent_v2.SGD,
        'ftrl': ftrl.Ftrl,
        'lossscaleoptimizer': loss_scale_optimizer.LossScaleOptimizer,
        'lossscaleoptimizerv3': loss_scale_optimizer.LossScaleOptimizerV3,
        # LossScaleOptimizerV1 deserializes into LossScaleOptimizer, as
        # LossScaleOptimizerV1 will be removed soon but deserializing it will
        # still be supported.
        'lossscaleoptimizerv1': loss_scale_optimizer.LossScaleOptimizer,
    }

    # Make deserialization case-insensitive for built-in optimizers.
    if config['class_name'].lower() in all_classes:
        config['class_name'] = config['class_name'].lower()
    return deserialize_keras_object(config,
                                    module_objects=all_classes,
                                    custom_objects=custom_objects,
                                    printable_module_name='optimizer')
예제 #26
0
def deserialize(name, custom_objects=None):
    """Returns activation function given a string identifier.

  Args:
    name: The name of the activation function.
    custom_objects: Optional `{function_name: function_obj}`
      dictionary listing user-provided activation functions.

  Returns:
      Corresponding activation function.

  For example:

  >>> tf.keras.activations.deserialize('linear')
   <function linear at 0x1239596a8>
  >>> tf.keras.activations.deserialize('sigmoid')
   <function sigmoid at 0x123959510>
  >>> tf.keras.activations.deserialize('abcd')
  Traceback (most recent call last):
  ...
  ValueError: Unknown activation function:abcd

  Raises:
      ValueError: `Unknown activation function` if the input string does not
      denote any defined Tensorflow activation function.
  """
    globs = globals()

    # only replace missing activations
    advanced_activations_globs = advanced_activations.get_globals()
    for key, val in advanced_activations_globs.items():
        if key not in globs:
            globs[key] = val

    return deserialize_keras_object(
        name,
        module_objects=globs,
        custom_objects=custom_objects,
        printable_module_name='activation function')
예제 #27
0
파일: json_utils.py 프로젝트: ttigong/keras
def _decode_helper(obj,
                   deserialize=False,
                   module_objects=None,
                   custom_objects=None):
    """A decoding helper that is TF-object aware.

  Args:
    obj: A decoded dictionary that may represent an object.
    deserialize: Boolean, defaults to False. When True, deserializes any Keras
      objects found in `obj`.
    module_objects: A dictionary of built-in objects to look the name up in.
      Generally, `module_objects` is provided by midlevel library implementers.
    custom_objects: A dictionary of custom objects to look the name up in.
      Generally, `custom_objects` is provided by the end user.

  Returns:
    The decoded object.
  """
    if isinstance(obj, dict) and 'class_name' in obj:
        if obj['class_name'] == 'TensorShape':
            return tf.TensorShape(obj['items'])
        elif obj['class_name'] == 'TypeSpec':
            return type_spec.lookup(obj['type_spec'])._deserialize(  # pylint: disable=protected-access
                _decode_helper(obj['serialized']))
        elif obj['class_name'] == '__tuple__':
            return tuple(_decode_helper(i) for i in obj['items'])
        elif obj['class_name'] == '__ellipsis__':
            return Ellipsis
        elif deserialize and '__passive_serialization__' in obj:
            # __passive_serialization__ is added by the JSON encoder when encoding
            # an object that has a `get_config()` method.
            try:
                return generic_utils.deserialize_keras_object(
                    obj,
                    module_objects=module_objects,
                    custom_objects=custom_objects)
            except ValueError:
                pass
    return obj
예제 #28
0
def test_MaskLoss():
    l = closs.binary_crossentropy_masked
    y_pred = np.array([0, 0.2, 0.6, 0.4, 1])
    y_true = np.array([1, 0, -1, 1, 0.0])

    y_true_mask = K.cast(y_true[y_true != MASK_VALUE], K.floatx())
    y_pred_mask = K.cast(y_pred[y_true != MASK_VALUE], K.floatx())
    y_true_cast = K.cast(y_true, K.floatx())
    y_pred_cast = K.cast(y_pred, K.floatx())

    res = K.eval(l(y_true, y_pred))

    res_mask = K.eval(kloss.binary_crossentropy(y_true_mask, y_pred_mask))

    assert np.allclose(res, res_mask)

    # test serialization
    s = serialize_keras_object(l)
    a = deserialize_keras_object(s)
    # assert a.loss == l.loss
    # assert a.mask_value == l.mask_value
    res2 = K.eval(a(y_true, y_pred))
    assert np.allclose(res, res2)
예제 #29
0
def test_serialization():

    seq_length = 100
    input_shape = (None, seq_length, 4)  # (batch_size, steps, input_dim)
    # input_shape = (seq_length, 4)  # (batch_size, steps, input_dim)

    # output_shape = (None, steps, filters)

    conv_l = kl.Conv1D(
        filters=15,
        kernel_size=11,
        padding="valid",
        activation="relu",
        batch_input_shape=input_shape,
    )

    # output_shape: (batch_size, new_steps, filters)
    # (new_)steps = length along the sequence, might changed due to padding
    model = Sequential()
    model.add(conv_l)
    model.add(cl.GAMSmooth())
    model.compile(optimizer="adam", loss="mse", metrics=["mse"])
    js = model.to_json()
    js
    # a = model_from_json(js, custom_objects={"Conv1D": kl.Conv1D})
    a = model_from_json(js)
    assert np.all(a.layers[1].get_weights()[0] == 0)

    # check just layer serialization:
    conv_l.build(input_shape)
    s = serialize_keras_object(cl.GAMSmooth())

    a = deserialize_keras_object(s, custom_objects={"Conv1D": kl.Conv1D})
    a.get_config()

    # serialization was successfull
    assert isinstance(a.get_config(), dict)
예제 #30
0
def deserialize(name, custom_objects=None):
    return deserialize_keras_object(name,
                                    module_objects=globals(),
                                    custom_objects=custom_objects,
                                    printable_module_name='loss function')
예제 #31
0
 def from_config(cls, config):
   return cls(generic_utils.deserialize_keras_object(
       config['inner_layer']))
예제 #32
0
def deserialize(name, custom_objects=None):
    return deserialize_keras_object(name,
                                    module_objects=globals(),
                                    custom_objects=custom_objects,
                                    printable_module_name='loss function')