Example #1
0
def serialize(optimizer):
    return serialize_keras_object(optimizer)
Example #2
0
def serialize(initializer):
    return serialize_keras_object(initializer)
Example #3
0
def serialize(regularizer):
    return serialize_keras_object(regularizer)
Example #4
0
def serialize(metric):
  return serialize_keras_object(metric)
Example #5
0
 def get_config(self):
     base_config = super(QuantizeWrapper, self).get_config()
     config = {
         'quantize_provider': serialize_keras_object(self.quantize_provider)
     }
     return dict(list(base_config.items()) + list(config.items()))
Example #6
0
def serialize(regularizer):
  return serialize_keras_object(regularizer)
Example #7
0
def serialize(initializer):
  return serialize_keras_object(initializer)
Example #8
0
def serialize(constraint):
    return serialize_keras_object(constraint)
Example #9
0
def serialize(loss):
    return serialize_keras_object(loss)
Example #10
0
 def get_config(self):
   config = {'layer': generic_utils.serialize_keras_object(self.layer)}
   base_config = super(Wrapper, self).get_config()
   return dict(list(base_config.items()) + list(config.items()))
Example #11
0
def serialize(metric):
    return serialize_keras_object(metric)
Example #12
0
  def __init__(self,
               layer,
               merge_mode='concat',
               weights=None,
               backward_layer=None,
               **kwargs):
    if not isinstance(layer, Layer):
      raise ValueError(
          'Please initialize `Bidirectional` layer with a '
          '`Layer` instance. You passed: {input}'.format(input=layer))
    if backward_layer is not None and not isinstance(backward_layer, Layer):
      raise ValueError('`backward_layer` need to be a `Layer` instance. '
                       'You passed: {input}'.format(input=backward_layer))
    if merge_mode not in ['sum', 'mul', 'ave', 'concat', None]:
      raise ValueError('Invalid merge mode. '
                       'Merge mode should be one of '
                       '{"sum", "mul", "ave", "concat", None}')
    # We don't want to track `layer` since we're already tracking the two copies
    # of it we actually run.
    self._setattr_tracking = False
    super(Bidirectional, self).__init__(layer, **kwargs)
    self._setattr_tracking = True

    # Recreate the forward layer from the original layer config, so that it will
    # not carry over any state from the layer.
    self.forward_layer = self._recreate_layer_from_config(layer)

    if backward_layer is None:
      self.backward_layer = self._recreate_layer_from_config(
          layer, go_backwards=True)
    else:
      self.backward_layer = backward_layer
      # Keep the custom backward layer config, so that we can save it later. The
      # layer's name might be updated below with prefix 'backward_', and we want
      # to preserve the original config.
      self._backward_layer_config = generic_utils.serialize_keras_object(
          backward_layer)

    self.forward_layer._name = 'forward_' + self.forward_layer.name
    self.backward_layer._name = 'backward_' + self.backward_layer.name

    self._verify_layer_config()

    def force_zero_output_for_mask(layer):
      # Force the zero_output_for_mask to be True if returning sequences.
      if getattr(layer, 'zero_output_for_mask', None) is not None:
        layer.zero_output_for_mask = layer.return_sequences

    force_zero_output_for_mask(self.forward_layer)
    force_zero_output_for_mask(self.backward_layer)

    self.merge_mode = merge_mode
    if weights:
      nw = len(weights)
      self.forward_layer.initial_weights = weights[:nw // 2]
      self.backward_layer.initial_weights = weights[nw // 2:]
    self.stateful = layer.stateful
    self.return_sequences = layer.return_sequences
    self.return_state = layer.return_state
    self.supports_masking = True
    self._trainable = True
    self._num_constants = 0
    self.input_spec = layer.input_spec
def serialize(learning_rate_schedule):
  return generic_utils.serialize_keras_object(learning_rate_schedule)
Example #14
0
def get_serialized(obj):
    with generic_utils.skip_failed_serialization():
        # Store the config dictionary, which may be used when reviving the object.
        # When loading, the program will attempt to revive the object from config,
        # and if that fails, the object will be revived from the SavedModel.
        return generic_utils.serialize_keras_object(obj)
Example #15
0
def serialize(constraint):
  return serialize_keras_object(constraint)
Example #16
0
def serialize(layer):
    return generic_utils.serialize_keras_object(layer)
Example #17
0
def serialize(loss_scale):
  return serialize_keras_object(loss_scale)
 def _serialize_fn(obj):
     if callable(obj):
         return generic_utils.serialize_keras_object(obj)
     return obj
Example #19
0
def serialize(loss):
  return serialize_keras_object(loss)
Example #20
0
def serialize(loss_scale):
  return serialize_keras_object(loss_scale)
Example #21
0
def serialize(optimizer):
  return serialize_keras_object(optimizer)
Example #22
0
def serialize(nets_fn):
    return serialize_keras_object(nets_fn)
def serialize(learning_rate_schedule):
  return generic_utils.serialize_keras_object(learning_rate_schedule)
Example #24
0
 def get_config(self):
     return {
         'inner_layer':
         generic_utils.serialize_keras_object(self.inner_layer)
     }