Beispiel #1
0
def _assert_in_default_state(t):
    t.assertIs(ds_context._get_default_replica_context(),
               ds_context.get_replica_context())
    t.assertIs(None, ds_context.get_cross_replica_context())
    t.assertFalse(ds_context.in_cross_replica_context())
    t.assertIs(ds_context._get_default_strategy(), ds_context.get_strategy())
    t.assertFalse(ds_context.has_strategy())
def _assert_in_default_state(t):
  t.assertIs(ds_context._get_default_replica_context(),
             ds_context.get_replica_context())
  t.assertIs(None, ds_context.get_cross_replica_context())
  t.assertFalse(ds_context.in_cross_replica_context())
  t.assertIs(ds_context._get_default_strategy(), ds_context.get_strategy())
  t.assertFalse(ds_context.has_strategy())
Beispiel #3
0
def save(model, filepath, overwrite, include_optimizer, signatures=None,
         options=None, save_traces=True):
  """Saves a model as a SavedModel to the filepath.

  Args:
    model: Keras model instance to be saved.
    filepath: String path to save the model.
    overwrite: whether to overwrite the existing filepath.
    include_optimizer: If True, save the model's optimizer state.
    signatures: Signatures to save with the SavedModel. Applicable to the 'tf'
      format only. Please see the `signatures` argument in `tf.saved_model.save`
      for details.
    options: (only applies to SavedModel format) `tf.saved_model.SaveOptions`
      object that specifies options for saving to SavedModel.
    save_traces: (only applies to SavedModel format) When enabled, the
      SavedModel will store the function traces for each layer. This
      can be disabled, so that only the configs of each layer are stored.
      Defaults to `True`. Disabling this will decrease serialization time
      and reduce file size, but it requires that all custom layers/models
      implement a `get_config()` method.

  Raises:
    ValueError: if the model's inputs have not been defined.
  """
  # If file exists and should not be overwritten.
  if not overwrite and os.path.exists(filepath):
    proceed = ask_to_proceed_with_overwrite(filepath)
    if not proceed:
      return

  if save_traces:
    if save_impl.should_skip_serialization(model):
      saving_utils.raise_model_input_error(model)

  if not include_optimizer:
    orig_optimizer = model.optimizer
    model.optimizer = None

  # Trace all functions and signatures with `training=0` instead of using an
  # already-set learning phase placeholder.
  # This is needed for compatibility reasons until learning phase setting
  # is removed from the public apis.
  with K.deprecated_internal_learning_phase_scope(0):
    # When saving a model involving batch norm layer within a strategy scope,
    # the replica context is not available when calling `add_update()`, and thus
    # we use the default replica context here.
    with distribution_strategy_context._get_default_replica_context():  # pylint: disable=protected-access
      with utils.keras_option_scope(save_traces):
        save_lib.save(model, filepath, signatures, options)

  if not include_optimizer:
    model.optimizer = orig_optimizer
  def testMergeCall(self):
    _assert_in_default_state(self)

    def merge_fn(dist, s):
      self.assertIs(ds_context._get_default_strategy(), dist)
      self.assertIs(None, ds_context.get_replica_context())
      self.assertIs(dist, ds_context.get_cross_replica_context())
      self.assertTrue(ds_context.in_cross_replica_context())
      self.assertIs(dist, ds_context.get_strategy())
      self.assertFalse(ds_context.has_strategy())
      return "foo_" + s

    replica_ctx = ds_context.get_replica_context()
    self.assertIs(ds_context._get_default_replica_context(), replica_ctx)
    self.assertEqual("foo_bar", replica_ctx.merge_call(merge_fn, args=("bar",)))
    _assert_in_default_state(self)
Beispiel #5
0
  def testMergeCall(self):
    _assert_in_default_state(self)

    def merge_fn(dist, s):
      self.assertIs(ds_context._get_default_strategy(), dist)
      self.assertIs(None, ds_context.get_replica_context())
      self.assertIs(dist, ds_context.get_cross_replica_context())
      self.assertTrue(ds_context.in_cross_replica_context())
      self.assertIs(dist, ds_context.get_strategy())
      self.assertFalse(ds_context.has_strategy())
      return "foo_" + s

    replica_ctx = ds_context.get_replica_context()
    self.assertIs(ds_context._get_default_replica_context(), replica_ctx)
    self.assertEqual("foo_bar", replica_ctx.merge_call(merge_fn, args=("bar",)))
    _assert_in_default_state(self)
Beispiel #6
0
def save(model,
         filepath,
         overwrite,
         include_optimizer,
         signatures=None,
         options=None):
    """Saves a model as a SavedModel to the filepath.

  Args:
    model: Keras model instance to be saved.
    filepath: String path to save the model.
    overwrite: whether to overwrite the existing filepath.
    include_optimizer: If True, save the model's optimizer state.
    signatures: Signatures to save with the SavedModel. Applicable to the 'tf'
      format only. Please see the `signatures` argument in `tf.saved_model.save`
      for details.
    options: Optional`tf.saved_model.SaveOptions` object that specifies
      options for saving to SavedModel.

  Raises:
    ValueError: if the model's inputs have not been defined.
  """
    # If file exists and should not be overwritten.
    if not overwrite and os.path.exists(filepath):
        proceed = ask_to_proceed_with_overwrite(filepath)
        if not proceed:
            return

    if save_impl.should_skip_serialization(model):
        saving_utils.raise_model_input_error(model)

    if not include_optimizer:
        orig_optimizer = model.optimizer
        model.optimizer = None

    # Trace all functions and signatures with `training=0` instead of using the
    # default learning phase placeholder.
    with K.learning_phase_scope(0):
        # When saving a model involving batch norm layer within a strategy scope,
        # the replica context is not available when calling `add_update()`, and thus
        # we use the default replica context here.
        with distribution_strategy_context._get_default_replica_context():  # pylint: disable=protected-access
            save_lib.save(model, filepath, signatures, options)

    if not include_optimizer:
        model.optimizer = orig_optimizer