Exemplo n.º 1
0
  def _clone_and_build_test_helper(self, model, model_type):
    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))

    is_subclassed = (model_type == 'subclass')

    # With placeholder creation
    new_model = models.clone_and_build_model(
        model, compile_clone=True, in_place_reset=is_subclassed)

    self._assert_same_compile_params(new_model)
    new_model.train_on_batch(inp, out)
    new_model.evaluate(inp, out)

    # Create new tensors for inputs.
    input_a = keras.Input(shape=(4,), name='a')
    new_model = models.clone_and_build_model(
        model, input_tensors=input_a, compile_clone=True,
        in_place_reset=is_subclassed)
    self._assert_same_compile_params(new_model)
    new_model.train_on_batch(inp, out)
    new_model.evaluate(inp, out)

    new_model = models.clone_and_build_model(
        model,
        input_tensors=input_a,
        target_tensors=None,
        compile_clone=True,
        in_place_reset=is_subclassed)
    self._assert_same_compile_params(new_model)
    new_model.train_on_batch(inp, out)
    new_model.evaluate(inp, out)
Exemplo n.º 2
0
  def _clone_and_build_test_helper(self, model, is_subclassed=False):
    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))

    # Everything should work in a new session.
    keras.backend.clear_session()

    with self.cached_session():
      # With placeholder creation
      new_model = models.clone_and_build_model(
          model, compile_clone=True, in_place_reset=is_subclassed)

      self._assert_same_compile_params(new_model)
      new_model.train_on_batch(inp, out)
      new_model.evaluate(inp, out)

      # Create new tensors for inputs and targets
      input_a = keras.Input(shape=(4,), name='a')
      new_model = models.clone_and_build_model(
          model, input_tensors=input_a, compile_clone=True,
          in_place_reset=is_subclassed)
      self._assert_same_compile_params(new_model)
      new_model.train_on_batch(inp, out)
      new_model.evaluate(inp, out)

      target_a = keras.Input(shape=(4,), name='b')
      new_model = models.clone_and_build_model(
          model, input_tensors=input_a, target_tensors=[target_a],
          compile_clone=True, in_place_reset=is_subclassed)
      self._assert_same_compile_params(new_model)
      new_model.train_on_batch(inp, out)
      new_model.evaluate(inp, out)
Exemplo n.º 3
0
  def _clone_and_build_test_helper(self, model, is_subclassed=False):
    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))

    # Everything should work in a new session.
    keras.backend.clear_session()

    with self.cached_session():
      # With placeholder creation
      new_model = models.clone_and_build_model(
          model, compile_clone=True, in_place_reset=is_subclassed)

      self._assert_same_compile_params(new_model)
      new_model.train_on_batch(inp, out)
      new_model.evaluate(inp, out)

      # Create new tensors for inputs and targets
      input_a = keras.Input(shape=(4,), name='a')
      new_model = models.clone_and_build_model(
          model, input_tensors=input_a, compile_clone=True,
          in_place_reset=is_subclassed)
      self._assert_same_compile_params(new_model)
      new_model.train_on_batch(inp, out)
      new_model.evaluate(inp, out)

      target_a = keras.Input(shape=(4,), name='b')
      new_model = models.clone_and_build_model(
          model, input_tensors=input_a, target_tensors=[target_a],
          compile_clone=True, in_place_reset=is_subclassed)
      self._assert_same_compile_params(new_model)
      new_model.train_on_batch(inp, out)
      new_model.evaluate(inp, out)
Exemplo n.º 4
0
  def _clone_and_build_test_helper(self, model, model_type):
    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))

    is_subclassed = (model_type == 'subclass')

    # With placeholder creation
    new_model = models.clone_and_build_model(
        model, compile_clone=True, in_place_reset=is_subclassed)

    self._assert_same_compile_params(new_model)
    new_model.train_on_batch(inp, out)
    new_model.evaluate(inp, out)

    # Create new tensors for inputs and targets
    input_a = keras.Input(shape=(4,), name='a')
    new_model = models.clone_and_build_model(
        model, input_tensors=input_a, compile_clone=True,
        in_place_reset=is_subclassed)
    self._assert_same_compile_params(new_model)
    new_model.train_on_batch(inp, out)
    new_model.evaluate(inp, out)

    target_a = keras.Input(shape=(4,), name='b')
    new_model = models.clone_and_build_model(
        model, input_tensors=input_a, target_tensors=[target_a],
        compile_clone=True, in_place_reset=is_subclassed)
    self._assert_same_compile_params(new_model)
    new_model.train_on_batch(inp, out)
    new_model.evaluate(inp, out)
Exemplo n.º 5
0
def _clone_and_build_model(mode,
                           keras_model,
                           custom_objects,
                           features=None,
                           labels=None):
    """Clone and build the given keras_model.

  Args:
    mode: training mode.
    keras_model: an instance of compiled keras model.
    custom_objects: Dictionary for custom objects.
    features: Dict of tensors.
    labels: Dict of tensors, or single tensor instance.

  Returns:
    The newly built model.
  """
    # Set to True during training, False for inference or testing.
    K.set_learning_phase(mode == model_fn_lib.ModeKeys.TRAIN)
    input_tensors, target_tensors = _convert_estimator_io_to_keras(
        keras_model, features, labels)
    return models.clone_and_build_model(
        keras_model,
        input_tensors,
        target_tensors,
        custom_objects,
        compile_clone=(mode != model_fn_lib.ModeKeys.PREDICT),
        in_place_reset=(not keras_model._is_graph_network))
Exemplo n.º 6
0
    def test_clone_and_build_non_compiled_model(self):
        inp = np.random.random((10, 4))
        out = np.random.random((10, 4))

        model = _get_model()

        with self.assertRaisesRegexp(ValueError, 'has not been compiled'):
            models.clone_and_build_model(model, compile_clone=True)

        is_subclassed = (testing_utils.get_model_type() == 'subclass')
        # With placeholder creation
        new_model = models.clone_and_build_model(model,
                                                 compile_clone=False,
                                                 in_place_reset=is_subclassed)
        with self.assertRaisesRegexp(RuntimeError, 'must compile'):
            new_model.evaluate(inp, out)
        with self.assertRaisesRegexp(RuntimeError, 'must compile'):
            new_model.train_on_batch(inp, out)
        new_model.compile(
            testing_utils.get_v2_optimizer('rmsprop'),
            'mse',
            run_eagerly=testing_utils.should_run_eagerly(),
            run_distributed=testing_utils.should_run_distributed())
        new_model.train_on_batch(inp, out)

        # Create new tensors for inputs and targets
        input_a = keras.Input(shape=(4, ))
        target_a = keras.Input(shape=(4, ))
        new_model = models.clone_and_build_model(model,
                                                 input_tensors=input_a,
                                                 target_tensors=[target_a],
                                                 compile_clone=False,
                                                 in_place_reset=is_subclassed)
        with self.assertRaisesRegexp(RuntimeError, 'must compile'):
            new_model.evaluate(inp, out)
        with self.assertRaisesRegexp(RuntimeError, 'must compile'):
            new_model.train_on_batch(inp, out)
        new_model.compile(
            testing_utils.get_v2_optimizer('rmsprop'),
            'mse',
            run_eagerly=testing_utils.should_run_eagerly(),
            run_distributed=testing_utils.should_run_distributed())
        new_model.train_on_batch(inp, out)
Exemplo n.º 7
0
    def test_clone_and_build_non_compiled_model(self):
        with self.cached_session():
            inp = np.random.random((10, 4))
            out = np.random.random((10, 4))

            model = keras.models.Sequential()
            model.add(keras.layers.Dense(4, input_shape=(4, )))
            model.add(keras.layers.BatchNormalization())
            model.add(keras.layers.Dropout(0.5))
            model.add(keras.layers.Dense(4))

        # Everything should work in a new session.
        keras.backend.clear_session()

        with self.cached_session():
            with self.assertRaisesRegexp(ValueError, 'has not been compiled'):
                models.clone_and_build_model(model, compile_clone=True)

            # With placeholder creation
            new_model = models.clone_and_build_model(model,
                                                     compile_clone=False)
            with self.assertRaisesRegexp(RuntimeError, 'must compile'):
                new_model.evaluate(inp, out)
            with self.assertRaisesRegexp(RuntimeError, 'must compile'):
                new_model.train_on_batch(inp, out)
            new_model.compile('rmsprop', 'mse')
            new_model.train_on_batch(inp, out)

            # Create new tensors for inputs and targets
            input_a = keras.Input(shape=(4, ))
            target_a = keras.Input(shape=(4, ))
            new_model = models.clone_and_build_model(model,
                                                     input_tensors=input_a,
                                                     target_tensors=[target_a],
                                                     compile_clone=False)
            with self.assertRaisesRegexp(RuntimeError, 'must compile'):
                new_model.evaluate(inp, out)
            with self.assertRaisesRegexp(RuntimeError, 'must compile'):
                new_model.train_on_batch(inp, out)
            new_model.compile('rmsprop', 'mse')
            new_model.train_on_batch(inp, out)
Exemplo n.º 8
0
 def test_clone_optimizer_in_different_graph(self):
   with ops.Graph().as_default():
     with self.session():
       model = testing_utils.get_small_sequential_mlp(3, 4)
       optimizer = keras.optimizer_v2.adam.Adam()
       model.compile(
           optimizer, 'mse', metrics=['acc', metrics.categorical_accuracy],
           )
       model.fit(
           x=np.array([[1., 2., 3., 4.]]),
           y=np.array([[1., 1., 1., 1.]]),
           epochs=1)
       optimizer_config = optimizer.get_config()
   with ops.Graph().as_default():
     with self.session():
       with self.assertRaisesRegex(ValueError, 'Cannot use the given session'):
         models.clone_and_build_model(model, compile_clone=True)
       # The optimizer_config object allows the model to be cloned in a
       # different graph.
       models.clone_and_build_model(model, compile_clone=True,
                                    optimizer_config=optimizer_config)
Exemplo n.º 9
0
def _clone_and_build_model(mode,
                           keras_model,
                           custom_objects,
                           features=None,
                           labels=None,
                           optimizer_config=None):
    """Clone and build the given keras_model.

  Args:
    mode: training mode.
    keras_model: an instance of compiled keras model.
    custom_objects: Dictionary for custom objects.
    features: Dict of tensors.
    labels: Dict of tensors, or single tensor instance.
    optimizer_config: Optimizer config dictionary, returned by
      `optimizer.get_config()`. This is used when cloning a model with
      an optimizer. Since `_clone_and_build_model` is called in a different
      graph and session from the model, `optimizer.get_config()` may raise an
      error during the attempt to serialize the optimizer hyperparameter values.

  Returns:
    The newly built model.
  """
    # Set to True during training, False for inference or testing.
    K.set_learning_phase(mode == ModeKeys.TRAIN)
    input_tensors, target_tensors, sample_weight_tensors = (
        _convert_estimator_io_to_keras(keras_model, features, labels))

    compile_clone = (mode != ModeKeys.PREDICT)

    global_step = None
    if compile_clone:
        # Set iterations to the global step created by tf.train.create_global_step()
        # which is automatically run in the estimator framework.
        global_step = tf.compat.v1.train.get_or_create_global_step()
        K.track_variable(global_step)

    clone = models.clone_and_build_model(
        keras_model,
        input_tensors,
        target_tensors,
        custom_objects,
        compile_clone=compile_clone,
        in_place_reset=(not keras_model._is_graph_network),
        optimizer_iterations=global_step,
        optimizer_config=optimizer_config)

    if sample_weight_tensors is not None:
        sample_weight_tensors = training_utils.standardize_sample_weights(
            sample_weight_tensors, clone.output_names)
        # Update calculated loss (model.total_loss) to include sample weights.
        clone._compile_weights_loss_and_weighted_metrics(sample_weight_tensors)
    return clone
Exemplo n.º 10
0
  def test_clone_and_build_non_compiled_model(self):
    with self.cached_session():
      inp = np.random.random((10, 4))
      out = np.random.random((10, 4))

      model = keras.models.Sequential()
      model.add(keras.layers.Dense(4, input_shape=(4,)))
      model.add(keras.layers.BatchNormalization())
      model.add(keras.layers.Dropout(0.5))
      model.add(keras.layers.Dense(4))

    # Everything should work in a new session.
    keras.backend.clear_session()

    with self.cached_session():
      with self.assertRaisesRegexp(ValueError, 'has not been compiled'):
        models.clone_and_build_model(model, compile_clone=True)

      # With placeholder creation
      new_model = models.clone_and_build_model(model, compile_clone=False)
      with self.assertRaisesRegexp(RuntimeError, 'must compile'):
        new_model.evaluate(inp, out)
      with self.assertRaisesRegexp(RuntimeError, 'must compile'):
        new_model.train_on_batch(inp, out)
      new_model.compile('rmsprop', 'mse')
      new_model.train_on_batch(inp, out)

      # Create new tensors for inputs and targets
      input_a = keras.Input(shape=(4,))
      target_a = keras.Input(shape=(4,))
      new_model = models.clone_and_build_model(model, input_tensors=input_a,
                                               target_tensors=[target_a],
                                               compile_clone=False)
      with self.assertRaisesRegexp(RuntimeError, 'must compile'):
        new_model.evaluate(inp, out)
      with self.assertRaisesRegexp(RuntimeError, 'must compile'):
        new_model.train_on_batch(inp, out)
      new_model.compile('rmsprop', 'mse')
      new_model.train_on_batch(inp, out)
Exemplo n.º 11
0
  def test_clone_and_build_non_compiled_model(self):
    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))

    model = _get_model()

    with self.assertRaisesRegexp(ValueError, 'has not been compiled'):
      models.clone_and_build_model(model, compile_clone=True)

    is_subclassed = (testing_utils.get_model_type() == 'subclass')
    # With placeholder creation
    new_model = models.clone_and_build_model(
        model, compile_clone=False, in_place_reset=is_subclassed)
    with self.assertRaisesRegexp(RuntimeError, 'must compile'):
      new_model.evaluate(inp, out)
    with self.assertRaisesRegexp(RuntimeError, 'must compile'):
      new_model.train_on_batch(inp, out)
    new_model.compile(
        testing_utils.get_v2_optimizer('rmsprop'), 'mse',
        run_eagerly=testing_utils.should_run_eagerly())
    new_model.train_on_batch(inp, out)

    # Create new tensors for inputs and targets
    input_a = keras.Input(shape=(4,))
    target_a = keras.Input(shape=(4,))
    new_model = models.clone_and_build_model(
        model, input_tensors=input_a, target_tensors=[target_a],
        compile_clone=False, in_place_reset=is_subclassed)
    with self.assertRaisesRegexp(RuntimeError, 'must compile'):
      new_model.evaluate(inp, out)
    with self.assertRaisesRegexp(RuntimeError, 'must compile'):
      new_model.train_on_batch(inp, out)
    new_model.compile(
        testing_utils.get_v2_optimizer('rmsprop'), 'mse',
        run_eagerly=testing_utils.should_run_eagerly())
    new_model.train_on_batch(inp, out)
Exemplo n.º 12
0
  def assert_optimizer_iterations_increases(self, optimizer):
    model = _get_model()
    model.compile(
        optimizer, 'mse', metrics=['acc', metrics.categorical_accuracy],
        run_eagerly=testing_utils.should_run_eagerly())

    global_step = keras.backend.variable(123, dtype=dtypes.int64)
    clone_model = models.clone_and_build_model(
        model, compile_clone=True, optimizer_iterations=global_step,
        in_place_reset=(testing_utils.get_model_type() == 'subclass'))

    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))
    clone_model.train_on_batch(inp, out)

    self.assertEqual(K.eval(global_step), 124)
Exemplo n.º 13
0
  def assert_optimizer_iterations_increases(self, optimizer):
    model = _get_model()
    model.compile(
        optimizer, 'mse', metrics=['acc', metrics.categorical_accuracy],
        run_eagerly=testing_utils.should_run_eagerly())

    global_step = keras.backend.variable(123, dtype=dtypes.int64)
    clone_model = models.clone_and_build_model(
        model, compile_clone=True, optimizer_iterations=global_step,
        in_place_reset=(testing_utils.get_model_type() == 'subclass'))

    inp = np.random.random((10, 4))
    out = np.random.random((10, 4))
    clone_model.train_on_batch(inp, out)

    self.assertEqual(K.eval(global_step), 124)
Exemplo n.º 14
0
def _clone_and_build_model(mode,
                           keras_model,
                           custom_objects,
                           features=None,
                           labels=None):
    """Clone and build the given keras_model.

  Args:
    mode: training mode.
    keras_model: an instance of compiled keras model.
    custom_objects: Dictionary for custom objects.
    features: Dict of tensors.
    labels: Dict of tensors, or single tensor instance.

  Returns:
    The newly built model.
  """
    # Set to True during training, False for inference or testing.
    K.set_learning_phase(mode == ModeKeys.TRAIN)
    input_tensors, target_tensors = _convert_estimator_io_to_keras(
        keras_model, features, labels)

    compile_clone = (mode != ModeKeys.PREDICT)

    global_step = None
    if compile_clone:
        # Set iterations to the global step created by tf.train.create_global_step()
        # which is automatically run in the estimator framework.
        global_step = training_util.get_or_create_global_step()
        K.track_variable(global_step)

    clone = models.clone_and_build_model(
        keras_model,
        input_tensors,
        target_tensors,
        custom_objects,
        compile_clone=compile_clone,
        in_place_reset=(not keras_model._is_graph_network),
        optimizer_iterations=global_step)

    return clone
Exemplo n.º 15
0
def _clone_and_build_model(mode,
                           keras_model,
                           custom_objects,
                           features=None,
                           labels=None):
  """Clone and build the given keras_model.

  Args:
    mode: training mode.
    keras_model: an instance of compiled keras model.
    custom_objects: Dictionary for custom objects.
    features: Dict of tensors.
    labels: Dict of tensors, or single tensor instance.

  Returns:
    The newly built model.
  """
  # Set to True during training, False for inference or testing.
  K.set_learning_phase(mode == model_fn_lib.ModeKeys.TRAIN)
  input_tensors, target_tensors = _convert_estimator_io_to_keras(
      keras_model, features, labels)

  compile_clone = (mode != model_fn_lib.ModeKeys.PREDICT)

  global_step = None
  if compile_clone:
    # Set iterations to the global step created by tf.train.create_global_step()
    # which is automatically run in the estimator framework.
    global_step = training_util.get_or_create_global_step()
    K.track_variable(global_step)

  clone = models.clone_and_build_model(
      keras_model, input_tensors, target_tensors, custom_objects,
      compile_clone=compile_clone,
      in_place_reset=(not keras_model._is_graph_network),
      optimizer_iterations=global_step)

  return clone
Exemplo n.º 16
0
  def assert_optimizer_iterations_increases(self, optimizer):
    with self.cached_session():
      input_a = keras.Input(shape=(4,))
      dense_1 = keras.layers.Dense(4,)
      dense_2 = keras.layers.Dense(4,)

      x_a = dense_1(input_a)
      x_a = keras.layers.Dropout(0.5)(x_a)
      x_a = keras.layers.BatchNormalization()(x_a)
      x_a = dense_2(x_a)
      model = keras.models.Model(input_a, x_a)
      model.compile(optimizer, 'mse',
                    metrics=['acc', metrics.categorical_accuracy])

      global_step = keras.backend.variable(123, dtype=dtypes.int64)
      clone_model = models.clone_and_build_model(
          model, compile_clone=True, optimizer_iterations=global_step)

      inp = np.random.random((10, 4))
      out = np.random.random((10, 4))
      clone_model.train_on_batch(inp, out)

      self.assertEqual(K.eval(global_step), 124)
Exemplo n.º 17
0
  def assert_optimizer_iterations_increases(self, optimizer):
    with self.cached_session():
      input_a = keras.Input(shape=(4,))
      dense_1 = keras.layers.Dense(4,)
      dense_2 = keras.layers.Dense(4,)

      x_a = dense_1(input_a)
      x_a = keras.layers.Dropout(0.5)(x_a)
      x_a = keras.layers.BatchNormalization()(x_a)
      x_a = dense_2(x_a)
      model = keras.models.Model(input_a, x_a)
      model.compile(optimizer, 'mse',
                    metrics=['acc', metrics.categorical_accuracy])

      global_step = keras.backend.variable(123, dtype=dtypes.int64)
      clone_model = models.clone_and_build_model(
          model, compile_clone=True, optimizer_iterations=global_step)

      inp = np.random.random((10, 4))
      out = np.random.random((10, 4))
      clone_model.train_on_batch(inp, out)

      self.assertEqual(K.eval(global_step), 124)
Exemplo n.º 18
0
def _export_mode(
    mode, has_saved_vars, builder, model, custom_objects, checkpoint_path):
  """Export a model, and optionally save new vars from the clone model.

  Args:
    mode: A `tf.estimator.ModeKeys` string.
    has_saved_vars: A `boolean` indicating whether the SavedModel has already
      exported variables.
    builder: A `SavedModelBuilder` object.
    model: A `tf.keras.Model` object.
    custom_objects: A dictionary mapping string names to custom classes
      or functions.
    checkpoint_path: String path to checkpoint.

  Raises:
    ValueError: If the train/eval mode is being exported, but the model does
      not have an optimizer.
  """
  compile_clone = (mode != model_fn_lib.ModeKeys.PREDICT)
  if compile_clone and not model.optimizer:
    raise ValueError(
        'Model does not have an optimizer. Cannot export mode %s' % mode)

  model_graph = ops.get_default_graph()
  with ops.Graph().as_default() as g:

    K.set_learning_phase(mode == model_fn_lib.ModeKeys.TRAIN)

    # Clone the model into blank graph. This will create placeholders for inputs
    # and targets.
    clone = models_lib.clone_and_build_model(
        model, custom_objects=custom_objects, compile_clone=compile_clone)

    # Make sure that iterations variable is added to the global step collection,
    # to ensure that, when the SavedModel graph is loaded, the iterations
    # variable is returned by `tf.train.get_global_step()`. This is required for
    # compatibility with the SavedModelEstimator.
    if compile_clone:
      g.add_to_collection(ops.GraphKeys.GLOBAL_STEP, clone.optimizer.iterations)

    # Extract update and train ops from train/test/predict functions.
    train_op = None
    if mode == model_fn_lib.ModeKeys.TRAIN:
      clone._make_train_function()
      train_op = clone.train_function.updates_op
    elif mode == model_fn_lib.ModeKeys.EVAL:
      clone._make_test_function()
    else:
      clone._make_predict_function()
    g.get_collection_ref(ops.GraphKeys.UPDATE_OPS).extend(clone.state_updates)

    clone_var_list = checkpointable_utils.named_saveables(clone)

    with session.Session().as_default():
      if has_saved_vars:
        # Confirm all variables in the clone have an entry in the checkpoint.
        status = clone.load_weights(checkpoint_path)
        status.assert_existing_objects_matched()
      else:
        # Confirm that variables between the clone and model match up exactly,
        # not counting optimizer objects. Optimizer objects are ignored because
        # if the model has not trained, the slot variables will not have been
        # created yet.
        # TODO(b/113179535): Replace with checkpointable equivalence.
        _assert_same_non_optimizer_objects(model, model_graph, clone, g)

        # TODO(b/113178242): Use value transfer for checkpointable objects.
        clone.load_weights(checkpoint_path)

        # Add graph and variables to SavedModel.
        # TODO(b/113134168): Switch to add_meta_graph_and_variables.
        clone.save_weights(checkpoint_path, save_format='tf', overwrite=True)
        builder._has_saved_variables = True

    # Add graph to the SavedModel builder.
    builder.add_meta_graph(
        model_fn_lib.EXPORT_TAG_MAP[mode],
        signature_def_map=_create_signature_def_map(clone, mode),
        saver=saver_lib.Saver(clone_var_list),
        init_op=variables.local_variables_initializer(),
        train_op=train_op)
    return None
Exemplo n.º 19
0
def _export_mode(mode, has_saved_vars, builder, model, custom_objects,
                 checkpoint_path, input_signature):
    """Exports a model, and optionally saves new vars from the clone model.

  Args:
    mode: A `tf.estimator.ModeKeys` string.
    has_saved_vars: A `boolean` indicating whether the SavedModel has already
      exported variables.
    builder: A `SavedModelBuilder` object.
    model: A `tf.keras.Model` object.
    custom_objects: A dictionary mapping string names to custom classes
      or functions.
    checkpoint_path: String path to checkpoint.
    input_signature: Nested TensorSpec containing the expected inputs. Can be
      `None`, in which case the signature will be inferred from the model.

  Raises:
    ValueError: If the train/eval mode is being exported, but the model does
      not have an optimizer.
  """
    from tensorflow.python.keras import models as models_lib  # pylint: disable=g-import-not-at-top
    compile_clone = (mode != mode_keys.ModeKeys.PREDICT)
    if compile_clone and not model.optimizer:
        raise ValueError(
            'Model does not have an optimizer. Cannot export mode %s' % mode)

    model_graph = ops.get_default_graph()
    with ops.Graph().as_default() as g, K.learning_phase_scope(
            mode == mode_keys.ModeKeys.TRAIN):

        if input_signature is None:
            input_tensors = None
        else:
            input_tensors = nest.map_structure(create_placeholder,
                                               input_signature)

        # Clone the model into blank graph. This will create placeholders for inputs
        # and targets.
        clone = models_lib.clone_and_build_model(model,
                                                 input_tensors=input_tensors,
                                                 custom_objects=custom_objects,
                                                 compile_clone=compile_clone)

        # Make sure that iterations variable is added to the global step collection,
        # to ensure that, when the SavedModel graph is loaded, the iterations
        # variable is returned by `tf.train.get_global_step()`. This is required for
        # compatibility with the SavedModelEstimator.
        if compile_clone:
            g.add_to_collection(ops.GraphKeys.GLOBAL_STEP,
                                clone.optimizer.iterations)

        # Extract update and train ops from train/test/predict functions.
        train_op = None
        if mode == mode_keys.ModeKeys.TRAIN:
            clone._make_train_function()
            train_op = clone.train_function.updates_op
        elif mode == mode_keys.ModeKeys.TEST:
            clone._make_test_function()
        else:
            clone._make_predict_function()
        g.get_collection_ref(ops.GraphKeys.UPDATE_OPS).extend(
            clone.state_updates)

        clone_var_list = checkpointable_utils.named_saveables(clone)

        with session.Session().as_default():
            if has_saved_vars:
                # Confirm all variables in the clone have an entry in the checkpoint.
                status = clone.load_weights(checkpoint_path)
                status.assert_existing_objects_matched()
            else:
                # Confirm that variables between the clone and model match up exactly,
                # not counting optimizer objects. Optimizer objects are ignored because
                # if the model has not trained, the slot variables will not have been
                # created yet.
                # TODO(b/113179535): Replace with checkpointable equivalence.
                _assert_same_non_optimizer_objects(model, model_graph, clone,
                                                   g)

                # TODO(b/113178242): Use value transfer for checkpointable objects.
                clone.load_weights(checkpoint_path)

                # Add graph and variables to SavedModel.
                # TODO(b/113134168): Switch to add_meta_graph_and_variables.
                clone.save_weights(checkpoint_path,
                                   save_format='tf',
                                   overwrite=True)
                builder._has_saved_variables = True

        # Add graph to the SavedModel builder.
        builder.add_meta_graph(model_utils.EXPORT_TAG_MAP[mode],
                               signature_def_map=_create_signature_def_map(
                                   clone, mode),
                               saver=saver_lib.Saver(clone_var_list),
                               init_op=variables.local_variables_initializer(),
                               train_op=train_op)
        return None