def test_saving_sequential_model(self):
        with self.cached_session():
            model = keras.models.Sequential()
            model.add(keras.layers.Dense(2, input_shape=(3, )))
            model.add(keras.layers.RepeatVector(3))
            model.add(keras.layers.TimeDistributed(keras.layers.Dense(3)))
            model.compile(loss=keras.losses.MSE,
                          optimizer=rmsprop.RMSprop(lr=0.0001),
                          metrics=[keras.metrics.categorical_accuracy],
                          sample_weight_mode='temporal',
                          run_eagerly=testing_utils.should_run_eagerly(),
                          experimental_run_tf_function=testing_utils.
                          should_run_tf_function())
            x = np.random.random((1, 3))
            y = np.random.random((1, 3, 3))
            model.train_on_batch(x, y)

            ref_y = model.predict(x)

            saved_model_dir = self._save_model_dir()
            keras_saved_model.export_saved_model(model, saved_model_dir)

            loaded_model = keras_saved_model.load_from_saved_model(
                saved_model_dir)
            y = loaded_model.predict(x)
            self.assertAllClose(ref_y, y, atol=1e-05)
    def test_saving_functional_model(self):
        with self.cached_session():
            inputs = keras.layers.Input(shape=(3, ))
            x = keras.layers.Dense(2)(inputs)
            output = keras.layers.Dense(3)(x)

            model = keras.models.Model(inputs, output)
            model.compile(loss=keras.losses.MSE,
                          optimizer=rmsprop.RMSprop(lr=0.0001),
                          metrics=[keras.metrics.categorical_accuracy],
                          run_eagerly=testing_utils.should_run_eagerly(),
                          experimental_run_tf_function=testing_utils.
                          should_run_tf_function())
            x = np.random.random((1, 3))
            y = np.random.random((1, 3))
            model.train_on_batch(x, y)

            ref_y = model.predict(x)

            saved_model_dir = self._save_model_dir()
            keras_saved_model.export_saved_model(model, saved_model_dir)
            loaded_model = keras_saved_model.load_from_saved_model(
                saved_model_dir)

            y = loaded_model.predict(x)
            self.assertAllClose(ref_y, y, atol=1e-05)
 def _load_and_run_model(self, distribution, saved_dir, predict_dataset,
                         output_name, experimental_run_tf_function):
     restored_keras_model = saved_model.load_from_saved_model(saved_dir)
     restored_keras_model._experimental_run_tf_function = (
         experimental_run_tf_function)
     return restored_keras_model.predict(predict_dataset,
                                         steps=test_base.PREDICT_STEPS)
Example #4
0
    def DISABLED_test_vocabulary_persistence_across_saving(self):
        vocab_data = ["earth", "wind", "and", "fire"]
        input_array = np.array([["earth", "wind", "and", "fire"],
                                ["fire", "and", "earth", "michigan"]])
        expected_output = [[2, 3, 4, 5], [5, 4, 2, 1]]

        # Build and validate a golden model.
        input_data = keras.Input(shape=(None, ), dtype=dtypes.string)
        layer = get_layer_class()(max_tokens=None)
        layer.set_vocabulary(vocab_data)
        int_data = layer(input_data)
        model = keras.Model(inputs=input_data, outputs=int_data)
        output_dataset = model.predict(input_array)
        self.assertAllEqual(output_dataset, expected_output)

        # Save the model to disk.
        output_path = os.path.join(self.get_temp_dir(), "tf_keras_saved_model")
        model.save(output_path, save_format="tf")
        loaded_model = saving.load_from_saved_model(
            output_path, custom_objects={"IndexLookup": get_layer_class()})

        # Ensure that the loaded model is unique (so that the save/load is real)
        self.assertIsNot(model, loaded_model)

        # Validate correctness of the new model.
        new_output_dataset = loaded_model.predict(input_array)
        self.assertAllEqual(new_output_dataset, expected_output)
    def test_saving_with_tf_optimizer(self):
        model = keras.models.Sequential()
        model.add(keras.layers.Dense(2, input_shape=(3, )))
        model.add(keras.layers.Dense(3))
        model.compile(loss='mse',
                      optimizer=training_module.RMSPropOptimizer(0.1),
                      metrics=['acc'])

        x = np.random.random((1, 3))
        y = np.random.random((1, 3))
        model.train_on_batch(x, y)
        ref_y = model.predict(x)

        saved_model_dir = self._save_model_dir()
        keras_saved_model.export_saved_model(model, saved_model_dir)
        loaded_model = keras_saved_model.load_from_saved_model(saved_model_dir)
        loaded_model.compile(loss='mse',
                             optimizer=training_module.RMSPropOptimizer(0.1),
                             metrics=['acc'],
                             run_eagerly=testing_utils.should_run_eagerly(),
                             experimental_run_tf_function=testing_utils.
                             should_run_tf_function())
        y = loaded_model.predict(x)
        self.assertAllClose(ref_y, y, atol=1e-05)

        # test that new updates are the same with both models
        x = np.random.random((1, 3))
        y = np.random.random((1, 3))

        ref_loss = model.train_on_batch(x, y)
        loss = loaded_model.train_on_batch(x, y)
        self.assertAllClose(ref_loss, loss, atol=1e-05)

        ref_y = model.predict(x)
        y = loaded_model.predict(x)
        self.assertAllClose(ref_y, y, atol=1e-05)

        # test saving/loading again
        saved_model_dir2 = self._save_model_dir('saved_model_2')
        keras_saved_model.export_saved_model(loaded_model, saved_model_dir2)
        loaded_model = keras_saved_model.load_from_saved_model(
            saved_model_dir2)
        y = loaded_model.predict(x)
        self.assertAllClose(ref_y, y, atol=1e-05)
def _save_restore_saved_model(model):
    tmpdir = tempfile.mkdtemp()
    saved_model_experimental.export_saved_model(model, tmpdir)

    with prune.prune_scope():
        loaded_model = saved_model_experimental.load_from_saved_model(tmpdir)

    loaded_model.compile(loss='categorical_crossentropy',
                         optimizer='sgd',
                         metrics=['accuracy'])
    return loaded_model
Example #7
0
  def test_saving_sequential_model_without_compile(self):
    with self.cached_session():
      model = keras.models.Sequential()
      model.add(keras.layers.Dense(2, input_shape=(3,)))
      model.add(keras.layers.RepeatVector(3))
      model.add(keras.layers.TimeDistributed(keras.layers.Dense(3)))

      x = np.random.random((1, 3))
      ref_y = model.predict(x)

      saved_model_dir = self._save_model_dir()
      keras_saved_model.export_saved_model(model, saved_model_dir)
      loaded_model = keras_saved_model.load_from_saved_model(saved_model_dir)

      y = loaded_model.predict(x)
      self.assertAllClose(ref_y, y, atol=1e-05)
Example #8
0
  def test_saving_functional_model_without_compile(self):
    with self.cached_session():
      inputs = keras.layers.Input(shape=(3,))
      x = keras.layers.Dense(2)(inputs)
      output = keras.layers.Dense(3)(x)

      model = keras.models.Model(inputs, output)

      x = np.random.random((1, 3))
      y = np.random.random((1, 3))

      ref_y = model.predict(x)

      saved_model_dir = self._save_model_dir()
      keras_saved_model.export_saved_model(model, saved_model_dir)
      loaded_model = keras_saved_model.load_from_saved_model(saved_model_dir)

      y = loaded_model.predict(x)
      self.assertAllClose(ref_y, y, atol=1e-05)
Example #9
0
  def DISABLED_test_vocabulary_persistence_across_saving_with_tfidf(self):
    vocab_data = ["earth", "wind", "and", "fire"]
    tfidf_data = [.5, .25, .2, .125]
    input_array = np.array([["earth", "wind", "and", "earth"],
                            ["ohio", "fire", "earth", "michigan"]])

    # pyformat: disable
    # pylint: disable=bad-whitespace
    expected_output = [[ 0,  1, .25, .2,    0],
                       [.1, .5,   0,  0, .125]]
    # pylint: enable=bad-whitespace
    # pyformat: enable

    # Build and validate a golden model.
    input_data = keras.Input(shape=(None,), dtype=dtypes.string)
    layer = get_layer_class()(
        max_tokens=5,
        standardize=None,
        split=None,
        output_mode=index_lookup.TFIDF)
    layer.set_vocabulary(vocab_data, df_data=tfidf_data, oov_df_value=.05)

    int_data = layer(input_data)
    model = keras.Model(inputs=input_data, outputs=int_data)
    output_dataset = model.predict(input_array)
    self.assertAllClose(output_dataset, expected_output)

    # Save the model to disk.
    output_path = os.path.join(self.get_temp_dir(), "tf_keras_saved_model")
    model.save(output_path, save_format="tf")
    loaded_model = saving.load_from_saved_model(
        output_path, custom_objects={"IndexLookup": get_layer_class()})

    # Ensure that the loaded model is unique (so that the save/load is real)
    self.assertIsNot(model, loaded_model)

    # Validate correctness of the new model.
    new_output_dataset = loaded_model.predict(input_array)
    self.assertAllClose(new_output_dataset, expected_output)
 def _load_and_run_model(self, distribution, saved_dir, predict_dataset,
                         output_name, run_distributed):
     restored_keras_model = saved_model.load_from_saved_model(saved_dir)
     restored_keras_model._run_distributed = run_distributed
     return restored_keras_model.predict(predict_dataset,
                                         steps=test_base.PREDICT_STEPS)