Пример #1
0
 def test_model_to_estimator(self):
   keras_model = model.create_keras_model(
       network=self.network,
       loss=self.loss,
       metrics=self.eval_metrics,
       optimizer=self.optimizer,
       size_feature_name=_SIZE)
   estimator = estimator_lib.model_to_estimator(
       model=keras_model, config=self.config)
   self.assertIsInstance(estimator, tf.compat.v1.estimator.Estimator)
Пример #2
0
 def test_model_compile_keras(self):
     # Specify the training configuration (optimizer, loss, metrics).
     optimizer = tf.keras.optimizers.RMSprop()
     loss = losses.SoftmaxLoss()
     eval_metrics = [metrics.NDCGMetric("ndcg_5", topn=5)]
     ranker = model.create_keras_model(network=self.network,
                                       loss=loss,
                                       metrics=eval_metrics,
                                       optimizer=optimizer,
                                       size_feature_name=None)
     self.assertIs(ranker.optimizer, optimizer)
     self.assertIs(ranker.loss, loss)
Пример #3
0
 def test_create_keras_model_without_padding(self):
     network = _DummyUnivariateRankingNetwork(
         context_feature_columns=self.context_feature_columns,
         example_feature_columns=self.example_feature_columns)
     ranker = model_lib.create_keras_model(network=network,
                                           loss=self.loss,
                                           metrics=self.metrics,
                                           optimizer=self.optimizer,
                                           size_feature_name=None)
     self.assertEqual(ranker.optimizer, self.optimizer)
     self.assertEqual(ranker.loss, self.loss)
     self.assertNotIn("example_list_size", ranker.input_names)
Пример #4
0
    def test_model_to_estimator(self, weights_feature_name):
        keras_model = model.create_keras_model(network=self._network,
                                               loss=self._loss,
                                               metrics=self._eval_metrics,
                                               optimizer=self._optimizer,
                                               size_feature_name=_SIZE)
        estimator = estimator_lib.model_to_estimator(
            model=keras_model,
            config=self._config,
            weights_feature_name=weights_feature_name,
            custom_objects=self._custom_objects)
        self.assertIsInstance(estimator, tf.compat.v1.estimator.Estimator)

        # Train and export model.
        train_spec = tf.estimator.TrainSpec(
            input_fn=self._make_input_fn(weights_feature_name), max_steps=1)
        eval_spec = tf.estimator.EvalSpec(
            name='eval',
            input_fn=self._make_input_fn(weights_feature_name),
            steps=10)
        tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)

        context_feature_spec = tf.feature_column.make_parse_example_spec(
            self._context_feature_columns.values())
        example_feature_spec = tf.feature_column.make_parse_example_spec(
            self._example_feature_columns.values())

        serving_input_receiver_fn = (
            data.build_ranking_serving_input_receiver_fn(
                data.ELWC,
                context_feature_spec=context_feature_spec,
                example_feature_spec=example_feature_spec,
                size_feature_name=_SIZE))
        export_dir = os.path.join(tf.compat.v1.test.get_temp_dir(), 'export')
        estimator.export_saved_model(export_dir, serving_input_receiver_fn)

        # Confirm model ran and created checkpoints and saved model.
        final_ckpt_path = os.path.join(estimator.model_dir,
                                       'model.ckpt-1.meta')
        self.assertTrue(tf.io.gfile.exists(final_ckpt_path))

        saved_model_pb = os.path.join(export_dir,
                                      tf.io.gfile.listdir(export_dir)[0],
                                      'saved_model.pb')
        self.assertTrue(tf.io.gfile.exists(saved_model_pb))
Пример #5
0
  def test_model_to_json(self):
    network = _DummyUnivariateRankingNetwork(
        context_feature_columns=self.context_feature_columns,
        example_feature_columns=self.example_feature_columns,
        name="dummy_univariate_ranking_network")
    ranker = model_lib.create_keras_model(
        network=network,
        loss=self.loss,
        metrics=self.metrics,
        optimizer=self.optimizer,
        size_feature_name="example_list_size")

    json_config = ranker.to_json()
    custom_objects = {
        "GenerateMask": feature.GenerateMask,
        "_DummyUnivariateRankingNetwork": _DummyUnivariateRankingNetwork
    }
    restored_ranker = tf.keras.models.model_from_json(
        json_config, custom_objects=custom_objects)
    self.assertAllEqual(restored_ranker(self.features), ranker(self.features))
Пример #6
0
    def test_model_to_estimator_missing_custom_objects(self):
        keras_model = model.create_keras_model(network=self._network,
                                               loss=self._loss,
                                               metrics=self._eval_metrics,
                                               optimizer=self._optimizer,
                                               size_feature_name=_SIZE)
        estimator = estimator_lib.model_to_estimator(model=keras_model,
                                                     config=self._config,
                                                     custom_objects=None)
        self.assertIsInstance(estimator, tf.compat.v1.estimator.Estimator)

        # Train and export model.
        train_spec = tf.estimator.TrainSpec(input_fn=self._make_input_fn(),
                                            max_steps=1)
        eval_spec = tf.estimator.EvalSpec(name='eval',
                                          input_fn=self._make_input_fn(),
                                          steps=10)

        with self.assertRaises(AttributeError):
            tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
Пример #7
0
  def test_model_to_saved_model_dense_inputs(self):
    # TODO: Add SavedModel support for sparse inputs.
    # After adding @tf.function decorator, _predict function breaks for
    # sparse inputs to Keras model.
    example_feature_columns = {}
    example_feature_columns.update(self.example_feature_columns)
    # Remove sparse feature based EmbeddingColumn.
    del example_feature_columns["unigrams"]
    network = _DummyUnivariateRankingNetwork(
        context_feature_columns=self.context_feature_columns,
        example_feature_columns=example_feature_columns)
    ranker = model_lib.create_keras_model(
        network=network,
        loss=self.loss,
        metrics=self.metrics,
        optimizer=self.optimizer,
        size_feature_name="example_list_size")

    context_feature_spec = tf.feature_column.make_parse_example_spec(
        network.context_feature_columns.values())
    example_feature_spec = tf.feature_column.make_parse_example_spec(
        network.example_feature_columns.values())

    eval_batch_size = 2

    @tf.function(input_signature=[
        tf.TensorSpec(shape=(eval_batch_size,), dtype=tf.string)
    ])
    def _predict(serialized):
      features = data.parse_from_example_list(
          serialized,
          context_feature_spec=context_feature_spec,
          example_feature_spec=example_feature_spec,
          size_feature_name="example_list_size")
      scores = ranker(inputs=features, training=False)
      return {"predictions": scores}

    ranker.infer_from_proto = _predict

    # Export the model to a SavedModel.
    tf.saved_model.save(
        ranker,
        export_dir="/tmp/functional_keras_model",
        signatures={"predict": ranker.infer_from_proto})

    # Import ranker from SavedModel.
    imported = tf.saved_model.load("/tmp/functional_keras_model")
    imported_ranker_predictor = imported.signatures["predict"]
    output = imported_ranker_predictor(
        tf.convert_to_tensor([
            EXAMPLE_LIST_PROTO_1.SerializeToString(),
            EXAMPLE_LIST_PROTO_2.SerializeToString(),
        ]))["predictions"]

    features = {}
    features.update(self.features)
    # TODO: Add SavedModel support for sparse inputs.
    # After adding @tf.function decorator, _predict function breaks for
    # sparse inputs to Keras model. Hence ranker is also created and called
    # only on dense features. Removing "unigrams", a sparse feature.
    del features["unigrams"]
    self.assertAllClose(
        ranker(features, training=False).numpy(), output.numpy())
Пример #8
0
    def test_model_to_estimator(self, weights_feature_name, serving_default):
        keras_model = model.create_keras_model(network=self._network,
                                               loss=self._loss,
                                               metrics=self._eval_metrics,
                                               optimizer=self._optimizer,
                                               size_feature_name=_SIZE)
        estimator = estimator_lib.model_to_estimator(
            model=keras_model,
            config=self._config,
            weights_feature_name=weights_feature_name,
            custom_objects=self._custom_objects,
            serving_default=serving_default)
        self.assertIsInstance(estimator, tf_compat_v1_estimator.Estimator)

        # Train and export model.
        train_spec = tf_estimator.TrainSpec(
            input_fn=self._make_input_fn(weights_feature_name), max_steps=1)
        eval_spec = tf_estimator.EvalSpec(
            name='eval',
            input_fn=self._make_input_fn(weights_feature_name),
            steps=10)
        tf_estimator.train_and_evaluate(estimator, train_spec, eval_spec)

        context_feature_spec = tf.feature_column.make_parse_example_spec(
            self._context_feature_columns.values())
        example_feature_spec = tf.feature_column.make_parse_example_spec(
            self._example_feature_columns.values())

        def _make_serving_input_fn(serving_default):
            if serving_default == 'predict':
                return data.build_ranking_serving_input_receiver_fn(
                    data.ELWC,
                    context_feature_spec=context_feature_spec,
                    example_feature_spec=example_feature_spec,
                    size_feature_name=_SIZE)
            else:

                def pointwise_serving_fn():
                    serialized = tf.compat.v1.placeholder(
                        dtype=tf.string,
                        shape=[None],
                        name='input_ranking_tensor')
                    receiver_tensors = {'input_ranking_data': serialized}
                    features = data.parse_from_tf_example(
                        serialized,
                        context_feature_spec=context_feature_spec,
                        example_feature_spec=example_feature_spec,
                        size_feature_name=_SIZE)
                    return tf_estimator.export.ServingInputReceiver(
                        features, receiver_tensors)

                return pointwise_serving_fn

        serving_input_receiver_fn = _make_serving_input_fn(serving_default)
        export_dir = os.path.join(tf.compat.v1.test.get_temp_dir(), 'export')
        estimator.export_saved_model(export_dir, serving_input_receiver_fn)

        # Confirm model ran and created checkpoints and saved model.
        final_ckpt_path = os.path.join(estimator.model_dir,
                                       'model.ckpt-1.meta')
        self.assertTrue(tf.io.gfile.exists(final_ckpt_path))

        saved_model_pb = os.path.join(export_dir,
                                      tf.io.gfile.listdir(export_dir)[0],
                                      'saved_model.pb')
        self.assertTrue(tf.io.gfile.exists(saved_model_pb))