Exemple #1
0
 def test_build_ranking_serving_input_receiver_fn(self):
   serving_input_receiver_fn = (
       data_lib.build_ranking_serving_input_receiver_fn(
           data_format=data_lib.EIE,
           context_feature_spec=CONTEXT_FEATURE_SPEC,
           example_feature_spec=EXAMPLE_FEATURE_SPEC))
   serving_input_receiver = serving_input_receiver_fn()
   self.assertCountEqual(serving_input_receiver.features.keys(),
                         ["query_length", "unigrams", "utility"])
   self.assertCountEqual(serving_input_receiver.receiver_tensors.keys(),
                         ["input_ranking_data"])
   eie_input = [_example_in_example(CONTEXT_1, EXAMPLES_1).SerializeToString()]
   with tf.compat.v1.Session() as sess:
     sess.run(tf.compat.v1.local_variables_initializer())
     features = sess.run(
         serving_input_receiver.features,
         feed_dict={
             serving_input_receiver.receiver_tensors["input_ranking_data"]
             .name:
                 eie_input
         })
     # Test dense_shape, indices and values for a SparseTensor.
     self.assertAllEqual(features["unigrams"].dense_shape, [1, 2, 3])
     self.assertAllEqual(features["unigrams"].indices,
                         [[0, 0, 0], [0, 1, 0], [0, 1, 1], [0, 1, 2]])
     self.assertAllEqual(features["unigrams"].values,
                         [b"tensorflow", b"learning", b"to", b"rank"])
     # For Tensors with dense values, values can be directly checked.
     self.assertAllEqual(features["query_length"], [[3]])
     self.assertAllEqual(features["utility"], [[[0.], [1.]]])
Exemple #2
0
    def _make_serving_input_fn(self):
        """Returns `Estimator` `input_fn` for serving the model.

    Returns:
      `input_fn` that can be used in serving. The returned input_fn takes no
      arguments and returns `InputFnOps'.
    """
        context_feature_spec = tf.feature_column.make_parse_example_spec(
            self._context_feature_columns.values())
        example_feature_spec = tf.feature_column.make_parse_example_spec(
            self._example_feature_columns.values())

        if self._export_elwc:
            # Exports accept the `ExampleListWithContext` format during serving.
            return tfr_data.build_ranking_serving_input_receiver_fn(
                data_format=tfr_data.ELWC,
                context_feature_spec=context_feature_spec,
                example_feature_spec=example_feature_spec,
                size_feature_name=self._size_feature_name)
        else:
            # Exports accept `tf.Example` format during serving.
            feature_spec = {}
            feature_spec.update(example_feature_spec)
            feature_spec.update(context_feature_spec)
            return tf.estimator.export.build_parsing_serving_input_receiver_fn(
                feature_spec)
Exemple #3
0
        def _make_serving_input_fn(listwise_inference):
            if listwise_inference:
                return data.build_ranking_serving_input_receiver_fn(
                    data.ELWC,
                    context_feature_spec=context_feature_spec,
                    example_feature_spec=example_feature_spec,
                    size_feature_name=_SIZE)
            else:
                feature_spec = {}
                feature_spec.update(context_feature_spec)
                feature_spec.update(example_feature_spec)

                def pointwise_serving_fn():
                    serialized = tf.compat.v1.placeholder(
                        dtype=tf.string,
                        shape=[None],
                        name='input_ranking_tensor')
                    receiver_tensors = {'input_ranking_data': serialized}
                    features = tf.compat.v1.io.parse_example(
                        serialized, feature_spec)
                    features[_SIZE] = tf.ones((1, ), dtype=tf.int32)
                    return tf.estimator.export.ServingInputReceiver(
                        features, receiver_tensors)

                return pointwise_serving_fn
Exemple #4
0
    def test_model_to_estimator(self, weights_feature_name):
        keras_model = model.create_keras_model(network=self._network,
                                               loss=self._loss,
                                               metrics=self._eval_metrics,
                                               optimizer=self._optimizer,
                                               size_feature_name=_SIZE)
        estimator = estimator_lib.model_to_estimator(
            model=keras_model,
            config=self._config,
            weights_feature_name=weights_feature_name,
            custom_objects=self._custom_objects)
        self.assertIsInstance(estimator, tf.compat.v1.estimator.Estimator)

        # Train and export model.
        train_spec = tf.estimator.TrainSpec(
            input_fn=self._make_input_fn(weights_feature_name), max_steps=1)
        eval_spec = tf.estimator.EvalSpec(
            name='eval',
            input_fn=self._make_input_fn(weights_feature_name),
            steps=10)
        tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)

        context_feature_spec = tf.feature_column.make_parse_example_spec(
            self._context_feature_columns.values())
        example_feature_spec = tf.feature_column.make_parse_example_spec(
            self._example_feature_columns.values())

        serving_input_receiver_fn = (
            data.build_ranking_serving_input_receiver_fn(
                data.ELWC,
                context_feature_spec=context_feature_spec,
                example_feature_spec=example_feature_spec,
                size_feature_name=_SIZE))
        export_dir = os.path.join(tf.compat.v1.test.get_temp_dir(), 'export')
        estimator.export_saved_model(export_dir, serving_input_receiver_fn)

        # Confirm model ran and created checkpoints and saved model.
        final_ckpt_path = os.path.join(estimator.model_dir,
                                       'model.ckpt-1.meta')
        self.assertTrue(tf.io.gfile.exists(final_ckpt_path))

        saved_model_pb = os.path.join(export_dir,
                                      tf.io.gfile.listdir(export_dir)[0],
                                      'saved_model.pb')
        self.assertTrue(tf.io.gfile.exists(saved_model_pb))
Exemple #5
0
        def _make_serving_input_fn(serving_default):
            if serving_default == 'predict':
                return data.build_ranking_serving_input_receiver_fn(
                    data.ELWC,
                    context_feature_spec=context_feature_spec,
                    example_feature_spec=example_feature_spec,
                    size_feature_name=_SIZE)
            else:

                def pointwise_serving_fn():
                    serialized = tf.compat.v1.placeholder(
                        dtype=tf.string,
                        shape=[None],
                        name='input_ranking_tensor')
                    receiver_tensors = {'input_ranking_data': serialized}
                    features = data.parse_from_tf_example(
                        serialized,
                        context_feature_spec=context_feature_spec,
                        example_feature_spec=example_feature_spec,
                        size_feature_name=_SIZE)
                    return tf_estimator.export.ServingInputReceiver(
                        features, receiver_tensors)

                return pointwise_serving_fn