Esempio n. 1
0
    def predict(self, features):
        self.assert_is_loaded()

        # Forcing both session and graph to be defaults here to force a graph
        # context if this ever gets used during Eager execution. Makes testing
        # easier too.
        with self._session.as_default(), self._graph.as_default():
            return self._session.run(
                self._predictions,
                tensorspec_utils.map_feed_dict(self._features, features))
    def predict(self, features):
        """Predicts based on feature input using the loaded model.

    Args:
      features: A dict containing the features used for predictions.
    Returns:
      The result of the queried model predictions.
    """
        self.assert_is_loaded()
        return self._sess.run(
            self._predictions,
            tensorspec_utils.map_feed_dict(self._features, features))
Esempio n. 3
0
    def preprocess(preprocessor, feature_spec, label_spec, flatten=False):
      with tf.Session() as sess:
        feature_placeholders = tensorspec_utils.make_placeholders(
            feature_spec, batch_size=1)
        label_placeholders = None
        if label_spec is not None:
          label_placeholders = tensorspec_utils.make_placeholders(
              label_spec, batch_size=1)

        # Normally we want our features and labels to be flattened.
        # However we support not flattened hierarchies as well.
        if flatten:
          feature_placeholders = tensorspec_utils.flatten_spec_structure(
              feature_placeholders)
          if label_spec is not None:
            label_placeholders = tensorspec_utils.flatten_spec_structure(
                label_placeholders)

        (features_preprocessed, labels_preprocessed) = preprocessor.preprocess(
            features=feature_placeholders,
            labels=label_placeholders,
            mode=tf.estimator.ModeKeys.TRAIN)

        # We create a mapping of {key: np.array} or a namedtuple spec structure.
        np_feature_spec = tensorspec_utils.make_random_numpy(
            feature_spec, batch_size=1)
        if label_placeholders is not None:
          np_label_spec = tensorspec_utils.make_random_numpy(
              label_spec, batch_size=1)

        # We create our feed dict which basically consists of
        # {placeholders: np.array}.
        feed_dict = tensorspec_utils.map_feed_dict(feature_placeholders,
                                                   np_feature_spec,
                                                   ignore_batch=True)
        if label_placeholders is not None:
          feed_dict = tensorspec_utils.map_feed_dict(label_placeholders,
                                                     np_label_spec,
                                                     feed_dict,
                                                     ignore_batch=True)

        fetch_results = [features_preprocessed]
        if label_placeholders is not None:
          fetch_results.append(labels_preprocessed)

        np_preprocessed = sess.run(
            fetch_results, feed_dict=feed_dict)

        np_features_preprocessed = np_preprocessed[0]
        if label_placeholders is not None:
          np_labels_preprocessed = np_preprocessed[1]

        np_feature_spec = tensorspec_utils.flatten_spec_structure(
            np_feature_spec)
        if label_placeholders is not None:
          np_label_spec = tensorspec_utils.flatten_spec_structure(np_label_spec)

        for key, value in np_feature_spec.items():
          np.testing.assert_allclose(value, np_features_preprocessed[key])

        if label_placeholders is not None:
          for key, value in np_label_spec.items():
            np.testing.assert_allclose(value, np_labels_preprocessed[key])