示例#1
0
  def _infer_model(self,
                   x=None, input_fn=None, feed_fn=None,
                   batch_size=None, axis=None, proba=False):
    # Converts inputs into tf.DataFrame / tf.Series.
    batch_size = -1 if batch_size is None else batch_size
    if x is not None:
      input_fn, feed_fn = _get_predict_input_fn(x, batch_size)

    checkpoint_path = saver.latest_checkpoint(self._model_dir)
    with ops.Graph().as_default() as g:
      random_seed.set_random_seed(self._config.tf_random_seed)
      contrib_framework.create_global_step(g)
      features, _ = input_fn()
      predictions = self._get_predict_ops(features)
      if not isinstance(predictions, dict):
        predictions = {'predictions': predictions}
      # TODO(ipolosukhin): Support batching
      if feed_fn is None:
        return infer(checkpoint_path, predictions)
      preds = {}
      while True:
        try:
          feed_dict = feed_fn()
        except StopIteration:
          break
        if feed_dict is None:
          break
        outputs = infer(checkpoint_path, predictions, feed_dict=feed_dict)
        for key in outputs:
          if key not in preds:
            preds[key] = []
          preds[key].append(outputs[key])
      for key in preds:
        preds[key] = np.concatenate(preds[key], axis=0)
      return preds
示例#2
0
  def _infer_model(self, x=None, input_fn=None, feed_fn=None, batch_size=None):
    # Converts inputs into tf.DataFrame / tf.Series.
    batch_size = -1 if batch_size is None else batch_size
    if x is not None:
      input_fn, feed_fn = _get_predict_input_fn(x, None, batch_size)

    checkpoint_path = saver.latest_checkpoint(self._model_dir)
    with ops.Graph().as_default() as g:
      random_seed.set_random_seed(self._config.tf_random_seed)
      contrib_framework.create_global_step(g)
      features = self._get_features_from_input_fn(input_fn)
      predictions = self._get_predict_ops(features)
      return_dict = True
      if not isinstance(predictions, dict):
        predictions, return_dict = {'predictions': predictions}, False
      if feed_fn is None:
        preds = infer(checkpoint_path, predictions)
      else:
        preds = {}
        def _feed_fn():
          while True:
            yield feed_fn()
        outputs = graph_actions.run_feeds(
            output_dict=predictions,
            feed_dicts=_feed_fn(),
            restore_checkpoint_path=checkpoint_path)
        for key in predictions:
          preds[key] = np.concatenate(
              [output[key] for output in outputs], axis=0)
      if return_dict:
        return preds
      return preds['predictions']
示例#3
0
    def _infer_model(self,
                     x=None,
                     input_fn=None,
                     feed_fn=None,
                     batch_size=None):
        # Converts inputs into tf.DataFrame / tf.Series.
        batch_size = -1 if batch_size is None else batch_size
        if x is not None:
            input_fn, feed_fn = _get_predict_input_fn(x, None, batch_size)

        checkpoint_path = saver.latest_checkpoint(self._model_dir)
        with ops.Graph().as_default() as g:
            random_seed.set_random_seed(self._config.tf_random_seed)
            contrib_framework.create_global_step(g)
            features = self._get_features_from_input_fn(input_fn)
            predictions = self._get_predict_ops(features)
            return_dict = True
            if not isinstance(predictions, dict):
                predictions, return_dict = {'predictions': predictions}, False
            if feed_fn is None:
                preds = infer(checkpoint_path, predictions)
            else:
                preds = {}
                while True:
                    try:
                        feed_dict = feed_fn()
                    except StopIteration:
                        break
                    if feed_dict is None:
                        break
                    outputs = infer(checkpoint_path,
                                    predictions,
                                    feed_dict=feed_dict)
                    for key in outputs:
                        if key not in preds:
                            preds[key] = []
                        preds[key].append(outputs[key])
                for key in preds:
                    preds[key] = np.concatenate(preds[key], axis=0)
            if return_dict:
                return preds
            return preds['predictions']
示例#4
0
    def _infer_model_single(self, checkpoint_path, predictions, feed_fn, return_dict):
        if feed_fn is None:
            preds = graph_actions.infer(checkpoint_path, predictions)
        else:

            def _feed_fn():
                while True:
                    yield feed_fn()

            outputs = graph_actions.run_feeds(
                output_dict=predictions, feed_dicts=_feed_fn(), restore_checkpoint_path=checkpoint_path
            )
            preds = {key: np.concatenate([output[key] for output in outputs], axis=0) for key in predictions}

        return preds if return_dict else preds["predictions"]
示例#5
0
  def _infer_model(self, x, batch_size=None, axis=None, proba=False):
    # Converts inputs into tf.DataFrame / tf.Series.
    batch_size = -1 if batch_size is None else batch_size
    input_fn, feed_fn = _get_predict_input_fn(x, batch_size)

    checkpoint_path = saver.latest_checkpoint(self._model_dir)
    with ops.Graph().as_default() as g:
      random_seed.set_random_seed(self._config.tf_random_seed)
      contrib_framework.create_global_step(g)
      features, _ = input_fn()
      feed_dict = feed_fn() if feed_fn is not None else None
      predictions = self._get_predict_ops(features)
      if not isinstance(predictions, dict):
        predictions = {'predictions': predictions}
      # TODO(ipolosukhin): Support batching
      return infer(checkpoint_path, predictions, feed_dict=feed_dict)
示例#6
0
    def _infer_model(self, input_fn, feed_fn=None, outputs=None):
        # Check that model has been trained.
        checkpoint_path = saver.latest_checkpoint(self._model_dir)
        if not checkpoint_path:
            raise NotFittedError("Couldn't find trained model at %s." %
                                 self._model_dir)

        with ops.Graph().as_default() as g:
            random_seed.set_random_seed(self._config.tf_random_seed)
            contrib_framework.create_global_step(g)
            features = self._get_features_from_input_fn(input_fn)
            predictions = self._get_predict_ops(features)
            # If predictions is single output - wrap it into dict, and remember to
            # return not a dict.
            return_dict = True
            if not isinstance(predictions, dict):
                predictions, return_dict = {'predictions': predictions}, False
            # Filter what to run predictions on, if outputs provided.
            if outputs:
                existing_keys = predictions.keys()
                predictions = {
                    key: value
                    for key, value in predictions.items() if key in outputs
                }
                if not predictions:
                    raise ValueError(
                        'Expected to run at least one output from %s, '
                        'provided %s.' % (existing_keys, outputs))
            if feed_fn is None:
                preds = graph_actions.infer(checkpoint_path, predictions)
            else:
                preds = {}

                def _feed_fn():
                    while True:
                        yield feed_fn()

                outputs = graph_actions.run_feeds(
                    output_dict=predictions,
                    feed_dicts=_feed_fn(),
                    restore_checkpoint_path=checkpoint_path)
                for key in predictions:
                    preds[key] = np.concatenate(
                        [output[key] for output in outputs], axis=0)
            if return_dict:
                return preds
            return preds['predictions']
示例#7
0
  def _infer_model_single(
      self, checkpoint_path, predictions, feed_fn, return_dict):
    if feed_fn is None:
      preds = graph_actions.infer(checkpoint_path, predictions)
    else:
      def _feed_fn():
        while True:
          yield feed_fn()

      outputs = graph_actions.run_feeds(
          output_dict=predictions,
          feed_dicts=_feed_fn(),
          restore_checkpoint_path=checkpoint_path)
      preds = {
          key: np.concatenate([output[key] for output in outputs], axis=0)
          for key in predictions}

    return preds if return_dict else preds['predictions']
示例#8
0
  def _infer_model(self, input_fn, feed_fn=None, outputs=None):
    # Check that model has been trained.
    checkpoint_path = saver.latest_checkpoint(self._model_dir)
    if not checkpoint_path:
      raise NotFittedError("Couldn't find trained model at %s."
                           % self._model_dir)

    with ops.Graph().as_default() as g:
      random_seed.set_random_seed(self._config.tf_random_seed)
      contrib_framework.create_global_step(g)
      features = self._get_features_from_input_fn(input_fn)
      predictions = self._get_predict_ops(features)
      # If predictions is single output - wrap it into dict, and remember to
      # return not a dict.
      return_dict = True
      if not isinstance(predictions, dict):
        predictions, return_dict = {'predictions': predictions}, False
      # Filter what to run predictions on, if outputs provided.
      if outputs:
        existing_keys = predictions.keys()
        predictions = {
            key: value for key, value in predictions.items() if key in outputs
        }
        if not predictions:
          raise ValueError('Expected to run at least one output from %s, '
                           'provided %s.' % (existing_keys, outputs))
      if feed_fn is None:
        preds = graph_actions.infer(checkpoint_path, predictions)
      else:
        preds = {}
        def _feed_fn():
          while True:
            yield feed_fn()
        outputs = graph_actions.run_feeds(
            output_dict=predictions,
            feed_dicts=_feed_fn(),
            restore_checkpoint_path=checkpoint_path)
        for key in predictions:
          preds[key] = np.concatenate(
              [output[key] for output in outputs], axis=0)
      if return_dict:
        return preds
      return preds['predictions']