예제 #1
0
 def infer_impl(self):
     """Performs inference on examples that require inference."""
     indices_to_infer = sorted(self.updated_example_indices)
     examples_to_infer = [
         self.json_to_proto(self.examples[index])
         for index in indices_to_infer
     ]
     infer_objs = []
     attribution_objs = []
     serving_bundle = inference_utils.ServingBundle(
         self.config.get('inference_address'),
         self.config.get('model_name'), self.config.get('model_type'),
         self.config.get('model_version'),
         self.config.get('model_signature'),
         self.config.get('uses_predict_api'),
         self.config.get('predict_input_tensor'),
         self.config.get('predict_output_tensor'),
         self.estimator_and_spec.get('estimator'),
         self.estimator_and_spec.get('feature_spec'),
         self.custom_predict_fn)
     (predictions,
      attributions) = (inference_utils.run_inference_for_inference_results(
          examples_to_infer, serving_bundle))
     infer_objs.append(predictions)
     attribution_objs.append(attributions)
     if ('inference_address_2' in self.config
             or self.compare_estimator_and_spec.get('estimator')
             or self.compare_custom_predict_fn):
         serving_bundle = inference_utils.ServingBundle(
             self.config.get('inference_address_2'),
             self.config.get('model_name_2'), self.config.get('model_type'),
             self.config.get('model_version_2'),
             self.config.get('model_signature_2'),
             self.config.get('uses_predict_api'),
             self.config.get('predict_input_tensor'),
             self.config.get('predict_output_tensor'),
             self.compare_estimator_and_spec.get('estimator'),
             self.compare_estimator_and_spec.get('feature_spec'),
             self.compare_custom_predict_fn)
         (predictions, attributions) = (
             inference_utils.run_inference_for_inference_results(
                 examples_to_infer, serving_bundle))
         infer_objs.append(predictions)
         attribution_objs.append(attributions)
     self.updated_example_indices = set()
     return {
         'inferences': {
             'indices': indices_to_infer,
             'results': infer_objs
         },
         'label_vocab': self.config.get('label_vocab'),
         'attributions': attribution_objs
     }
예제 #2
0
 def infer(self):
     indices_to_infer = sorted(self.updated_example_indices)
     examples_to_infer = [
         self.json_to_proto(self.examples[index])
         for index in indices_to_infer
     ]
     infer_objs = []
     serving_bundle = inference_utils.ServingBundle(
         self.config.get('inference_address'),
         self.config.get('model_name'), self.config.get('model_type'),
         self.config.get('model_version'),
         self.config.get('model_signature'),
         self.config.get('uses_predict_api'),
         self.config.get('predict_input_tensor'),
         self.config.get('predict_output_tensor'),
         self.estimator_and_spec.get('estimator'),
         self.estimator_and_spec.get('feature_spec'))
     infer_objs.append(
         inference_utils.run_inference_for_inference_results(
             examples_to_infer, serving_bundle))
     if ('inference_address_2' in self.config
             or self.compare_estimator_and_spec.get('estimator')):
         serving_bundle = inference_utils.ServingBundle(
             self.config.get('inference_address_2'),
             self.config.get('model_name_2'), self.config.get('model_type'),
             self.config.get('model_version_2'),
             self.config.get('model_signature_2'),
             self.config.get('uses_predict_api'),
             self.config.get('predict_input_tensor'),
             self.config.get('predict_output_tensor'),
             self.compare_estimator_and_spec.get('estimator'),
             self.compare_estimator_and_spec.get('feature_spec'))
         infer_objs.append(
             inference_utils.run_inference_for_inference_results(
                 examples_to_infer, serving_bundle))
     self.updated_example_indices = set()
     inferences = {
         'inferences': {
             'indices': indices_to_infer,
             'results': infer_objs
         },
         'label_vocab': self.config.get('label_vocab')
     }
     output.eval_js("""inferenceCallback('{inferences}')""".format(
         inferences=json.dumps(inferences)))
예제 #3
0
    def _infer(self, request):
        """Returns JSON for the `vz-line-chart`s for a feature.

    Args:
      request: A request that should contain 'inference_address', 'model_name',
        'model_type, 'model_version', 'model_signature' and 'label_vocab_path'.

    Returns:
      A list of JSON objects, one for each chart.
    """
        label_vocab = inference_utils.get_label_vocab(
            request.args.get('label_vocab_path'))

        try:
            if request.method != 'GET':
                logger.error('%s requests are forbidden.', request.method)
                return http_util.Respond(request,
                                         {'error': 'invalid non-GET request'},
                                         'application/json',
                                         code=405)

            (inference_addresses, model_names, model_versions,
             model_signatures) = self._parse_request_arguments(request)

            indices_to_infer = sorted(self.updated_example_indices)
            examples_to_infer = [
                self.examples[index] for index in indices_to_infer
            ]
            infer_objs = []
            for model_num in xrange(len(inference_addresses)):
                serving_bundle = inference_utils.ServingBundle(
                    inference_addresses[model_num], model_names[model_num],
                    request.args.get('model_type'), model_versions[model_num],
                    model_signatures[model_num],
                    request.args.get('use_predict') == 'true',
                    request.args.get('predict_input_tensor'),
                    request.args.get('predict_output_tensor'))
                (predictions,
                 _) = inference_utils.run_inference_for_inference_results(
                     examples_to_infer, serving_bundle)
                infer_objs.append(predictions)

            resp = {'indices': indices_to_infer, 'results': infer_objs}
            self.updated_example_indices = set()
            return http_util.Respond(request, {
                'inferences': json.dumps(resp),
                'vocab': json.dumps(label_vocab)
            }, 'application/json')
        except common_utils.InvalidUserInputError as e:
            return http_util.Respond(request, {'error': e.message},
                                     'application/json',
                                     code=400)
        except AbortionError as e:
            return http_util.Respond(request, {'error': e.details},
                                     'application/json',
                                     code=400)
예제 #4
0
 def _infer(self, change):
   indices_to_infer = sorted(self.updated_example_indices)
   examples_to_infer = [
     self.json_to_proto(self.examples[index]) for index in indices_to_infer]
   infer_objs = []
   serving_bundle = inference_utils.ServingBundle(
     self.config.get('inference_address'),
     self.config.get('model_name'),
     self.config.get('model_type'),
     self.config.get('model_version'),
     self.config.get('model_signature'),
     self.config.get('uses_predict_api'),
     self.config.get('predict_input_tensor'),
     self.config.get('predict_output_tensor'),
     self.estimator_and_spec.get('estimator'),
     self.estimator_and_spec.get('feature_spec'),
     self.custom_predict_fn)
   infer_objs.append(inference_utils.run_inference_for_inference_results(
     examples_to_infer, serving_bundle))
   if ('inference_address_2' in self.config or
       self.compare_estimator_and_spec.get('estimator') or
       self.compare_custom_predict_fn):
     serving_bundle = inference_utils.ServingBundle(
       self.config.get('inference_address_2'),
       self.config.get('model_name_2'),
       self.config.get('model_type'),
       self.config.get('model_version_2'),
       self.config.get('model_signature_2'),
       self.config.get('uses_predict_api'),
       self.config.get('predict_input_tensor'),
       self.config.get('predict_output_tensor'),
       self.compare_estimator_and_spec.get('estimator'),
       self.compare_estimator_and_spec.get('feature_spec'),
       self.compare_custom_predict_fn)
     infer_objs.append(inference_utils.run_inference_for_inference_results(
       examples_to_infer, serving_bundle))
   self.updated_example_indices = set()
   self.inferences = {
     'inferences': {'indices': indices_to_infer, 'results': infer_objs},
     'label_vocab': self.config.get('label_vocab')}
예제 #5
0
 def infer_impl(self):
     """Performs inference on examples that require inference."""
     indices_to_infer = sorted(self.updated_example_indices)
     examples_to_infer = [
         self.json_to_proto(self.examples[index])
         for index in indices_to_infer
     ]
     infer_objs = []
     extra_output_objs = []
     serving_bundle = inference_utils.ServingBundle(
         self.config.get("inference_address"),
         self.config.get("model_name"),
         self.config.get("model_type"),
         self.config.get("model_version"),
         self.config.get("model_signature"),
         self.config.get("uses_predict_api"),
         self.config.get("predict_input_tensor"),
         self.config.get("predict_output_tensor"),
         self.estimator_and_spec.get("estimator"),
         self.estimator_and_spec.get("feature_spec"),
         self.custom_predict_fn,
     )
     (
         predictions,
         extra_output,
     ) = inference_utils.run_inference_for_inference_results(
         examples_to_infer, serving_bundle)
     infer_objs.append(predictions)
     extra_output_objs.append(extra_output)
     if ("inference_address_2" in self.config
             or self.compare_estimator_and_spec.get("estimator")
             or self.compare_custom_predict_fn):
         serving_bundle = inference_utils.ServingBundle(
             self.config.get("inference_address_2"),
             self.config.get("model_name_2"),
             self.config.get("model_type"),
             self.config.get("model_version_2"),
             self.config.get("model_signature_2"),
             self.config.get("uses_predict_api"),
             self.config.get("predict_input_tensor"),
             self.config.get("predict_output_tensor"),
             self.compare_estimator_and_spec.get("estimator"),
             self.compare_estimator_and_spec.get("feature_spec"),
             self.compare_custom_predict_fn,
         )
         (
             predictions,
             extra_output,
         ) = inference_utils.run_inference_for_inference_results(
             examples_to_infer, serving_bundle)
         infer_objs.append(predictions)
         extra_output_objs.append(extra_output)
     self.updated_example_indices = set()
     return {
         "inferences": {
             "indices": indices_to_infer,
             "results": infer_objs
         },
         "label_vocab": self.config.get("label_vocab"),
         "extra_outputs": extra_output_objs,
     }