Exemplo n.º 1
0
 def _predict_fn(self, arr: Union[np.ndarray, List]) -> np.ndarray:
     print(arr)
     if type(arr) == list:
         arr = np.array(arr)
     if self.protocol == Protocol.seldon_grpc:
         return self._grpc(arr)
     elif self.protocol == Protocol.seldon_http:
         payload = seldon.create_request(arr, seldon.SeldonPayload.NDARRAY)
         response_raw = requests.post(SELDON_PREDICTOR_URL_FORMAT.format(
             self.predictor_host),
                                      json=payload)
         if response_raw.status_code == 200:
             rh = seldon.SeldonRequestHandler(response_raw.json())
             response_list = rh.extract_request()
             return np.array(response_list)
         else:
             raise Exception(
                 "Failed to get response from model return_code:%d" %
                 response_raw.status_code)
     elif self.protocol == Protocol.tensorflow_http:
         instances = []
         for req_data in arr:
             if isinstance(req_data, np.ndarray):
                 instances.append(req_data.tolist())
             else:
                 instances.append(req_data)
         request = {"instances": instances}
         response = requests.post(
             TENSORFLOW_PREDICTOR_URL_FORMAT.format(self.predictor_host),
             json.dumps(request))
         if response.status_code != 200:
             raise Exception(
                 "Failed to get response from model return_code:%d" %
                 response.status_code)
         return np.array(response.json()["predictions"])
Exemplo n.º 2
0
 def explain(self, request: Dict, model_name=None) -> Any:
     if self.method is ExplainerMethod.anchor_tabular or self.method is ExplainerMethod.anchor_images or \
             self.method is ExplainerMethod.anchor_text or self.method is ExplainerMethod.kernel_shap or \
             self.method is ExplainerMethod.integrated_gradients or self.method is ExplainerMethod.tree_shap:
         if self.protocol == Protocol.tensorflow_http:
             explanation: Explanation = self.wrapper.explain(
                 request["instances"])
         elif self.protocol == Protocol.v2_http:
             logging.info("model name %s", model_name)
             rh = v2.KFServingV2RequestHandler()
             self.v2_model_name = model_name
             self.v2_name = rh.extract_name(request)
             self.v2_type = rh.extract_type(request)
             logging.info("v2 name from inputs %s:", self.v2_name)
             response_list = rh.extract_request(request)
             explanation = self.wrapper.explain(response_list)
         else:
             rh = seldon.SeldonRequestHandler(request)
             response_list = rh.extract_request()
             explanation = self.wrapper.explain(response_list)
         explanationAsJsonStr = explanation.to_json()
         logging.info("Explanation: %s", explanationAsJsonStr)
         return json.loads(explanationAsJsonStr)
     else:
         raise NotImplementedError
Exemplo n.º 3
0
 def explain(self, request: Dict) -> Any:
     if self.method is ExplainerMethod.anchor_tabular or self.method is ExplainerMethod.anchor_images or \
             self.method is ExplainerMethod.anchor_text or self.method is ExplainerMethod.kernel_shap or \
             self.method is ExplainerMethod.integrated_gradients:
         if self.protocol == Protocol.tensorflow_http:
             explanation: Explanation = self.wrapper.explain(request["instances"])
         else:
             rh = seldon.SeldonRequestHandler(request)
             response_list = rh.extract_request()
             explanation = self.wrapper.explain(response_list)
         explanationAsJsonStr = explanation.to_json()
         logging.info("Explanation: %s", explanationAsJsonStr)
         return json.loads(explanationAsJsonStr)
     else:
         raise NotImplementedError