def process_event(self, inputs: Union[List, Dict], headers: Dict) -> Dict: """ Process the event and return Alibi Detect score Parameters ---------- inputs Input data headers Header options Returns ------- SeldonResponse response """ logging.info("PROCESSING Feedback Event.") logging.info(str(headers)) logging.info("----") metrics = [] output = {} truth = None response = None error = None if "truth" not in inputs: raise SeldonMicroserviceException( f"No truth value provided in: {json.dumps(inputs)}", status_code=400, reason="NO_TRUTH_VALUE", ) else: truth = inputs["truth"] # We automatically add any metrics provided in the incoming request if "metrics" in inputs: metrics.extend(inputs["metrics"]) # If response is provided then we can perform a comparison if "response" in inputs: response = inputs["response"] elif REQUEST_ID_HEADER_NAME in headers: # Otherwise if UUID is provided we can fetch from elasticsearch if not self.elasticsearch_client: error = "Seldon-Puid provided but elasticsearch client not configured" else: try: seldon_puid = headers.get(REQUEST_ID_HEADER_NAME, "") seldon_namespace = headers.get(NAMESPACE_HEADER_NAME, "") # Currently only supports SELDON inference type (not kfserving) elasticsearch_index = f"inference-log-seldon-{seldon_namespace}-{SELDON_DEPLOYMENT_ID}-{SELDON_PREDICTOR_ID}" doc = self.elasticsearch_client.get( index=elasticsearch_index, id=seldon_puid) response = (doc.get("_source", {}).get("response", None).get("instance", None)) if not response: error = f"Elasticsearch index {elasticsearch_index} with id {seldon_puid} did not contain response value" except NotFoundError: error = f"Elasticsearch index {elasticsearch_index} with id {seldon_puid} not found" else: error = "Neither response nor request Puid provided in headers" if error: raise SeldonMicroserviceException(error, status_code=400, reason="METRICS_SERVER_ERROR") logging.error(f"{truth}, {response}") output = self.model.transform(truth, response) seldon_response = SeldonResponse.create(output or None) seldon_response.metrics.extend(metrics) return seldon_response
def post(self): """ Handle post request. Extract data. Call event handler and optionally send a reply event. """ if not self.model.ready: self.model.load() try: body = json.loads(self.request.body) except json.decoder.JSONDecodeError as e: raise tornado.web.HTTPError( status_code=HTTPStatus.BAD_REQUEST, reason="Unrecognized request format: %s" % e, ) # Extract payload from request request_handler: RequestHandler = get_request_handler( self.protocol, body) request_handler.validate() request = request_handler.extract_request() # Create event from request body event = v02.Event() http_marshaller = marshaller.NewDefaultHTTPMarshaller() event = http_marshaller.FromRequest(event, self.request.headers, self.request.body, json.loads) logging.debug(json.dumps(event.Properties())) # Extract any desired request headers headers = {} for (key, val) in self.request.headers.get_all(): headers[key] = val response = self.model.process_event(request, headers) seldon_response = SeldonResponse.create(response) runtime_metrics = seldon_response.metrics if runtime_metrics is not None: if validate_metrics(runtime_metrics): self.seldon_metrics.update(runtime_metrics, self.event_type) else: logging.error("Metrics returned are invalid: " + str(runtime_metrics)) if seldon_response.data is not None: responseStr = json.dumps(seldon_response.data) # Create event from response if reply_url is active if not self.reply_url == "": if event.EventID() is None or event.EventID() == "": resp_event_id = uuid.uuid1().hex else: resp_event_id = event.EventID() revent = ( v02.Event().SetContentType("application/json").SetData( responseStr).SetEventID(resp_event_id).SetSource( self.event_source).SetEventType( self.event_type).SetExtensions( event.Extensions())) logging.debug(json.dumps(revent.Properties())) sendCloudEvent(revent, self.reply_url) self.write(json.dumps(seldon_response.data))