def do_inference(save_file_dir=None): if request.content_type.startswith("application/json"): # Process requests with json data json_data = json.loads(request.data) elif request.content_type.startswith("multipart/form-data"): # Process requests with raw image json_data = request_util.create_json_from_formdata_request( request, args.download_inference_images, save_file_dir=save_file_dir) else: logging.error("Unsupported content type: {}".format( request.content_type)) return "Error, unsupported content type" if "model_name" in json_data: model_name = json_data.get("model_name", "") if model_name == "": logging.error("The model does not exist: {}".format(model_name)) else: model_name = "default" inferenceService = model_name_service_map[model_name] result = inferenceService.inference(json_data) return result
def do_inference(self): # 1. Check request data format if request.content_type.startswith("application/json"): # Process requests with json data try: json_data = request.json if not isinstance(json_data, dict): result = { "error": "Invalid json data: {}".format(request.data) } return result, 400 except Exception as e: result = { "error": "Invalid json data: {}".format(request.data) } return result, 400 elif request.content_type.startswith("multipart/form-data"): # Process requests with raw image try: json_data = request_util.create_json_from_formdata_request( request, self.args.download_inference_images, save_file_dir=self.app.config['UPLOAD_FOLDER']) except Exception as e: result = {"error": "Invalid form-data: {}".format(e)} return result, 400 else: logging.error("Unsupported content type: {}".format( request.content_type)) return {"error": "Error, unsupported content type"}, 400 # 2. Get model or use default one model_name = "default" if "model_name" in json_data: model_name = json_data.get("model_name") if model_name not in self.manager.model_name_service_map: return { "error": "Invalid model name: {}, available models: {}".format( model_name, self.manager.model_name_service_map.keys()) }, 400 # 3. Use initialized manager for inference try: result = self.manager.inference(model_name, json_data) return result, 200 except Exception as e: result = {"error": e.message} return result, 400
def do_inference(save_file_dir=None): # Process requests with json data if request.content_type.startswith("application/json"): json_data = json.loads(request.data) # Process requests with raw image elif request.content_type.startswith("multipart/form-data"): # get supported signatures to help refactor input data model_name = request.form.get("model_name", "default") support_signatures = None if model_name in model_name_service_map: support_signatures = model_name_service_map[model_name].get_detail( ).get("model_signature", None) json_data = request_util.create_json_from_formdata_request( request, support_signatures=support_signatures, save_file_dir=save_file_dir) else: logging.error("Unsupported content type: {}".format( request.content_type)) return "Error, unsupported content type" # Request backend service with json data #logging.debug("Constructed request data as json: {}".format(json_data)) if "model_name" in json_data: model_name = json_data.get("model_name", "") if model_name == "": logging.error("The model does not exist: {}".format(model_name)) else: model_name = "default" inferenceService = model_name_service_map[model_name] result = inferenceService.inference(json_data) return result