async def _batch_handler_template(self, requests, api_name): headers = {self._MARSHAL_FLAG: "true"} api_url = f"http://{self.outbound_host}:{self.outbound_port}/{api_name}" with async_trace( ZIPKIN_API_URL, service_name=self.__class__.__name__, span_name=f"[2]merged {api_name}", ) as trace_ctx: headers.update(make_http_headers(trace_ctx)) reqs_s = DataLoader.merge_requests(requests) try: async with aiohttp.ClientSession() as client: async with client.post(api_url, data=reqs_s, headers=headers) as resp: raw = await resp.read() merged = DataLoader.split_responses(raw) except (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError): return (aiohttp.web.HTTPServiceUnavailable, ) * len(requests) if merged is None: return (aiohttp.web.HTTPInternalServerError, ) * len(requests) return tuple( aiohttp.web.Response( body=i.data, headers=i.headers, status=i.status) for i in merged)
async def _batch_handler_template(self, requests, api_name): ''' batch request handler params: * requests: list of aiohttp request * api_name: called API name raise: * RemoteException: known exceptions from model server * Exception: other exceptions ''' headers = {self._MARSHAL_FLAG: "true"} api_url = f"http://{self.outbound_host}:{self.outbound_port}/{api_name}" with async_trace( ZIPKIN_API_URL, service_name=self.__class__.__name__, span_name=f"[2]merged {api_name}", ) as trace_ctx: headers.update(make_http_headers(trace_ctx)) reqs_s = DataLoader.merge_requests(requests) async with aiohttp.ClientSession() as client: async with client.post(api_url, data=reqs_s, headers=headers) as resp: raw = await resp.read() if resp.status != 200: raise RemoteException( f"Bad response status from model server:\n{resp.status}\n{raw}", payload=SimpleResponse(resp.status, resp.headers, raw), ) merged = DataLoader.split_responses(raw) return tuple( aiohttp.web.Response( body=i.data, headers=i.headers, status=i.status) for i in merged)
def api_func(): # handle_request may raise 4xx or 5xx exception. try: if request.headers.get(self.request_header_flag): reqs = DataLoader.split_requests(request.get_data()) responses = api.handle_batch_request(reqs) response_body = DataLoader.merge_responses(responses) response = make_response(response_body) else: response = api.handle_request(request) except BentoMLException as e: log_exception(sys.exc_info()) if 400 <= e.status_code < 500 and e.status_code not in (401, 403): response = make_response( jsonify(message= "BentoService error handling API request: %s" % str(e)), e.status_code, ) else: response = make_response('', e.status_code) except Exception: # pylint: disable=broad-except # For all unexpected error, return 500 by default. For example, # if users' model raises an error of division by zero. log_exception(sys.exc_info()) response = make_response( 'An error has occurred in BentoML user code when handling this ' 'request, find the error details in server logs', 500, ) return response
def handle_batch_request(self, request): requests = DataLoader.split_requests(request.data) with trace( ZIPKIN_API_URL, service_name=self.__class__.__name__, span_name=f"call `{self._handler.__class__.__name__}`", ): responses = self.handler.handle_batch_request(requests, self.func) return DataLoader.merge_responses(responses)
def handle_batch_request(self, request: flask.Request): from bentoml.marshal.utils import DataLoader requests = DataLoader.split_requests(request.get_data()) with trace( service_name=self.__class__.__name__, span_name=f"call `{self.handler.__class__.__name__}`", ): responses = self.handler.handle_batch_request(requests, self.func) return DataLoader.merge_responses(responses)
async def _batch_handler_template(self, requests, api_name): ''' batch request handler params: * requests: list of aiohttp request * api_name: called API name raise: * RemoteException: known exceptions from model server * Exception: other exceptions ''' headers = {self.request_header_flag: "true"} api_url = f"http://{self.outbound_host}:{self.outbound_port}/{api_name}" with async_trace( self.zipkin_api_url, service_name=self.__class__.__name__, span_name=f"[2]merged {api_name}", ) as trace_ctx: headers.update(make_http_headers(trace_ctx)) reqs_s = DataLoader.merge_requests(requests) try: async with aiohttp.ClientSession( auto_decompress=False) as client: async with client.post(api_url, data=reqs_s, headers=headers) as resp: raw = await resp.read() except aiohttp.client_exceptions.ClientConnectionError as e: raise RemoteException(e, payload=HTTPResponse( status=503, body=b"Service Unavailable")) if resp.status != 200: raise RemoteException( f"Bad response status from model server:\n{resp.status}\n{raw}", payload=HTTPResponse( status=resp.status, headers=tuple(resp.headers.items()), body=raw, ), ) merged = DataLoader.split_responses(raw) return tuple( aiohttp.web.Response( body=i.body, headers=i.headers, status=i.status or 500) for i in merged)
async def _batch_handler_template(self, requests, api_route): ''' batch request handler params: * requests: list of aiohttp request * api_route: called API name raise: * RemoteException: known exceptions from model server * Exception: other exceptions ''' from aiohttp.client_exceptions import ClientConnectionError from aiohttp.web import Response headers = {MARSHAL_REQUEST_HEADER: "true"} api_url = f"http://{self.outbound_host}:{self.outbound_port}/{api_route}" with get_tracer().async_span( service_name=self.__class__.__name__, span_name=f"[2]merged {api_route}", request_headers=headers, ): reqs_s = DataLoader.merge_requests(requests) try: client = self.get_client() async with client.post(api_url, data=reqs_s, headers=headers) as resp: raw = await resp.read() except ClientConnectionError as e: raise RemoteException(e, payload=HTTPResponse( status=503, body=b"Service Unavailable")) if resp.status != 200: raise RemoteException( f"Bad response status from model server:\n{resp.status}\n{raw}", payload=HTTPResponse( status=resp.status, headers=tuple(resp.headers.items()), body=raw, ), ) merged = DataLoader.split_responses(raw) return tuple( Response( body=i.body, headers=i.headers, status=i.status or 500) for i in merged)