示例#1
0
    def handle_batch_request(self, requests: Iterable[SimpleRequest],
                             func) -> Iterable[SimpleResponse]:

        datas = [r.data for r in requests]
        headers = [{hk.lower(): hv
                    for hk, hv in r.headers or tuple()} for r in requests]
        content_types = [
            h.get('content-type', 'application/json') for h in headers
        ]
        # TODO: check content_type

        df_conc, slices = read_dataframes_from_json_n_csv(datas, content_types)

        result_conc = func(df_conc)
        # TODO: check length

        results = [s and result_conc[s] or BadResult for s in slices]

        responses = [SimpleResponse("bad request", None, 400)] * len(requests)
        for i, result in enumerate(results):
            if result is BadResult:
                continue
            json_output = api_func_result_to_json(
                result, pandas_dataframe_orient=self.output_orient)
            responses[i] = SimpleResponse(
                json_output, (("Content-Type", "application/json"), ), 200)
        return responses
示例#2
0
    def handle_batch_request(self, requests: Iterable[SimpleRequest],
                             func: callable) -> Iterable[SimpleResponse]:
        """
        Batch version of handle_request
        """
        bad_resp = SimpleResponse(400, None, "Bad Input")
        responses = [bad_resp] * len(requests)

        input_datas = []
        ids = []
        for i, req in enumerate(requests):
            if not req.data:
                continue
            request = Request.from_values(
                input_stream=BytesIO(req.data),
                content_length=len(req.data),
                headers=req.headers,
            )
            try:
                input_data = self._load_image_data(request)
            except BadInput as e:
                responses[i] = SimpleResponse(400, None, str(e))
                continue

            input_datas.append(input_data)
            ids.append(i)

        results = func(input_datas) if input_datas else []
        for i, result in zip(ids, results):
            responses[i] = SimpleResponse(200, None,
                                          api_func_result_to_json(result))

        return responses
示例#3
0
    def to_batch_response(
        self,
        result_conc,
        slices=None,
        fallbacks=None,
        requests: Iterable[SimpleRequest] = None,
    ) -> Iterable[SimpleResponse]:
        # TODO(bojiang): header content_type

        if slices is None:
            slices = [i for i, _ in enumerate(result_conc)]
        if fallbacks is None:
            fallbacks = [None] * len(slices)

        responses = [None] * len(slices)

        for i, (s, f) in enumerate(zip(slices, fallbacks)):
            if s is None:
                responses[i] = f
                continue

            result = result_conc[s]
            try:
                json_output = jsonize(result)
                responses[i] = SimpleResponse(
                    200, (("Content-Type", "application/json"),), json_output
                )
            except AssertionError as e:
                responses[i] = SimpleResponse(400, None, str(e))
            except Exception as e:  # pylint: disable=broad-except
                responses[i] = SimpleResponse(500, None, str(e))
        return responses
    def handle_batch_request(self, requests: Iterable[SimpleRequest],
                             func) -> Iterable[SimpleResponse]:

        datas = [r.data for r in requests]
        content_types = [
            r.formated_headers.get('content-type', 'application/json')
            for r in requests
        ]
        # TODO: check content_type

        df_conc, slices = read_dataframes_from_json_n_csv(datas, content_types)

        result_conc = func(df_conc)
        # TODO: check length

        results = [result_conc[s] if s else BadResult for s in slices]

        responses = [SimpleResponse(400, None, "bad request")] * len(requests)
        for i, result in enumerate(results):
            if result is BadResult:
                continue
            json_output = api_func_result_to_json(
                result, pandas_dataframe_orient=self.output_orient)
            responses[i] = SimpleResponse(
                200, (("Content-Type", "application/json"), ), json_output)
        return responses
示例#5
0
    def handle_batch_request(self, requests: Iterable[SimpleRequest],
                             func) -> Iterable[SimpleResponse]:
        bad_resp = SimpleResponse(400, None, "Bad Input")
        instances_list = [None] * len(requests)
        fallbacks = [bad_resp] * len(requests)
        batch_flags = [None] * len(requests)

        for i, request in enumerate(requests):
            batch_flags[i] = self.is_batch_request(request)
            try:
                raw_str = request.data
                parsed_json = json.loads(raw_str)
                instances_list[i] = parsed_json
            except (json.JSONDecodeError, UnicodeDecodeError):
                fallbacks[i] = SimpleResponse(400, None, "Not a valid json")
            except Exception:  # pylint: disable=broad-except
                import traceback

                err = traceback.format_exc()
                fallbacks[i] = SimpleResponse(500, None,
                                              f"Internal Server Error: {err}")

        merged_instances, slices = concat_list(instances_list,
                                               batch_flags=batch_flags)
        merged_result = func(merged_instances)
        return self.output_adapter.to_batch_response(merged_result, slices,
                                                     fallbacks, requests)
示例#6
0
    def handle_batch_request(
        self, requests: Iterable[SimpleRequest], func
    ) -> Iterable[SimpleResponse]:
        """
        TODO(hrmthw):
        1. check content type
        1. specify batch dim
        1. output str fromat
        """
        import tensorflow as tf

        bad_resp = SimpleResponse(b"Bad Input", None, 400)
        instances_list = [None] * len(requests)
        responses = [bad_resp] * len(requests)

        for i, request in enumerate(requests):
            try:
                raw_str = request[0]  # .decode("utf-8")
                parsed_json = json.loads(raw_str)
                if parsed_json.get("instances") is not None:
                    instances = parsed_json.get("instances")
                    if instances is None:
                        continue
                    instances = decode_b64_if_needed(instances)
                    if not isinstance(instances, (list, tuple)):
                        instances = [instances]
                    instances_list[i] = instances

                elif parsed_json.get("inputs"):
                    responses[i] = SimpleResponse(
                        "Column format 'inputs' not implemented", None, 501,
                    )

            except (json.exceptions.JSONDecodeError, UnicodeDecodeError):
                import traceback

                traceback.print_exc()

        merged_instances, slices = concat_list(instances_list)

        parsed_tensor = tf.constant(merged_instances)
        merged_result = func(parsed_tensor)
        merged_result = decode_tf_if_needed(merged_result)
        assert isinstance(merged_result, (list, tuple))

        results = [merged_result[s] for s in slices]

        for i, result in enumerate(results):
            result_str = api_func_result_to_json(result)
            responses[i] = SimpleResponse(result_str, dict(), 200)

        return responses
示例#7
0
    def handle_batch_request(
        self, requests: Iterable[SimpleRequest], func
    ) -> Iterable[SimpleResponse]:
        bad_resp = SimpleResponse(400, None, "Bad Input")
        instances_list = [None] * len(requests)
        responses = [bad_resp] * len(requests)
        batch_flags = [None] * len(requests)

        for i, request in enumerate(requests):
            batch_flags[i] = (
                request.formated_headers.get(
                    self._BATCH_REQUEST_HEADER.lower(),
                    "true" if self.config.get('is_batch_input') else "false",
                )
                == "true"
            )
            try:
                raw_str = request.data
                parsed_json = json.loads(raw_str)
                if not batch_flags[i]:
                    parsed_json = (parsed_json,)
                instances_list[i] = parsed_json
            except (json.JSONDecodeError, UnicodeDecodeError):
                responses[i] = SimpleResponse(400, None, "Not a valid json")
            except Exception:  # pylint: disable=broad-except
                import traceback

                err = traceback.format_exc()
                responses[i] = SimpleResponse(
                    500, None, f"Internal Server Error: {err}"
                )

        merged_instances, slices = concat_list(instances_list)
        merged_result = func(merged_instances)
        if not isinstance(merged_result, (list, tuple)) or len(merged_result) != len(
            merged_instances
        ):
            raise ValueError(
                "The return value with JsonHandler must be list of jsonable objects, "
                "and have same length as the inputs."
            )

        for i, s in enumerate(slices):
            if s is None:
                continue
            result = merged_result[s]
            if not batch_flags[i]:
                result = result[0]
            result_str = api_func_result_to_json(result)
            responses[i] = SimpleResponse(200, dict(), result_str)

        return responses
示例#8
0
    def handle_batch_request(self, requests: Iterable[SimpleRequest],
                             func) -> Iterable[SimpleResponse]:
        """
        TODO(hrmthw):
        1. specify batch dim
        1. output str fromat
        """
        import tensorflow as tf

        bad_resp = SimpleResponse(400, None, "input format error")
        instances_list = [None] * len(requests)
        responses = [bad_resp] * len(requests)
        batch_flags = [None] * len(requests)

        for i, request in enumerate(requests):
            try:
                raw_str = request.data
                batch_flags[i] = (request.formated_headers.get(
                    self._BATCH_REQUEST_HEADER.lower(),
                    "true" if self.config.get("is_batch_input") else "false",
                ) == "true")
                parsed_json = json.loads(raw_str)
                if parsed_json.get("instances") is not None:
                    instances = parsed_json.get("instances")
                    if instances is None:
                        continue
                    instances = decode_b64_if_needed(instances)
                    instances_list[i] = instances

                elif parsed_json.get("inputs"):
                    responses[i] = SimpleResponse(
                        501, None, "Column format 'inputs' not implemented")

            except (json.JSONDecodeError, UnicodeDecodeError):
                pass
            except Exception:  # pylint: disable=broad-except
                import traceback

                err = traceback.format_exc()
                responses[i] = SimpleResponse(500, None,
                                              f"Internal Server Error: {err}")

        merged_instances, slices = concat_list(instances_list,
                                               batch_flags=batch_flags)

        parsed_tensor = tf.constant(merged_instances)
        merged_result = func(parsed_tensor)
        return self.output_adapter.to_batch_response(merged_result,
                                                     slices=slices,
                                                     fallbacks=responses,
                                                     requests=requests)
示例#9
0
    async def _batch_handler_template(self, requests, api_name):
        '''
        batch request handler
        params:
            * requests: list of aiohttp request
            * api_name: called API name
        raise:
            * RemoteException: known exceptions from model server
            * Exception: other exceptions
        '''
        headers = {self._MARSHAL_FLAG: "true"}
        api_url = f"http://{self.outbound_host}:{self.outbound_port}/{api_name}"

        with async_trace(
                ZIPKIN_API_URL,
                service_name=self.__class__.__name__,
                span_name=f"[2]merged {api_name}",
        ) as trace_ctx:
            headers.update(make_http_headers(trace_ctx))
            reqs_s = DataLoader.merge_requests(requests)
            async with aiohttp.ClientSession() as client:
                async with client.post(api_url, data=reqs_s,
                                       headers=headers) as resp:
                    raw = await resp.read()
            if resp.status != 200:
                raise RemoteException(
                    f"Bad response status from model server:\n{resp.status}\n{raw}",
                    payload=SimpleResponse(resp.status, resp.headers, raw),
                )
            merged = DataLoader.split_responses(raw)
            return tuple(
                aiohttp.web.Response(
                    body=i.data, headers=i.headers, status=i.status)
                for i in merged)
示例#10
0
    def to_batch_response(
        self,
        result_conc,
        slices=None,
        fallbacks=None,
        requests: Iterable[SimpleRequest] = None,
    ) -> Iterable[SimpleResponse]:
        # TODO(bojiang): header content_type
        result_conc = decode_tf_if_needed(result_conc)

        assert isinstance(result_conc, (list, tuple))

        if slices is None:
            slices = [i for i, _ in enumerate(result_conc)]
        if fallbacks is None:
            fallbacks = [None] * len(slices)

        responses = [None] * len(slices)

        for i, (s, f) in enumerate(zip(slices, fallbacks)):
            if s is None:
                responses[i] = f
                continue
            result = result_conc[s]
            result_str = jsonize(result)
            responses[i] = SimpleResponse(200, dict(), result_str)

        return responses
示例#11
0
    def handle_batch_request(
        self, requests: Iterable[SimpleRequest], func
    ) -> Iterable[SimpleResponse]:
        """
        TODO(bojiang):
        1. specify batch dim
        """
        import tensorflow as tf

        bad_resp = SimpleResponse(400, None, "input format error")
        instances_list = [None] * len(requests)
        responses = [bad_resp] * len(requests)
        batch_flags = [None] * len(requests)

        for i, request in enumerate(requests):
            try:
                raw_str = request.data
                batch_flags[i] = self.is_batch_request(request)
                parsed_json = json.loads(raw_str, object_hook=b64_hook)
                if parsed_json.get("instances") is not None:
                    instances = parsed_json.get("instances")
                    if instances is None:
                        continue
                    instances_list[i] = instances

                elif parsed_json.get("inputs"):
                    responses[i] = SimpleResponse(
                        501, None, "Column format 'inputs' not implemented"
                    )

            except (json.JSONDecodeError, UnicodeDecodeError):
                pass
            except Exception:  # pylint: disable=broad-except
                import traceback

                err = traceback.format_exc()
                responses[i] = SimpleResponse(
                    500, None, f"Internal Server Error: {err}"
                )
        merged_instances, slices = concat_list(instances_list, batch_flags=batch_flags)
        parsed_tensor = tf.constant(merged_instances)
        merged_result = func(parsed_tensor)
        return self.output_adapter.to_batch_response(
            merged_result, slices=slices, fallbacks=responses, requests=requests
        )
示例#12
0
    def handle_batch_request(self, requests: Iterable[SimpleRequest],
                             func) -> Iterable[SimpleResponse]:
        bad_resp = SimpleResponse(400, None, "Bad Input")
        instances_list = [None] * len(requests)
        responses = [bad_resp] * len(requests)

        for i, request in enumerate(requests):
            try:
                raw_str = request.data
                parsed_json = json.loads(raw_str)
                instances_list[i] = parsed_json
            except (json.JSONDecodeError, UnicodeDecodeError):
                responses[i] = SimpleResponse(400, None,
                                              "not a valid json input")
            except Exception:  # pylint: disable=broad-except
                responses[i] = SimpleResponse(500, None,
                                              "internal server error")
                import traceback

                traceback.print_exc()

        merged_instances, slices = concat_list(instances_list)
        merged_result = func(merged_instances)
        if not isinstance(
                merged_result,
            (list, tuple)) or len(merged_result) != len(merged_instances):
            raise ValueError(
                "The return value with JsonHandler must be list of jsonable objects, "
                "and have same length as the inputs.")

        for i, s in enumerate(slices):
            if s is None:
                continue
            result_str = api_func_result_to_json(merged_result[s])
            responses[i] = SimpleResponse(200, dict(), result_str)

        return responses
    def to_batch_response(
        self,
        result_conc,
        slices=None,
        fallbacks=None,
        requests: Iterable[SimpleRequest] = None,
    ) -> Iterable[SimpleResponse]:
        # TODO(bojiang): header content_type
        results = tf_to_numpy(result_conc)
        assert isinstance(results, np.ndarray)
        if slices is None:
            slices = [i for i in range(results.shape[0])]
        if fallbacks is None:
            fallbacks = itertools.repeat(None)
        responses = [None] * len(slices)

        for i, (s, f) in enumerate(zip(slices, fallbacks)):
            if s is None:
                responses[i] = f
                continue
            result = results[s]
            result_str = json.dumps(result, cls=TfTensorJsonEncoder)
            responses[i] = SimpleResponse(200, dict(), result_str)
        return responses