Beispiel #1
0
    def postprocess(self, response: Response) -> Response:
        from tokenizer_tools.tagset.exceptions import TagSetDecodeError
        from tokenizer_tools.tagset.offset.sequence import Sequence

        tags_list = response.data
        raw_text_list = self.request_query

        infer_result = []

        for raw_text, tags in zip(raw_text_list, tags_list):
            # decode Unicode
            tags_seq = [
                i.decode() if isinstance(i, bytes) else i for i in tags
            ]

            # BILUO to offset
            is_failed = False
            exec_msg = None
            try:
                seq = self.decoder.to_offset(tags_seq, raw_text)
            except TagSetDecodeError as e:
                exec_msg = str(e)

                # invalid tag sequence will raise exception
                # so return a empty result to avoid batch fail
                seq = Sequence(raw_text)
                is_failed = True

            infer_result.append(PredictResult(seq, is_failed, exec_msg))

        response.update_data(infer_result)

        return response
    def postprocess(self, response: Response) -> Response:
        tag_lookup_table = self.lookup_table_registry["tag"]

        data_str_list = []
        for data_int in response.data:
            data_str = [tag_lookup_table.inverse_lookup(i) for i in data_int]
            data_str_list.append(data_str)

        response.update_data(data_str_list)

        return response
Beispiel #3
0
 def __call__(self, response) -> Response:
     response_data = response
     # ner branch
     ner_response = response_data[0].tolist()
     # cls branch
     cls_tmp_response = np.argmax(response_data[1])
     cls_tmp_restorer, cls_result_restorer = [], []
     cls_tmp_restorer.append(cls_tmp_response)
     cls_result_restorer.append(cls_tmp_restorer)
     result = Response([])
     result["cls"] = cls_result_restorer
     result.data = ner_response
     return result
def test_serving(datadir):
    metadata = {
        "instance": {
            "LookupProcessor_0": {
                "class":
                "deliverable_model.builtin.processor.lookup_processor.LookupProcessor",
                "parameter": {
                    "lookup_table": ["vocabulary", "tag"],
                    "padding_parameter": {},
                },
            }
        },
        "pipeline": {
            "pre": ["LookupProcessor_0"],
            "post": ["LookupProcessor_0"]
        },
    }
    processor_obj = Processor.load(datadir, metadata)

    processor_obj.instance_processor()

    request = Request(["abc", "cba"])
    return_request = processor_obj.call_preprocessor(request)

    assert np.all(return_request.query == [[1, 2, 3], [3, 2, 1]])

    response = Response([[1, 2, 3], [3, 2, 1]])
    return_response = processor_obj.call_postprocessor(response)

    assert np.all(return_response.data == [["tag-a", "tag-b", "tag-c"],
                                           ["tag-c", "tag-b", "tag-a"]])
Beispiel #5
0
    def _make_response(self, response: Any) -> Response:
        tags_tensor_proto = response.outputs["tags"]
        tags_numpy = tf.make_ndarray(tags_tensor_proto)
        unicode_tags_numpy = np.vectorize(lambda x: x.decode())(tags_numpy)
        tags = unicode_tags_numpy.tolist()

        response = Response(tags)
        return response
    def inference(self, request: Request, batch_size=None) -> Response:
        if not batch_size:  # inference without mini batch
            return self._do_inference(request)

        # inference with batch
        batcher = BatchingIterator(batch_size)

        sub_response_list = []

        for sub_request_dict in batcher(request):
            sub_request = Request.from_dict(sub_request_dict)
            sub_response_list.append(self._do_inference(sub_request))

        response = merge_dict_list(*sub_response_list)

        return Response.from_dict(response)
Beispiel #7
0
def test_serving(datadir, tmpdir):
    parameter = {
        "lookup_table": ["vocabulary", "tag"],
        "padding_parameter": {}
    }

    lookup_processor = LookupProcessor.load(parameter, datadir)

    request = Request(["abc", "cba"])
    return_request = lookup_processor.preprocess(request)

    assert np.all(return_request.query == [[1, 2, 3], [3, 2, 1]])

    response = Response([[1, 2, 3], [3, 2, 1]])
    return_response = lookup_processor.postprocess(response)

    assert np.all(return_response.data == [["tag-a", "tag-b", "tag-c"],
                                           ["tag-c", "tag-b", "tag-a"]])
 def inference(self, request):
     return Response([["tag-{}".format(i) for i in j]
                      for j in request.query])
    def inference(self, *args, **kwargs) -> Response:
        result = self.predictor_func(args[0].query)

        response = Response(result)

        return response
Beispiel #10
0
def converter_for_response(response: Any) -> Response:
    from deliverable_model.response import Response

    return Response(response["tags"])
Beispiel #11
0
def simple_converter_for_response(response: Any) -> "Response":
    from deliverable_model.response import Response

    return Response(response)
    def call(self, response: Any) -> Response:
        tags_tensor_proto = response.outputs["crf"]
        tags_numpy = tf.make_ndarray(tags_tensor_proto)
        tags = tags_numpy.tolist()

        return Response(tags)
Beispiel #13
0
    def parse(self, request: Request) -> Response:
        result = self.predictor_func(request.query)

        response = Response(result)

        return response
 def call(self, response: Any) -> Response:
     return Response(response)