Beispiel #1
0
 def from_http_request(self, req: HTTPRequest) -> MultiImgTask:
     if len(self.input_names) == 1:
         # broad parsing while single input
         if req.parsed_headers.content_type == 'multipart/form-data':
             _, _, files = HTTPRequest.parse_form_data(req)
             if not any(files):
                 task = InferenceTask(data=None)
                 task.discard(
                     http_status=400,
                     err_msg=
                     f"BentoML#{self.__class__.__name__} requires inputs"
                     f"fields {self.input_names}",
                 )
             else:
                 f = next(iter(files.values()))
                 task = InferenceTask(
                     context=InferenceContext(
                         http_headers=req.parsed_headers),
                     data=(f, ),
                 )
         else:
             # for images/*
             task = InferenceTask(
                 context=InferenceContext(http_headers=req.parsed_headers),
                 data=(io.BytesIO(req.body), ),
             )
     elif req.parsed_headers.content_type == 'multipart/form-data':
         _, _, files = HTTPRequest.parse_form_data(req)
         files = tuple(files.get(k) for k in self.input_names)
         if not any(files):
             task = InferenceTask(data=None)
             task.discard(
                 http_status=400,
                 err_msg=f"BentoML#{self.__class__.__name__} requires inputs "
                 f"fields {self.input_names}",
             )
         elif not all(files) and not self.allow_none:
             task = InferenceTask(data=None)
             task.discard(
                 http_status=400,
                 err_msg=f"BentoML#{self.__class__.__name__} requires inputs "
                 f"fields {self.input_names}",
             )
         else:
             task = InferenceTask(
                 context=InferenceContext(http_headers=req.parsed_headers),
                 data=files,
             )
     else:
         task = InferenceTask(data=None)
         task.discard(
             http_status=400,
             err_msg=
             f"BentoML#{self.__class__.__name__} with multiple inputs "
             "accepts requests with Content-Type: multipart/form-data only",
         )
     return task
Beispiel #2
0
 def from_http_request(self, req: HTTPRequest) -> MultiFileTask:
     if req.headers.content_type != 'multipart/form-data':
         task = InferenceTask(data=None)
         task.discard(
             http_status=400,
             err_msg=
             f"BentoML#{self.__class__.__name__} only accepts requests "
             "with Content-Type: multipart/form-data",
         )
     else:
         _, _, files = HTTPRequest.parse_form_data(req)
         files = tuple(files.get(k) for k in self.input_names)
         if not any(files):
             task = InferenceTask(data=None)
             task.discard(
                 http_status=400,
                 err_msg=f"BentoML#{self.__class__.__name__} requires inputs "
                 f"fields {self.input_names}",
             )
         elif not all(files) and not self.allow_none:
             task = InferenceTask(data=None)
             task.discard(
                 http_status=400,
                 err_msg=f"BentoML#{self.__class__.__name__} requires inputs "
                 f"fields {self.input_names}",
             )
         else:
             task = InferenceTask(
                 http_headers=req.headers,
                 data=files,
             )
     return task
Beispiel #3
0
 def from_http_request(self, req: HTTPRequest) -> InferenceTask[BinaryIO]:
     if req.parsed_headers.content_type == 'multipart/form-data':
         _, _, files = HTTPRequest.parse_form_data(req)
         if len(files) != 1:
             task = InferenceTask(data=None)
             task.discard(
                 http_status=400,
                 err_msg=
                 f"BentoML#{self.__class__.__name__} requires one and at"
                 " least one file at a time, if you just upgraded from"
                 " bentoml 0.7, you may need to use MultiFileAdapter instead",
             )
         else:
             input_file = next(iter(files.values()))
             task = InferenceTask(
                 context=InferenceContext(http_headers=req.parsed_headers),
                 data=input_file,
             )
     elif req.body:
         task = InferenceTask(
             context=InferenceContext(http_headers=req.parsed_headers),
             data=io.BytesIO(req.body),
         )
     else:
         task = InferenceTask(data=None)
         task.discard(
             http_status=400,
             err_msg=
             f'BentoML#{self.__class__.__name__} unexpected HTTP request'
             ' format',
         )
     return task
Beispiel #4
0
 def from_http_request(self, req: HTTPRequest) -> InferenceTask[str]:
     if req.headers.content_type == 'multipart/form-data':
         _, _, files = HTTPRequest.parse_form_data(req)
         if len(files) != 1:
             return InferenceTask().discard(
                 http_status=400,
                 err_msg=
                 f"BentoML#{self.__class__.__name__} accepts one text file "
                 "at a time",
             )
         input_file = next(iter(files.values()))
         bytes_ = input_file.read()
         charset = chardet.detect(bytes_)['encoding'] or "utf-8"
     else:
         bytes_ = req.body
         charset = req.headers.charset or "utf-8"
     try:
         return InferenceTask(
             http_headers=req.headers,
             data=bytes_.decode(charset),
         )
     except UnicodeDecodeError:
         return InferenceTask().discard(
             http_status=400,
             err_msg=
             f"{self.__class__.__name__}: UnicodeDecodeError for {req.body}",
         )
     except LookupError:
         return InferenceTask().discard(
             http_status=400,
             err_msg=
             f"{self.__class__.__name__}: Unsupported charset {req.charset}",
         )