Beispiel #1
0
def parse_mimetype(mimetype: str) -> MimeType:
    """Parses a MIME type into its components.

    mimetype is a MIME type string.

    Returns a MimeType object.

    Example:

    >>> parse_mimetype('text/html; charset=utf-8')
    MimeType(type='text', subtype='html', suffix='',
             parameters={'charset': 'utf-8'})

    """
    if not mimetype:
        return MimeType(type='',
                        subtype='',
                        suffix='',
                        parameters=MultiDictProxy(MultiDict()))

    parts = mimetype.split(';')
    params = MultiDict()  # type: MultiDict[str]
    for item in parts[1:]:
        if not item:
            continue
        key, value = cast(Tuple[str, str],
                          item.split('=', 1) if '=' in item else (item, ''))
        params.add(key.lower().strip(), value.strip(' "'))

    fulltype = parts[0].strip().lower()
    if fulltype == '*':
        fulltype = '*/*'

    mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1))
                    if '/' in fulltype else (fulltype, ''))
    stype, suffix = (cast(Tuple[str,
                                str], stype.split('+', 1)) if '+' in stype else
                     (stype, ''))

    return MimeType(type=mtype,
                    subtype=stype,
                    suffix=suffix,
                    parameters=MultiDictProxy(params))
class MultiDictMatcher(AttributeMatcher):
    def __init__(self, expected: DictOrTupleList) -> None:
        self._expected = MultiDictProxy(MultiDict(expected))

    def match(self, actual: Union[MultiDict, MultiDictProxy]) -> bool:
        for key, val in self._expected.items():
            matcher = val if isinstance(val, AttributeMatcher) else ContainMatcher(val)
            if not matcher.match(actual.getall(key, [])):
                return False
        return True
Beispiel #3
0
    def query(self):
        """A MultiDictProxy representing parsed query parameters in decoded
        representation.

        Empty value if URL has no query part.

        """
        ret = MultiDict(
            parse_qsl(self.raw_query_string, keep_blank_values=True))
        return MultiDictProxy(ret)
 async def test_non_empty_query_result_raises_notfound(self):
     """Test that 404 is raised with empty query result."""
     operator = Operator(self.client)
     operator.db_service.query = MagicMock()
     query = MultiDictProxy(MultiDict([]))
     with patch(
         "metadata_backend.api.operators.Operator._format_read_data",
         return_value=[],
     ):
         with self.assertRaises(HTTPNotFound):
             await operator.query_metadata_database("study", query, 1, 10, [])
Beispiel #5
0
 def __init__(
     self,
     stream_id: int,
     headers: List[Tuple[str, str]],
     trailers: asyncio.Future,
     *,
     loop=None,
 ) -> None:
     self._stream_id = stream_id
     self._headers = MultiDictProxy(MultiDict(headers))
     self._trailers = trailers
     self._status = int(self._headers[':status'])
Beispiel #6
0
def parse_mimetype(mimetype: str) -> MimeType:
    """Parses a MIME type into its components.

    mimetype is a MIME type string.

    Returns a MimeType object.

    Example:

    >>> parse_mimetype('text/html; charset=utf-8')
    MimeType(type='text', subtype='html', suffix='',
             parameters={'charset': 'utf-8'})

    """
    if not mimetype:
        return MimeType(type="",
                        subtype="",
                        suffix="",
                        parameters=MultiDictProxy(MultiDict()))

    parts = mimetype.split(";")
    params: MultiDict[str] = MultiDict()
    for item in parts[1:]:
        if not item:
            continue
        key, _, value = item.partition("=")
        params.add(key.lower().strip(), value.strip(' "'))

    fulltype = parts[0].strip().lower()
    if fulltype == "*":
        fulltype = "*/*"

    mtype, _, stype = fulltype.partition("/")
    stype, _, suffix = stype.partition("+")

    return MimeType(type=mtype,
                    subtype=stype,
                    suffix=suffix,
                    parameters=MultiDictProxy(params))
Beispiel #7
0
async def process_request_posting(bot: Bot,
                                  data: MultiDictProxy) -> web.Response:
    await log_info(
        bot, f"Получен запрос. параметры {quote_html(str(data.items()))}")

    text: typing.Optional[str] = data.get('text', None)
    photo_url: typing.Optional[str] = data.get('photo_url', None)

    if text is None and photo_url is None:
        return web.Response(
            text="Sorry you don't send params text and/or photo_url",
            status=400)
    elif text is not None and len(text) > TEXT_MAX_SIZE:
        return web.Response(
            text="Sorry your text len to long (more than 4095)", status=400)

    return await try_send_content(bot,
                                  text=text,
                                  photo_url=photo_url,
                                  parse_mode=data.get('parse_mode', None),
                                  button_text=data.get('button_text', None),
                                  button_url=data.get('button_url', None))
Beispiel #8
0
def _md_to_rowdict(input: MultiDictProxy, ignore=tuple()):
    ret = defaultdict(list)
    max_rows = 0
    for key, value in input.items():
        key = key.replace('transaction_', '')
        if key not in ignore:
            ret[key].append(value)
    for key, l in ret.items():
        max_rows = max(max_rows, len(l))
    ret = dict(filter(lambda l: len(l[1]) == max_rows, ret.items()))
    ret = [{key: ret[key][i]
            for key in ret.keys()} for i in range(len(list(ret.values())[0]))]
    return ret
Beispiel #9
0
async def io(req: Request):
    if req.content_type == 'application/x-www-form-urlencoded':
        charset = req.charset or 'utf-8'
        out = MultiDict(req.query)

        bytes_body = await req.read()
        if bytes_body:
            out.extend(
                parse_qsl(bytes_body.rstrip().decode(charset),
                          keep_blank_values=True,
                          encoding=charset))

        # Make immutable dict with auth[param] keys like auth_param
        res = MultiDictProxy(
            MultiDict((
                k.startswith('auth[') and k.endswith(']') and 'auth_' +
                k[5:-1] or k,
                v,
            ) for k, v in out.items()))

        print('\n'.join('{}: {}'.format(k, v) for k, v in res.items()))

    return Response(status=200, text='OK')
 async def test_working_query_params_are_passed_to_db_query(self):
     """Test that database is called with correct query."""
     operator = Operator(self.client)
     study_test = [
         {
             "publishDate": datetime.datetime(2020, 6, 14, 0, 0),
             "accessionId": "EDAG3945644754983408",
             "dateCreated": datetime.datetime(2020, 6, 14, 0, 0),
             "dateModified": datetime.datetime(2020, 6, 14, 0, 0),
         }
     ]
     study_total = [{"total": 0}]
     operator.db_service.do_aggregate.side_effect = [study_test, study_total]
     query = MultiDictProxy(MultiDict([("studyAttributes", "foo")]))
     await operator.query_metadata_database("study", query, 1, 10, [])
     calls = [
         call(
             "study",
             [
                 {
                     "$match": {
                         "$or": [
                             {"studyAttributes.tag": re.compile(".*foo.*", re.IGNORECASE)},
                             {"studyAttributes.value": re.compile(".*foo.*", re.IGNORECASE)},
                         ]
                     }
                 },
                 {"$redact": {"$cond": {"if": {}, "then": "$$DESCEND", "else": "$$PRUNE"}}},
                 {"$skip": 0},
                 {"$limit": 10},
                 {"$project": {"_id": 0}},
             ],
         ),
         call(
             "study",
             [
                 {
                     "$match": {
                         "$or": [
                             {"studyAttributes.tag": re.compile(".*foo.*", re.IGNORECASE)},
                             {"studyAttributes.value": re.compile(".*foo.*", re.IGNORECASE)},
                         ]
                     }
                 },
                 {"$redact": {"$cond": {"if": {}, "then": "$$DESCEND", "else": "$$PRUNE"}}},
                 {"$count": "total"},
             ],
         ),
     ]
     operator.db_service.do_aggregate.assert_has_calls(calls, any_order=True)
Beispiel #11
0
    def links(self) -> 'MultiDictProxy[MultiDictProxy[Union[str, URL]]]':
        links_str = ", ".join(self.headers.getall("link", []))

        if not links_str:
            return MultiDictProxy(MultiDict())

        links = MultiDict()  # type: MultiDict[MultiDictProxy[Union[str, URL]]]

        for val in re.split(r",(?=\s*<)", links_str):
            match = re.match(r"\s*<(.*)>(.*)", val)
            if match is None:  # pragma: no cover
                # the check exists to suppress mypy error
                continue
            url, params_str = match.groups()
            params = params_str.split(";")[1:]

            link = MultiDict()  # type: MultiDict[Union[str, URL]]

            for param in params:
                match = re.match(
                    r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$",
                    param, re.M
                )
                if match is None:  # pragma: no cover
                    # the check exists to suppress mypy error
                    continue
                key, _, value, _ = match.groups()

                link.add(key, value)

            key = link.get("rel", url)  # type: ignore

            link.add("url", self.url.join(URL(url)))

            links.add(key, MultiDictProxy(link))

        return MultiDictProxy(links)
Beispiel #12
0
    async def _execute_action(self, schema: str, content: str, db_client: AsyncIOMotorClient, action: str) -> Dict:
        """Complete the command in the action set of the submission file.

        Only "add/modify/validate" actions are supported.

        :param schema: Schema type of the object in question
        :param content: Metadata object referred to in submission
        :param db_client: Database client for database operations
        :param action: Type of action to be done
        :raises: HTTPBadRequest if an incorrect or non-supported action is called
        :returns: Dict containing specific action that was completed
        """
        if action == "add":
            result = {
                "accessionId": await XMLOperator(db_client).create_metadata_object(schema, content),
                "schema": schema,
            }
            LOG.debug(f"added some content in {schema} ...")
            return result

        elif action == "modify":
            data_as_json = XMLToJSONParser().parse(schema, content)
            if "accessionId" in data_as_json:
                accession_id = data_as_json["accessionId"]
            else:
                alias = data_as_json["alias"]
                query = MultiDictProxy(MultiDict([("alias", alias)]))
                data, _, _, _ = await Operator(db_client).query_metadata_database(schema, query, 1, 1, [])
                if len(data) > 1:
                    reason = "Alias in provided XML file corresponds with more than one existing metadata object."
                    LOG.error(reason)
                    raise web.HTTPBadRequest(reason=reason)
                accession_id = data[0]["accessionId"]
            data_as_json.pop("accessionId", None)
            result = {
                "accessionId": await Operator(db_client).update_metadata_object(schema, accession_id, data_as_json),
                "schema": schema,
            }
            LOG.debug(f"modified some content in {schema} ...")
            return result

        elif action == "validate":
            validator = await self._perform_validation(schema, content)
            return json.loads(validator.resp_body)

        else:
            reason = f"Action {action} in xml is not supported."
            LOG.error(reason)
            raise web.HTTPBadRequest(reason=reason)
Beispiel #13
0
 def scan_dict(self, post_dict: multidict.MultiDictProxy):
     """Scans form data dicts for spam"""
     bad_items = []
     for k, v in post_dict.items():
         if v and isinstance(v, str) and len(v) >= MINIMUM_SCAN_LENGTH:
             b = bytes(v, encoding="utf-8")
             bad_items.extend(self.scan_simple(f"formdata::{k}", b))
             # Use the naïve scanner as well?
             if self.enable_naive:
                 res = self.spamfilter.scan_text(v)
                 if res >= self.naive_threshold:
                     bad_items.append(
                         f"Form element {k} has spam score of {res}, crosses threshold of {self.naive_threshold}!"
                     )
     return bad_items
 async def test_non_working_query_params_are_not_passed_to_db_query(self):
     """Test that database with empty query, when url params are wrong."""
     operator = Operator(self.client)
     study_test = [
         {
             "publishDate": datetime.datetime(2020, 6, 14, 0, 0),
             "accessionId": "EDAG3945644754983408",
             "dateCreated": datetime.datetime(2020, 6, 14, 0, 0),
             "dateModified": datetime.datetime(2020, 6, 14, 0, 0),
         }
     ]
     study_total = [{"total": 0}]
     operator.db_service.do_aggregate.side_effect = [study_test, study_total]
     query = MultiDictProxy(MultiDict([("swag", "littinen")]))
     with patch(
         "metadata_backend.api.operators.Operator._format_read_data",
         return_value=study_test,
     ):
         await operator.query_metadata_database("study", query, 1, 10, [])
     calls = [
         call(
             "study",
             [
                 {"$match": {}},
                 {"$redact": {"$cond": {"if": {}, "then": "$$DESCEND", "else": "$$PRUNE"}}},
                 {"$skip": 0},
                 {"$limit": 10},
                 {"$project": {"_id": 0}},
             ],
         ),
         call(
             "study",
             [
                 {"$match": {}},
                 {"$redact": {"$cond": {"if": {}, "then": "$$DESCEND", "else": "$$PRUNE"}}},
                 {"$count": "total"},
             ],
         ),
     ]
     operator.db_service.do_aggregate.assert_has_calls(calls, any_order=True)
     self.assertEqual(operator.db_service.do_aggregate.call_count, 2)
def parse_query_string(request, allow_none=False):
    """We are not using self.request.form (parsed by Zope Publisher)!!
    There is special meaning for colon(:) in key field. For example `field_name:list`
    treats data as List and it doesn't recognize FHIR search modifier like :not, :missing
    as a result, from colon(:) all chars are ommited.

    Another important reason, FHIR search supports duplicate keys (defferent values) in query string.

    Build Duplicate Key Query String ::
        >>> import requests
        >>> params = {'patient': 'P001', 'lastUpdated': ['2018-01-01', 'lt2018-09-10']}
        >>> requests.get(url, params=params)
        >>> REQUEST['QUERY_STRING']
        'patient=P001&lastUpdated=2018-01-01&lastUpdated=lt2018-09-10'

        >>> from six.moves.urllib.parse import urlencode
        >>> params = [('patient', 'P001'), ('lastUpdated', '2018-01-01'), ('lastUpdated', 'lt2018-09-10')]
        >>> urlencode(params)
        'patient=P001&lastUpdated=2018-01-01&lastUpdated=lt2018-09-10'


    param:request
    param:allow_none
    """
    query_string = request.get("QUERY_STRING", "")
    params = MultiDict()

    for q in query_string.split("&"):
        parts = q.split("=")
        param_name = unquote_plus(parts[0])
        try:
            value = parts[1] and unquote_plus(parts[1]) or None
        except IndexError:
            if not allow_none:
                continue
            value = None

        params.add(param_name, value)

    return MultiDictProxy(params)
Beispiel #16
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self.method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        body = yield from self.read()
        content_charset = self.charset or 'utf-8'

        environ = {
            'REQUEST_METHOD': self.method,
            'CONTENT_LENGTH': str(len(body)),
            'QUERY_STRING': '',
            'CONTENT_TYPE': self.headers.get(hdrs.CONTENT_TYPE)
        }

        fs = cgi.FieldStorage(fp=io.BytesIO(body),
                              environ=environ,
                              keep_blank_values=True,
                              encoding=content_charset)

        supported_transfer_encoding = {
            'base64': binascii.a2b_base64,
            'quoted-printable': binascii.a2b_qp
        }

        out = MultiDict()
        _count = 1
        for field in fs.list or ():
            transfer_encoding = field.headers.get(
                hdrs.CONTENT_TRANSFER_ENCODING, None)
            if field.filename:
                ff = FileField(
                    field.name,
                    field.filename,
                    field.file,  # N.B. file closed error
                    field.type)
                if self._post_files_cache is None:
                    self._post_files_cache = {}
                self._post_files_cache[field.name + str(_count)] = field
                _count += 1
                out.add(field.name, ff)
            else:
                value = field.value
                if transfer_encoding in supported_transfer_encoding:
                    # binascii accepts bytes
                    value = value.encode('utf-8')
                    value = supported_transfer_encoding[transfer_encoding](
                        value)
                out.add(field.name, value)

        self._post = MultiDictProxy(out)
        return self._post
 def __init__(self, expected: DictOrTupleList) -> None:
     self._expected = MultiDictProxy(MultiDict(expected))
Beispiel #18
0
 def _mock_request_func(query_params={'companies': 'REA,CBA'}):
         request = request_class.return_value
         request.app = app
         request.query = MultiDictProxy(MultiDict(query_params))
         return request
Beispiel #19
0
 def query(self):
     ret = MultiDict(
         parse_qsl(self.parsed_url.query, keep_blank_values=True))
     return MultiDictProxy(ret)
Beispiel #20
0
 def receive_trailers(self, trailers: List[Tuple[str, str]]):
     trailers = MultiDictProxy(MultiDict(trailers))
     self._response_trailers.set_result(trailers)
Beispiel #21
0
 def receive_promise(self, headers: List[Tuple[str, str]]):
     self._request_headers = MultiDictProxy(MultiDict(headers))
Beispiel #22
0
    async def post(self) -> Response:
        request = self.request
        ctype = request.headers.get('content-type')

        logger.debug('Request Content-Type: %s', ctype)

        form: MultiDictProxy

        if ctype == 'application/json':
            try:
                data: Any = await request.json()
                if not isinstance(data, dict):
                    raise ValueError('Invalid request type')
            except ValueError as e:
                logger.warning('Invalid request: %s', e)
                raise HTTPBadRequest(reason='Invalid request') from e
            else:
                form = MultiDictProxy(MultiDict(cast(Dict, data)))

        elif ctype == 'application/x-www-form-urlencoded':
            form = await self.request.post()

        else:
            raise HTTPBadRequest(reason='Invalid content type')

        logger.debug('Form is: %s', form)

        user = await self.request.app['store'].users.create(
            **{
                'username': form.get('username'),
                'password': form.get('password'),
                'given_name': form.get('given_name'),
                'patronymic': form.get('patronymic'),
                'family_name': form.get('family_name')
            })
        await self.request.app['store'].users.put(user)

        if 'next' in form:
            response = Response(status=303,
                                reason='See Other',
                                charset='utf-8',
                                headers={
                                    'Location':
                                    '{}?{}'.format(
                                        form.get('next'),
                                        urlencode({'state': form.get('state')},
                                                  doseq=True))
                                })
        else:
            accepts = parse_accept(request.headers.get('Accept'))
            ctype = choice_content_type(accepts,
                                        ['application/json', 'text/plain'])
            logger.debug('Content-type for response is: %s', ctype)

            if ctype == 'application/json':
                user_dict = asdict(user)
                user_dict.pop('crypt')
                user_dict.pop('salt')
                response = json_response({
                    'meta': {
                        'status': 'ok'
                    },
                    'data': user_dict
                })

            else:
                response = Response(text='Login successful')

        self.set_cookie(self.request, response, user.get_id())

        return response
Beispiel #23
0
 def query(self) -> MultiDictProxy[str]:
     """A multidict with all the variables in the query string."""
     return MultiDictProxy(self._rel_url.query)
Beispiel #24
0
    async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]':
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict[Union[str, bytes, FileField]]

        if content_type == 'multipart/form-data':
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                field_ct = field.headers.get(hdrs.CONTENT_TYPE)

                if isinstance(field, BodyPartReader):
                    if field.filename and field_ct:
                        # store file in temp file
                        tmp = tempfile.TemporaryFile()
                        chunk = await field.read_chunk(size=2**16)
                        while chunk:
                            chunk = field.decode(chunk)
                            tmp.write(chunk)
                            size += len(chunk)
                            if 0 < max_size < size:
                                raise HTTPRequestEntityTooLarge(
                                    max_size=max_size, actual_size=size)
                            chunk = await field.read_chunk(size=2**16)
                        tmp.seek(0)

                        ff = FileField(field.name, field.filename,
                                       cast(io.BufferedReader, tmp), field_ct,
                                       field.headers)
                        out.add(field.name, ff)
                    else:
                        # deal with ordinary data
                        value = await field.read(decode=True)
                        if field_ct is None or \
                                field_ct.startswith('text/'):
                            charset = field.get_charset(default='utf-8')
                            out.add(field.name, value.decode(charset))
                        else:
                            out.add(field.name, value)
                        size += len(value)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                            actual_size=size)
                else:
                    raise ValueError(
                        'To decode nested multipart you need '
                        'to use custom reader', )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or 'utf-8'
                bytes_query = data.rstrip()
                try:
                    query = bytes_query.decode(charset)
                except LookupError:
                    raise HTTPUnsupportedMediaType()
                out.extend(
                    parse_qsl(qs=query,
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Beispiel #25
0
from multidict import MultiDict, MultiDictProxy

dictx = MultiDict({})
print(dictx)
dictm = MultiDictProxy(dictx)
print(dictm)

    async def query_metadata_database(
            self, schema_type: str, que: MultiDictProxy, page_num: int,
            page_size: int,
            filter_objects: List) -> Tuple[List, int, int, int]:
        """Query database based on url query parameters.

        Url queries are mapped to mongodb queries based on query_map in
        apps config.

        :param schema_type: Schema type of the object to read.
        :param que: Dict containing query information
        :param page_size: Results per page
        :param page_num: Page number
        :param filter_objects: List of objects belonging to a user
        :raises: HTTPBadRequest if error happened when connection to database
        and HTTPNotFound error if object with given accession id is not found.
        :returns: Query result with pagination numbers
        """
        # Redact the query by checking the accessionId belongs to user
        redacted_content = {
            "$redact": {
                "$cond": {
                    "if": {
                        "$in": ["$accessionId", filter_objects]
                    } if len(filter_objects) > 1 else {},
                    "then": "$$DESCEND",
                    "else": "$$PRUNE",
                }
            }
        }
        # Generate mongodb query from query parameters
        mongo_query: Dict[Any, Any] = {}
        for query, value in que.items():
            if query in query_map:
                regx = re.compile(f".*{value}.*", re.IGNORECASE)
                if isinstance(query_map[query], dict):
                    # Make or-query for keys in dictionary
                    base = query_map[query]["base"]  # type: ignore
                    if "$or" not in mongo_query:
                        mongo_query["$or"] = []
                    for key in query_map[query]["keys"]:  # type: ignore
                        if value.isdigit():
                            regi = {
                                "$expr": {
                                    "$regexMatch": {
                                        "input": {
                                            "$toString": f"${base}.{key}"
                                        },
                                        "regex": f".*{int(value)}.*",
                                    }
                                }
                            }
                            mongo_query["$or"].append(regi)
                        else:
                            mongo_query["$or"].append({f"{base}.{key}": regx})
                else:
                    # Query with regex from just one field
                    mongo_query = {query_map[query]: regx}
        LOG.debug(f"Query construct: {mongo_query}")
        LOG.debug(f"redacted filter: {redacted_content}")
        skips = page_size * (page_num - 1)
        aggregate_query = [
            {
                "$match": mongo_query
            },
            redacted_content,
            {
                "$skip": skips
            },
            {
                "$limit": page_size
            },
            {
                "$project": {
                    "_id": 0
                }
            },
        ]
        try:
            result_aggregate = await self.db_service.aggregate(
                schema_type, aggregate_query)
        except (ConnectionFailure, OperationFailure) as error:
            reason = f"Error happened while getting object: {error}"
            LOG.error(reason)
            raise web.HTTPBadRequest(reason=reason)
        data = await self._format_read_data(schema_type, result_aggregate)

        if not data:
            reason = f"could not find any data in {schema_type}."
            LOG.error(reason)
            raise web.HTTPNotFound(reason=reason)

        page_size = len(data) if len(data) != page_size else page_size
        count_query = [{
            "$match": mongo_query
        }, redacted_content, {
            "$count": "total"
        }]
        total_objects = await self.db_service.aggregate(
            schema_type, count_query)

        LOG.debug(f"DB query: {que}")
        LOG.info(f"DB query successful for query on {schema_type} "
                 f"resulted in {total_objects[0]['total']}. "
                 f"Requested was page {page_num} and page size {page_size}.")
        return data, page_num, page_size, total_objects[0]["total"]
Beispiel #27
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()

        if content_type == 'multipart/form-data':
            multipart = yield from self.multipart()

            field = yield from multipart.next()
            while field is not None:
                size = 0
                max_size = self._client_max_size
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = yield from field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if max_size > 0 and size > max_size:
                            raise ValueError(
                                'Maximum request body size exceeded')
                        chunk = yield from field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename, tmp,
                                   content_type, field.headers)
                    out.add(field.name, ff)
                else:
                    value = yield from field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if max_size > 0 and size > max_size:
                        raise ValueError('Maximum request body size exceeded')

                field = yield from multipart.next()
        else:
            data = yield from self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(data.rstrip().decode(charset), encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
 def links(self) -> LinkMultiDict:
     """Convert stored links into the format returned by :attr:`ClientResponse.links`"""
     items = [(k, _to_url_multidict(v)) for k, v in self._links]
     return MultiDictProxy(
         MultiDict([(k, MultiDictProxy(v)) for k, v in items]))
Beispiel #29
0
class BaseRequest(MutableMapping[str, Any], HeadersMixin):

    POST_METHODS = {
        hdrs.METH_PATCH,
        hdrs.METH_POST,
        hdrs.METH_PUT,
        hdrs.METH_TRACE,
        hdrs.METH_DELETE,
    }

    __slots__ = (
        "_message",
        "_protocol",
        "_payload_writer",
        "_payload",
        "_headers",
        "_method",
        "_version",
        "_rel_url",
        "_post",
        "_read_bytes",
        "_state",
        "_cache",
        "_task",
        "_client_max_size",
        "_loop",
        "_transport_sslcontext",
        "_transport_peername",
        "_disconnection_waiters",
        "__weakref__",
    )

    def __init__(
        self,
        message: RawRequestMessage,
        payload: StreamReader,
        protocol: "RequestHandler",
        payload_writer: AbstractStreamWriter,
        task: "asyncio.Task[None]",
        loop: asyncio.AbstractEventLoop,
        *,
        client_max_size: int = 1024**2,
        state: Optional[Dict[str, Any]] = None,
        scheme: Optional[str] = None,
        host: Optional[str] = None,
        remote: Optional[str] = None,
    ) -> None:
        super().__init__()
        if state is None:
            state = {}
        self._message = message
        self._protocol = protocol
        self._payload_writer = payload_writer

        self._payload = payload
        self._headers = message.headers
        self._method = message.method
        self._version = message.version
        self._rel_url = message.url
        self._post = (
            None
        )  # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
        self._read_bytes = None  # type: Optional[bytes]

        self._state = state
        self._cache = {}  # type: Dict[str, Any]
        self._task = task
        self._client_max_size = client_max_size
        self._loop = loop
        self._disconnection_waiters = set()  # type: Set[asyncio.Future[None]]

        transport = self._protocol.transport
        assert transport is not None
        self._transport_sslcontext = transport.get_extra_info("sslcontext")
        self._transport_peername = transport.get_extra_info("peername")

        if scheme is not None:
            self._cache["scheme"] = scheme
        if host is not None:
            self._cache["host"] = host
        if remote is not None:
            self._cache["remote"] = remote

    def clone(
        self,
        *,
        method: Union[str, _SENTINEL] = sentinel,
        rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
        headers: Union[LooseHeaders, _SENTINEL] = sentinel,
        scheme: Union[str, _SENTINEL] = sentinel,
        host: Union[str, _SENTINEL] = sentinel,
        remote: Union[str, _SENTINEL] = sentinel,
    ) -> "BaseRequest":
        """Clone itself with replacement some attributes.

        Creates and returns a new instance of Request object. If no parameters
        are given, an exact copy is returned. If a parameter is not passed, it
        will reuse the one from the current request object.

        """

        if self._read_bytes:
            raise RuntimeError("Cannot clone request "
                               "after reading its content")

        dct = {}  # type: Dict[str, Any]
        if method is not sentinel:
            dct["method"] = method
        if rel_url is not sentinel:
            new_url: URL = URL(rel_url)  # type: ignore[arg-type]
            dct["url"] = new_url
            dct["path"] = str(new_url)
        if headers is not sentinel:
            # a copy semantic
            new_headers = CIMultiDictProxy(
                CIMultiDict(headers)  # type: ignore[arg-type]
            )
            dct["headers"] = new_headers
            dct["raw_headers"] = tuple((k.encode("utf-8"), v.encode("utf-8"))
                                       for k, v in new_headers.items())

        message = self._message._replace(**dct)

        kwargs: Dict[str, str] = {}
        if scheme is not sentinel:
            kwargs["scheme"] = scheme  # type: ignore[assignment]
        if host is not sentinel:
            kwargs["host"] = host  # type: ignore[assignment]
        if remote is not sentinel:
            kwargs["remote"] = remote  # type: ignore[assignment]

        return self.__class__(
            message,
            self._payload,
            self._protocol,
            self._payload_writer,
            self._task,
            self._loop,
            client_max_size=self._client_max_size,
            state=self._state.copy(),
            **kwargs,
        )

    @property
    def task(self) -> "asyncio.Task[None]":
        return self._task

    @property
    def protocol(self) -> "RequestHandler":
        return self._protocol

    @property
    def transport(self) -> Optional[asyncio.Transport]:
        if self._protocol is None:
            return None
        return self._protocol.transport

    @property
    def writer(self) -> AbstractStreamWriter:
        return self._payload_writer

    @reify
    def rel_url(self) -> URL:
        return self._rel_url

    # MutableMapping API

    def __getitem__(self, key: str) -> Any:
        return self._state[key]

    def __setitem__(self, key: str, value: Any) -> None:
        self._state[key] = value

    def __delitem__(self, key: str) -> None:
        del self._state[key]

    def __len__(self) -> int:
        return len(self._state)

    def __iter__(self) -> Iterator[str]:
        return iter(self._state)

    ########

    @reify
    def secure(self) -> bool:
        """A bool indicating if the request is handled with SSL."""
        return self.scheme == "https"

    @reify
    def forwarded(self) -> Tuple[Mapping[str, str], ...]:
        """A tuple containing all parsed Forwarded header(s).

        Makes an effort to parse Forwarded headers as specified by RFC 7239:

        - It adds one (immutable) dictionary per Forwarded 'field-value', ie
          per proxy. The element corresponds to the data in the Forwarded
          field-value added by the first proxy encountered by the client. Each
          subsequent item corresponds to those added by later proxies.
        - It checks that every value has valid syntax in general as specified
          in section 4: either a 'token' or a 'quoted-string'.
        - It un-escapes found escape sequences.
        - It does NOT validate 'by' and 'for' contents as specified in section
          6.
        - It does NOT validate 'host' contents (Host ABNF).
        - It does NOT validate 'proto' contents for valid URI scheme names.

        Returns a tuple containing one or more immutable dicts
        """
        elems = []
        for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
            length = len(field_value)
            pos = 0
            need_separator = False
            elem = {}  # type: Dict[str, str]
            elems.append(types.MappingProxyType(elem))
            while 0 <= pos < length:
                match = _FORWARDED_PAIR_RE.match(field_value, pos)
                if match is not None:  # got a valid forwarded-pair
                    if need_separator:
                        # bad syntax here, skip to next comma
                        pos = field_value.find(",", pos)
                    else:
                        name, value, port = match.groups()
                        if value[0] == '"':
                            # quoted string: remove quotes and unescape
                            value = _QUOTED_PAIR_REPLACE_RE.sub(
                                r"\1", value[1:-1])
                        if port:
                            value += port
                        elem[name.lower()] = value
                        pos += len(match.group(0))
                        need_separator = True
                elif field_value[pos] == ",":  # next forwarded-element
                    need_separator = False
                    elem = {}
                    elems.append(types.MappingProxyType(elem))
                    pos += 1
                elif field_value[pos] == ";":  # next forwarded-pair
                    need_separator = False
                    pos += 1
                elif field_value[pos] in " \t":
                    # Allow whitespace even between forwarded-pairs, though
                    # RFC 7239 doesn't. This simplifies code and is in line
                    # with Postel's law.
                    pos += 1
                else:
                    # bad syntax here, skip to next comma
                    pos = field_value.find(",", pos)
        return tuple(elems)

    @reify
    def scheme(self) -> str:
        """A string representing the scheme of the request.

        Hostname is resolved in this order:

        - overridden value by .clone(scheme=new_scheme) call.
        - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.

        'http' or 'https'.
        """
        if self._transport_sslcontext:
            return "https"
        else:
            return "http"

    @reify
    def method(self) -> str:
        """Read only property for getting HTTP method.

        The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
        """
        return self._method

    @reify
    def version(self) -> HttpVersion:
        """Read only property for getting HTTP version of request.

        Returns aiohttp.protocol.HttpVersion instance.
        """
        return self._version

    @reify
    def host(self) -> str:
        """Hostname of the request.

        Hostname is resolved in this order:

        - overridden value by .clone(host=new_host) call.
        - HOST HTTP header
        - socket.getfqdn() value
        """
        host = self._message.headers.get(hdrs.HOST)
        if host is not None:
            return host
        return socket.getfqdn()

    @reify
    def remote(self) -> Optional[str]:
        """Remote IP of client initiated HTTP request.

        The IP is resolved in this order:

        - overridden value by .clone(remote=new_remote) call.
        - peername of opened socket
        """
        if self._transport_peername is None:
            return None
        if isinstance(self._transport_peername, (list, tuple)):
            return str(self._transport_peername[0])
        return str(self._transport_peername)

    @reify
    def url(self) -> URL:
        url = URL.build(scheme=self.scheme, host=self.host)
        return url.join(self._rel_url)

    @reify
    def path(self) -> str:
        """The URL including *PATH INFO* without the host or scheme.

        E.g., ``/app/blog``
        """
        return self._rel_url.path

    @reify
    def path_qs(self) -> str:
        """The URL including PATH_INFO and the query string.

        E.g, /app/blog?id=10
        """
        return str(self._rel_url)

    @reify
    def raw_path(self) -> str:
        """The URL including raw *PATH INFO* without the host or scheme.
        Warning, the path is unquoted and may contains non valid URL characters

        E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
        """
        return self._message.path

    @reify
    def query(self) -> MultiDictProxy[str]:
        """A multidict with all the variables in the query string."""
        return MultiDictProxy(self._rel_url.query)

    @reify
    def query_string(self) -> str:
        """The query string in the URL.

        E.g., id=10
        """
        return self._rel_url.query_string

    @reify
    def headers(self) -> "CIMultiDictProxy[str]":
        """A case-insensitive multidict proxy with all headers."""
        return self._headers

    @reify
    def raw_headers(self) -> RawHeaders:
        """A sequence of pairs for all headers."""
        return self._message.raw_headers

    @reify
    def if_modified_since(self) -> Optional[datetime.datetime]:
        """The value of If-Modified-Since HTTP header, or None.

        This header is represented as a `datetime` object.
        """
        return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))

    @reify
    def if_unmodified_since(self) -> Optional[datetime.datetime]:
        """The value of If-Unmodified-Since HTTP header, or None.

        This header is represented as a `datetime` object.
        """
        return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))

    @staticmethod
    def _etag_values(etag_header: str) -> Iterator[ETag]:
        """Extract `ETag` objects from raw header."""
        if etag_header == ETAG_ANY:
            yield ETag(
                is_weak=False,
                value=ETAG_ANY,
            )
        else:
            for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
                is_weak, value, garbage = match.group(2, 3, 4)
                # Any symbol captured by 4th group means
                # that the following sequence is invalid.
                if garbage:
                    break

                yield ETag(
                    is_weak=bool(is_weak),
                    value=value,
                )

    @classmethod
    def _if_match_or_none_impl(
            cls, header_value: Optional[str]) -> Optional[Tuple[ETag, ...]]:
        if not header_value:
            return None

        return tuple(cls._etag_values(header_value))

    @reify
    def if_match(self) -> Optional[Tuple[ETag, ...]]:
        """The value of If-Match HTTP header, or None.

        This header is represented as a `tuple` of `ETag` objects.
        """
        return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))

    @reify
    def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
        """The value of If-None-Match HTTP header, or None.

        This header is represented as a `tuple` of `ETag` objects.
        """
        return self._if_match_or_none_impl(self.headers.get(
            hdrs.IF_NONE_MATCH))

    @reify
    def if_range(self) -> Optional[datetime.datetime]:
        """The value of If-Range HTTP header, or None.

        This header is represented as a `datetime` object.
        """
        return parse_http_date(self.headers.get(hdrs.IF_RANGE))

    @reify
    def keep_alive(self) -> bool:
        """Is keepalive enabled by client?"""
        return not self._message.should_close

    @reify
    def cookies(self) -> Mapping[str, str]:
        """Return request cookies.

        A read-only dictionary-like object.
        """
        raw = self.headers.get(hdrs.COOKIE, "")
        parsed = SimpleCookie(raw)  # type: SimpleCookie[str]
        return MappingProxyType(
            {key: val.value
             for key, val in parsed.items()})

    @reify
    def http_range(self) -> slice:
        """The content of Range HTTP header.

        Return a slice instance.

        """
        rng = self._headers.get(hdrs.RANGE)
        start, end = None, None
        if rng is not None:
            try:
                pattern = r"^bytes=(\d*)-(\d*)$"
                start, end = re.findall(pattern, rng)[0]
            except IndexError:  # pattern was not found in header
                raise ValueError("range not in acceptable format")

            end = int(end) if end else None
            start = int(start) if start else None

            if start is None and end is not None:
                # end with no start is to return tail of content
                start = -end
                end = None

            if start is not None and end is not None:
                # end is inclusive in range header, exclusive for slice
                end += 1

                if start >= end:
                    raise ValueError("start cannot be after end")

            if start is end is None:  # No valid range supplied
                raise ValueError("No start or end of range specified")

        return slice(start, end, 1)

    @reify
    def content(self) -> StreamReader:
        """Return raw payload stream."""
        return self._payload

    @property
    def can_read_body(self) -> bool:
        """Return True if request's HTTP BODY can be read, False otherwise."""
        return not self._payload.at_eof()

    @reify
    def body_exists(self) -> bool:
        """Return True if request has HTTP BODY, False otherwise."""
        return type(self._payload) is not EmptyStreamReader

    async def release(self) -> None:
        """Release request.

        Eat unread part of HTTP BODY if present.
        """
        while not self._payload.at_eof():
            await self._payload.readany()

    async def read(self) -> bytes:
        """Read request body if present.

        Returns bytes object with full request content.
        """
        if self._read_bytes is None:
            body = bytearray()
            while True:
                chunk = await self._payload.readany()
                body.extend(chunk)
                if self._client_max_size:
                    body_size = len(body)
                    if body_size > self._client_max_size:
                        raise HTTPRequestEntityTooLarge(
                            max_size=self._client_max_size,
                            actual_size=body_size)
                if not chunk:
                    break
            self._read_bytes = bytes(body)
        return self._read_bytes

    async def text(self) -> str:
        """Return BODY as text using encoding from .charset."""
        bytes_body = await self.read()
        encoding = self.charset or "utf-8"
        try:
            return bytes_body.decode(encoding)
        except LookupError:
            raise HTTPUnsupportedMediaType()

    async def json(
        self,
        *,
        loads: JSONDecoder = DEFAULT_JSON_DECODER,
        content_type: Optional[str] = "application/json",
    ) -> Any:
        """Return BODY as JSON."""
        body = await self.text()
        if content_type:
            ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
            if not is_expected_content_type(ctype, content_type):
                raise HTTPBadRequest(text=("Attempt to decode JSON with "
                                           "unexpected mimetype: %s" % ctype))

        return loads(body)

    async def multipart(self) -> MultipartReader:
        """Return async iterator to process BODY as multipart."""
        return MultipartReader(self._headers, self._payload)

    async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if content_type not in (
                "",
                "application/x-www-form-urlencoded",
                "multipart/form-data",
        ):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict[Union[str, bytes, FileField]]

        if content_type == "multipart/form-data":
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                field_ct = field.headers.get(hdrs.CONTENT_TYPE)

                if isinstance(field, BodyPartReader):
                    assert field.name is not None

                    # Note that according to RFC 7578, the Content-Type header
                    # is optional, even for files, so we can't assume it's
                    # present.
                    # https://tools.ietf.org/html/rfc7578#section-4.4
                    if field.filename:
                        # store file in temp file
                        tmp = tempfile.TemporaryFile()
                        chunk = await field.read_chunk(size=2**16)
                        while chunk:
                            chunk = field.decode(chunk)
                            tmp.write(chunk)
                            size += len(chunk)
                            if 0 < max_size < size:
                                tmp.close()
                                raise HTTPRequestEntityTooLarge(
                                    max_size=max_size, actual_size=size)
                            chunk = await field.read_chunk(size=2**16)
                        tmp.seek(0)

                        if field_ct is None:
                            field_ct = "application/octet-stream"

                        ff = FileField(
                            field.name,
                            field.filename,
                            cast(io.BufferedReader, tmp),
                            field_ct,
                            field.headers,
                        )
                        out.add(field.name, ff)
                    else:
                        # deal with ordinary data
                        value = await field.read(decode=True)
                        if field_ct is None or field_ct.startswith("text/"):
                            charset = field.get_charset(default="utf-8")
                            out.add(field.name, value.decode(charset))
                        else:
                            out.add(field.name, value)
                        size += len(value)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                            actual_size=size)
                else:
                    raise ValueError(
                        "To decode nested multipart you need "
                        "to use custom reader", )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or "utf-8"
                bytes_query = data.rstrip()
                try:
                    query = bytes_query.decode(charset)
                except LookupError:
                    raise HTTPUnsupportedMediaType()
                out.extend(
                    parse_qsl(qs=query,
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post

    def get_extra_info(self, name: str, default: Any = None) -> Any:
        """Extra info from protocol transport"""
        protocol = self._protocol
        if protocol is None:
            return default

        transport = protocol.transport
        if transport is None:
            return default

        return transport.get_extra_info(name, default)

    def __repr__(self) -> str:
        ascii_encodable_path = self.path.encode(
            "ascii", "backslashreplace").decode("ascii")
        return "<{} {} {} >".format(self.__class__.__name__, self._method,
                                    ascii_encodable_path)

    def __eq__(self, other: object) -> bool:
        return id(self) == id(other)

    def __bool__(self) -> bool:
        return True

    async def _prepare_hook(self, response: StreamResponse) -> None:
        return

    def _cancel(self, exc: BaseException) -> None:
        self._payload.set_exception(exc)
        for fut in self._disconnection_waiters:
            set_result(fut, None)

    def _finish(self) -> None:
        for fut in self._disconnection_waiters:
            fut.cancel()

        if self._post is None or self.content_type != "multipart/form-data":
            return

        # NOTE: Release file descriptors for the
        # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom`
        # NOTE: instances of files sent within multipart request body
        # NOTE: via HTTP POST request.
        for file_name, file_field_object in self._post.items():
            if not isinstance(file_field_object, FileField):
                continue

            file_field_object.file.close()

    async def wait_for_disconnection(self) -> None:
        loop = asyncio.get_event_loop()
        fut = loop.create_future()  # type: asyncio.Future[None]
        self._disconnection_waiters.add(fut)
        try:
            await fut
        finally:
            self._disconnection_waiters.remove(fut)
Beispiel #30
0
    async def post(self) -> MultiDictProxy:
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict

        if content_type == 'multipart/form-data':
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = await field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                            actual_size=size)
                        chunk = await field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename,
                                   cast(io.BufferedReader, tmp), content_type,
                                   field.headers)
                    out.add(field.name, ff)
                else:
                    value = await field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if 0 < max_size < size:
                        raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                        actual_size=size)

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(data.rstrip().decode(charset),
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Beispiel #31
0
    async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if content_type not in (
                "",
                "application/x-www-form-urlencoded",
                "multipart/form-data",
        ):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict[Union[str, bytes, FileField]]

        if content_type == "multipart/form-data":
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                field_ct = field.headers.get(hdrs.CONTENT_TYPE)

                if isinstance(field, BodyPartReader):
                    assert field.name is not None

                    # Note that according to RFC 7578, the Content-Type header
                    # is optional, even for files, so we can't assume it's
                    # present.
                    # https://tools.ietf.org/html/rfc7578#section-4.4
                    if field.filename:
                        # store file in temp file
                        tmp = tempfile.TemporaryFile()
                        chunk = await field.read_chunk(size=2**16)
                        while chunk:
                            chunk = field.decode(chunk)
                            tmp.write(chunk)
                            size += len(chunk)
                            if 0 < max_size < size:
                                tmp.close()
                                raise HTTPRequestEntityTooLarge(
                                    max_size=max_size, actual_size=size)
                            chunk = await field.read_chunk(size=2**16)
                        tmp.seek(0)

                        if field_ct is None:
                            field_ct = "application/octet-stream"

                        ff = FileField(
                            field.name,
                            field.filename,
                            cast(io.BufferedReader, tmp),
                            field_ct,
                            field.headers,
                        )
                        out.add(field.name, ff)
                    else:
                        # deal with ordinary data
                        value = await field.read(decode=True)
                        if field_ct is None or field_ct.startswith("text/"):
                            charset = field.get_charset(default="utf-8")
                            out.add(field.name, value.decode(charset))
                        else:
                            out.add(field.name, value)
                        size += len(value)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                            actual_size=size)
                else:
                    raise ValueError(
                        "To decode nested multipart you need "
                        "to use custom reader", )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or "utf-8"
                bytes_query = data.rstrip()
                try:
                    query = bytes_query.decode(charset)
                except LookupError:
                    raise HTTPUnsupportedMediaType()
                out.extend(
                    parse_qsl(qs=query,
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Beispiel #32
0
def multi_dict_to_dict_of_lists(md: MultiDictProxy) -> Dict[Any, List[Any]]:
    d: Dict[List[Any]] = {}
    for k, v in md.items():
        d[k] = d.get(k, [])
        d[k].append(v)
    return d