Exemplo n.º 1
0
Arquivo: view.py Projeto: EleComb/slim
 def params(self) -> "MultiDict[str]":
     if self._params_cache is None:
         self._params_cache = MultiDict(self._request.query)
     return self._params_cache
Exemplo n.º 2
0
def convertToDict(data):
    ''' Converts JSON to MultiDict '''
    md = MultiDict()
    for key in data:
        md.add(key, str(data[key]))
    return md
Exemplo n.º 3
0
 def __init__(self, request, options: Dict, stream: Callable) -> None:
     self.request = request
     self.options = options
     self.stream = stream
     self.limit = request.cache.cfg.stream_buffer
     self.result = (MultiDict(), MultiDict())
Exemplo n.º 4
0
    def __init__(self,
                 method,
                 url,
                 *,
                 params=None,
                 headers=None,
                 skip_auto_headers=frozenset(),
                 data=None,
                 cookies=None,
                 auth=None,
                 version=http.HttpVersion11,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 loop=None,
                 response_class=None,
                 proxy=None,
                 proxy_auth=None,
                 timer=None,
                 session=None,
                 auto_decompress=True,
                 ssl=None,
                 proxy_headers=None):

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        self._session = session
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.url = url.with_fragment(None)
        self.original_url = url
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        self.response_class = response_class or ClientResponse
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress
        self._ssl = ssl

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_headers)

        self.update_body_from_data(data)
        if data or self.method not in self.GET_METHODS:
            self.update_transfer_encoding()
        self.update_expect_continue(expect100)
Exemplo n.º 5
0
 async def post(self) -> "MultiDict[str]":
     """Return POST parameters."""
     return MultiDict(parse_qsl(self._text, keep_blank_values=True))
Exemplo n.º 6
0
    async def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()

        if content_type == 'multipart/form-data':
            multipart = await self.multipart()

            field = await multipart.next()
            while field is not None:
                size = 0
                max_size = self._client_max_size
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = await field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if 0 < max_size < size:
                            raise ValueError(
                                'Maximum request body size exceeded')
                        chunk = await field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename, tmp,
                                   content_type, field.headers)
                    out.add(field.name, ff)
                else:
                    value = await field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if 0 < max_size < size:
                        raise ValueError('Maximum request body size exceeded')

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(data.rstrip().decode(charset),
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Exemplo n.º 7
0
    async def send_trailing_metadata(
        self,
        *,
        status: Status = Status.OK,
        status_message: Optional[str] = None,
        status_details: Any = None,
        metadata: Optional[_MetadataLike] = None,
    ) -> None:
        """Coroutine to send trailers with trailing metadata to the client.

        This coroutine allows sending trailers-only responses, in case of some
        failure conditions during handling current request, i.e. when
        ``status is not OK``.

        .. note:: This coroutine will be called implicitly at exit from
            request handler, with appropriate status code, if not called
            explicitly during handler execution.

        :param status: resulting status of this coroutine call
        :param status_message: description for a status
        :param metadata: custom trailing metadata, dict or list of pairs
        """
        if self._send_trailing_metadata_done:
            raise ProtocolError('Trailing metadata was already sent')

        if (not self._cardinality.server_streaming
                and not self._send_message_done and status is Status.OK):
            raise ProtocolError('Unary response with OK status requires '
                                'a single message to be sent')

        if self._send_initial_metadata_done:
            headers: _Headers = []
        else:
            # trailers-only response
            headers = [
                (':status', '200'),
                ('content-type', self._content_type),
            ]

        headers.append(('grpc-status', str(status.value)))
        if status_message is not None:
            headers.append(
                ('grpc-message', encode_grpc_message(status_message)))
        if (status_details is not None
                and self._status_details_codec is not None):
            status_details_bin = (encode_bin_value(
                self._status_details_codec.encode(
                    status,
                    status_message,
                    status_details,
                )).decode('ascii'))
            headers.append((_STATUS_DETAILS_KEY, status_details_bin))

        metadata = MultiDict(metadata or ())
        metadata, = await self._dispatch.send_trailing_metadata(metadata)
        headers.extend(encode_metadata(cast(_Metadata, metadata)))

        await self._stream.send_headers(headers, end_stream=True)
        self._send_trailing_metadata_done = True

        if status != Status.OK and self._stream.closable:
            self._stream.reset_nowait()
Exemplo n.º 8
0
    def parse_request_filters(cls, request: web.Request) -> RequestFilters:
        """
        Parse filters from request query string.

        .. hint::

            Please note, that the *filter* strategy is not defined by the
            JSON API specification and depends on the implementation.
            If you want to use another filter strategy,
            feel free to **override** this method.

        Returns a MultiDict with field names as keys and rules as values.
        Rule value is JSON deserialized from query string.

        Filters can be applied using the query string.

        .. code-block:: python3

            >>> from aiohttp_json_api.context import JSONAPIContext
            >>> from aiohttp.test_utils import make_mocked_request

            >>> request = make_mocked_request('GET', '/api/User/?filter[name]=endswith:"Simpson"')
            >>> JSONAPIContext.parse_request_filters(request)
            <MultiDict('name': FilterRule(name='endswith', value='Simpson'))>

            >>> request = make_mocked_request('GET', '/api/User/?filter[name]=endswith:"Simpson"&filter[name]=in:["Some","Names"]')
            >>> JSONAPIContext.parse_request_filters(request)
            <MultiDict('name': FilterRule(name='endswith', value='Simpson'), 'name': FilterRule(name='in', value=['Some', 'Names']))>

            >>> request = make_mocked_request('GET', '/api/User/?filter[name]=in:["Homer Simpson", "Darth Vader"]')
            >>> JSONAPIContext.parse_request_filters(request)
            <MultiDict('name': FilterRule(name='in', value=['Homer Simpson', 'Darth Vader']))>

            >>> request = make_mocked_request('GET', '/api/User/?filter[some-field]=startswith:"lisa"&filter[another-field]=lt:20')
            >>> JSONAPIContext.parse_request_filters(request)
            <MultiDict('some_field': FilterRule(name='startswith', value='lisa'), 'another_field': FilterRule(name='lt', value=20))>

        The general syntax is::

            "?filter[field]=name:rule"

        where *rule* is a JSON value.

        :raises HTTPBadRequest:
            If the rule of a filter is not a JSON object.
        :raises HTTPBadRequest:
            If a filter name contains invalid characters.
        """
        filters = MultiDict()  # type: MultiDict

        for key, value in request.query.items():
            key_match = re.fullmatch(cls.FILTER_KEY, key)
            value_match = re.fullmatch(cls.FILTER_VALUE, value)

            # If the key indicates a filter, but the value is not correct
            # formatted.
            if key_match and not value_match:
                field = key_match.group('field')
                raise HTTPBadRequest(
                    detail="The filter '{}' "
                           "is not correct applied.".format(field),
                    source_parameter=key
                )

            # The key indicates a filter and the filter name exists.
            elif key_match and value_match:
                field = key_match.group('field')
                name = value_match.group('name')
                value = value_match.group('value')
                try:
                    value = json.loads(value)
                except Exception as err:
                    logger.debug(str(err), exc_info=False)
                    raise HTTPBadRequest(
                        detail="The value '{}' "
                               "is not JSON serializable".format(value),
                        source_parameter=key
                    )
                filters.add(cls.convert_field_name(field),
                            FilterRule(name=name, value=value))

        return filters
Exemplo n.º 9
0
import pytest
from multidict import MultiDict
from yarl import URL

from aiohttp import helpers
from aiohttp.helpers import is_expected_content_type

IS_PYPY = platform.python_implementation() == 'PyPy'

# ------------------- parse_mimetype ----------------------------------


@pytest.mark.parametrize(
    'mimetype, expected',
    [('', helpers.MimeType('', '', '', MultiDict())),
     ('*', helpers.MimeType('*', '*', '', MultiDict())),
     ('application/json',
      helpers.MimeType('application', 'json', '', MultiDict())),
     ('application/json;  charset=utf-8',
      helpers.MimeType('application', 'json', '',
                       MultiDict({'charset': 'utf-8'}))),
     ('''application/json; charset=utf-8;''',
      helpers.MimeType('application', 'json', '',
                       MultiDict({'charset': 'utf-8'}))),
     ('ApPlIcAtIoN/JSON;ChaRseT="UTF-8"',
      helpers.MimeType('application', 'json', '',
                       MultiDict({'charset': 'UTF-8'}))),
     ('application/rss+xml',
      helpers.MimeType('application', 'rss', 'xml', MultiDict())),
     ('text/plain;base64',
Exemplo n.º 10
0
async def test_traceparent_handling(aiohttp_client, aioeapm):
    app = aioeapm.app
    client = await aiohttp_client(app)
    elasticapm_client = aioeapm.client
    with mock.patch(
            "elasticapm.contrib.aiohttp.middleware.TraceParent.from_string",
            wraps=TraceParent.from_string) as wrapped_from_string:
        resp = await client.get(
            "/boom",
            headers=(
                (constants.TRACEPARENT_HEADER_NAME,
                 "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-03"),
                (constants.TRACESTATE_HEADER_NAME, "foo=bar,bar=baz"),
                (constants.TRACESTATE_HEADER_NAME, "baz=bazzinga"),
            ),
        )

    transaction = elasticapm_client.events[constants.TRANSACTION][0]

    assert transaction["trace_id"] == "0af7651916cd43dd8448eb211c80319c"
    assert transaction["parent_id"] == "b7ad6b7169203331"
    assert "foo=bar,bar=baz,baz=bazzinga" in wrapped_from_string.call_args[0]


@pytest.mark.parametrize("headers,expected", ((MultiDict(
    (("a", "1"), ("a", "2"))), "1,2"), (MultiDict(), None)))
async def test_aiohttptraceparent_merge(headers, expected):
    result = AioHttpTraceParent.merge_duplicate_headers(headers, "a")
    assert result == expected
Exemplo n.º 11
0
 def handler(request):
     data = yield from request.post()
     assert data == MultiDict([('q', 'test1'), ('q', 'test2')])
     return web.Response()
Exemplo n.º 12
0
    async def post(self) -> Response:
        request = self.request
        ctype = request.headers.get('content-type')

        logger.debug('Request Content-Type: %s', ctype)

        form: MultiDictProxy

        if ctype == 'application/json':
            try:
                data: Any = await request.json()
                if not isinstance(data, dict):
                    raise ValueError('Invalid request type')
            except ValueError as e:
                logger.warning('Invalid request: %s', e)
                raise HTTPBadRequest(reason='Invalid request') from e
            else:
                form = MultiDictProxy(MultiDict(cast(Dict, data)))

        elif ctype == 'application/x-www-form-urlencoded':
            form = await self.request.post()

        else:
            raise HTTPBadRequest(reason='Invalid content type')

        logger.debug('Form is: %s', form)

        user = await self.request.app['store'].users.create(
            **{
                'username': form.get('username'),
                'password': form.get('password'),
                'given_name': form.get('given_name'),
                'patronymic': form.get('patronymic'),
                'family_name': form.get('family_name')
            })
        await self.request.app['store'].users.put(user)

        if 'next' in form:
            response = Response(status=303,
                                reason='See Other',
                                charset='utf-8',
                                headers={
                                    'Location':
                                    '{}?{}'.format(
                                        form.get('next'),
                                        urlencode({'state': form.get('state')},
                                                  doseq=True))
                                })
        else:
            accepts = parse_accept(request.headers.get('Accept'))
            ctype = choice_content_type(accepts,
                                        ['application/json', 'text/plain'])
            logger.debug('Content-type for response is: %s', ctype)

            if ctype == 'application/json':
                user_dict = asdict(user)
                user_dict.pop('crypt')
                user_dict.pop('salt')
                response = json_response({
                    'meta': {
                        'status': 'ok'
                    },
                    'data': user_dict
                })

            else:
                response = Response(text='Login successful')

        self.set_cookie(self.request, response, user.get_id())

        return response
Exemplo n.º 13
0
async def links_download(request):

    data = request.query
    session = request.app['session']
    tmp_dir = request.app['config'].get('tmp_dir', '.')

    files_ids = []

    async with request.app['db'].acquire() as conn:
        if 'id' in data:

            id = data['id']
            if id.isdigit():
                id = int(id)
            else:
                return web.json_response(
                    status=400,
                    data=dict(error='File id must be integer'),
                )

            query = db.links.select().where(
                db.links.c.user_id == session['user']).where(
                    db.links.c.id == id)
            result = await conn.fetch(query)
            if result[0]['file_id']:
                files_ids.append(result[0]['file_id'])
        else:
            query = db.links.select().where(
                db.links.c.user_id == session['user'])
            result = await conn.fetch(query)
            for r in result:
                files_ids.append(r['file_id'])

    res_list = []

    for file_id in files_ids:
        res = await get_file(request, file_id, tmp_dir, session.get('user'))
        res_list.append(res)

    if len(res_list) == 1:
        file_name = res_list[0][0]
    else:
        file_name = os.path.join(tmp_dir, f'files_{session.get("user")}')
        shutil.make_archive(file_name, 'zip',
                            os.path.join(tmp_dir, str(session.get('user'))))
        file_name += '.zip'

    # TODO: Это можно сделать лучше
    with open(file_name, 'rb') as f:
        content = f.read()
        response = web.Response(body=content,
                                headers=MultiDict({
                                    'Content-Disposition':
                                    'attachment; filename="%s"' %
                                    file_name.split('/')[-1],
                                }))

    if '.zip' not in file_name:
        os.remove(file_name)
    else:
        os.remove(file_name)
        shutil.rmtree(os.path.join(tmp_dir, str(session.get('user'))))

    return response
Exemplo n.º 14
0
from aiohttp import web
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from multidict import MultiDict

from pulpcore.plugin.content import Handler, PathNotResolved
from pulpcore.plugin.models import ContentArtifact
from pulp_container.app.models import ContainerDistribution, Tag
from pulp_container.app.schema_convert import Schema2toSchema1ConverterWrapper
from pulp_container.app.utils import get_accepted_media_types
from pulp_container.constants import MEDIA_TYPE

log = logging.getLogger(__name__)

v2_headers = MultiDict()
v2_headers["Docker-Distribution-API-Version"] = "registry/2.0"


class ArtifactNotFound(Exception):
    """
    The artifact associated with a published-artifact does not exist.
    """

    pass


class Registry(Handler):
    """
    A set of handlers for the Container Registry v2 API.
    """
Exemplo n.º 15
0
    def collection(self, user_name, extra_params):
        collection_data = []
        plays_data = []

        if isinstance(extra_params, list):
            for params in extra_params:
                collection_data += self.client.collection(
                    user_name=user_name,
                    **params,
                )
        else:
            collection_data = self.client.collection(
                user_name=user_name,
                **extra_params,
            )

        # Dummy game for linking extra promos and accessories
        collection_data.append(
            _create_blank_collection(EXTRA_EXPANSIONS_GAME_ID,
                                     "ZZZ: Expansions without Game (A-I)"))

        params = {"subtype": "boardgameaccessory", "own": 1}
        accessory_collection = self.client.collection(user_name=user_name,
                                                      **params)
        accessory_list_data = self.client.game_list([
            game_in_collection["id"]
            for game_in_collection in accessory_collection
        ])
        accessory_collection_by_id = MultiDict()
        for acc in accessory_collection:
            accessory_collection_by_id.add(str(acc["id"]), acc)

        plays_data = self.client.plays(user_name=user_name, )

        game_list_data = self.client.game_list([
            game_in_collection["id"] for game_in_collection in collection_data
        ])

        collection_by_id = MultiDict()
        for item in collection_data:
            item["players"] = []
            collection_by_id.add(str(item["id"]), item)

        for play in plays_data:
            play_id = str(play["game"]["gameid"])
            if play_id in collection_by_id:
                collection_by_id[play_id]["players"].extend(play["players"])

        games_data = list(
            filter(lambda x: x["type"] == "boardgame", game_list_data))
        expansions_data = list(
            filter(lambda x: x["type"] == "boardgameexpansion",
                   game_list_data))

        game_data_by_id = {}
        expansion_data_by_id = {}

        for game in games_data:
            game["accessories_collection"] = []
            game["expansions_collection"] = []
            game_data_by_id[game["id"]] = game

        for expansion in expansions_data:
            expansion["accessories_collection"] = []
            expansion["expansions_collection"] = []
            expansion_data_by_id[expansion["id"]] = expansion

        expansion_data_by_id = custom_expansion_mappings(expansion_data_by_id)

        for expansion_data in expansion_data_by_id.values():
            if is_promo_box(expansion_data):
                game_data_by_id[expansion_data["id"]] = expansion_data
            for expansion in expansion_data["expansions"]:
                id = expansion["id"]
                if expansion["inbound"] and id in expansion_data_by_id:
                    expansion_data_by_id[id]["expansions_collection"].append(
                        expansion_data)

        for accessory_data in accessory_list_data:
            own_game = False
            for accessory in accessory_data["accessories"]:
                id = accessory["id"]
                if accessory["inbound"]:
                    if id in game_data_by_id:
                        game_data_by_id[id]["accessories_collection"].append(
                            accessory_data)
                        own_game = True
                    elif id in expansion_data_by_id:
                        expansion_data_by_id[id][
                            "accessories_collection"].append(accessory_data)
                        own_game = True
            if not own_game:
                game_data_by_id[EXTRA_EXPANSIONS_GAME_ID][
                    "accessories_collection"].append(accessory_data)

        for expansion_data in expansion_data_by_id.values():
            own_base_game = False
            for expansion in expansion_data["expansions"]:
                id = expansion["id"]
                if expansion["inbound"]:
                    if id in game_data_by_id:
                        own_base_game = True
                        if not is_promo_box(expansion_data):
                            game_data_by_id[id][
                                "expansions_collection"].append(expansion_data)
                            game_data_by_id[id][
                                "expansions_collection"].extend(
                                    expansion_data_by_id[expansion_data["id"]]
                                    ["expansions_collection"])
                            game_data_by_id[id][
                                "accessories_collection"].extend(
                                    expansion_data_by_id[expansion_data["id"]]
                                    ["accessories_collection"])
                    elif id in expansion_data_by_id:
                        own_base_game = True
            if not own_base_game:
                id = EXTRA_EXPANSIONS_GAME_ID
                expansion_data["suggested_numplayers"] = []
                game_data_by_id[id]["expansions_collection"].append(
                    expansion_data)
                game_data_by_id[id]["expansions_collection"].extend(
                    expansion_data_by_id[
                        expansion_data["id"]]["expansions_collection"])
                game_data_by_id[id]["accessories_collection"].extend(
                    expansion_data_by_id[
                        expansion_data["id"]]["accessories_collection"])

        games_collection = list(
            filter(lambda x: x["id"] in game_data_by_id,
                   collection_by_id.values()))

        games = [
            BoardGame(game_data_by_id[collection["id"]],
                      collection,
                      expansions=[
                          BoardGame(expansion_data, collection)
                          for expansion_data in _uniq(game_data_by_id[
                              collection["id"]]["expansions_collection"])
                          for collection in collection_by_id.getall(
                              str(expansion_data["id"]))
                      ],
                      accessories=[
                          BoardGame(accessory_data, collection)
                          for accessory_data in _uniq(game_data_by_id[
                              collection["id"]]["accessories_collection"])
                          for collection in accessory_collection_by_id.getall(
                              str(accessory_data["id"]))
                      ]) for collection in games_collection
        ]

        newGames = []

        # Cleanup the game
        for game in games:
            for exp in game.expansions:
                exp.name = remove_prefix(exp.name, game)
            for acc in game.accessories:
                acc.name = remove_prefix(acc.name, game)
            contained_list = []
            for con in game.contained:
                if con["inbound"]:
                    con["name"] = remove_prefix(con["name"], game)
                    contained_list.append(con)
            game.contained = sorted(contained_list, key=lambda x: x["name"])

            integrates_list = []
            for integrate in game.integrates:
                # Filter integrates to owned games
                if str(integrate["id"]) in collection_by_id:
                    integrate["name"] = name_scrubber(integrate["name"])
                    integrates_list.append(integrate)
            game.integrates = sorted(integrates_list, key=lambda x: x["name"])

            for reimps in game.reimplements:
                reimps["name"] = name_scrubber(reimps["name"])
            for reimpby in game.reimplementedby:
                reimpby["name"] = name_scrubber(reimpby["name"])

            family_list = []
            for fam in game.families:
                newFam = family_filter(fam)
                if newFam:
                    family_list.append(newFam)
            game.families = family_list

            game.publishers = publisher_filter(game.publishers,
                                               collection_by_id[str(game.id)])

            # TODO This is terrible, but split the extra expansions by letter
            if game.id == EXTRA_EXPANSIONS_GAME_ID:

                game.description = ""
                game.players = []
                for exp in game.expansions:
                    exp.players.clear()

                newGame = copy.deepcopy(game)
                newGame.name = "ZZZ: Expansions without Game (J-Q)"
                newGame.collection_id = str(game.collection_id) + "jq"
                newGame.expansions = list(
                    filter(lambda x: re.search(r"^[j-qJ-Q]", x.name),
                           game.expansions))
                newGame.accessories = list(
                    filter(lambda x: re.search(r"^[j-qJ-Q]", x.name),
                           game.accessories))
                newGame.expansions = sorted(newGame.expansions,
                                            key=lambda x: x.name)
                newGame.accessories = sorted(newGame.accessories,
                                             key=lambda x: x.name)
                game.expansions = list(
                    set(game.expansions) - set(newGame.expansions))
                game.accessories = list(
                    set(game.accessories) - set(newGame.accessories))
                newGames.append(newGame)

                newGame = copy.deepcopy(game)
                newGame.name = "ZZZ: Expansions without Game (R-Z)"
                newGame.collection_id = str(game.collection_id) + "rz"
                newGame.expansions = list(
                    filter(lambda x: re.search(r"^[r-zR-Z]", x.name),
                           game.expansions))
                newGame.accessories = list(
                    filter(lambda x: re.search(r"^[r-zR-Z]", x.name),
                           game.accessories))
                newGame.expansions = sorted(newGame.expansions,
                                            key=lambda x: x.name)
                newGame.accessories = sorted(newGame.accessories,
                                             key=lambda x: x.name)
                game.expansions = list(
                    set(game.expansions) - set(newGame.expansions))
                game.accessories = list(
                    set(game.accessories) - set(newGame.accessories))
                newGames.append(newGame)

            # Resort the list after updating the names
            game.expansions = sorted(game.expansions, key=lambda x: x.name)
            game.accessories = sorted(game.accessories, key=lambda x: x.name)
            game.contained = sorted(game.contained, key=lambda x: x["name"])
            game.families = sorted(game.families, key=lambda x: x["name"])
            game.reimplements = sorted(game.reimplements,
                                       key=lambda x: x["name"])
            game.reimplementedby = sorted(game.reimplementedby,
                                          key=lambda x: x["name"])

        games.extend(newGames)

        return games
Exemplo n.º 16
0
def test_query_spaces():
    url = URL('http://example.com?a+b=c+d')
    assert url.query == MultiDict({'a b': 'c d'})
Exemplo n.º 17
0
 def query_parameters(self):
     return MultiDict(self._request.query.items())
Exemplo n.º 18
0
def test_query_empty():
    url = URL('http://example.com')
    assert isinstance(url.query, MultiDictProxy)
    assert url.query == MultiDict()
Exemplo n.º 19
0
def test_with_query_multidict():
    url = URL("http://example.com/path")
    q = MultiDict([("a", "b"), ("c", "d")])
    assert str(url.with_query(q)) == "http://example.com/path?a=b&c=d"
Exemplo n.º 20
0
def test_query():
    url = URL('http://example.com?a=1&b=2')
    assert url.query == MultiDict([('a', '1'), ('b', '2')])
Exemplo n.º 21
0
    def __init__(self,
                 method: str,
                 url: URL,
                 *,
                 params: Optional[Mapping[str, str]] = None,
                 headers: Optional[LooseHeaders] = None,
                 skip_auto_headers: Iterable[str] = frozenset(),
                 data: Any = None,
                 cookies: Optional[LooseCookies] = None,
                 auth: Optional[BasicAuth] = None,
                 version: http.HttpVersion = http.HttpVersion11,
                 compress: Optional[str] = None,
                 chunked: Optional[bool] = None,
                 expect100: bool = False,
                 loop: asyncio.AbstractEventLoop,
                 response_class: Optional[Type['ClientResponse']] = None,
                 proxy: Optional[URL] = None,
                 proxy_auth: Optional[BasicAuth] = None,
                 timer: Optional[BaseTimerContext] = None,
                 session: Optional['ClientSession'] = None,
                 ssl: Union[SSLContext, bool, Fingerprint, None] = None,
                 proxy_headers: Optional[LooseHeaders] = None,
                 traces: Optional[List['Trace']] = None):

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        # FIXME: session is None in tests only, need to fix tests
        # assert session is not None
        self._session = cast('ClientSession', session)
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.original_url = url
        self.url = url.with_fragment(None)
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        if response_class is None:
            real_response_class = ClientResponse
        else:
            real_response_class = response_class
        self.response_class = real_response_class  # type: Type[ClientResponse]
        self._timer = timer if timer is not None else TimerNoop()
        self._ssl = ssl

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_headers)

        self.update_body_from_data(data)
        if data or self.method not in self.GET_METHODS:
            self.update_transfer_encoding()
        self.update_expect_continue(expect100)
        if traces is None:
            traces = []
        self._traces = traces
Exemplo n.º 22
0
def test_query_repeated_args():
    url = URL('http://example.com?a=1&b=2&a=3')
    assert url.query == MultiDict([('a', '1'), ('b', '2'), ('a', '3')])
Exemplo n.º 23
0
 def query(self) -> "MultiDict[str]":
     """Return a dictionary with the query variables."""
     return MultiDict(parse_qsl(self.query_string, keep_blank_values=True))
Exemplo n.º 24
0
def test_query_empty_arg():
    url = URL('http://example.com?a')
    assert url.query == MultiDict([('a', '')])
Exemplo n.º 25
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self.method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        if self.content_type.startswith('multipart/'):
            warnings.warn(
                'To process multipart requests use .multipart'
                ' coroutine instead.', DeprecationWarning)

        body = yield from self.read()
        content_charset = self.charset or 'utf-8'

        environ = {
            'REQUEST_METHOD': self.method,
            'CONTENT_LENGTH': str(len(body)),
            'QUERY_STRING': '',
            'CONTENT_TYPE': self.headers.get(hdrs.CONTENT_TYPE)
        }

        fs = cgi.FieldStorage(fp=io.BytesIO(body),
                              environ=environ,
                              keep_blank_values=True,
                              encoding=content_charset)

        supported_transfer_encoding = {
            'base64': binascii.a2b_base64,
            'quoted-printable': binascii.a2b_qp
        }

        out = MultiDict()
        _count = 1
        for field in fs.list or ():
            transfer_encoding = field.headers.get(
                hdrs.CONTENT_TRANSFER_ENCODING, None)
            if field.filename:
                ff = FileField(
                    field.name,
                    field.filename,
                    field.file,  # N.B. file closed error
                    field.type)
                if self._post_files_cache is None:
                    self._post_files_cache = {}
                self._post_files_cache[field.name + str(_count)] = field
                _count += 1
                out.add(field.name, ff)
            else:
                value = field.value
                if transfer_encoding in supported_transfer_encoding:
                    # binascii accepts bytes
                    value = value.encode('utf-8')
                    value = supported_transfer_encoding[transfer_encoding](
                        value)
                out.add(field.name, value)

        self._post = MultiDictProxy(out)
        return self._post
Exemplo n.º 26
0
def test_with_query_multidict():
    url = URL('http://example.com/path')
    q = MultiDict([('a', 'b'), ('c', 'd')])
    assert str(url.with_query(q)) == 'http://example.com/path?a=b&c=d'
Exemplo n.º 27
0
	('bpm', 'beatsPerMinute'),
	('date', 'year'),
	('discnumber', 'disc_number', 'discNumber'),
	('disctotal', 'total_disc_count', 'totalDiscCount'),
	('tracknumber', 'track_number', 'trackNumber'),
	('tracktotal', 'total_track_count', 'totalTrackCount'),
]
_FIELD_MAP = [
	(field, alias)
	for group in _FIELD_MAP_GROUPS
	for field in group
	for alias in group
	if field != alias
]

FIELD_MAP = MultiDict(_FIELD_MAP)
"""~multidict.MultiDict: Mapping of field name aliases."""

# TODO: Support other/more metadata field names.
TEMPLATE_PATTERNS = {
	'%album%': ['album'],
	'%albumartist%': ['albumartist', 'album_artist', 'albumArtist'],
	'%artist%': ['artist'],
	'%date%': ['date'],
	'%disc%': ['discnumber', 'disc_number', 'discNumber'],
	'%disc2%': ['discnumber', 'disc_number', 'discNumber'],
	'%discnumber%': ['discnumber', 'disc_number', 'discNumber'],
	'%discnumber2%': ['discnumber', 'disc_number', 'discNumber'],
	'%genre%': ['genre'],
	'%title%': ['title'],
	'%track%': ['tracknumber', 'track_number', 'trackNumber'],
Exemplo n.º 28
0
    async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if content_type not in (
                "",
                "application/x-www-form-urlencoded",
                "multipart/form-data",
        ):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict[Union[str, bytes, FileField]]

        if content_type == "multipart/form-data":
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                field_ct = field.headers.get(hdrs.CONTENT_TYPE)

                if isinstance(field, BodyPartReader):
                    assert field.name is not None

                    # Note that according to RFC 7578, the Content-Type header
                    # is optional, even for files, so we can't assume it's
                    # present.
                    # https://tools.ietf.org/html/rfc7578#section-4.4
                    if field.filename:
                        # store file in temp file
                        tmp = tempfile.TemporaryFile()
                        chunk = await field.read_chunk(size=2**16)
                        while chunk:
                            chunk = field.decode(chunk)
                            tmp.write(chunk)
                            size += len(chunk)
                            if 0 < max_size < size:
                                tmp.close()
                                raise HTTPRequestEntityTooLarge(
                                    max_size=max_size, actual_size=size)
                            chunk = await field.read_chunk(size=2**16)
                        tmp.seek(0)

                        if field_ct is None:
                            field_ct = "application/octet-stream"

                        ff = FileField(
                            field.name,
                            field.filename,
                            cast(io.BufferedReader, tmp),
                            field_ct,
                            field.headers,
                        )
                        out.add(field.name, ff)
                    else:
                        # deal with ordinary data
                        value = await field.read(decode=True)
                        if field_ct is None or field_ct.startswith("text/"):
                            charset = field.get_charset(default="utf-8")
                            out.add(field.name, value.decode(charset))
                        else:
                            out.add(field.name, value)
                        size += len(value)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                            actual_size=size)
                else:
                    raise ValueError(
                        "To decode nested multipart you need "
                        "to use custom reader", )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or "utf-8"
                bytes_query = data.rstrip()
                try:
                    query = bytes_query.decode(charset)
                except LookupError:
                    raise HTTPUnsupportedMediaType()
                out.extend(
                    parse_qsl(qs=query,
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Exemplo n.º 29
0
async def test_merge_headers_with_multi_dict(create_session) -> None:
    session = await create_session(headers={"h1": "header1", "h2": "header2"})
    headers = session._prepare_headers(MultiDict([("h1", "h1")]))
    assert isinstance(headers, CIMultiDict)
    assert headers == {"h1": "h1", "h2": "header2"}
Exemplo n.º 30
0
    def _get_request_param(self, param_name):
        param_immutable = self.request.PARAMS.get(param_name, MultiDictProxy(MultiDict()))

        return param_immutable.copy()