Exemple #1
0
async def ask_queries(reader, writer):
    """
    This set of commands seems to elicit all of the unique information I can get out of
    my receiver.  (Denon AVR-S730H)

    :param reader:
    :param writer:
    :return:
    """

    commands = ["PW?", "MV?", "CV?", "MU?", "SI?"]
    commands += ["ZM?", "SV?", "SD?", "SLP?", "MS?"]
    commands += ["MSQUICK ?", "PSLOM ?"]
    commands += ["PSMULTEQ: ?", "PSDYNEQ ?", "PSREFLEV ?", "PSDYNVOL ?"]
    commands += ["PSEFF ?", "PSDEL ?", "PSSWR ?", "PSRSTR ?"]
    commands += ["Z2?", "Z2MU?", "Z2SLP?", "Z2QUICK ?", "TMAN?"]

    # commands = [b"Z2ON", b"SINET"]
    facts = MultiDict()
    for command in commands:
        writer.write(command.encode("ascii") + b"\r")
        lines = await read_lines_until(reader, 0.1)
        for line in lines:
            facts.add(line.strip(), command)
    return facts
Exemple #2
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self.method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('',
                                 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        body = yield from self.read()
        content_charset = self.charset or 'utf-8'

        environ = {'REQUEST_METHOD': self.method,
                   'CONTENT_LENGTH': str(len(body)),
                   'QUERY_STRING': '',
                   'CONTENT_TYPE': self.headers.get(hdrs.CONTENT_TYPE)}

        fs = cgi.FieldStorage(fp=io.BytesIO(body),
                              environ=environ,
                              keep_blank_values=True,
                              encoding=content_charset)

        supported_transfer_encoding = {
            'base64': binascii.a2b_base64,
            'quoted-printable': binascii.a2b_qp
        }

        out = MultiDict()
        _count = 1
        for field in fs.list or ():
            transfer_encoding = field.headers.get(
                hdrs.CONTENT_TRANSFER_ENCODING, None)
            if field.filename:
                ff = FileField(field.name,
                               field.filename,
                               field.file,  # N.B. file closed error
                               field.type)
                if self._post_files_cache is None:
                    self._post_files_cache = {}
                self._post_files_cache[field.name+str(_count)] = field
                _count += 1
                out.add(field.name, ff)
            else:
                value = field.value
                if transfer_encoding in supported_transfer_encoding:
                    # binascii accepts bytes
                    value = value.encode('utf-8')
                    value = supported_transfer_encoding[
                        transfer_encoding](value)
                out.add(field.name, value)

        self._post = MultiDictProxy(out)
        return self._post
Exemple #3
0
    def update_query(self, *args, **kwargs):
        """Return a new URL with query part updated."""
        s = self._get_str_query(*args, **kwargs)
        new_query = MultiDict(parse_qsl(s, keep_blank_values=True))
        query = MultiDict(self.query)
        query.update(new_query)

        return URL(self._val._replace(query=self._get_str_query(query)), encoded=True)
Exemple #4
0
    def __init__(self, method, url, *,
                 params=None, headers=None, skip_auto_headers=frozenset(),
                 data=None, cookies=None,
                 auth=None, version=http.HttpVersion11, compress=None,
                 chunked=None, expect100=False,
                 loop=None, response_class=None,
                 proxy=None, proxy_auth=None,
                 timer=None, session=None, auto_decompress=True,
                 verify_ssl=None, fingerprint=None, ssl_context=None,
                 proxy_headers=None):

        if verify_ssl is False and ssl_context is not None:
            raise ValueError(
                "Either disable ssl certificate validation by "
                "verify_ssl=False or specify ssl_context, not both.")

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        self._session = session
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.url = url.with_fragment(None)
        self.original_url = url
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        self.response_class = response_class or ClientResponse
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress
        self._verify_ssl = verify_ssl
        self._ssl_context = ssl_context

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_headers)
        self.update_fingerprint(fingerprint)

        self.update_body_from_data(data)
        if data or self.method not in self.GET_METHODS:
            self.update_transfer_encoding()
        self.update_expect_continue(expect100)
Exemple #5
0
    def links(self):
        links_str = ", ".join(self.headers.getall("link", []))

        links = MultiDict()

        if not links_str:
            return MultiDictProxy(links)

        for val in re.split(r",(?=\s*<)", links_str):
            url, params = re.match(r"\s*<(.*)>(.*)", val).groups()
            params = params.split(";")[1:]

            link = MultiDict()

            for param in params:
                key, _, value, _ = re.match(
                    r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$",
                    param, re.M
                ).groups()

                link.add(key, value)

            key = link.get("rel", url)

            link.add("url", self.url.join(URL(url)))

            links.add(key, MultiDictProxy(link))

        return MultiDictProxy(links)
    def implicit_tvals(self):
        tvals = dict()
        ret = defaultdict(NoClobberDict)

        tls = self.tlayers
        rls = self.rlayers

        tl_n = [t.category for t in tls]

        def _parse_spec(s):
            c, v = s.split('=')
            return (c, v.split(','))

        for c in self.controls:
            tvs = []
            for specs, region in c.tval_specs:
                wsd = MultiDict()
                for w in region:
                    # wsd[self.values(w, rls)] = w
                    rv = tuple(zip([l.category for l in rls],
                                   self.values(w, rls)))
                    wsd[rv] = w


                for rl, ws in wsd.items():
                    tl_v = map(list,
                               map(set, zip(*[self.values(w, tls)
                                              for w in sorted(ws)])))

                    assert len(tl_n) == len(tl_v)

                    available = zip(tl_n, tl_v)
                    tmp = dict(available +
                               map(_parse_spec, specs.split('*')))

                    chunk = [(tl, rl) for tl in
                             list(product(*[tmp[t] for t in tl_n]))]
                    tvs.append(chunk)

            from itertools import combinations
            assert 0 == sum(map(len, [set.intersection(set(x), set(y))
                                      for x, y in list(combinations(tvs, 2))]))

            for tv, rv in sorted(set(sum(tvs, []))):
                ret[tv][rv] = c

        return dict([(k, dict(v)) for k, v in ret.items()])
Exemple #7
0
    def __init__(self, method, url, *,
                 params=None, headers=None, skip_auto_headers=frozenset(),
                 data=None, cookies=None,
                 auth=None, version=http.HttpVersion11, compress=None,
                 chunked=None, expect100=False,
                 loop=None, response_class=None,
                 proxy=None, proxy_auth=None, proxy_from_env=False,
                 timer=None, session=None, auto_decompress=True):

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        self._session = session
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.url = url.with_fragment(None)
        self.original_url = url
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        self.response_class = response_class or ClientResponse
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_from_env)

        self.update_body_from_data(data)
        self.update_transfer_encoding()
        self.update_expect_continue(expect100)
Exemple #8
0
 def info_data(self, request, **params):
     headers = self.getheaders(request)
     data = {'method': request.method,
             'headers': headers,
             'pulsar': self.pulsar_info(request)}
     if request.method in ENCODE_URL_METHODS:
         data['args'] = as_dict(request.url_data)
     else:
         args, files = request.data_and_files()
         jfiles = MultiDict()
         if files:
             for name, part in files.items():
                 try:
                     part = part.string()
                 except UnicodeError:
                     part = part.base64()
                 jfiles.add(name, part)
         data.update((('args', as_dict(args)),
                      ('files', as_dict(jfiles))))
     data.update(params)
     return data
Exemple #9
0
def parse_mimetype(mimetype: str) -> MimeType:
    """Parses a MIME type into its components.

    mimetype is a MIME type string.

    Returns a MimeType object.

    Example:

    >>> parse_mimetype('text/html; charset=utf-8')
    MimeType(type='text', subtype='html', suffix='',
             parameters={'charset': 'utf-8'})

    """
    if not mimetype:
        return MimeType(type='', subtype='', suffix='',
                        parameters=MultiDictProxy(MultiDict()))

    parts = mimetype.split(';')
    params = MultiDict()  # type: MultiDict[str]
    for item in parts[1:]:
        if not item:
            continue
        key, value = cast(Tuple[str, str],
                          item.split('=', 1) if '=' in item else (item, ''))
        params.add(key.lower().strip(), value.strip(' "'))

    fulltype = parts[0].strip().lower()
    if fulltype == '*':
        fulltype = '*/*'

    mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1))
                    if '/' in fulltype else (fulltype, ''))
    stype, suffix = (cast(Tuple[str, str], stype.split('+', 1))
                     if '+' in stype else (stype, ''))

    return MimeType(type=mtype, subtype=stype, suffix=suffix,
                    parameters=MultiDictProxy(params))
Exemple #10
0
    def links(self) -> 'MultiDictProxy[MultiDictProxy[Union[str, URL]]]':
        links_str = ", ".join(self.headers.getall("link", []))

        if not links_str:
            return MultiDictProxy(MultiDict())

        links = MultiDict()  # type: MultiDict[MultiDictProxy[Union[str, URL]]]

        for val in re.split(r",(?=\s*<)", links_str):
            match = re.match(r"\s*<(.*)>(.*)", val)
            if match is None:  # pragma: no cover
                # the check exists to suppress mypy error
                continue
            url, params_str = match.groups()
            params = params_str.split(";")[1:]

            link = MultiDict()  # type: MultiDict[Union[str, URL]]

            for param in params:
                match = re.match(
                    r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$",
                    param, re.M
                )
                if match is None:  # pragma: no cover
                    # the check exists to suppress mypy error
                    continue
                key, _, value, _ = match.groups()

                link.add(key, value)

            key = link.get("rel", url)  # type: ignore

            link.add("url", self.url.join(URL(url)))

            links.add(key, MultiDictProxy(link))

        return MultiDictProxy(links)
Exemple #11
0
def test_query_empty_arg():
    url = URL("http://example.com?a")
    assert url.query == MultiDict([("a", "")])
Exemple #12
0
 def request(self, method, path, params=(), auth=None, **kwargs):
     kwargs['auth'] = TokenAuth(auth) if isinstance(auth, dict) else auth
     params = MultiDict(params)
     params.extend(self.params)
     url = urljoin(self.url, path).rstrip('/') + self.trailing
     return super().request(method, url, params=params, **kwargs)
Exemple #13
0
from aiohttp import web
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from multidict import MultiDict

from pulpcore.plugin.content import Handler, PathNotResolved
from pulpcore.plugin.models import ContentArtifact
from pulp_container.app.models import ContainerDistribution, Tag
from pulp_container.app.schema_convert import Schema2toSchema1ConverterWrapper
from pulp_container.app.utils import get_accepted_media_types
from pulp_container.constants import MEDIA_TYPE

log = logging.getLogger(__name__)

v2_headers = MultiDict()
v2_headers["Docker-Distribution-API-Version"] = "registry/2.0"


class ArtifactNotFound(Exception):
    """
    The artifact associated with a published-artifact does not exist.
    """

    pass


class Registry(Handler):
    """
    A set of handlers for the Container Registry v2 API.
    """
Exemple #14
0
def test_query_spaces():
    url = URL("http://example.com?a+b=c+d")
    assert url.query == MultiDict({"a b": "c d"})
Exemple #15
0
    async def post(self) -> MultiDictProxy:
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict

        if content_type == 'multipart/form-data':
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = await field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                            actual_size=size)
                        chunk = await field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename,
                                   cast(io.BufferedReader, tmp), content_type,
                                   field.headers)
                    out.add(field.name, ff)
                else:
                    value = await field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if 0 < max_size < size:
                        raise HTTPRequestEntityTooLarge(max_size=max_size,
                                                        actual_size=size)

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(data.rstrip().decode(charset),
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Exemple #16
0
    def __init__(self,
                 method: str,
                 url: URL,
                 *,
                 params: Optional[Mapping[str, str]] = None,
                 headers: Optional[LooseHeaders] = None,
                 skip_auto_headers: Iterable[str] = frozenset(),
                 data: Any = None,
                 cookies: Optional[LooseCookies] = None,
                 auth: Optional[BasicAuth] = None,
                 version: http.HttpVersion = http.HttpVersion11,
                 compress: Optional[str] = None,
                 chunked: Optional[bool] = None,
                 expect100: bool = False,
                 loop: asyncio.AbstractEventLoop,
                 response_class: Optional[Type['ClientResponse']] = None,
                 proxy: Optional[URL] = None,
                 proxy_auth: Optional[BasicAuth] = None,
                 timer: Optional[BaseTimerContext] = None,
                 session: Optional['ClientSession'] = None,
                 ssl: Union[SSLContext, bool, Fingerprint, None] = None,
                 proxy_headers: Optional[LooseHeaders] = None,
                 traces: Optional[List['Trace']] = None):

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        # FIXME: session is None in tests only, need to fix tests
        # assert session is not None
        self._session = cast('ClientSession', session)
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.original_url = url
        self.url = url.with_fragment(None)
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        if response_class is None:
            real_response_class = ClientResponse
        else:
            real_response_class = response_class
        self.response_class = real_response_class  # type: Type[ClientResponse]
        self._timer = timer if timer is not None else TimerNoop()
        self._ssl = ssl

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_headers)

        self.update_body_from_data(data)
        if data or self.method not in self.GET_METHODS:
            self.update_transfer_encoding()
        self.update_expect_continue(expect100)
        if traces is None:
            traces = []
        self._traces = traces
Exemple #17
0
class ServerProxy(object):
    __slots__ = "client", "url", "loop", "headers", "encoding", "huge_tree"

    USER_AGENT = ("aiohttp XML-RPC client " "(Python: 3.9, version: 0.1)")

    def __init__(self,
                 url,
                 client=None,
                 headers=None,
                 encoding=None,
                 huge_tree=False,
                 **kwargs):
        self.headers = MultiDict(headers or {})

        self.headers.setdefault("Content-Type", "text/xml")
        self.headers.setdefault("User-Agent", self.USER_AGENT)

        self.encoding = encoding
        self.huge_tree = huge_tree

        self.url = str(url)
        self.client = client or aiohttp.client.ClientSession(**kwargs)

    @staticmethod
    def _make_request(method_name, *args, **kwargs):
        root = etree.Element("methodCall")
        method_el = etree.Element("methodName")
        method_el.text = method_name

        root.append(method_el)

        params_el = etree.Element("params")
        root.append(params_el)

        for arg in args:
            param = etree.Element("param")
            val = etree.Element("value")
            param.append(val)
            params_el.append(param)
            val.append(py2xml(arg))

        if kwargs:
            param = etree.Element("param")
            val = etree.Element("value")
            param.append(val)
            params_el.append(param)
            val.append(py2xml(kwargs))

        return root

    def _parse_response(self, body, method_name):
        try:
            if log.getEffectiveLevel() <= logging.DEBUG:
                log.debug("Server response: \n%s", body.decode())

            parser = etree.XMLParser(huge_tree=self.huge_tree)
            response = etree.fromstring(body, parser)
            schema.assertValid(response)
        except etree.DocumentInvalid:
            raise ValueError("Invalid body")

        result = response.xpath("//params/param/value")
        if result:
            if len(result) < 2:
                return xml2py(result[0])

            return [xml2py(item) for item in result]

        fault = response.xpath("//fault/value")
        if fault:
            err = xml2py(fault[0])

            raise xml2py_exception(
                err.get("faultCode", exceptions_edit.SystemError.code),
                err.get("faultString", "Unknown error"),
                default_exc_class=exceptions_edit.ServerError,
            )

        raise exceptions_edit.ParseError(
            'Respond body for method "%s" '
            "not contains any response.",
            method_name,
        )

    async def __remote_call(self, method_name, *args, **kwargs):
        async with self.client.post(
                str(self.url),
                data=etree.tostring(
                    self._make_request(method_name, *args, **kwargs),
                    xml_declaration=True,
                    encoding=self.encoding,
                ),
                headers=self.headers,
        ) as response:
            response.raise_for_status()

            return self._parse_response((await response.read()), method_name)

    def __getattr__(self, method_name):
        return self[method_name]

    def __getitem__(self, method_name):
        def method(*args, **kwargs):
            return self.__remote_call(method_name, *args, **kwargs)

        return method

    def __aenter__(self):
        return self.client.__aenter__()

    def __aexit__(self, exc_type, exc_val, exc_tb):
        return self.client.__aexit__(exc_type, exc_val, exc_tb)

    def close(self):
        return self.client.close()
Exemple #18
0
@author: Paolo Cozzi <*****@*****.**>
"""

import aiohttp
import logging

from yarl import URL
from multidict import MultiDict

HEADERS = {
    'Accept': 'application/json',
}

# An empty dictionary for params
PARAMS = MultiDict([])

# Get an instance of a logger
logger = logging.getLogger(__name__)


async def parse_json(response, url, content_type='application/json'):
    """Helper function to parse json data"""

    logger.debug(f"Got response from {url}")

    try:
        return await response.json(content_type=content_type)

    except aiohttp.client_exceptions.ContentTypeError as exc:
        logger.error(repr(exc))
 async def post(self) -> 'MultiDict[str]':
     """Return POST parameters."""
     return MultiDict(parse_qsl(self._text, keep_blank_values=True))
 def query(self) -> 'MultiDict[str]':
     """Return a dictionary with the query variables."""
     return MultiDict(parse_qsl(self.query_string, keep_blank_values=True))
async def test_traceparent_handling(aiohttp_client, aioeapm):
    app = aioeapm.app
    client = await aiohttp_client(app)
    elasticapm_client = aioeapm.client
    with mock.patch(
            "elasticapm.contrib.aiohttp.middleware.TraceParent.from_string",
            wraps=TraceParent.from_string) as wrapped_from_string:
        resp = await client.get(
            "/boom",
            headers=(
                (constants.TRACEPARENT_HEADER_NAME,
                 "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-03"),
                (constants.TRACESTATE_HEADER_NAME, "foo=bar,bar=baz"),
                (constants.TRACESTATE_HEADER_NAME, "baz=bazzinga"),
            ),
        )

    transaction = elasticapm_client.events[constants.TRANSACTION][0]

    assert transaction["trace_id"] == "0af7651916cd43dd8448eb211c80319c"
    assert transaction["parent_id"] == "b7ad6b7169203331"
    assert "foo=bar,bar=baz,baz=bazzinga" in wrapped_from_string.call_args[0]


@pytest.mark.parametrize("headers,expected", ((MultiDict(
    (("a", "1"), ("a", "2"))), "1,2"), (MultiDict(), None)))
async def test_aiohttptraceparent_merge(headers, expected):
    result = AioHttpTraceParent.merge_duplicate_headers(headers, "a")
    assert result == expected
Exemple #22
0
 def query_parameters(self):
     return MultiDict(self._request.query.items())
Exemple #23
0
def test_query_nonascii():
    url = URL("http://example.com?ключ=знач")
    assert url.query == MultiDict({"ключ": "знач"})
Exemple #24
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self.method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        body = yield from self.read()
        content_charset = self.charset or 'utf-8'

        environ = {
            'REQUEST_METHOD': self.method,
            'CONTENT_LENGTH': str(len(body)),
            'QUERY_STRING': '',
            'CONTENT_TYPE': self.headers.get(hdrs.CONTENT_TYPE)
        }

        fs = cgi.FieldStorage(fp=io.BytesIO(body),
                              environ=environ,
                              keep_blank_values=True,
                              encoding=content_charset)

        supported_transfer_encoding = {
            'base64': binascii.a2b_base64,
            'quoted-printable': binascii.a2b_qp
        }

        out = MultiDict()
        _count = 1
        for field in fs.list or ():
            transfer_encoding = field.headers.get(
                hdrs.CONTENT_TRANSFER_ENCODING, None)
            if field.filename:
                ff = FileField(
                    field.name,
                    field.filename,
                    field.file,  # N.B. file closed error
                    field.type)
                if self._post_files_cache is None:
                    self._post_files_cache = {}
                self._post_files_cache[field.name + str(_count)] = field
                _count += 1
                out.add(field.name, ff)
            else:
                value = field.value
                if transfer_encoding in supported_transfer_encoding:
                    # binascii accepts bytes
                    value = value.encode('utf-8')
                    value = supported_transfer_encoding[transfer_encoding](
                        value)
                out.add(field.name, value)

        self._post = MultiDictProxy(out)
        return self._post
    def __init__(self,
                 method,
                 url,
                 *,
                 params=None,
                 headers=None,
                 skip_auto_headers=frozenset(),
                 data=None,
                 cookies=None,
                 auth=None,
                 version=http.HttpVersion11,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 loop=None,
                 response_class=None,
                 proxy=None,
                 proxy_auth=None,
                 timer=None,
                 session=None,
                 auto_decompress=True,
                 verify_ssl=None,
                 fingerprint=None,
                 ssl_context=None,
                 proxy_headers=None):

        if verify_ssl is False and ssl_context is not None:
            raise ValueError(
                "Either disable ssl certificate validation by "
                "verify_ssl=False or specify ssl_context, not both.")

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        self._session = session
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.url = url.with_fragment(None)
        self.original_url = url
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        self.response_class = response_class or ClientResponse
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress
        self._verify_ssl = verify_ssl
        self._ssl_context = ssl_context

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_headers)
        self.update_fingerprint(fingerprint)

        self.update_body_from_data(data)
        if data or self.method not in self.GET_METHODS:
            self.update_transfer_encoding()
        self.update_expect_continue(expect100)
def test_merge_headers_with_multi_dict(create_session):
    session = create_session(headers={"h1": "header1", "h2": "header2"})
    headers = session._prepare_headers(MultiDict([("h1", "h1")]))
    assert isinstance(headers, CIMultiDict)
    assert headers == {"h1": "h1", "h2": "header2"}
Exemple #27
0
    def links(self) -> 'MultiDictProxy[MultiDictProxy[Union[str, URL]]]':
        links_str = ", ".join(self.headers.getall("link", []))

        if not links_str:
            return MultiDictProxy(MultiDict())

        links = MultiDict()  # type: MultiDict[MultiDictProxy[Union[str, URL]]]

        for val in re.split(r",(?=\s*<)", links_str):
            match = re.match(r"\s*<(.*)>(.*)", val)
            if match is None:  # pragma: no cover
                # the check exists to suppress mypy error
                continue
            url, params_str = match.groups()
            params = params_str.split(";")[1:]

            link = MultiDict()  # type: MultiDict[Union[str, URL]]

            for param in params:
                match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$",
                                 param, re.M)
                if match is None:  # pragma: no cover
                    # the check exists to suppress mypy error
                    continue
                key, _, value, _ = match.groups()

                link.add(key, value)

            key = link.get("rel", url)  # type: ignore

            link.add("url", self.url.join(URL(url)))

            links.add(key, MultiDictProxy(link))

        return MultiDictProxy(links)
    logs.error(f"File {PATH_MODEL} doesn't exist, cannot load the model")
    sys.exit(1)

# test image with the digit "2" on it
if not os.path.isfile(PATH_IMAGE_TEST):
    logs.error(
        f"File {PATH_IMAGE_TEST} doesn't exist, cannot load the test image")
    sys.exit(1)

# wrk workaroung to use GET request instead of POST
FileField = namedtuple("FileField",
                       ['name', 'file_name', 'content_type', 'file'])
DATA = MultiDict({
    "image":
    FileField(name=POST_OBJ_KEY,
              content_type="image/jpeg",
              file_name="test_2.jpeg",
              file=open(PATH_IMAGE_TEST, 'rb').read())
})


class Endpoint(Predictor):
    async def handler(self, request) -> web.json_response:
        """ Web server handler function """

        try:
            # data = await request.post()
            # workaround for wrk to use GET request instead of POST
            data = DATA.copy()
            if POST_OBJ_KEY not in data:
                return web.json_response({"data": None},
Exemple #29
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('', 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()

        if content_type == 'multipart/form-data':
            multipart = yield from self.multipart()

            field = yield from multipart.next()
            while field is not None:
                size = 0
                max_size = self._client_max_size
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = yield from field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if max_size > 0 and size > max_size:
                            raise ValueError(
                                'Maximum request body size exceeded')
                        chunk = yield from field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename, tmp,
                                   content_type, field.headers)
                    out.add(field.name, ff)
                else:
                    value = yield from field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if max_size > 0 and size > max_size:
                        raise ValueError('Maximum request body size exceeded')

                field = yield from multipart.next()
        else:
            data = yield from self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(data.rstrip().decode(charset),
                              keep_blank_values=True,
                              encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Exemple #30
0
def test_query():
    url = URL("http://example.com?a=1&b=2")
    assert url.query == MultiDict([("a", "1"), ("b", "2")])
Exemple #31
0
    async def post(self) -> MultiDictProxy:
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('',
                                 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict

        if content_type == 'multipart/form-data':
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = await field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(
                                max_size=max_size,
                                actual_size=size
                            )
                        chunk = await field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename,
                                   cast(io.BufferedReader, tmp),
                                   content_type, field.headers)
                    out.add(field.name, ff)
                else:
                    value = await field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if 0 < max_size < size:
                        raise HTTPRequestEntityTooLarge(
                            max_size=max_size,
                            actual_size=size
                        )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(
                        data.rstrip().decode(charset),
                        keep_blank_values=True,
                        encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
Exemple #32
0
def test_query_empty():
    url = URL("http://example.com")
    assert isinstance(url.query, MultiDictProxy)
    assert url.query == MultiDict()
 def get_result_container(self) -> MutableMapping:
     return MultiDict()
    def invoke_method(
            self,
            app_id: str,
            method_name: str,
            data: Union[bytes, str, GrpcMessage],
            content_type: Optional[str] = None,
            metadata: Optional[MetadataTuple] = None,
            http_verb: Optional[str] = None,
            http_querystring: Optional[MetadataTuple] = None) -> InvokeMethodResponse:
        """Invoke a service method over HTTP.

        Args:
            app_id (str): Application Id.
            method_name (str): Method to be invoked.
            data (bytes or str or GrpcMessage, optional): Data for requet's body.
            content_type (str, optional): Content type header.
            metadata (MetadataTuple, optional): Additional headers.
            http_verb (str, optional): HTTP verb for the request.
            http_querystring (MetadataTuple, optional): Query parameters.

        Returns:
            InvokeMethodResponse: the response from the method invocation.
        """

        verb = 'GET'
        if http_verb is not None:
            verb = http_verb

        headers = {}
        if metadata is not None:
            for key, value in metadata:
                headers[key] = value
        query_params: MultiDict = MultiDict()
        if http_querystring is not None:
            for key, value in http_querystring:
                query_params.add(key, value)

        if content_type is not None:
            headers[CONTENT_TYPE_HEADER] = content_type

        url = f'{self._client.get_api_url()}/invoke/{app_id}/method/{method_name}'

        if isinstance(data, GrpcMessage):
            body = data.SerializeToString()
        elif isinstance(data, str):
            body = data.encode('utf-8')
        else:
            body = data

        async def make_request():
            resp_body, r = await self._client.send_bytes(
                method=verb,
                headers=headers,
                url=url,
                data=body,
                query_params=query_params)

            resp_data = InvokeMethodResponse(resp_body, r.content_type)
            for key in r.headers:
                resp_data.headers[key] = r.headers.getall(key)  # type: ignore
            return resp_data

        try:
            loop = asyncio.get_running_loop()
        except RuntimeError:
            loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)

        return loop.run_until_complete(make_request())
Exemple #35
0
    async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if content_type not in (
            "",
            "application/x-www-form-urlencoded",
            "multipart/form-data",
        ):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()  # type: MultiDict[Union[str, bytes, FileField]]

        if content_type == "multipart/form-data":
            multipart = await self.multipart()
            max_size = self._client_max_size

            field = await multipart.next()
            while field is not None:
                size = 0
                field_ct = field.headers.get(hdrs.CONTENT_TYPE)

                if isinstance(field, BodyPartReader):
                    assert field.name is not None

                    # Note that according to RFC 7578, the Content-Type header
                    # is optional, even for files, so we can't assume it's
                    # present.
                    # https://tools.ietf.org/html/rfc7578#section-4.4
                    if field.filename:
                        # store file in temp file
                        tmp = tempfile.TemporaryFile()
                        chunk = await field.read_chunk(size=2 ** 16)
                        while chunk:
                            chunk = field.decode(chunk)
                            tmp.write(chunk)
                            size += len(chunk)
                            if 0 < max_size < size:
                                tmp.close()
                                raise HTTPRequestEntityTooLarge(
                                    max_size=max_size, actual_size=size
                                )
                            chunk = await field.read_chunk(size=2 ** 16)
                        tmp.seek(0)

                        if field_ct is None:
                            field_ct = "application/octet-stream"

                        ff = FileField(
                            field.name,
                            field.filename,
                            cast(io.BufferedReader, tmp),
                            field_ct,
                            field.headers,
                        )
                        out.add(field.name, ff)
                    else:
                        # deal with ordinary data
                        value = await field.read(decode=True)
                        if field_ct is None or field_ct.startswith("text/"):
                            charset = field.get_charset(default="utf-8")
                            out.add(field.name, value.decode(charset))
                        else:
                            out.add(field.name, value)
                        size += len(value)
                        if 0 < max_size < size:
                            raise HTTPRequestEntityTooLarge(
                                max_size=max_size, actual_size=size
                            )
                else:
                    raise ValueError(
                        "To decode nested multipart you need " "to use custom reader",
                    )

                field = await multipart.next()
        else:
            data = await self.read()
            if data:
                charset = self.charset or "utf-8"
                bytes_query = data.rstrip()
                try:
                    query = bytes_query.decode(charset)
                except LookupError:
                    raise HTTPUnsupportedMediaType()
                out.extend(
                    parse_qsl(qs=query, keep_blank_values=True, encoding=charset)
                )

        self._post = MultiDictProxy(out)
        return self._post
Exemple #36
0
import pytest
from multidict import MultiDict
from yarl import URL

from aiohttp import helpers
from aiohttp.helpers import is_expected_content_type

IS_PYPY = platform.python_implementation() == "PyPy"

# ------------------- parse_mimetype ----------------------------------


@pytest.mark.parametrize(
    "mimetype, expected",
    [
        ("", helpers.MimeType("", "", "", MultiDict())),
        ("*", helpers.MimeType("*", "*", "", MultiDict())),
        ("application/json",
         helpers.MimeType("application", "json", "", MultiDict())),
        (
            "application/json;  charset=utf-8",
            helpers.MimeType("application", "json", "",
                             MultiDict({"charset": "utf-8"})),
        ),
        (
            """application/json; charset=utf-8;""",
            helpers.MimeType("application", "json", "",
                             MultiDict({"charset": "utf-8"})),
        ),
        (
            'ApPlIcAtIoN/JSON;ChaRseT="UTF-8"',
Exemple #37
0
class View(web.View):

    SEGMENT_RE = re.compile(r'([^/{}]+)/?$')
    PATHS = {}
    PATTERNS = {}

    def __init__(self,
                 request: web.Request,
                 match_dict: T.Optional[collections.abc.Mapping] = None,
                 embed=None):
        super().__init__(request)
        if match_dict is None:
            rel_url = request.rel_url
        else:
            rel_url = self.aiohttp_resource().url_for(**match_dict)
        if embed is not None:
            rel_url = rel_url.update_query(embed=embed)
        self.__rel_url = rel_url
        self.__embed = None
        self.__query = None
        self.__canonical_rel_url = None
        self.__etag = None
        self.__match_dict = dict(
            # Ugly: we're using non-public member ``_match()`` of
            # :class:`aiohttp.web.Resource`.  But most alternatives are
            # equally ugly.
            self.aiohttp_resource()._match(self.rel_url.raw_path))

    def __getitem__(self, item):
        # language=rst
        """Shorthand for ``self.match_dict[item]``"""
        return self.__match_dict[item]

    @property
    def match_dict(self):
        return self.__match_dict

    def add_embed_to_url(self, url: web.URL, link_relation):
        embed = self.embed.get(link_relation)
        if embed is None:
            return url
        return url.update_query(embed=embed)

    @property
    def rel_url(self) -> web.URL:
        # language=rst
        """The relative URL as passed to the constructor."""
        return self.__rel_url

    @property
    def canonical_rel_url(self) -> web.URL:
        # language=rst
        """Like :meth:`rel_url`, but with all default query parameters explicitly listed."""
        if self.__canonical_rel_url is None:
            self.__canonical_rel_url = self.__rel_url.with_query(self.query)
        # noinspection PyTypeChecker
        return self.__canonical_rel_url

    @property
    def to_link(self) -> T.Dict[str, str]:
        """The HAL JSON link object to this resource."""
        result = {'href': str(self.canonical_rel_url)}
        if self.link_name is not None:
            result['name'] = self.link_name
        if self.link_title is not None:
            result['title'] = self.link_title
        return result

    async def etag(self) -> T.Union[None, bool, str]:
        # language=rst
        """

        Return values have the following meanings:

        ``True``
            Resource exists but doesn't support ETags
        ``False``
            Resource doesn't exist and doesn't support ETags
        ``None``
            Resource doesn't exist and supports ETags.
        ETag string:
            Resource exists and supports ETags.

        """
        return isinstance(self.aiohttp_resource(), web.PlainResource)

    @property
    def query(self):
        # language=rst
        """Like ``self.rel_url.query``, but with default parameters added.

        These default parameters are retrieved from the swagger definition.

        """
        if self.__query is None:
            self.__query = MultiDict(self.default_query_params)
            self.__query.update(self.__rel_url.query)
        return self.__query

    @property
    def embed(self):
        if self.__embed is None:
            embed = ','.join(self.query.getall('embed', default=[]))
            self.__embed = parse_embed(embed)
        return self.__embed

    @staticmethod
    def construct_resource_for(request: web.Request, rel_url: web.URL) \
            -> T.Optional[web.View]:
        # language=rst
        """

        .. deprecated:: v0

            Only used by :meth:`PlainView._links` and
            :meth:`DynamicView._links`, which are themselves deprecated.

        """
        for resource in request.app.router.resources():
            match_dict = resource._match(rel_url.raw_path)
            if match_dict is not None:
                if hasattr(resource, 'rest_utils_class'):
                    return resource.rest_utils_class(request, rel_url)
                _logger.error(
                    "Path %s doesn't resolve to rest_utils.Resource.",
                    str(rel_url))
                return None
        return None

    @property
    def default_query_params(self) -> T.Dict[str, str]:
        return {}

    @classmethod
    def add_to_router(cls,
                      router: web.UrlDispatcher,
                      path: str,
                      expect_handler: T.Callable = None):
        # language=rst
        """Adds this View class to the aiohttp router."""
        cls._aiohttp_resource = router.add_resource(path)
        # Register the current class in the appropriate registry:
        if isinstance(cls._aiohttp_resource, web.DynamicResource):
            View.PATTERNS[cls._aiohttp_resource.get_info()['pattern']] = cls
        elif isinstance(cls._aiohttp_resource, web.PlainResource):
            View.PATHS[cls._aiohttp_resource.get_info()['path']] = cls
        else:
            _logger.critical(
                "aiohttp router method 'add_resource()' returned resource object of unexpected type %s",
                cls._aiohttp_resource.__class__)
        cls._aiohttp_resource.rest_utils_class = cls
        cls._aiohttp_resource.add_route('*',
                                        cls,
                                        expect_handler=expect_handler)
        return cls._aiohttp_resource

    @classmethod
    def aiohttp_resource(
            cls) -> T.Union[web.PlainResource, web.DynamicResource]:
        assert hasattr(cls, '_aiohttp_resource'), \
            "%s.aiohttp_resource() called before .add_to_router()" % cls
        return cls._aiohttp_resource

    @property
    def link_name(self) -> T.Optional[str]:
        # language=rst
        """A more or less unique name for the resource.

        This default implementation returns the last path segment of the url of
        this resource if that last path segment is templated.  Otherwise `None`
        is returned (in which case there's no `name` attribute in link objects
        for this resource).  See also :meth:`to_link`.

        Subclasses can override this default implementation.

        """
        formatter = self.aiohttp_resource().get_info().get('formatter')
        if formatter is not None and re.search(r'\}[^/]*/?$', formatter):
            return self.rel_url.name or self.rel_url.parent.name
        return None

    @property
    def link_title(self) -> T.Optional[str]:
        # language=rst
        """The title of this resource, to be used in link objects.

        This default implementation returns `None`, and there's no `title`
        attribute in HAL link objects.  See also :meth:`to_link`.

        Subclasses can override this default implementation.

        """
        return None

    async def attributes(self):
        # language=rst
        """

        This default implementation returns *no* attributes, ie. an empty
        `dict`.

        Most subclasses should override this default implementation.

        """
        return {}

    async def _links(self) -> T.Dict[str, T.Any]:
        # language=rst
        """

        Called by :meth:`.links` and :meth:`.embedded`.  See the
        documentation of these methods for more info.

        Most subclasses should override this default implementation.

        :returns: This method must return a dict.  The values must have one of
            the following types:

            -   asynchronous generator of `.View` objects
            -   generator of `.View` objects
            -   a `.View` object
            -   a *link object*
            -   Iterable of `.View`\s and/or *link objects* (may be mixed)

            where *link object* means a HALJSON link object, ie. a `dict` with
            at least a key ``href``.

        """
        return {}

    async def embedded(self) -> T.Dict[str, T.Any]:
        result = {}
        _links = await self._links()
        for key, value in _links.items():
            if key in self.embed:
                if (inspect.isasyncgen(value) or inspect.isgenerator(value)
                        or isinstance(value, View)
                        or isinstance(value, collections.abc.Iterable)):
                    result[key] = value
                else:
                    _logger.error("Don't know how to embed object: %s", value)
        return result

    async def links(self) -> T.Dict[str, T.Any]:
        result = {}
        _links = await self._links()
        for key, value in _links.items():
            if key == 'item':
                key = 'item'
            if isinstance(value, View):
                if key not in self.embed:
                    result[key] = value.to_link
            elif inspect.isasyncgen(value):
                if key not in self.embed:

                    async def g1(resources):
                        async for resource in resources:
                            yield resource.to_link

                    result[key] = g1(value)
            elif inspect.isgenerator(value):
                if key not in self.embed:

                    def g2(resources):
                        for resource in resources:
                            yield resource.to_link

                    result[key] = g2(value)
            elif isinstance(value, collections.Mapping):
                if key in self.embed:
                    _logger.info(
                        'Client asked to embed unembeddable object: %s', value)
                result[key] = value
            elif isinstance(value, collections.abc.Iterable):

                def g3(key, value):
                    for o in value:
                        if not isinstance(o, View):
                            if key in self.embed:
                                _logger.info(
                                    'Client asked to embed unembeddable object: %s',
                                    o)
                            yield o
                        elif key not in self.embed:
                            yield o.to_link

                result[key] = g3(key, value)
            elif key not in self.embed:
                _logger.error("Don't know how to render object as link: %s",
                              value)
        return result

    async def get(self) -> web.StreamResponse:

        # Assert we're not calling `get()` recursively within a single request:
        assert 'GET_IN_PROGRESS' not in self.request
        self.request['GET_IN_PROGRESS'] = True

        etag = await self.etag()
        if not etag:
            raise web.HTTPNotFound()
        assert_preconditions(self.request, etag)
        if self.request.method == 'GET':
            data = await self.to_dict()
        response = web.StreamResponse()
        if isinstance(await self.etag(), str):
            response.headers.add('ETag', await self.etag())
        response.content_type = self.request[BEST_CONTENT_TYPE]
        response.enable_compression()
        if str(self.canonical_rel_url) != str(self.request.rel_url):
            response.headers.add('Content-Location',
                                 str(self.canonical_rel_url))
        await response.prepare(self.request)
        if self.request.method == 'GET':
            async for chunk in _json.json_encode(data):
                response.write(chunk)
        response.write_eof()
        del self.request['GET_IN_PROGRESS']
        return response

    async def head(self):
        return await self.get()

    async def to_dict(self):
        result = await self.attributes()
        if isinstance(await self.etag(), str):
            result['_etag'] = await self.etag()
        result['_links'] = await self.links()
        if 'self' not in result['_links']:
            result['_links']['self'] = self.to_link
        result['_embedded'] = await self.embedded()
        if len(result['_embedded']) == 0:
            del result['_embedded']
        return result
Exemple #38
0
def test_query_repeated_args():
    url = URL("http://example.com?a=1&b=2&a=3")
    assert url.query == MultiDict([("a", "1"), ("b", "2"), ("a", "3")])
Exemple #39
0
    def __init__(self, method: str, url: URL, *,
                 params: Optional[Mapping[str, str]]=None,
                 headers: Optional[LooseHeaders]=None,
                 skip_auto_headers: Iterable[str]=frozenset(),
                 data: Any=None,
                 cookies: Optional[LooseCookies]=None,
                 auth: Optional[BasicAuth]=None,
                 version: http.HttpVersion=http.HttpVersion11,
                 compress: Optional[str]=None,
                 chunked: Optional[bool]=None,
                 expect100: bool=False,
                 loop: Optional[asyncio.AbstractEventLoop]=None,
                 response_class: Optional[Type['ClientResponse']]=None,
                 proxy: Optional[URL]=None,
                 proxy_auth: Optional[BasicAuth]=None,
                 timer: Optional[BaseTimerContext]=None,
                 session: Optional['ClientSession']=None,
                 ssl: Union[SSLContext, bool, Fingerprint, None]=None,
                 proxy_headers: Optional[LooseHeaders]=None,
                 traces: Optional[List['Trace']]=None):

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy
        # FIXME: session is None in tests only, need to fix tests
        # assert session is not None
        self._session = cast('ClientSession', session)
        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.original_url = url
        self.url = url.with_fragment(None)
        self.method = method.upper()
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.length = None
        if response_class is None:
            real_response_class = ClientResponse
        else:
            real_response_class = response_class
        self.response_class = real_response_class  # type: Type[ClientResponse]
        self._timer = timer if timer is not None else TimerNoop()
        self._ssl = ssl

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth, proxy_headers)

        self.update_body_from_data(data)
        if data or self.method not in self.GET_METHODS:
            self.update_transfer_encoding()
        self.update_expect_continue(expect100)
        if traces is None:
            traces = []
        self._traces = traces
Exemple #40
0
    async def http_handler(request: aiohttp.web.Request):
        headers = MultiDict([
            ('Server', 'Nawah'),
            ('Powered-By', 'Nawah, https://nawah.masaar.com'),
            ('Access-Control-Allow-Origin', '*'),
            ('Access-Control-Allow-Methods', 'GET,POST,OPTIONS'),
            (
                'Access-Control-Allow-Headers',
                'Content-Type,X-Auth-Bearer,X-Auth-Token,X-Auth-App',
            ),
            ('Access-Control-Expose-Headers', 'Content-Disposition'),
        ])

        logger.debug(
            f'Received new {request.method} request: {request.match_info}')

        if request.method == 'OPTIONS':
            return aiohttp.web.Response(
                status=200,
                headers=headers,
                body=JSONEncoder().encode({
                    'status': 200,
                    'msg': 'OPTIONS request is allowed.',
                }),
            )

        # [DOC] Check for IP quota
        if str(request.remote) not in ip_quota:
            ip_quota[str(request.remote)] = {
                'counter': Config.quota_ip_min,
                'last_check': datetime.datetime.utcnow(),
            }
        else:
            if (datetime.datetime.utcnow() -
                    ip_quota[str(request.remote)]['last_check']).seconds > 259:
                ip_quota[str(request.remote
                             )]['last_check'] = datetime.datetime.utcnow()
                ip_quota[str(request.remote)]['counter'] = Config.quota_ip_min
            else:
                if ip_quota[str(request.remote)]['counter'] - 1 <= 0:
                    logger.warning(
                        f'Denying \'{request.method}\' request from \'{request.remote}\' for hitting IP quota.'
                    )
                    headers['Content-Type'] = 'application/json; charset=utf-8'
                    return aiohttp.web.Response(
                        status=429,
                        headers=headers,
                        body=JSONEncoder().encode({
                            'status': 429,
                            'msg': 'You have hit calls quota from this IP.',
                            'args': {
                                'code': 'CORE_REQ_IP_QUOTA_HIT'
                            },
                        }),
                    )
                else:
                    ip_quota[str(request.remote)]['counter'] -= 1

        module = request.url.parts[1].lower()
        method = request.url.parts[2].lower()
        request_args = dict(request.match_info.items())

        # [DOC] Extract Args Sets based on request.method
        args_sets = Config.modules[module].methods[method].query_args
        args_sets = cast(List[Dict[str, ATTR]], args_sets)

        # [DOC] Attempt to validate query as doc
        for args_set in args_sets:
            if len(args_set.keys()) == len(args_set.keys()) and sum(
                    1 for arg in args_set.keys()
                    if arg in args_set.keys()) == len(args_set.keys()):
                # [DOC] Check presence and validate all attrs in doc args
                try:
                    exception: Exception
                    await validate_doc(mode='create',
                                       doc=request_args,
                                       attrs=args_set)  # type: ignore
                except InvalidAttrException as e:
                    exception = e
                    headers['Content-Type'] = 'application/json; charset=utf-8'
                    return aiohttp.web.Response(
                        status=400,
                        headers=headers,
                        body=JSONEncoder().encode({
                            'status': 400,
                            'msg':
                            f'{str(e)} for \'{request.method}\' request on module \'{Config.modules[module].package_name.upper()}_{module.upper()}\'.',
                            'args': {
                                'code':
                                f'{Config.modules[module].package_name.upper()}_{module.upper()}_INVALID_ATTR'
                            },
                        }).encode('utf-8'),
                    )
                except ConvertAttrException as e:
                    exception = e
                    headers['Content-Type'] = 'application/json; charset=utf-8'
                    return aiohttp.web.Response(
                        status=400,
                        headers=headers,
                        body=JSONEncoder().encode({
                            'status': 400,
                            'msg':
                            f'{str(e)} for \'{request.method}\' request on module \'{Config.modules[module].package_name.upper()}_{module.upper()}\'.',
                            'args': {
                                'code':
                                f'{Config.modules[module].package_name.upper()}_{module.upper()}_CONVERT_INVALID_ATTR'
                            },
                        }).encode('utf-8'),
                    )
                break

        conn = Data.create_conn()
        env: NAWAH_ENV = {
            'conn': conn,
            'REMOTE_ADDR': request.remote,
            'client_app': '__public',
        }

        try:
            env['HTTP_USER_AGENT'] = request.headers['user-agent']
            env['HTTP_ORIGIN'] = request.headers['origin']
        except:
            env['HTTP_USER_AGENT'] = ''
            env['HTTP_ORIGIN'] = ''

        if 'X-Auth-Bearer' in request.headers or 'X-Auth-Token' in request.headers:
            logger.debug('Detected \'X-Auth\' header[s].')
            if ('X-Auth-Bearer' not in request.headers
                    or 'X-Auth-Token' not in request.headers
                    or 'X-Auth-App' not in request.headers):
                logger.debug(
                    'Denying request due to missing \'X-Auth\' header.')
                headers['Content-Type'] = 'application/json; charset=utf-8'
                return aiohttp.web.Response(
                    status=400,
                    headers=headers,
                    body=JSONEncoder().encode({
                        'status':
                        400,
                        'msg':
                        'One \'X-Auth\' headers was set but not the other.',
                    }).encode('utf-8'),
                )
            if len(Config.client_apps.keys()) and (
                    request.headers['X-Auth-App']
                    not in Config.client_apps.keys() or
                (Config.client_apps[request.headers['X-Auth-App']]['type']
                 == 'web' and env['HTTP_ORIGIN'] not in Config.client_apps[
                     request.headers['X-Auth-App']]['origin'])):
                logger.debug('Denying request due to unauthorised client_app.')
                headers['Content-Type'] = 'application/json; charset=utf-8'
                return aiohttp.web.Response(
                    status=403,
                    headers=headers,
                    body=JSONEncoder().encode({
                        'status': 403,
                        'msg': 'X-Auth headers could not be verified.',
                        'args': {
                            'code': 'CORE_SESSION_INVALID_XAUTH'
                        },
                    }).encode('utf-8'),
                )
            try:
                session_results = await Config.modules['session'].read(
                    skip_events=[Event.PERM],
                    env=env,
                    query=[{
                        '_id': request.headers['X-Auth-Bearer'],
                    }],
                )
            except:
                headers['Content-Type'] = 'application/json; charset=utf-8'
                if Config.debug:
                    return aiohttp.web.Response(
                        status=500,
                        headers=headers,
                        body=JSONEncoder().encode({
                            'status': 500,
                            'msg':
                            f'Unexpected error has occurred [{str(exception)}].',
                            'args': {
                                'code': 'CORE_SERVER_ERROR',
                                'err': str(exception)
                            },
                        }).encode('utf-8'),
                    )
                else:
                    return aiohttp.web.Response(
                        status=500,
                        headers=headers,
                        body=JSONEncoder().encode({
                            'status': 500,
                            'msg': 'Unexpected error has occurred.',
                            'args': {
                                'code': 'CORE_SERVER_ERROR'
                            },
                        }).encode('utf-8'),
                    )

            if not session_results.args.count or not pbkdf2_sha512.verify(
                    request.headers['X-Auth-Token'],
                    session_results.args.docs[0].token_hash,
            ):
                logger.debug(
                    'Denying request due to missing failed Call Authorisation.'
                )
                headers['Content-Type'] = 'application/json; charset=utf-8'
                return aiohttp.web.Response(
                    status=403,
                    headers=headers,
                    body=JSONEncoder().encode({
                        'status': 403,
                        'msg': 'X-Auth headers could not be verified.',
                        'args': {
                            'code': 'CORE_SESSION_INVALID_XAUTH'
                        },
                    }).encode('utf-8'),
                )
            else:
                session = session_results.args.docs[0]
                session_results = await Config.modules['session'].reauth(
                    skip_events=[Event.PERM],
                    env=env,
                    query=[{
                        '_id': request.headers['X-Auth-Bearer'],
                        'token': request.headers['X-Auth-Token'],
                    }],
                )
                logger.debug('Denying request due to fail to reauth.')
                if session_results.status != 200:
                    headers['Content-Type'] = 'application/json; charset=utf-8'
                    return aiohttp.web.Response(
                        status=403,
                        headers=headers,
                        body=JSONEncoder().encode(session_results).encode(
                            'utf-8'),
                    )
                else:
                    session = session_results.args.session
        else:
            anon_user = _compile_anon_user()
            anon_session = _compile_anon_session()
            anon_session['user'] = DictObj(anon_user)
            session = DictObj(anon_session)

        env['session'] = session

        doc_content = await request.content.read()
        try:
            doc = json.loads(doc_content)
        except:
            try:
                multipart_content_type = request.headers['Content-Type']
                doc = {
                    part.headers[b'Content-Disposition'].decode(
                        'utf-8').replace('form-data; name=',
                                         '').replace('"', '').split(';')[0]:
                    part.content
                    for part in decoder.MultipartDecoder(
                        doc_content, multipart_content_type).parts
                }
            except Exception as e:
                doc = {}

        results = await Config.modules[module].methods[method](
            env=env, query=[request_args], doc=doc)

        logger.debug('Closing connection.')
        env['conn'].close()

        if 'return' not in results.args or results.args['return'] == 'json':
            if 'return' in results.args:
                del results.args['return']
            headers['Content-Type'] = 'application/json; charset=utf-8'
            if results.status == 404:
                return aiohttp.web.Response(
                    status=results.status,
                    headers=headers,
                    body=JSONEncoder().encode({
                        'status':
                        404,
                        'msg':
                        'Requested content not found.'
                    }).encode('utf-8'),
                )
            else:
                return aiohttp.web.Response(
                    status=results.status,
                    headers=headers,
                    body=JSONEncoder().encode(results),
                )
        elif results.args['return'] == 'file':
            del results.args['return']
            expiry_time = datetime.datetime.utcnow() + datetime.timedelta(
                days=30)
            headers['lastModified'] = str(results.args.docs[0].lastModified)
            headers['Content-Type'] = results.args.docs[0].type
            headers['Cache-Control'] = 'public, max-age=31536000'
            headers['Expires'] = expiry_time.strftime(
                '%a, %d %b %Y %H:%M:%S GMT')
            return aiohttp.web.Response(
                status=results.status,
                headers=headers,
                body=results.args.docs[0].content,
            )
        elif results.args['return'] == 'msg':
            del results.args['return']
            headers['Content-Type'] = 'application/json; charset=utf-8'
            return aiohttp.web.Response(status=results.status,
                                        headers=headers,
                                        body=results.msg)

        headers['Content-Type'] = 'application/json; charset=utf-8'
        return aiohttp.web.Response(
            status=405,
            headers=headers,
            body=JSONEncoder().encode({
                'status': 405,
                'msg': 'METHOD NOT ALLOWED'
            }),
        )
Exemple #41
0
import pytest
from multidict import MultiDict
from yarl import URL

from aiohttp import helpers
from aiohttp.helpers import is_expected_content_type

IS_PYPY = platform.python_implementation() == 'PyPy'

# ------------------- parse_mimetype ----------------------------------


@pytest.mark.parametrize(
    'mimetype, expected',
    [('', helpers.MimeType('', '', '', MultiDict())),
     ('*', helpers.MimeType('*', '*', '', MultiDict())),
     ('application/json',
      helpers.MimeType('application', 'json', '', MultiDict())),
     ('application/json;  charset=utf-8',
      helpers.MimeType('application', 'json', '',
                       MultiDict({'charset': 'utf-8'}))),
     ('''application/json; charset=utf-8;''',
      helpers.MimeType('application', 'json', '',
                       MultiDict({'charset': 'utf-8'}))),
     ('ApPlIcAtIoN/JSON;ChaRseT="UTF-8"',
      helpers.MimeType('application', 'json', '',
                       MultiDict({'charset': 'UTF-8'}))),
     ('application/rss+xml',
      helpers.MimeType('application', 'rss', 'xml', MultiDict())),
     ('text/plain;base64',
Exemple #42
0
async def add_pins(multihash_args, pin_type, ipfs_url, django_url, auth):
    # Get pins from django
    ret = await _get_pins_django(django_url,
                                 auth,
                                 include_mfs=(pin_type == 'mfs'))
    if not isinstance(ret, dict):
        return web.Response(status=ret.status, text=await ret.text())
    django_pins = ret

    # If any one of the pins fails, need to return error, and not pin any of
    # the other hashes
    for multihash in multihash_args:
        django_pin_types = django_pins.get(multihash, [])
        if pin_type == 'direct' and 'recursive' in django_pin_types:
            error_msg = {
                'Message': f'pin: {multihash} already pinned recursively',
                'Code': 0,
                'Type': 'error'
            }
            return web.json_response(error_msg, status=500)

    add_pins = []
    delete_pins = []
    for multihash in multihash_args:
        django_pin_types = django_pins.get(multihash, [])
        if pin_type in django_pin_types:
            # Do nothing, just return the multihash
            pass
        else:
            ret = await _pins_from_multihash(multihash, ipfs_url, pin_type)
            if not isinstance(ret, list):
                return web.Response(status=ret.status, text=await ret.text())
            pins = ret
            if pin_type == 'recursive' and 'direct' in django_pin_types:
                # If we're replacing a direct with recursive, delete the direct one
                delete_pins.append({
                    'multihash': multihash,
                    'pin_type': 'direct'
                })
            add_pins += pins

    # Add new pins to ipfs as direct pins
    pin_url = f'{ipfs_url}/api/v0/pin/add'
    pin_params = MultiDict()
    for pin in add_pins:
        pin_params['arg'] = pin['multihash']

    async with app['session'].request('POST', pin_url,
                                      params=pin_params) as resp:
        if resp.status != 200:
            return web.Response(status=resp.status, text=await resp.text())

    # Add new pins to django
    # NOTE: if this fails, we should roll back the direct pins we added to ipfs
    # but we can also let the garbage collector take care of it
    if len(add_pins) > 0:
        resp = await _add_pins_django(add_pins, django_url, auth)
        if resp.status != 201:
            return web.Response(status=resp.status, text=await resp.text())

    # Delete replaced pins (this will not affect disk usage, since duplicates
    # shouldn't count)
    if len(delete_pins) > 0:
        await _delete_pins_django(delete_pins, django_url, auth)
Exemple #43
0
    def post(self):
        """Return POST parameters."""
        if self._post is not None:
            return self._post
        if self._method not in self.POST_METHODS:
            self._post = MultiDictProxy(MultiDict())
            return self._post

        content_type = self.content_type
        if (content_type not in ('',
                                 'application/x-www-form-urlencoded',
                                 'multipart/form-data')):
            self._post = MultiDictProxy(MultiDict())
            return self._post

        out = MultiDict()

        if content_type == 'multipart/form-data':
            multipart = yield from self.multipart()

            field = yield from multipart.next()
            while field is not None:
                size = 0
                max_size = self._client_max_size
                content_type = field.headers.get(hdrs.CONTENT_TYPE)

                if field.filename:
                    # store file in temp file
                    tmp = tempfile.TemporaryFile()
                    chunk = yield from field.read_chunk(size=2**16)
                    while chunk:
                        chunk = field.decode(chunk)
                        tmp.write(chunk)
                        size += len(chunk)
                        if max_size > 0 and size > max_size:
                            raise ValueError(
                                'Maximum request body size exceeded')
                        chunk = yield from field.read_chunk(size=2**16)
                    tmp.seek(0)

                    ff = FileField(field.name, field.filename,
                                   tmp, content_type, field.headers)
                    out.add(field.name, ff)
                else:
                    value = yield from field.read(decode=True)
                    if content_type is None or \
                            content_type.startswith('text/'):
                        charset = field.get_charset(default='utf-8')
                        value = value.decode(charset)
                    out.add(field.name, value)
                    size += len(value)
                    if max_size > 0 and size > max_size:
                        raise ValueError(
                            'Maximum request body size exceeded')

                field = yield from multipart.next()
        else:
            data = yield from self.read()
            if data:
                charset = self.charset or 'utf-8'
                out.extend(
                    parse_qsl(
                        data.rstrip().decode(charset),
                        keep_blank_values=True,
                        encoding=charset))

        self._post = MultiDictProxy(out)
        return self._post
class ServerProxy(object):
    __slots__ = 'client', 'url', 'loop', 'headers', 'loads', 'dumps'

    USER_AGENT = u'aiohttp JSON-RPC client (Python: {0}, version: {1})'.format(__pyversion__, __version__)

    def __init__(self, url, client=None, loop=None, headers=None,
                 loads=json.loads, dumps=json.dumps, **kwargs):

        self.headers = MultiDict(headers or {})

        self.headers.setdefault('Content-Type', 'application/json')
        self.headers.setdefault('User-Agent', self.USER_AGENT)

        self.url = str(url)
        self.loop = loop or asyncio.get_event_loop()
        self.client = client or aiohttp.client.ClientSession(loop=self.loop, **kwargs)

        self.loads = loads
        self.dumps = dumps

    @staticmethod
    def _parse_response(response):
        log.debug("Server response: \n%r", response)

        if 'error' in response:
            error = response['error']

            if not isinstance(error, dict):
                raise Exception
            else:
                raise json2py_exception(
                    error.get('code', exceptions.SystemError.code),
                    error.get('message', 'Unknown error'),
                    default_exc_class=exceptions.ServerError
                )
        return response.get('result')

    @asyncio.coroutine
    def __remote_call(self, json_request):
        response = yield from self.client.post(
            str(self.url),
            headers=self.headers,
            data=self.dumps(py2json(json_request)),
        )

        response.raise_for_status()

        return self._parse_response(
            self.loads((yield from response.read()).decode())
        )

    @asyncio.coroutine
    def _batch_call(self, prepared_methods):

        request = []

        for idx, req in enumerate(prepared_methods):
            if isinstance(req, Method):
                req = req.prepare()

            req['id'] = idx + 1
            request.append(req)

        response = yield from self.client.post(
            str(self.url),
            headers=self.headers,
            data=self.dumps(py2json(request)),
        )

        response.raise_for_status()

        responses = []
        data = self.loads((yield from response.read()).decode())

        for response in data:
            try:
                responses.append(self._parse_response(response))
            except Exception as e:
                responses.append(e)

        return responses

    def __getattr__(self, method_name) -> Method:
        return self[method_name]

    def __getitem__(self, method_name) -> Method:
        return Method(method_name, self.__remote_call)

    def close(self):
        return self.client.close()