Exemple #1
0
class HttpTunnel(RequestBase):
    first_line = None
    data = None
    decompress = False
    method = 'CONNECT'

    def __init__(self, client, req):
        self.client = client
        self.key = req
        self.headers = CIMultiDict(client.DEFAULT_TUNNEL_HEADERS)

    def __repr__(self):
        return 'Tunnel %s' % self.url
    __str__ = __repr__

    def encode(self):
        self.headers['host'] = self.key.netloc
        self.first_line = 'CONNECT http://%s:%s HTTP/1.1' % self.key.address
        buffer = [self.first_line.encode('ascii'), b'\r\n']
        buffer.extend((('%s: %s\r\n' % (name, value)).encode(CHARSET)
                       for name, value in self.headers.items()))
        buffer.append(b'\r\n')
        return b''.join(buffer)

    def has_header(self, header_name):
        return header_name in self.headers

    def get_header(self, header_name, default=None):
        return self.headers.get(header_name, default)

    def remove_header(self, header_name):
        self.headers.pop(header_name, None)
Exemple #2
0
class HttpTunnel(RequestBase):
    first_line = None
    data = None
    decompress = False
    method = 'CONNECT'

    def __init__(self, client, req):
        self.client = client
        self.key = req
        self.headers = CIMultiDict(client.DEFAULT_TUNNEL_HEADERS)

    def __repr__(self):
        return 'Tunnel %s' % self.url

    __str__ = __repr__

    def encode(self):
        self.headers['host'] = self.key.netloc
        self.first_line = 'CONNECT http://%s:%s HTTP/1.1' % self.key.address
        buffer = [self.first_line.encode('ascii'), b'\r\n']
        buffer.extend((('%s: %s\r\n' % (name, value)).encode(CHARSET)
                       for name, value in self.headers.items()))
        buffer.append(b'\r\n')
        return b''.join(buffer)

    def has_header(self, header_name):
        return header_name in self.headers

    def get_header(self, header_name, default=None):
        return self.headers.get(header_name, default)

    def remove_header(self, header_name):
        self.headers.pop(header_name, None)
Exemple #3
0
def headers_preserve_casing():
    try:
        from multidict import CIMultiDict
    except:
        return True

    d = CIMultiDict()
    d.update({"X-NewRelic-ID": "value"})
    return "X-NewRelic-ID" in dict(d.items())
Exemple #4
0
def headers_preserve_casing():
    try:
        from multidict import CIMultiDict
    except:
        return True

    d = CIMultiDict()
    d.update({'X-NewRelic-ID': 'value'})
    return 'X-NewRelic-ID' in dict(d.items())
Exemple #5
0
 async def start(self, connection, read_until_eof=False):
     # vk.com return url like this: http://REDIRECT_URI#access_token=...
     # but aiohttp by default removes all parameters after '#'
     await super().start(connection, read_until_eof)
     headers = CIMultiDict(self.headers)
     location = headers.get(hdrs.LOCATION, None)
     if location is not None:
         headers[hdrs.LOCATION] = location.replace('#', '?')
     self.headers = CIMultiDictProxy(headers)
     self.raw_headers = tuple(headers.items())
     return self
Exemple #6
0
 async def start(self, connection, read_until_eof=False):
     # vk.com return url like this: http://REDIRECT_URI#access_token=...
     # but aiohttp by default removes all parameters after '#'
     await super().start(connection, read_until_eof)
     headers = CIMultiDict(self.headers)
     location = headers.get(hdrs.LOCATION, None)
     if location:
         headers[hdrs.LOCATION] = location.replace('#', '?')
     self.headers = CIMultiDictProxy(headers)
     self.raw_headers = tuple(headers.items())
     return self
Exemple #7
0
 def _prepare_headers(self, headers):
     """ Add default headers and transform it to CIMultiDict
     """
     # Convert headers to MultiDict
     result = CIMultiDict(self._default_headers)
     if headers:
         if not isinstance(headers, (MultiDictProxy, MultiDict)):
             headers = CIMultiDict(headers)
         added_names = set()
         for key, value in headers.items():
             if key in added_names:
                 result.add(key, value)
             else:
                 result[key] = value
                 added_names.add(key)
     return result
Exemple #8
0
 def _prepare_headers(self, headers):
     """ Add default headers and transform it to CIMultiDict
     """
     # Convert headers to MultiDict
     result = CIMultiDict(self._default_headers)
     if headers:
         if not isinstance(headers, (MultiDictProxy, MultiDict)):
             headers = CIMultiDict(headers)
         added_names = set()
         for key, value in headers.items():
             if key in added_names:
                 result.add(key, value)
             else:
                 result[key] = value
                 added_names.add(key)
     return result
Exemple #9
0
    def maker(method, path, headers=None, *,
              version=HttpVersion(1, 1), closing=False,
              sslcontext=None,
              secure_proxy_ssl_header=None):
        if version < HttpVersion(1, 1):  # noqa
            closing = True

        if headers is None:
            headers = {}
        headers = CIMultiDict(headers)

        app = mock.Mock()
        app._debug = False
        app.on_response_prepare = Signal(app)

        if "HOST" not in headers:
            headers["HOST"] = "test.local"  # noqa

        message = RawRequestMessage(method, path, version, headers,
                                    [(k.encode('utf-8'), v.encode('utf-8'))
                                     for k, v in headers.items()],
                                    closing, False)
        payload = mock.Mock()
        transport = mock.Mock()

        def get_extra_info(key):  # noqa
            if key == 'sslcontext':
                return sslcontext
            else:
                return None

        transport.get_extra_info.side_effect = get_extra_info
        writer = mock.Mock()
        reader = mock.Mock()
        req = Request(app, message, payload,
                      transport, reader, writer,
                      secure_proxy_ssl_header=secure_proxy_ssl_header)

        return req
Exemple #10
0
class ClientRequest:

    GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
    POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
    ALL_METHODS = GET_METHODS.union(POST_METHODS).union(
        {hdrs.METH_DELETE, hdrs.METH_TRACE})

    DEFAULT_HEADERS = {
        hdrs.ACCEPT: '*/*',
        hdrs.ACCEPT_ENCODING: 'gzip, deflate',
    }

    SERVER_SOFTWARE = HttpMessage.SERVER_SOFTWARE

    body = b''
    auth = None
    response = None
    response_class = None

    _writer = None  # async task for streaming data
    _continue = None  # waiter future for '100 Continue' response

    # N.B.
    # Adding __del__ method with self._writer closing doesn't make sense
    # because _writer is instance method, thus it keeps a reference to self.
    # Until writer has finished finalizer will not be called.

    def __init__(self,
                 method,
                 url,
                 *,
                 params=None,
                 headers=None,
                 skip_auto_headers=frozenset(),
                 data=None,
                 cookies=None,
                 auth=None,
                 encoding='utf-8',
                 version=http.HttpVersion11,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 loop=None,
                 response_class=None,
                 proxy=None,
                 proxy_auth=None,
                 timer=None):

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy

        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.url = url.with_fragment(None)
        self.original_url = url
        self.method = method.upper()
        self.encoding = encoding
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.response_class = response_class or ClientResponse
        self._timer = timer if timer is not None else _TimeServiceTimeoutNoop()

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth)

        self.update_body_from_data(data, skip_auto_headers)
        self.update_transfer_encoding()
        self.update_expect_continue(expect100)

    @property
    def host(self):
        return self.url.host

    @property
    def port(self):
        return self.url.port

    def update_host(self, url):
        """Update destination host, port and connection type (ssl)."""
        # get host/port
        if not url.host:
            raise ValueError('Host could not be detected.')

        # basic auth info
        username, password = url.user, url.password
        if username:
            self.auth = helpers.BasicAuth(username, password or '')

        # Record entire netloc for usage in host header

        scheme = url.scheme
        self.ssl = scheme in ('https', 'wss')

    def update_version(self, version):
        """Convert request version to two elements tuple.

        parser HTTP version '1.1' => (1, 1)
        """
        if isinstance(version, str):
            v = [l.strip() for l in version.split('.', 1)]
            try:
                version = int(v[0]), int(v[1])
            except ValueError:
                raise ValueError(
                    'Can not parse http version number: {}'.format(
                        version)) from None
        self.version = version

    def update_headers(self, headers):
        """Update request headers."""
        self.headers = CIMultiDict()
        if headers:
            if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
                headers = headers.items()

            for key, value in headers:
                self.headers.add(key, value)

    def update_auto_headers(self, skip_auto_headers):
        self.skip_auto_headers = skip_auto_headers
        used_headers = set(self.headers) | skip_auto_headers

        for hdr, val in self.DEFAULT_HEADERS.items():
            if hdr not in used_headers:
                self.headers.add(hdr, val)

        # add host
        if hdrs.HOST not in used_headers:
            netloc = self.url.raw_host
            if not self.url.is_default_port():
                netloc += ':' + str(self.url.port)
            self.headers[hdrs.HOST] = netloc

        if hdrs.USER_AGENT not in used_headers:
            self.headers[hdrs.USER_AGENT] = self.SERVER_SOFTWARE

    def update_cookies(self, cookies):
        """Update request cookies header."""
        if not cookies:
            return

        c = SimpleCookie()
        if hdrs.COOKIE in self.headers:
            c.load(self.headers.get(hdrs.COOKIE, ''))
            del self.headers[hdrs.COOKIE]

        for name, value in cookies.items():
            if isinstance(value, Morsel):
                # Preserve coded_value
                mrsl_val = value.get(value.key, Morsel())
                mrsl_val.set(value.key, value.value, value.coded_value)
                c[name] = mrsl_val
            else:
                c[name] = value

        self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()

    def update_content_encoding(self, data):
        """Set request content encoding."""
        if not data:
            return

        enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
        if enc:
            if self.compress is not False:
                self.compress = enc
                # enable chunked, no need to deal with length
                self.chunked = True
        elif self.compress:
            if not isinstance(self.compress, str):
                self.compress = 'deflate'
            self.headers[hdrs.CONTENT_ENCODING] = self.compress
            self.chunked = True  # enable chunked, no need to deal with length

    def update_auth(self, auth):
        """Set basic auth."""
        if auth is None:
            auth = self.auth
        if auth is None:
            return

        if not isinstance(auth, helpers.BasicAuth):
            raise TypeError('BasicAuth() tuple is required instead')

        self.headers[hdrs.AUTHORIZATION] = auth.encode()

    def update_body_from_data(self, data, skip_auto_headers):
        if not data:
            return

        if isinstance(data, str):
            data = data.encode(self.encoding)

        if isinstance(data, (bytes, bytearray)):
            self.body = data
            if (hdrs.CONTENT_TYPE not in self.headers
                    and hdrs.CONTENT_TYPE not in skip_auto_headers):
                self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
            if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked:
                self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

        elif isinstance(
                data,
            (asyncio.StreamReader, streams.StreamReader, streams.DataQueue)):
            self.body = data

        elif asyncio.iscoroutine(data):
            self.body = data
            if (hdrs.CONTENT_LENGTH not in self.headers
                    and self.chunked is None):
                self.chunked = True

        elif isinstance(data, io.IOBase):
            assert not isinstance(data, io.StringIO), \
                'attempt to send text data instead of binary'
            self.body = data
            if not self.chunked and isinstance(data, io.BytesIO):
                # Not chunking if content-length can be determined
                size = len(data.getbuffer())
                self.headers[hdrs.CONTENT_LENGTH] = str(size)
                self.chunked = False
            elif (not self.chunked
                  and isinstance(data,
                                 (io.BufferedReader, io.BufferedRandom))):
                # Not chunking if content-length can be determined
                try:
                    size = os.fstat(data.fileno()).st_size - data.tell()
                    self.headers[hdrs.CONTENT_LENGTH] = str(size)
                    self.chunked = False
                except OSError:
                    # data.fileno() is not supported, e.g.
                    # io.BufferedReader(io.BytesIO(b'data'))
                    self.chunked = True
            else:
                self.chunked = True

            if hasattr(data, 'mode'):
                if data.mode == 'r':
                    raise ValueError('file {!r} should be open in binary mode'
                                     ''.format(data))
            if (hdrs.CONTENT_TYPE not in self.headers
                    and hdrs.CONTENT_TYPE not in skip_auto_headers
                    and hasattr(data, 'name')):
                mime = mimetypes.guess_type(data.name)[0]
                mime = 'application/octet-stream' if mime is None else mime
                self.headers[hdrs.CONTENT_TYPE] = mime

        elif isinstance(data, MultipartWriter):
            self.body = data.serialize()
            self.headers.update(data.headers)
            self.chunked = True

        else:
            if not isinstance(data, helpers.FormData):
                data = helpers.FormData(data)

            self.body = data(self.encoding)

            if (hdrs.CONTENT_TYPE not in self.headers
                    and hdrs.CONTENT_TYPE not in skip_auto_headers):
                self.headers[hdrs.CONTENT_TYPE] = data.content_type

            if data.is_multipart:
                self.chunked = True
            else:
                if (hdrs.CONTENT_LENGTH not in self.headers
                        and not self.chunked):
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_transfer_encoding(self):
        """Analyze transfer-encoding header."""
        te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()

        if self.chunked:
            if hdrs.CONTENT_LENGTH in self.headers:
                del self.headers[hdrs.CONTENT_LENGTH]
            if 'chunked' not in te:
                self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

        else:
            if 'chunked' in te:
                self.chunked = True
            else:
                self.chunked = None
                if hdrs.CONTENT_LENGTH not in self.headers:
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_expect_continue(self, expect=False):
        if expect:
            self.headers[hdrs.EXPECT] = '100-continue'
        elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue':
            expect = True

        if expect:
            self._continue = helpers.create_future(self.loop)

    def update_proxy(self, proxy, proxy_auth):
        if proxy and not proxy.scheme == 'http':
            raise ValueError("Only http proxies are supported")
        if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
            raise ValueError("proxy_auth must be None or BasicAuth() tuple")
        self.proxy = proxy
        self.proxy_auth = proxy_auth

    @asyncio.coroutine
    def write_bytes(self, request, conn):
        """Support coroutines that yields bytes objects."""
        # 100 response
        if self._continue is not None:
            yield from request.drain()
            yield from self._continue

        try:
            if asyncio.iscoroutine(self.body):
                exc = None
                value = None
                stream = self.body

                while True:
                    try:
                        if exc is not None:
                            result = stream.throw(exc)
                        else:
                            result = stream.send(value)
                    except StopIteration as exc:
                        if isinstance(exc.value, bytes):
                            yield from request.write(exc.value)
                        break
                    except:
                        self.response.close()
                        raise

                    if isinstance(result, asyncio.Future):
                        exc = None
                        value = None
                        try:
                            value = yield result
                        except Exception as err:
                            exc = err
                    elif isinstance(result, (bytes, bytearray)):
                        yield from request.write(result)
                        value = None
                    else:
                        raise ValueError('Bytes object is expected, got: %s.' %
                                         type(result))

            elif isinstance(self.body,
                            (asyncio.StreamReader, streams.StreamReader)):
                chunk = yield from self.body.read(streams.DEFAULT_LIMIT)
                while chunk:
                    yield from request.write(chunk, drain=True)
                    chunk = yield from self.body.read(streams.DEFAULT_LIMIT)

            elif isinstance(self.body, streams.DataQueue):
                while True:
                    try:
                        chunk = yield from self.body.read()
                        if not chunk:
                            break
                        yield from request.write(chunk)
                    except streams.EofStream:
                        break

            elif isinstance(self.body, io.IOBase):
                chunk = self.body.read(streams.DEFAULT_LIMIT)
                while chunk:
                    request.write(chunk)
                    chunk = self.body.read(self.chunked)
            else:
                if isinstance(self.body, (bytes, bytearray)):
                    self.body = (self.body, )

                for chunk in self.body:
                    request.write(chunk)

        except Exception as exc:
            new_exc = aiohttp.ClientRequestError(
                'Can not write request body for %s' % self.url)
            new_exc.__context__ = exc
            new_exc.__cause__ = exc
            conn.protocol.set_exception(new_exc)
        else:
            try:
                yield from request.write_eof()
            except Exception as exc:
                new_exc = aiohttp.ClientRequestError(
                    'Can not write request body for %s' % self.url)
                new_exc.__context__ = exc
                new_exc.__cause__ = exc
                conn.protocol.set_exception(new_exc)

        self._writer = None

    def send(self, conn):
        # Specify request target:
        # - CONNECT request must send authority form URI
        # - not CONNECT proxy must send absolute form URI
        # - most common is origin form URI
        if self.method == hdrs.METH_CONNECT:
            path = '{}:{}'.format(self.url.raw_host, self.url.port)
        elif self.proxy and not self.ssl:
            path = str(self.url)
        else:
            path = self.url.raw_path
            if self.url.raw_query_string:
                path += '?' + self.url.raw_query_string

        request = http.Request(conn.writer,
                               self.method,
                               path,
                               self.version,
                               loop=self.loop)

        if self.compress:
            request.enable_compression(self.compress)

        if self.chunked is not None:
            request.enable_chunking()

        # set default content-type
        if (self.method in self.POST_METHODS
                and hdrs.CONTENT_TYPE not in self.skip_auto_headers
                and hdrs.CONTENT_TYPE not in self.headers):
            self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'

        for k, value in self.headers.items():
            request.add_header(k, value)
        request.send_headers()

        self._writer = helpers.ensure_future(self.write_bytes(request, conn),
                                             loop=self.loop)

        self.response = self.response_class(self.method,
                                            self.original_url,
                                            writer=self._writer,
                                            continue100=self._continue,
                                            timer=self._timer)

        self.response._post_init(self.loop)
        return self.response

    @asyncio.coroutine
    def close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None

    def terminate(self):
        if self._writer is not None:
            if not self.loop.is_closed():
                self._writer.cancel()
            self._writer = None
Exemple #11
0
class HttpMessage(PayloadWriter):
    """HttpMessage allows to write headers and payload to a stream."""

    HOP_HEADERS = None  # Must be set by subclass.

    SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
        sys.version_info, aiohttp.__version__)

    upgrade = False  # Connection: UPGRADE
    websocket = False  # Upgrade: WEBSOCKET
    has_chunked_hdr = False  # Transfer-encoding: chunked

    def __init__(self, transport, version, close, loop=None):
        super().__init__(transport, loop)

        self.version = version
        self.closing = close
        self.keepalive = None
        self.length = None
        self.headers = CIMultiDict()
        self.headers_sent = False

    @property
    def body_length(self):
        return self.output_length

    def force_close(self):
        self.closing = True
        self.keepalive = False

    def keep_alive(self):
        if self.keepalive is None:
            if self.version < HttpVersion10:
                # keep alive not supported at all
                return False
            if self.version == HttpVersion10:
                if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
                    return True
                else:  # no headers means we close for Http 1.0
                    return False
            else:
                return not self.closing
        else:
            return self.keepalive

    def is_headers_sent(self):
        return self.headers_sent

    def add_header(self, name, value):
        """Analyze headers. Calculate content length,
        removes hop headers, etc."""
        assert not self.headers_sent, 'headers have been sent already'
        assert isinstance(name, str), \
            'Header name should be a string, got {!r}'.format(name)
        assert set(name).issubset(ASCIISET), \
            'Header name should contain ASCII chars, got {!r}'.format(name)
        assert isinstance(value, str), \
            'Header {!r} should have string value, got {!r}'.format(
                name, value)

        name = istr(name)
        value = value.strip()

        if name == hdrs.CONTENT_LENGTH:
            self.length = int(value)

        if name == hdrs.TRANSFER_ENCODING:
            self.has_chunked_hdr = value.lower() == 'chunked'

        if name == hdrs.CONNECTION:
            val = value.lower()
            # handle websocket
            if 'upgrade' in val:
                self.upgrade = True
            # connection keep-alive
            elif 'close' in val:
                self.keepalive = False
            elif 'keep-alive' in val:
                self.keepalive = True

        elif name == hdrs.UPGRADE:
            if 'websocket' in value.lower():
                self.websocket = True
            self.headers[name] = value

        elif name not in self.HOP_HEADERS:
            # ignore hop-by-hop headers
            self.headers.add(name, value)

    def add_headers(self, *headers):
        """Adds headers to a HTTP message."""
        for name, value in headers:
            self.add_header(name, value)

    def send_headers(self, _sep=': ', _end='\r\n'):
        """Writes headers to a stream. Constructs payload writer."""
        # Chunked response is only for HTTP/1.1 clients or newer
        # and there is no Content-Length header is set.
        # Do not use chunked responses when the response is guaranteed to
        # not have a response body (304, 204).
        assert not self.headers_sent, 'headers have been sent already'
        self.headers_sent = True

        if not self.chunked and self.autochunked():
            self.enable_chunking()

        if self.chunked:
            self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

        self._add_default_headers()

        # status + headers
        headers = self.status_line + ''.join(
            [k + _sep + v + _end for k, v in self.headers.items()])
        headers = headers.encode('utf-8') + b'\r\n'

        self.buffer_data(headers)

    def _add_default_headers(self):
        # set the connection header
        connection = None
        if self.upgrade:
            connection = 'Upgrade'
        elif not self.closing if self.keepalive is None else self.keepalive:
            if self.version == HttpVersion10:
                connection = 'keep-alive'
        else:
            if self.version == HttpVersion11:
                connection = 'close'

        if connection is not None:
            self.headers[hdrs.CONNECTION] = connection
    async def handleRequest(self, reader, writer):
        # connection id, for debugging
        connid = self.nextConnId
        self.nextConnId += 1

        logger.debug(f'{connid}: new incoming connection')
        # simple HTTP parsing, yes this is a terrible idea.
        l = await reader.readline()
        if l == bytes():
            raise BadRequest(f'{connid}: unexpected eof')
        l = l.rstrip(b'\r\n')
        try:
            method, rawPath, proto = l.split(b' ')
            logger.debug(f'{connid}: got {method} {rawPath} {proto}')
        except ValueError:
            logger.error(f'{connid}: cannot split line {l}')
            raise
        reqUrl = furl(rawPath.decode('utf-8'))

        headers = CIMultiDict()
        while True:
            if len(headers) > 100:
                raise BadRequest('too many headers')

            l = await reader.readline()
            if l == bytes():
                raise BadRequest(f'{connid}: unexpected eof in headers')
            l = l.rstrip(b'\r\n')
            logger.debug(f'{connid}: got header line {l!r}')
            # end of headers?
            if l == bytes():
                break
            try:
                key, value = l.decode('utf-8').split(':', 1)
                headers.add(key.strip(), value.strip())
            except ValueError:
                logger.error(f'cannot parse {l}')

        logger.debug(f'{connid}: {rawPath} {method} got headers {headers}')

        route = None
        try:
            netloc = headers['host']
            reqUrl = reqUrl.set(scheme='http', netloc=netloc)
            logger.debug(f'got request url {reqUrl}')
            routeKey = None
            for d in self.domain:
                m = parse(d, reqUrl.netloc)
                if m is not None:
                    routeKey = RouteKey(key=m['key'], user=m['user'])
                    logger.debug(f'{connid}: got route key {routeKey}')
                    break
            route = self.routes[routeKey]
        except (KeyError, ValueError):
            logger.info(f'{connid}: cannot find route for {reqUrl}')
            self.status['noroute'] += 1
            # error is written to client later

        # is this a non-forwarded request?
        segments = reqUrl.path.segments
        if len(segments) > 0 and segments[0] == '_conductor':
            if segments[1] == 'auth':
                logger.info(f'authorization request for {reqUrl.netloc}')
                try:
                    nextLoc = reqUrl.query.params['next'].encode('utf-8')
                except KeyError:
                    nextLoc = b'/'
                writer.write(b'\r\n'.join([
                    b'HTTP/1.0 302 Found', b'Location: ' + nextLoc,
                    b'Set-Cookie: authorization=' +
                    segments[2].encode('utf-8') + b'; HttpOnly; Path=/',
                    b'Cache-Control: no-store', b'',
                    b'Follow the white rabbit.'
                ]))
            elif segments[1] == 'status':
                writer.write(
                    b'HTTP/1.0 200 OK\r\nContent-Type: application/json\r\n\r\n'
                )
                self.status['routesTotal'] = len(self.routes)
                writer.write(
                    json.dumps(self.status, ensure_ascii=True).encode('ascii'))
            else:
                writer.write(
                    b'HTTP/1.0 404 Not Found\r\nContent-Type: plain/text\r\n\r\nNot found'
                )
            writer.close()
            return

        if not route:
            writer.write(
                b'HTTP/1.0 404 Not Found\r\nConnection: close\r\n\r\n')
            writer.close()
            return

        # check authorization
        cookies = BaseCookie()
        try:
            cookies.load(headers['Cookie'])
        except KeyError:
            # will be rejected later
            pass
        authorized = False
        for c in cookies.values():
            # Only hashed authorization is available to server.
            if c.key == 'authorization' and self.hashKey(
                    c.value) == route.auth:
                authorized = True
                break
        try:
            # do not forward auth cookie to the application, so it can’t leak it.
            del cookies['authorization']
            headers['Cookie'] = cookies.output(header='', sep='')
        except KeyError:
            # nonexistent cookie is fine
            pass

        if not authorized:
            logger.info(
                f'{connid}-{reqUrl}: not authorized, cookies sent {cookies.values()}'
            )
            writer.write(
                b'HTTP/1.0 403 Unauthorized\r\nContent-Type: plain/text\r\nConnection: close\r\n\r\nUnauthorized'
            )
            writer.close()
            self.status['unauthorized'] += 1
            return

        # try opening the socket
        try:
            start = time.time()
            sockreader, sockwriter = await asyncio.open_unix_connection(
                path=route.socket)
            end = time.time()
            logger.debug(f'opening socket took {end-start}s')
        except (ConnectionRefusedError, FileNotFoundError, PermissionError):
            logger.info(f'{connid}-{reqUrl}: route {routeKey} is broken')
            writer.write(
                b'HTTP/1.0 502 Bad Gateway\r\nConnection: close\r\n\r\n')
            writer.close()
            self.status['broken'] += 1
            return

        # some headers are fixed
        # not parsing body, so we cannot handle more than one request per connection
        # XXX: this is super-inefficient
        if 'Upgrade' not in headers or headers['Upgrade'].lower(
        ) != 'websocket':
            headers['Connection'] = 'close'

        # write http banner plus headers
        sockwriter.write(method + b' ' + rawPath + b' ' + proto + b'\r\n')
        for k, v in headers.items():
            sockwriter.write(f'{k}: {v}\r\n'.encode('utf-8'))
        sockwriter.write(b'\r\n')

        async def beforeABClose(result):
            if result == 0:
                # no response received from client
                logger.info(
                    f'{connid}-{reqUrl}: route {routeKey} got no result from server'
                )
                writer.write(
                    b'HTTP/1.0 502 Bad Gateway\r\nConnection: close\r\n\r\n')

        await proxy((sockreader, sockwriter, 'sock'), (reader, writer, 'web'),
                    logger=logger,
                    logPrefix=connid,
                    beforeABClose=beforeABClose)
Exemple #13
0
def make_mocked_request(method, path, headers=None, *,
                        version=HttpVersion(1, 1), closing=False,
                        app=None,
                        writer=sentinel,
                        payload_writer=sentinel,
                        protocol=sentinel,
                        transport=sentinel,
                        payload=sentinel,
                        sslcontext=None,
                        secure_proxy_ssl_header=None,
                        client_max_size=1024**2,
                        loop=...):
    """Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    task = mock.Mock()
    if loop is ...:
        loop = mock.Mock()
        loop.create_future.return_value = ()

    if version < HttpVersion(1, 1):
        closing = True

    if headers:
        headers = CIMultiDict(headers)
        raw_hdrs = tuple(
            (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
    else:
        headers = CIMultiDict()
        raw_hdrs = ()

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(
        method, path, version, headers,
        raw_hdrs, closing, False, False, chunked, URL(path))
    if app is None:
        app = _create_app_mock()

    if protocol is sentinel:
        protocol = mock.Mock()

    if transport is sentinel:
        transport = _create_transport(sslcontext)

    if writer is sentinel:
        writer = mock.Mock()
        writer.transport = transport

    if payload_writer is sentinel:
        payload_writer = mock.Mock()
        payload_writer.write_eof.side_effect = noop
        payload_writer.drain.side_effect = noop

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    time_service = mock.Mock()
    time_service.time.return_value = 12345
    time_service.strtime.return_value = "Tue, 15 Nov 1994 08:12:31 GMT"

    @contextmanager
    def timeout(*args, **kw):
        yield

    time_service.timeout = mock.Mock()
    time_service.timeout.side_effect = timeout

    req = Request(message, payload,
                  protocol, payload_writer, time_service, task, loop,
                  secure_proxy_ssl_header=secure_proxy_ssl_header,
                  client_max_size=client_max_size)

    match_info = UrlMappingMatchInfo({}, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemple #14
0
class Message:
    def __init__(
        self,
        headers=None,
        payload=None,
        from_details=None,
        to_details=None,
        contact_details=None,
    ):

        if headers:
            self.headers = headers
        else:
            self.headers = CIMultiDict()

        if from_details:
            self._from_details = from_details
        elif 'From' not in self.headers:
            raise ValueError('From header or from_details is required')

        if to_details:
            self._to_details = to_details
        elif 'To' not in self.headers:
            raise ValueError('To header or to_details is required')

        if contact_details:
            self._contact_details = contact_details

        self._payload = payload
        self._raw_payload = None

        if 'Via' not in self.headers:
            self.headers['Via'] = 'SIP/2.0/%(protocol)s ' + \
                utils.format_host_and_port(self.contact_details['uri']['host'],
                                           self.contact_details['uri']['port']) + \
                ';branch=%s' % utils.gen_branch(10)

    @property
    def auth(self):
        if not hasattr(self, '_auth'):
            self._auth = Auth.from_message(self)
        return self._auth

    @property
    def payload(self):
        if self._payload:
            return self._payload
        elif self._raw_payload:
            self._payload = self._raw_payload.decode()
            return self._payload
        else:
            return ''

    @payload.setter
    def payload(self, payload):
        self._payload = payload

    @property
    def from_details(self):
        if not hasattr(self, '_from_details'):
            self._from_details = Contact.from_header(self.headers['From'])
        return self._from_details

    @from_details.setter
    def from_details(self, from_details):
        self._from_details = from_details

    @property
    def to_details(self):
        if not hasattr(self, '_to_details'):
            self._to_details = Contact.from_header(self.headers['To'])
        return self._to_details

    @to_details.setter
    def to_details(self, to_details):
        self._to_details = to_details

    @property
    def contact_details(self):
        if not hasattr(self, '_contact_details'):
            if 'Contact' in self.headers:
                self._contact_details = Contact.from_header(
                    self.headers['Contact'])
            else:
                self._contact_details = None
        return self._contact_details

    @contact_details.setter
    def contact_details(self, contact_details):
        self._contact_details = contact_details

    @property
    def content_type(self):
        return self.headers['Content-Type']

    @content_type.setter
    def content_type(self, content_type):
        self.headers['Content-Type'] = content_type

    @property
    def cseq(self):
        if not hasattr(self, '_cseq'):
            self._cseq = int(self.headers['CSeq'].split(' ')[0])
        return self._cseq

    @cseq.setter
    def cseq(self, cseq):
        self._cseq = int(cseq)

    @property
    def method(self):
        if not hasattr(self, '_method'):
            self._method = self.headers['CSeq'].split(' ')[1]
        return self._method

    @method.setter
    def method(self, method):
        self._method = method

    def __str__(self):
        if self._payload:
            self._raw_payload = self._payload.encode()
        elif not self._raw_payload:
            self._raw_payload = b''

        msg = self._make_headers()
        return msg + self.payload

    def encode(self, *args, **kwargs):
        if self._payload:
            self._raw_payload = self._payload.encode(*args, **kwargs)
        elif not self._raw_payload:
            self._raw_payload = b''

        msg = self._make_headers()
        return msg.encode(*args, **kwargs) + self._raw_payload

    def _make_headers(self):
        if hasattr(self, '_from_details'):
            self.headers['From'] = str(self.from_details)

        if hasattr(self, '_to_details'):
            self.headers['To'] = str(self.to_details)

        if hasattr(self, '_contact_details') and 'Contact' not in self.headers:
            self.headers['Contact'] = str(self.contact_details)

        if hasattr(self, '_cseq'):
            self.headers['CSeq'] = '%s %s' % (self.cseq, self.method)
        elif hasattr(self, '_method'):
            self.headers['CSeq'] = '%s %s' % (self.cseq, self.method)

        self.headers['Content-Length'] = str(len(self._raw_payload))
        if 'Max-Forwards' not in self.headers:
            self.headers['Max-Forwards'] = '70'
        if 'Call-ID' not in self.headers:
            self.headers['Call-ID'] = uuid.uuid4()

        return self._format_headers()

    def _format_headers(self):
        msg = []
        for k, v in sorted(self.headers.items()):
            if k == 'Via':
                if isinstance(v, (list, tuple)):
                    msg = ['%s: %s' % (k, i) for i in v] + msg
                else:
                    msg.insert(0, '%s: %s' % (k, v))
            else:
                if isinstance(v, (list, tuple)):
                    msg.extend(['%s: %s' % (k, i) for i in v])
                else:
                    msg.append('%s: %s' % (k, v))
        msg.append(utils.EOL)
        return utils.EOL.join(msg)

    def parsed_xml(self):
        if 'Content-Type' not in self.headers:
            return None
        if not self.headers['Content-Type'].endswith('+xml'):
            return None
        return PyQuery(self.payload).remove_namespaces()

    @classmethod
    def from_raw_headers(cls, raw_headers):
        headers = CIMultiDict()
        decoded_headers = raw_headers.decode().split(utils.EOL)
        for line in decoded_headers[1:]:
            k, v = line.split(': ', 1)
            if k in headers:
                o = headers.setdefault(k, [])
                if not isinstance(o, list):
                    o = [o]
                o.append(v)
                headers[k] = o
            else:
                headers[k] = v

        m = FIRST_LINE_PATTERN['response']['regex'].match(decoded_headers[0])
        if m:
            d = m.groupdict()
            return Response(status_code=int(d['status_code']),
                            status_message=d['status_message'],
                            headers=headers,
                            first_line=decoded_headers[0])
        else:
            m = FIRST_LINE_PATTERN['request']['regex'].match(
                decoded_headers[0])
            if m:
                d = m.groupdict()
                cseq, _ = headers['CSeq'].split()
                return Request(method=d['method'],
                               headers=headers,
                               cseq=int(cseq),
                               first_line=decoded_headers[0])
            else:
                LOG.debug(decoded_headers)
                raise ValueError('Not a SIP message')
Exemple #15
0
class HttpMessage(ABC):
    """HttpMessage allows to write headers and payload to a stream.

    For example, lets say we want to read file then compress it with deflate
    compression and then send it with chunked transfer encoding, code may look
    like this:

       >>> response = aiohttp.Response(transport, 200)

    We have to use deflate compression first:

      >>> response.add_compression_filter('deflate')

    Then we want to split output stream into chunks of 1024 bytes size:

      >>> response.add_chunking_filter(1024)

    We can add headers to response with add_headers() method. add_headers()
    does not send data to transport, send_headers() sends request/response
    line and then sends headers:

      >>> response.add_headers(
      ...     ('Content-Disposition', 'attachment; filename="..."'))
      >>> response.send_headers()

    Now we can use chunked writer to write stream to a network stream.
    First call to write() method sends response status line and headers,
    add_header() and add_headers() method unavailable at this stage:

    >>> with open('...', 'rb') as f:
    ...     chunk = fp.read(8192)
    ...     while chunk:
    ...         response.write(chunk)
    ...         chunk = fp.read(8192)

    >>> response.write_eof()

    """

    writer = None

    # 'filter' is being used for altering write() behaviour,
    # add_chunking_filter adds deflate/gzip compression and
    # add_compression_filter splits incoming data into a chunks.
    filter = None

    HOP_HEADERS = None  # Must be set by subclass.

    SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
        sys.version_info, aiohttp.__version__)

    upgrade = False  # Connection: UPGRADE
    websocket = False  # Upgrade: WEBSOCKET
    has_chunked_hdr = False  # Transfer-encoding: chunked

    # subclass can enable auto sending headers with write() call,
    # this is useful for wsgi's start_response implementation.
    _send_headers = False

    def __init__(self, transport, version, close):
        self.transport = transport
        self._version = version
        self.closing = close
        self.keepalive = None
        self.chunked = False
        self.length = None
        self.headers = CIMultiDict()
        self.headers_sent = False
        self.output_length = 0
        self.headers_length = 0
        self._output_size = 0

    @property
    @abstractmethod
    def status_line(self):
        return b''

    @abstractmethod
    def autochunked(self):
        return False

    @property
    def version(self):
        return self._version

    @property
    def body_length(self):
        return self.output_length - self.headers_length

    def force_close(self):
        self.closing = True
        self.keepalive = False

    def enable_chunked_encoding(self):
        self.chunked = True

    def keep_alive(self):
        if self.keepalive is None:
            if self.version < HttpVersion10:
                # keep alive not supported at all
                return False
            if self.version == HttpVersion10:
                if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
                    return True
                else:  # no headers means we close for Http 1.0
                    return False
            else:
                return not self.closing
        else:
            return self.keepalive

    def is_headers_sent(self):
        return self.headers_sent

    def add_header(self, name, value):
        """Analyze headers. Calculate content length,
        removes hop headers, etc."""
        assert not self.headers_sent, 'headers have been sent already'
        assert isinstance(name, str), \
            'Header name should be a string, got {!r}'.format(name)
        assert set(name).issubset(ASCIISET), \
            'Header name should contain ASCII chars, got {!r}'.format(name)
        assert isinstance(value, str), \
            'Header {!r} should have string value, got {!r}'.format(
                name, value)

        name = upstr(name)
        value = value.strip()

        if name == hdrs.CONTENT_LENGTH:
            self.length = int(value)

        if name == hdrs.TRANSFER_ENCODING:
            self.has_chunked_hdr = value.lower().strip() == 'chunked'

        if name == hdrs.CONNECTION:
            val = value.lower()
            # handle websocket
            if 'upgrade' in val:
                self.upgrade = True
            # connection keep-alive
            elif 'close' in val:
                self.keepalive = False
            elif 'keep-alive' in val:
                self.keepalive = True

        elif name == hdrs.UPGRADE:
            if 'websocket' in value.lower():
                self.websocket = True
                self.headers[name] = value

        elif name not in self.HOP_HEADERS:
            # ignore hop-by-hop headers
            self.headers.add(name, value)

    def add_headers(self, *headers):
        """Adds headers to a HTTP message."""
        for name, value in headers:
            self.add_header(name, value)

    def send_headers(self, _sep=': ', _end='\r\n'):
        """Writes headers to a stream. Constructs payload writer."""
        # Chunked response is only for HTTP/1.1 clients or newer
        # and there is no Content-Length header is set.
        # Do not use chunked responses when the response is guaranteed to
        # not have a response body (304, 204).
        assert not self.headers_sent, 'headers have been sent already'
        self.headers_sent = True

        if self.chunked or self.autochunked():
            self.writer = self._write_chunked_payload()
            self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

        elif self.length is not None:
            self.writer = self._write_length_payload(self.length)

        else:
            self.writer = self._write_eof_payload()

        next(self.writer)

        self._add_default_headers()

        # status + headers
        headers = self.status_line + ''.join(
            [k + _sep + v + _end for k, v in self.headers.items()])
        headers = headers.encode('utf-8') + b'\r\n'

        self.output_length += len(headers)
        self.headers_length = len(headers)
        self.transport.write(headers)

    def _add_default_headers(self):
        # set the connection header
        connection = None
        if self.upgrade:
            connection = 'upgrade'
        elif not self.closing if self.keepalive is None else self.keepalive:
            if self.version == HttpVersion10:
                connection = 'keep-alive'
        else:
            if self.version == HttpVersion11:
                connection = 'close'

        if connection is not None:
            self.headers[hdrs.CONNECTION] = connection

    def write(self,
              chunk,
              *,
              drain=False,
              EOF_MARKER=EOF_MARKER,
              EOL_MARKER=EOL_MARKER):
        """Writes chunk of data to a stream by using different writers.

        writer uses filter to modify chunk of data.
        write_eof() indicates end of stream.
        writer can't be used after write_eof() method being called.
        write() return drain future.
        """
        assert (isinstance(chunk, (bytes, bytearray))
                or chunk is EOF_MARKER), chunk

        size = self.output_length

        if self._send_headers and not self.headers_sent:
            self.send_headers()

        assert self.writer is not None, 'send_headers() is not called.'

        if self.filter:
            chunk = self.filter.send(chunk)
            while chunk not in (EOF_MARKER, EOL_MARKER):
                if chunk:
                    self.writer.send(chunk)
                chunk = next(self.filter)
        else:
            if chunk is not EOF_MARKER:
                self.writer.send(chunk)

        self._output_size += self.output_length - size

        if self._output_size > 64 * 1024:
            if drain:
                self._output_size = 0
                return self.transport.drain()

        return ()

    def write_eof(self):
        self.write(EOF_MARKER)
        try:
            self.writer.throw(aiohttp.EofStream())
        except StopIteration:
            pass

        return self.transport.drain()

    def _write_chunked_payload(self):
        """Write data in chunked transfer encoding."""
        while True:
            try:
                chunk = yield
            except aiohttp.EofStream:
                self.transport.write(b'0\r\n\r\n')
                self.output_length += 5
                break

            chunk = bytes(chunk)
            chunk_len = '{:x}\r\n'.format(len(chunk)).encode('ascii')
            self.transport.write(chunk_len + chunk + b'\r\n')
            self.output_length += len(chunk_len) + len(chunk) + 2

    def _write_length_payload(self, length):
        """Write specified number of bytes to a stream."""
        while True:
            try:
                chunk = yield
            except aiohttp.EofStream:
                break

            if length:
                l = len(chunk)
                if length >= l:
                    self.transport.write(chunk)
                    self.output_length += l
                    length = length - l
                else:
                    self.transport.write(chunk[:length])
                    self.output_length += length
                    length = 0

    def _write_eof_payload(self):
        while True:
            try:
                chunk = yield
            except aiohttp.EofStream:
                break

            self.transport.write(chunk)
            self.output_length += len(chunk)

    @wrap_payload_filter
    def add_chunking_filter(self,
                            chunk_size=16 * 1024,
                            *,
                            EOF_MARKER=EOF_MARKER,
                            EOL_MARKER=EOL_MARKER):
        """Split incoming stream into chunks."""
        buf = bytearray()
        chunk = yield

        while True:
            if chunk is EOF_MARKER:
                if buf:
                    yield buf

                yield EOF_MARKER

            else:
                buf.extend(chunk)

                while len(buf) >= chunk_size:
                    chunk = bytes(buf[:chunk_size])
                    del buf[:chunk_size]
                    yield chunk

                chunk = yield EOL_MARKER

    @wrap_payload_filter
    def add_compression_filter(self,
                               encoding='deflate',
                               *,
                               EOF_MARKER=EOF_MARKER,
                               EOL_MARKER=EOL_MARKER):
        """Compress incoming stream with deflate or gzip encoding."""
        zlib_mode = (16 +
                     zlib.MAX_WBITS if encoding == 'gzip' else -zlib.MAX_WBITS)
        zcomp = zlib.compressobj(wbits=zlib_mode)

        chunk = yield
        while True:
            if chunk is EOF_MARKER:
                yield zcomp.flush()
                chunk = yield EOF_MARKER

            else:
                yield zcomp.compress(chunk)
                chunk = yield EOL_MARKER
class ClientRequest:

    GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
    POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
    ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE, hdrs.METH_TRACE})

    DEFAULT_HEADERS = {hdrs.ACCEPT: "*/*", hdrs.ACCEPT_ENCODING: "gzip, deflate"}

    SERVER_SOFTWARE = HttpMessage.SERVER_SOFTWARE

    body = b""
    auth = None
    response = None
    response_class = None

    _writer = None  # async task for streaming data
    _continue = None  # waiter future for '100 Continue' response

    # N.B.
    # Adding __del__ method with self._writer closing doesn't make sense
    # because _writer is instance method, thus it keeps a reference to self.
    # Until writer has finished finalizer will not be called.

    def __init__(
        self,
        method,
        url,
        *,
        params=None,
        headers=None,
        skip_auto_headers=frozenset(),
        data=None,
        cookies=None,
        auth=None,
        encoding="utf-8",
        version=aiohttp.HttpVersion11,
        compress=None,
        chunked=None,
        expect100=False,
        loop=None,
        response_class=None
    ):

        if loop is None:
            loop = asyncio.get_event_loop()

        self.url = url
        self.method = method.upper()
        self.encoding = encoding
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.response_class = response_class or ClientResponse

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_path(params)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding()
        self.update_auth(auth)

        self.update_body_from_data(data, skip_auto_headers)
        self.update_transfer_encoding()
        self.update_expect_continue(expect100)

    def update_host(self, url):
        """Update destination host, port and connection type (ssl)."""
        url_parsed = urllib.parse.urlsplit(url)

        # check for network location part
        netloc = url_parsed.netloc
        if not netloc:
            raise ValueError("Host could not be detected.")

        # get host/port
        host = url_parsed.hostname
        if not host:
            raise ValueError("Host could not be detected.")

        try:
            port = url_parsed.port
        except ValueError:
            raise ValueError("Port number could not be converted.") from None

        # check domain idna encoding
        try:
            netloc = netloc.encode("idna").decode("utf-8")
            host = host.encode("idna").decode("utf-8")
        except UnicodeError:
            raise ValueError("URL has an invalid label.")

        # basic auth info
        username, password = url_parsed.username, url_parsed.password
        if username:
            self.auth = helpers.BasicAuth(username, password or "")
            netloc = netloc.split("@", 1)[1]

        # Record entire netloc for usage in host header
        self.netloc = netloc

        scheme = url_parsed.scheme
        self.ssl = scheme in ("https", "wss")

        # set port number if it isn't already set
        if not port:
            if self.ssl:
                port = HTTPS_PORT
            else:
                port = HTTP_PORT

        self.host, self.port, self.scheme = host, port, scheme

    def update_version(self, version):
        """Convert request version to two elements tuple.

        parser HTTP version '1.1' => (1, 1)
        """
        if isinstance(version, str):
            v = [l.strip() for l in version.split(".", 1)]
            try:
                version = int(v[0]), int(v[1])
            except ValueError:
                raise ValueError("Can not parse http version number: {}".format(version)) from None
        self.version = version

    def update_path(self, params):
        """Build path."""
        # extract path
        scheme, netloc, path, query, fragment = urllib.parse.urlsplit(self.url)
        if not path:
            path = "/"

        if isinstance(params, collections.Mapping):
            params = list(params.items())

        if params:
            if not isinstance(params, str):
                params = urllib.parse.urlencode(params)
            if query:
                query = "%s&%s" % (query, params)
            else:
                query = params

        self.path = urllib.parse.urlunsplit(("", "", helpers.requote_uri(path), query, fragment))
        self.url = urllib.parse.urlunsplit((scheme, netloc, self.path, "", ""))

    def update_headers(self, headers):
        """Update request headers."""
        self.headers = CIMultiDict()
        if headers:
            if isinstance(headers, dict):
                headers = headers.items()
            elif isinstance(headers, (MultiDictProxy, MultiDict)):
                headers = headers.items()

            for key, value in headers:
                self.headers.add(key, value)

    def update_auto_headers(self, skip_auto_headers):
        self.skip_auto_headers = skip_auto_headers
        used_headers = set(self.headers) | skip_auto_headers

        for hdr, val in self.DEFAULT_HEADERS.items():
            if hdr not in used_headers:
                self.headers.add(hdr, val)

        # add host
        if hdrs.HOST not in used_headers:
            self.headers[hdrs.HOST] = self.netloc

        if hdrs.USER_AGENT not in used_headers:
            self.headers[hdrs.USER_AGENT] = self.SERVER_SOFTWARE

    def update_cookies(self, cookies):
        """Update request cookies header."""
        if not cookies:
            return

        c = http.cookies.SimpleCookie()
        if hdrs.COOKIE in self.headers:
            c.load(self.headers.get(hdrs.COOKIE, ""))
            del self.headers[hdrs.COOKIE]

        if isinstance(cookies, dict):
            cookies = cookies.items()

        for name, value in cookies:
            if isinstance(value, http.cookies.Morsel):
                c[value.key] = value.value
            else:
                c[name] = value

        self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()

    def update_content_encoding(self):
        """Set request content encoding."""
        enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
        if enc:
            if self.compress is not False:
                self.compress = enc
                # enable chunked, no need to deal with length
                self.chunked = True
        elif self.compress:
            if not isinstance(self.compress, str):
                self.compress = "deflate"
            self.headers[hdrs.CONTENT_ENCODING] = self.compress
            self.chunked = True  # enable chunked, no need to deal with length

    def update_auth(self, auth):
        """Set basic auth."""
        if auth is None:
            auth = self.auth
        if auth is None:
            return

        if not isinstance(auth, helpers.BasicAuth):
            warnings.warn("BasicAuth() tuple is required instead ", DeprecationWarning)
            auth = helpers.BasicAuth(*auth)

        self.headers[hdrs.AUTHORIZATION] = auth.encode()

    def update_body_from_data(self, data, skip_auto_headers):
        if not data:
            return

        if isinstance(data, str):
            data = data.encode(self.encoding)

        if isinstance(data, (bytes, bytearray)):
            self.body = data
            if hdrs.CONTENT_TYPE not in self.headers and hdrs.CONTENT_TYPE not in skip_auto_headers:
                self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
            if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked:
                self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

        elif isinstance(data, (asyncio.StreamReader, streams.DataQueue)):
            self.body = data

        elif asyncio.iscoroutine(data):
            self.body = data
            if hdrs.CONTENT_LENGTH not in self.headers and self.chunked is None:
                self.chunked = True

        elif isinstance(data, io.IOBase):
            assert not isinstance(data, io.StringIO), "attempt to send text data instead of binary"
            self.body = data
            if not self.chunked and isinstance(data, io.BytesIO):
                # Not chunking if content-length can be determined
                size = len(data.getbuffer())
                self.headers[hdrs.CONTENT_LENGTH] = str(size)
                self.chunked = False
            elif not self.chunked and isinstance(data, io.BufferedReader):
                # Not chunking if content-length can be determined
                try:
                    size = os.fstat(data.fileno()).st_size - data.tell()
                    self.headers[hdrs.CONTENT_LENGTH] = str(size)
                    self.chunked = False
                except OSError:
                    # data.fileno() is not supported, e.g.
                    # io.BufferedReader(io.BytesIO(b'data'))
                    self.chunked = True
            else:
                self.chunked = True

            if hasattr(data, "mode"):
                if data.mode == "r":
                    raise ValueError("file {!r} should be open in binary mode" "".format(data))
            if (
                hdrs.CONTENT_TYPE not in self.headers
                and hdrs.CONTENT_TYPE not in skip_auto_headers
                and hasattr(data, "name")
            ):
                mime = mimetypes.guess_type(data.name)[0]
                mime = "application/octet-stream" if mime is None else mime
                self.headers[hdrs.CONTENT_TYPE] = mime

        elif isinstance(data, MultipartWriter):
            self.body = data.serialize()
            self.headers.update(data.headers)
            self.chunked = self.chunked or 8192

        else:
            if not isinstance(data, helpers.FormData):
                data = helpers.FormData(data)

            self.body = data(self.encoding)

            if hdrs.CONTENT_TYPE not in self.headers and hdrs.CONTENT_TYPE not in skip_auto_headers:
                self.headers[hdrs.CONTENT_TYPE] = data.content_type

            if data.is_multipart:
                self.chunked = self.chunked or 8192
            else:
                if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked:
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_transfer_encoding(self):
        """Analyze transfer-encoding header."""
        te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()

        if self.chunked:
            if hdrs.CONTENT_LENGTH in self.headers:
                del self.headers[hdrs.CONTENT_LENGTH]
            if "chunked" not in te:
                self.headers[hdrs.TRANSFER_ENCODING] = "chunked"

            self.chunked = self.chunked if type(self.chunked) is int else 8192
        else:
            if "chunked" in te:
                self.chunked = 8192
            else:
                self.chunked = None
                if hdrs.CONTENT_LENGTH not in self.headers:
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_expect_continue(self, expect=False):
        if expect:
            self.headers[hdrs.EXPECT] = "100-continue"
        elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
            expect = True

        if expect:
            self._continue = asyncio.Future(loop=self.loop)

    @asyncio.coroutine
    def write_bytes(self, request, reader):
        """Support coroutines that yields bytes objects."""
        # 100 response
        if self._continue is not None:
            yield from self._continue

        try:
            if asyncio.iscoroutine(self.body):
                request.transport.set_tcp_nodelay(True)
                exc = None
                value = None
                stream = self.body

                while True:
                    try:
                        if exc is not None:
                            result = stream.throw(exc)
                        else:
                            result = stream.send(value)
                    except StopIteration as exc:
                        if isinstance(exc.value, bytes):
                            yield from request.write(exc.value, drain=True)
                        break
                    except:
                        self.response.close()
                        raise

                    if isinstance(result, asyncio.Future):
                        exc = None
                        value = None
                        try:
                            value = yield result
                        except Exception as err:
                            exc = err
                    elif isinstance(result, (bytes, bytearray)):
                        yield from request.write(result, drain=True)
                        value = None
                    else:
                        raise ValueError("Bytes object is expected, got: %s." % type(result))

            elif isinstance(self.body, asyncio.StreamReader):
                request.transport.set_tcp_nodelay(True)
                chunk = yield from self.body.read(streams.DEFAULT_LIMIT)
                while chunk:
                    yield from request.write(chunk, drain=True)
                    chunk = yield from self.body.read(streams.DEFAULT_LIMIT)

            elif isinstance(self.body, streams.DataQueue):
                request.transport.set_tcp_nodelay(True)
                while True:
                    try:
                        chunk = yield from self.body.read()
                        if chunk is EOF_MARKER:
                            break
                        yield from request.write(chunk, drain=True)
                    except streams.EofStream:
                        break

            elif isinstance(self.body, io.IOBase):
                chunk = self.body.read(self.chunked)
                while chunk:
                    request.write(chunk)
                    chunk = self.body.read(self.chunked)
                request.transport.set_tcp_nodelay(True)

            else:
                if isinstance(self.body, (bytes, bytearray)):
                    self.body = (self.body,)

                for chunk in self.body:
                    request.write(chunk)
                request.transport.set_tcp_nodelay(True)

        except Exception as exc:
            new_exc = aiohttp.ClientRequestError("Can not write request body for %s" % self.url)
            new_exc.__context__ = exc
            new_exc.__cause__ = exc
            reader.set_exception(new_exc)
        else:
            assert request.transport.tcp_nodelay
            try:
                ret = request.write_eof()
                # NB: in asyncio 3.4.1+ StreamWriter.drain() is coroutine
                # see bug #170
                if asyncio.iscoroutine(ret) or isinstance(ret, asyncio.Future):
                    yield from ret
            except Exception as exc:
                new_exc = aiohttp.ClientRequestError("Can not write request body for %s" % self.url)
                new_exc.__context__ = exc
                new_exc.__cause__ = exc
                reader.set_exception(new_exc)

        self._writer = None

    def send(self, writer, reader):
        writer.set_tcp_cork(True)
        request = aiohttp.Request(writer, self.method, self.path, self.version)

        if self.compress:
            request.add_compression_filter(self.compress)

        if self.chunked is not None:
            request.enable_chunked_encoding()
            request.add_chunking_filter(self.chunked)

        # set default content-type
        if (
            self.method in self.POST_METHODS
            and hdrs.CONTENT_TYPE not in self.skip_auto_headers
            and hdrs.CONTENT_TYPE not in self.headers
        ):
            self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"

        for k, value in self.headers.items():
            request.add_header(k, value)
        request.send_headers()

        self._writer = helpers.ensure_future(self.write_bytes(request, reader), loop=self.loop)

        self.response = self.response_class(
            self.method, self.url, self.host, writer=self._writer, continue100=self._continue
        )
        self.response._post_init(self.loop)
        return self.response

    @asyncio.coroutine
    def close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None

    def terminate(self):
        if self._writer is not None:
            if hasattr(self.loop, "is_closed"):
                if not self.loop.is_closed():
                    self._writer.cancel()
            else:
                self._writer.cancel()
            self._writer = None
Exemple #17
0
class BaseResponse:
    __slots__ = ("body", "status", "content_type", "headers", "_cookies",
                 "_protocol")

    charset = 'utf-8'
    max_cookie_size = 4093

    def __init__(self,
                 body=None,
                 status=200,
                 headers=None,
                 content_type="text/plain"):
        self.content_type = content_type
        self.body = self._encode_body(body)
        self.status = status
        self.headers = CIMultiDict(headers or {})
        self._cookies = None
        self._protocol = None

    def set_protocol(self, protocol):
        self._protocol = protocol

    def has_protocol(self):
        return self._protocol is not None

    @staticmethod
    def _encode_body(data):
        try:
            if not isinstance(data, bytes):
                return data.encode()
            return data
        except AttributeError:
            return str(data or "").encode()

    def _parse_headers(self):
        headers = b""
        for name, value in self.headers.items():
            try:
                headers += b"%b: %b\r\n" % (
                    name.encode(),
                    value.encode(self.charset),
                )
            except AttributeError:
                headers += b"%b: %b\r\n" % (
                    str(name).encode(),
                    str(value).encode(self.charset),
                )
        return headers

    def set_cookie(self,
                   key,
                   value="",
                   max_age=None,
                   expires=None,
                   path="/",
                   domain=None,
                   secure=False,
                   httponly=False,
                   samesite=None):
        cookie = http.cookies.SimpleCookie()
        cookie[key] = value
        if max_age is not None:
            cookie[key]["max-age"] = max_age  # type: ignore
        if expires is not None:
            cookie[key]["expires"] = expires  # type: ignore
        if path is not None:
            cookie[key]["path"] = path
        if domain is not None:
            cookie[key]["domain"] = domain
        if secure:
            cookie[key]["secure"] = True
        if httponly:
            cookie[key]["httponly"] = True
        if samesite:
            cookie[key]["samesite"] = samesite
        cookie_val = cookie.output(header="").strip()
        self.headers.add('Set-Cookie', cookie_val)

    def delete_cookie(self, key, path="/", domain=None):
        self.set_cookie(key, expires=0, max_age=0, path=path, domain=domain)

    def get_headers(self,
                    version="1.1",
                    keep_alive=False,
                    keep_alive_timeout=None):
        timeout_header = b""
        if keep_alive and keep_alive_timeout is not None:
            timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
        self.headers["Content-Type"] = self.headers.get(
            "Content-Type", self.content_type)
        if self.status in (304, 412):
            self.headers = remove_entity_headers(self.headers)
        headers = self._parse_headers()
        if self.status is 200:
            description = b"OK"
        else:
            description = STATUS_TEXT.get(self.status, b"UNKNOWN RESPONSE")

        return (b"HTTP/%b %d %b\r\n"
                b"Connection: %b\r\n"
                b"%b"
                b"%b\r\n") % (version.encode(), self.status, description,
                              b"keep-alive" if keep_alive else b"close",
                              timeout_header, headers)

    async def output(self,
                     version="1.1",
                     keep_alive=False,
                     keep_alive_timeout=None):
        if has_message_body(self.status):
            body = self.body
            self.headers["Content-Length"] = self.headers.get(
                "Content-Length", len(self.body))
        else:
            body = b""
        return self.get_headers(version, keep_alive,
                                keep_alive_timeout) + b"%b" % body
Exemple #18
0
class ClientRequest:

    GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
    POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
    ALL_METHODS = GET_METHODS.union(POST_METHODS).union(
        {hdrs.METH_DELETE, hdrs.METH_TRACE})

    DEFAULT_HEADERS = {
        hdrs.ACCEPT: '*/*',
        hdrs.ACCEPT_ENCODING: 'gzip, deflate',
    }

    SERVER_SOFTWARE = HttpMessage.SERVER_SOFTWARE

    body = b''
    auth = None
    response = None
    response_class = None

    _writer = None  # async task for streaming data
    _continue = None  # waiter future for '100 Continue' response

    # N.B.
    # Adding __del__ method with self._writer closing doesn't make sense
    # because _writer is instance method, thus it keeps a reference to self.
    # Until writer has finished finalizer will not be called.

    def __init__(self, method, url, *,
                 params=None, headers=None, skip_auto_headers=frozenset(),
                 data=None, cookies=None,
                 auth=None, encoding='utf-8',
                 version=aiohttp.HttpVersion11, compress=None,
                 chunked=None, expect100=False,
                 loop=None, response_class=None,
                 proxy=None, proxy_auth=None,
                 timeout=5*60):

        if loop is None:
            loop = asyncio.get_event_loop()

        assert isinstance(url, URL), url
        assert isinstance(proxy, (URL, type(None))), proxy

        if params:
            q = MultiDict(url.query)
            url2 = url.with_query(params)
            q.extend(url2.query)
            url = url.with_query(q)
        self.url = url.with_fragment(None)
        self.method = method.upper()
        self.encoding = encoding
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.response_class = response_class or ClientResponse
        self._timeout = timeout

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth)

        self.update_body_from_data(data, skip_auto_headers)
        self.update_transfer_encoding()
        self.update_expect_continue(expect100)

    @property
    def host(self):
        return self.url.host

    @property
    def port(self):
        return self.url.port

    def update_host(self, url):
        """Update destination host, port and connection type (ssl)."""
        # get host/port
        if not url.host:
            raise ValueError('Host could not be detected.')

        # basic auth info
        username, password = url.user, url.password
        if username:
            self.auth = helpers.BasicAuth(username, password or '')

        # Record entire netloc for usage in host header

        scheme = url.scheme
        self.ssl = scheme in ('https', 'wss')

    def update_version(self, version):
        """Convert request version to two elements tuple.

        parser HTTP version '1.1' => (1, 1)
        """
        if isinstance(version, str):
            v = [l.strip() for l in version.split('.', 1)]
            try:
                version = int(v[0]), int(v[1])
            except ValueError:
                raise ValueError(
                    'Can not parse http version number: {}'
                    .format(version)) from None
        self.version = version

    def update_headers(self, headers):
        """Update request headers."""
        self.headers = CIMultiDict()
        if headers:
            if isinstance(headers, dict):
                headers = headers.items()
            elif isinstance(headers, (MultiDictProxy, MultiDict)):
                headers = headers.items()

            for key, value in headers:
                self.headers.add(key, value)

    def update_auto_headers(self, skip_auto_headers):
        self.skip_auto_headers = skip_auto_headers
        used_headers = set(self.headers) | skip_auto_headers

        for hdr, val in self.DEFAULT_HEADERS.items():
            if hdr not in used_headers:
                self.headers.add(hdr, val)

        # add host
        if hdrs.HOST not in used_headers:
            netloc = self.url.host
            if not self.url.is_default_port():
                netloc += ':' + str(self.url.port)
            self.headers[hdrs.HOST] = netloc

        if hdrs.USER_AGENT not in used_headers:
            self.headers[hdrs.USER_AGENT] = self.SERVER_SOFTWARE

    def update_cookies(self, cookies):
        """Update request cookies header."""
        if not cookies:
            return

        c = http.cookies.SimpleCookie()
        if hdrs.COOKIE in self.headers:
            c.load(self.headers.get(hdrs.COOKIE, ''))
            del self.headers[hdrs.COOKIE]

        for name, value in cookies.items():
            if isinstance(value, http.cookies.Morsel):
                c[value.key] = value.value
            else:
                c[name] = value

        self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()

    def update_content_encoding(self, data):
        """Set request content encoding."""
        if not data:
            return

        enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
        if enc:
            if self.compress is not False:
                self.compress = enc
                # enable chunked, no need to deal with length
                self.chunked = True
        elif self.compress:
            if not isinstance(self.compress, str):
                self.compress = 'deflate'
            self.headers[hdrs.CONTENT_ENCODING] = self.compress
            self.chunked = True  # enable chunked, no need to deal with length

    def update_auth(self, auth):
        """Set basic auth."""
        if auth is None:
            auth = self.auth
        if auth is None:
            return

        if not isinstance(auth, helpers.BasicAuth):
            raise TypeError('BasicAuth() tuple is required instead')

        self.headers[hdrs.AUTHORIZATION] = auth.encode()

    def update_body_from_data(self, data, skip_auto_headers):
        if not data:
            return

        if isinstance(data, str):
            data = data.encode(self.encoding)

        if isinstance(data, (bytes, bytearray)):
            self.body = data
            if (hdrs.CONTENT_TYPE not in self.headers and
                    hdrs.CONTENT_TYPE not in skip_auto_headers):
                self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
            if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked:
                self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

        elif isinstance(data, (asyncio.StreamReader, streams.StreamReader,
                               streams.DataQueue)):
            self.body = data

        elif asyncio.iscoroutine(data):
            self.body = data
            if (hdrs.CONTENT_LENGTH not in self.headers and
                    self.chunked is None):
                self.chunked = True

        elif isinstance(data, io.IOBase):
            assert not isinstance(data, io.StringIO), \
                'attempt to send text data instead of binary'
            self.body = data
            if not self.chunked and isinstance(data, io.BytesIO):
                # Not chunking if content-length can be determined
                size = len(data.getbuffer())
                self.headers[hdrs.CONTENT_LENGTH] = str(size)
                self.chunked = False
            elif (not self.chunked and
                  isinstance(data, (io.BufferedReader, io.BufferedRandom))):
                # Not chunking if content-length can be determined
                try:
                    size = os.fstat(data.fileno()).st_size - data.tell()
                    self.headers[hdrs.CONTENT_LENGTH] = str(size)
                    self.chunked = False
                except OSError:
                    # data.fileno() is not supported, e.g.
                    # io.BufferedReader(io.BytesIO(b'data'))
                    self.chunked = True
            else:
                self.chunked = True

            if hasattr(data, 'mode'):
                if data.mode == 'r':
                    raise ValueError('file {!r} should be open in binary mode'
                                     ''.format(data))
            if (hdrs.CONTENT_TYPE not in self.headers and
                hdrs.CONTENT_TYPE not in skip_auto_headers and
                    hasattr(data, 'name')):
                mime = mimetypes.guess_type(data.name)[0]
                mime = 'application/octet-stream' if mime is None else mime
                self.headers[hdrs.CONTENT_TYPE] = mime

        elif isinstance(data, MultipartWriter):
            self.body = data.serialize()
            self.headers.update(data.headers)
            self.chunked = self.chunked or 8192

        else:
            if not isinstance(data, helpers.FormData):
                data = helpers.FormData(data)

            self.body = data(self.encoding)

            if (hdrs.CONTENT_TYPE not in self.headers and
                    hdrs.CONTENT_TYPE not in skip_auto_headers):
                self.headers[hdrs.CONTENT_TYPE] = data.content_type

            if data.is_multipart:
                self.chunked = self.chunked or 8192
            else:
                if (hdrs.CONTENT_LENGTH not in self.headers and
                        not self.chunked):
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_transfer_encoding(self):
        """Analyze transfer-encoding header."""
        te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()

        if self.chunked:
            if hdrs.CONTENT_LENGTH in self.headers:
                del self.headers[hdrs.CONTENT_LENGTH]
            if 'chunked' not in te:
                self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

            self.chunked = self.chunked if type(self.chunked) is int else 8192
        else:
            if 'chunked' in te:
                self.chunked = 8192
            else:
                self.chunked = None
                if hdrs.CONTENT_LENGTH not in self.headers:
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_expect_continue(self, expect=False):
        if expect:
            self.headers[hdrs.EXPECT] = '100-continue'
        elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue':
            expect = True

        if expect:
            self._continue = helpers.create_future(self.loop)

    def update_proxy(self, proxy, proxy_auth):
        if proxy and not proxy.scheme == 'http':
            raise ValueError("Only http proxies are supported")
        if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
            raise ValueError("proxy_auth must be None or BasicAuth() tuple")
        self.proxy = proxy
        self.proxy_auth = proxy_auth

    @asyncio.coroutine
    def write_bytes(self, request, reader):
        """Support coroutines that yields bytes objects."""
        # 100 response
        if self._continue is not None:
            yield from self._continue

        try:
            if asyncio.iscoroutine(self.body):
                request.transport.set_tcp_nodelay(True)
                exc = None
                value = None
                stream = self.body

                while True:
                    try:
                        if exc is not None:
                            result = stream.throw(exc)
                        else:
                            result = stream.send(value)
                    except StopIteration as exc:
                        if isinstance(exc.value, bytes):
                            yield from request.write(exc.value, drain=True)
                        break
                    except:
                        self.response.close()
                        raise

                    if isinstance(result, asyncio.Future):
                        exc = None
                        value = None
                        try:
                            value = yield result
                        except Exception as err:
                            exc = err
                    elif isinstance(result, (bytes, bytearray)):
                        yield from request.write(result, drain=True)
                        value = None
                    else:
                        raise ValueError(
                            'Bytes object is expected, got: %s.' %
                            type(result))

            elif isinstance(self.body, (asyncio.StreamReader,
                                        streams.StreamReader)):
                request.transport.set_tcp_nodelay(True)
                chunk = yield from self.body.read(streams.DEFAULT_LIMIT)
                while chunk:
                    yield from request.write(chunk, drain=True)
                    chunk = yield from self.body.read(streams.DEFAULT_LIMIT)

            elif isinstance(self.body, streams.DataQueue):
                request.transport.set_tcp_nodelay(True)
                while True:
                    try:
                        chunk = yield from self.body.read()
                        if chunk is EOF_MARKER:
                            break
                        yield from request.write(chunk, drain=True)
                    except streams.EofStream:
                        break

            elif isinstance(self.body, io.IOBase):
                chunk = self.body.read(self.chunked)
                while chunk:
                    request.write(chunk)
                    chunk = self.body.read(self.chunked)
                request.transport.set_tcp_nodelay(True)

            else:
                if isinstance(self.body, (bytes, bytearray)):
                    self.body = (self.body,)

                for chunk in self.body:
                    request.write(chunk)
                request.transport.set_tcp_nodelay(True)

        except Exception as exc:
            new_exc = aiohttp.ClientRequestError(
                'Can not write request body for %s' % self.url)
            new_exc.__context__ = exc
            new_exc.__cause__ = exc
            reader.set_exception(new_exc)
        else:
            assert request.transport.tcp_nodelay
            try:
                ret = request.write_eof()
                # NB: in asyncio 3.4.1+ StreamWriter.drain() is coroutine
                # see bug #170
                if (asyncio.iscoroutine(ret) or
                        isinstance(ret, asyncio.Future)):
                    yield from ret
            except Exception as exc:
                new_exc = aiohttp.ClientRequestError(
                    'Can not write request body for %s' % self.url)
                new_exc.__context__ = exc
                new_exc.__cause__ = exc
                reader.set_exception(new_exc)

        self._writer = None

    def send(self, writer, reader):
        writer.set_tcp_cork(True)
        path = self.url.raw_path
        if self.url.raw_query_string:
            path += '?' + self.url.raw_query_string
        request = aiohttp.Request(writer, self.method, path,
                                  self.version)

        if self.compress:
            request.add_compression_filter(self.compress)

        if self.chunked is not None:
            request.enable_chunked_encoding()
            request.add_chunking_filter(self.chunked)

        # set default content-type
        if (self.method in self.POST_METHODS and
                hdrs.CONTENT_TYPE not in self.skip_auto_headers and
                hdrs.CONTENT_TYPE not in self.headers):
            self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'

        for k, value in self.headers.items():
            request.add_header(k, value)
        request.send_headers()

        self._writer = helpers.ensure_future(
            self.write_bytes(request, reader), loop=self.loop)

        self.response = self.response_class(
            self.method, self.url,
            writer=self._writer, continue100=self._continue,
            timeout=self._timeout)
        self.response._post_init(self.loop)
        return self.response

    @asyncio.coroutine
    def close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None

    def terminate(self):
        if self._writer is not None:
            if not self.loop.is_closed():
                self._writer.cancel()
            self._writer = None
Exemple #19
0
def make_mocked_request(method,
                        path,
                        headers=None,
                        *,
                        version=HttpVersion(1, 1),
                        closing=False,
                        app=None,
                        writer=sentinel,
                        payload_writer=sentinel,
                        protocol=sentinel,
                        transport=sentinel,
                        payload=sentinel,
                        sslcontext=None,
                        client_max_size=1024**2):
    """
    XXX copied from aiohttp but using guillotina request object
    Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    task = mock.Mock()
    loop = mock.Mock()
    loop.create_future.return_value = ()

    if version < HttpVersion(1, 1):
        closing = True

    if headers is None:
        headers = {}
    if 'Host' not in headers:
        headers['Host'] = 'localhost'
    headers = CIMultiDict(headers)
    raw_hdrs = tuple(
        (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(method, path, version, headers, raw_hdrs,
                                closing, False, False, chunked, URL(path))
    if app is None:
        app = test_utils._create_app_mock()

    if protocol is sentinel:
        protocol = mock.Mock()

    if transport is sentinel:
        transport = test_utils._create_transport(sslcontext)

    if writer is sentinel:
        writer = mock.Mock()
        writer.transport = transport

    if payload_writer is sentinel:
        payload_writer = mock.Mock()
        payload_writer.write_eof.side_effect = noop
        payload_writer.drain.side_effect = noop

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    time_service = mock.Mock()
    time_service.time.return_value = 12345
    time_service.strtime.return_value = "Tue, 15 Nov 1994 08:12:31 GMT"

    @contextmanager
    def timeout(*args, **kw):
        yield

    time_service.timeout = mock.Mock()
    time_service.timeout.side_effect = timeout

    req = Request(message,
                  payload,
                  protocol,
                  payload_writer,
                  time_service,
                  task,
                  client_max_size=client_max_size)

    match_info = UrlMappingMatchInfo({}, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemple #20
0
class Message:
    def __init__(self,
                 headers=None,
                 payload=None,
                 from_details=None,
                 to_details=None,
                 contact_details=None,
                 ):

        if headers:
            self.headers = headers
        else:
            self.headers = CIMultiDict()

        if from_details:
            self._from_details = from_details
        elif 'From' not in self.headers:
            raise ValueError('From header or from_details is required')

        if to_details:
            self._to_details = to_details
        elif 'To' not in self.headers:
            raise ValueError('To header or to_details is required')

        if contact_details:
            self._contact_details = contact_details

        self._payload = payload
        self._raw_payload = None

        if 'Via' not in self.headers:
            self.headers['Via'] = 'SIP/2.0/%(protocol)s ' + \
                utils.format_host_and_port(self.contact_details['uri']['host'],
                                           self.contact_details['uri']['port']) + \
                ';branch=%s' % utils.gen_branch(10)

    @property
    def payload(self):
        if self._payload:
            return self._payload
        elif self._raw_payload:
            self._payload = self._raw_payload.decode()
            return self._payload
        else:
            return ''

    @payload.setter
    def payload(self, payload):
        self._payload = payload

    @property
    def from_details(self):
        if not hasattr(self, '_from_details'):
            self._from_details = Contact.from_header(self.headers['From'])
        return self._from_details

    @from_details.setter
    def from_details(self, from_details):
        self._from_details = from_details

    @property
    def to_details(self):
        if not hasattr(self, '_to_details'):
            self._to_details = Contact.from_header(self.headers['To'])
        return self._to_details

    @to_details.setter
    def to_details(self, to_details):
        self._to_details = to_details

    @property
    def contact_details(self):
        if not hasattr(self, '_contact_details'):
            if 'Contact' in self.headers:
                self._contact_details = Contact.from_header(self.headers['Contact'])
            else:
                self._contact_details = None
        return self._contact_details

    @contact_details.setter
    def contact_details(self, contact_details):
        self._contact_details = contact_details

    @property
    def content_type(self):
        return self.headers['Content-Type']

    @content_type.setter
    def content_type(self, content_type):
        self.headers['Content-Type'] = content_type

    @property
    def cseq(self):
        if not hasattr(self, '_cseq'):
            self._cseq = int(self.headers['CSeq'].split(' ')[0])
        return self._cseq

    @cseq.setter
    def cseq(self, cseq):
        self._cseq = int(cseq)

    @property
    def method(self):
        if not hasattr(self, '_method'):
            self._method = self.headers['CSeq'].split(' ')[1]
        return self._method

    @method.setter
    def method(self, method):
        self._method = method

    def __str__(self):
        if self._payload:
            self._raw_payload = self._payload.encode()
        elif not self._raw_payload:
            self._raw_payload = b''

        msg = self._make_headers()
        return msg + self.payload

    def encode(self, *args, **kwargs):
        if self._payload:
            self._raw_payload = self._payload.encode(*args, **kwargs)
        elif not self._raw_payload:
            self._raw_payload = b''

        msg = self._make_headers()
        return msg.encode(*args, **kwargs) + self._raw_payload

    def _make_headers(self):
        if hasattr(self, '_from_details'):
            self.headers['From'] = str(self.from_details)

        if hasattr(self, '_to_details'):
            self.headers['To'] = str(self.to_details)

        if hasattr(self, '_contact_details'):
            self.headers['Contact'] = str(self.contact_details)

        if hasattr(self, '_cseq'):
            self.headers['CSeq'] = '%s %s' % (self.cseq, self.method)
        elif hasattr(self, '_method'):
            self.headers['CSeq'] = '%s %s' % (self.cseq, self.method)

        self.headers['Content-Length'] = str(len(self._raw_payload))
        if 'Max-Forwards' not in self.headers:
            self.headers['Max-Forwards'] = '70'
        if 'Call-ID' not in self.headers:
            self.headers['Call-ID'] = uuid.uuid4()

        return self._format_headers()

    def _format_headers(self):
        msg = []
        for k, v in sorted(self.headers.items()):
            if k == 'Via':
                if isinstance(v, (list, tuple)):
                    msg = ['%s: %s' % (k, i) for i in v] + msg
                else:
                    msg.insert(0, '%s: %s' % (k, v))
            else:
                if isinstance(v, (list, tuple)):
                    msg.extend(['%s: %s' % (k, i) for i in v])
                else:
                    msg.append('%s: %s' % (k, v))
        msg.append(utils.EOL)
        return utils.EOL.join(msg)

    def parsed_xml(self):
        if 'Content-Type' not in self.headers:
            return None
        if not self.headers['Content-Type'].endswith('+xml'):
            return None
        return PyQuery(self.payload).remove_namespaces()

    @classmethod
    def from_raw_headers(cls, raw_headers):
        headers = CIMultiDict()
        decoded_headers = raw_headers.decode().split(utils.EOL)
        for line in decoded_headers[1:]:
            k, v = line.split(': ', 1)
            if k in headers:
                o = headers.setdefault(k, [])
                if not isinstance(o, list):
                    o = [o]
                o.append(v)
                headers[k] = o
            else:
                headers[k] = v

        m = FIRST_LINE_PATTERN['response']['regex'].match(decoded_headers[0])
        if m:
            d = m.groupdict()
            return Response(status_code=int(d['status_code']),
                            status_message=d['status_message'],
                            headers=headers,
                            first_line=decoded_headers[0])
        else:
            m = FIRST_LINE_PATTERN['request']['regex'].match(decoded_headers[0])
            if m:
                d = m.groupdict()
                cseq, _ = headers['CSeq'].split()
                return Request(method=d['method'],
                               headers=headers,
                               cseq=int(cseq),
                               first_line=decoded_headers[0])
            else:
                LOG.debug(decoded_headers)
                raise ValueError('Not a SIP message')
Exemple #21
0
class HttpMessage(PayloadWriter):
    """HttpMessage allows to write headers and payload to a stream."""

    HOP_HEADERS = None  # Must be set by subclass.

    SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
        sys.version_info, aiohttp.__version__)

    upgrade = False  # Connection: UPGRADE
    websocket = False  # Upgrade: WEBSOCKET
    has_chunked_hdr = False  # Transfer-encoding: chunked

    def __init__(self, transport, version, close, loop=None):
        super().__init__(transport, loop)

        self.version = version
        self.closing = close
        self.keepalive = None
        self.length = None
        self.headers = CIMultiDict()
        self.headers_sent = False

    @property
    def body_length(self):
        return self.output_length

    def force_close(self):
        self.closing = True
        self.keepalive = False

    def keep_alive(self):
        if self.keepalive is None:
            if self.version < HttpVersion10:
                # keep alive not supported at all
                return False
            if self.version == HttpVersion10:
                if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
                    return True
                else:  # no headers means we close for Http 1.0
                    return False
            else:
                return not self.closing
        else:
            return self.keepalive

    def is_headers_sent(self):
        return self.headers_sent

    def add_header(self, name, value):
        """Analyze headers. Calculate content length,
        removes hop headers, etc."""
        assert not self.headers_sent, 'headers have been sent already'
        assert isinstance(name, str), \
            'Header name should be a string, got {!r}'.format(name)
        assert set(name).issubset(ASCIISET), \
            'Header name should contain ASCII chars, got {!r}'.format(name)
        assert isinstance(value, str), \
            'Header {!r} should have string value, got {!r}'.format(
                name, value)

        name = istr(name)
        value = value.strip()

        if name == hdrs.CONTENT_LENGTH:
            self.length = int(value)

        if name == hdrs.TRANSFER_ENCODING:
            self.has_chunked_hdr = value.lower() == 'chunked'

        if name == hdrs.CONNECTION:
            val = value.lower()
            # handle websocket
            if 'upgrade' in val:
                self.upgrade = True
            # connection keep-alive
            elif 'close' in val:
                self.keepalive = False
            elif 'keep-alive' in val:
                self.keepalive = True

        elif name == hdrs.UPGRADE:
            if 'websocket' in value.lower():
                self.websocket = True
            self.headers[name] = value

        elif name not in self.HOP_HEADERS:
            # ignore hop-by-hop headers
            self.headers.add(name, value)

    def add_headers(self, *headers):
        """Adds headers to a HTTP message."""
        for name, value in headers:
            self.add_header(name, value)

    def send_headers(self, _sep=': ', _end='\r\n'):
        """Writes headers to a stream. Constructs payload writer."""
        # Chunked response is only for HTTP/1.1 clients or newer
        # and there is no Content-Length header is set.
        # Do not use chunked responses when the response is guaranteed to
        # not have a response body (304, 204).
        assert not self.headers_sent, 'headers have been sent already'
        self.headers_sent = True

        if not self.chunked and self.autochunked():
            self.enable_chunking()

        if self.chunked:
            self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

        self._add_default_headers()

        # status + headers
        headers = self.status_line + ''.join(
            [k + _sep + v + _end for k, v in self.headers.items()])
        headers = headers.encode('utf-8') + b'\r\n'

        self.buffer_data(headers)

    def _add_default_headers(self):
        # set the connection header
        connection = None
        if self.upgrade:
            connection = 'Upgrade'
        elif not self.closing if self.keepalive is None else self.keepalive:
            if self.version == HttpVersion10:
                connection = 'keep-alive'
        else:
            if self.version == HttpVersion11:
                connection = 'close'

        if connection is not None:
            self.headers[hdrs.CONNECTION] = connection
Exemple #22
0
class HTTPMessage:
	def __init__(self):
		self.version = "HTTP/1.1"
	
		self.headers = CIMultiDict()
		self.body = b""
		
		self.rawform = None
		self.form = None
		self.json = None
		self.xml = None
		self.files = None
		self.text = None
		
		self.boundary = "--------BOUNDARY--------"
		
	def check_version(self):
		if not self.version.startswith("HTTP/"):
			raise HTTPError("HTTP version must start with HTTP/")
		if self.version not in ["HTTP/1.0", "HTTP/1.1"]:
			raise HTTPError("HTTP version not supported")
	
	def transfer_encodings(self):
		encoding = self.headers.get("Transfer-Encoding", "identity")
		return [enc.strip() for enc in encoding.split(",")]
	
	def is_chunked(self):
		return "chunked" in self.transfer_encodings()
		
	def parse_body(self):
		type, param = parseheader(self.headers.get("Content-Type", ""))
		
		is_json = type == "application/json" or type.endswith("+json")
		is_xml = type in XML_TYPES or type.endswith("+xml")
		is_text = type in TEXT_TYPES or type.startswith("text/") or is_json or is_xml
		
		if is_text:
			try:
				self.text = self.body.decode(param.get("charset", "UTF-8"))
			except UnicodeDecodeError:
				raise HTTPError("Failed to decode HTTP body")
		
		if type == "application/x-www-form-urlencoded":
			self.form = formdecode(self.text)
			self.rawform = formdecode(self.text, False)
		
		if is_json:
			try:
				self.json = json.loads(self.text)
			except json.JSONDecodeError:
				raise HTTPError("Failed to decode JSON body")
		
		if is_xml:
			try:
				self.xml = xml.parse(self.text)
			except ValueError as e:
				raise HTTPError("Failed to decode XML body: %s" %e)
		
		if type.startswith("multipart/form-data"):
			if "boundary" not in param:
				raise HTTPError("multipart/form-data required boundary parameter")
			self.boundary = param["boundary"]
			self.files = self.parse_files(self.body)
	
	def parse_files(self, data):
		split = b"--%s" %self.boundary.encode()
		parts = data.split(split)
		
		if parts[-1] != b"--\r\n" or parts[0] != b"":
			raise HTTPError("Failed to decode multipart body")
		
		files = MultiDict()
		for part in parts[1:-1]:
			if part[:2] != b"\r\n" or part[-2:] != b"\r\n":
				raise HTTPError("Failed to decode multipart body")
			part = part[2:-2]
			
			if not b"\r\n\r\n" in part:
				raise HTTPError("Failed to decode multipart body")
			
			head, body = part.split(b"\r\n\r\n", 1)
			try:
				lines = head.decode().split("\r\n")
			except UnicodeDecodeError:
				raise HTTPError("Failed to decode multipart body")
			
			headers = {}
			for header in lines:
				if not ": " in header:
					raise HTTPError("Invalid line in multipart headers")
				key, value = header.split(": ", 1)
				headers[key] = value
			
			if "Content-Disposition" not in headers:
				raise HTTPError("Expected Content-Disposition header in multipart data")
				
			type, param = parseheader(headers["Content-Disposition"])
			if type != "form-data":
				raise HTTPError("Expected form-data header in multipart data")
			
			if "name" not in param:
				raise HTTPError("Expected name parameter in Content-Disposition header")
			files[param["name"]] = body
				
		return files
		
	def encode_body(self):
		text = self.text
		body = self.body
		
		if self.rawform is not None:
			if "Content-Type" not in self.headers:
				self.headers["Content-Type"] = "application/x-www-form-urlencoded"
			text = formencode(self.rawform, False)
		
		elif self.form is not None:
			if "Content-Type" not in self.headers:
				self.headers["Content-Type"] = "application/x-www-form-urlencoded"
			text = formencode(self.form)
		
		elif self.json is not None:
			if "Content-Type" not in self.headers:
				self.headers["Content-Type"] = "application/json"
			text = json.dumps(self.json)
			
		elif self.xml is not None:
			if "Content-Type" not in self.headers:
				self.headers["Content-Type"] = "application/xml"
			text = self.xml.encode()
			
		elif self.files is not None:
			if "Content-Type" not in self.headers:
				self.headers["Content-Type"] = "multipart/form-data"
			self.headers["Content-Type"] += "; boundary=%s" %self.boundary
			
			text = None
			body = b""
			for name, data in self.files.items():
				name = name.replace('"', '\\"')
				body += b"--%s\r\n" %self.boundary.encode()
				body += b"Content-Disposition: form-data; name=\"%s\"\r\n\r\n" %name.encode()
				body += data + b"\r\n"
			body += b"--%s--\r\n" %self.boundary.encode()
		
		if text is not None:
			if "Content-Type" not in self.headers:
				self.headers["Content-Type"] = "text/plain"
			body = text.encode()
		
		if body and "Content-Type" not in self.headers:
			self.headers["Content-Type"] = "application/octet-stream"
		
		if self.is_chunked():
			if not body:
				return b"0\r\n\r\n"
			return b"%x\r\n" %len(body) + body + b"\r\n0\r\n\r\n"
		else:
			if body:
				self.headers["Content-Length"] = len(body)
			return body
	
	def encode_start_line(self):
		raise NotImplementedError("%s.encode_start_line" %self.__class__.__name__)
	
	def encode_headers(self):
		self.encode_body()
		
		lines = [self.encode_start_line()]
		for key, value in self.headers.items():
			lines.append("%s: %s" %(key, value))
		
		text = "\r\n".join(lines) + "\r\n\r\n"
		return text.encode()
	
	def encode(self):
		return self.encode_headers() + self.encode_body()
	
	@classmethod
	def parse(cls, data, head=False):
		parser = HTTPParser(cls, head)
		parser.update(data)
		parser.eof()
		
		if parser.buffer:
			raise HTTPError("Got more data than expected")
		
		return parser.message
Exemple #23
0
def make_mocked_request(method, path, headers=None, *,
                        version=HttpVersion(1, 1), closing=False,
                        app=None,
                        writer=sentinel,
                        protocol=sentinel,
                        transport=sentinel,
                        payload=sentinel,
                        sslcontext=None,
                        secure_proxy_ssl_header=None):
    """Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    if version < HttpVersion(1, 1):
        closing = True

    if headers:
        headers = CIMultiDict(headers)
        raw_hdrs = tuple(
            (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
    else:
        headers = CIMultiDict()
        raw_hdrs = ()

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(
        method, path, version, headers,
        raw_hdrs, closing, False, False, chunked, URL(path))
    if app is None:
        app = _create_app_mock()

    if protocol is sentinel:
        protocol = mock.Mock()

    if transport is sentinel:
        transport = _create_transport(sslcontext)

    if writer is sentinel:
        writer = mock.Mock()
        writer.transport = transport

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    time_service = mock.Mock()
    time_service.time.return_value = 12345
    time_service.strtime.return_value = "Tue, 15 Nov 1994 08:12:31 GMT"

    @contextmanager
    def timeout(*args, **kw):
        yield

    time_service.timeout = mock.Mock()
    time_service.timeout.side_effect = timeout

    task = mock.Mock()
    loop = mock.Mock()
    loop.create_future.return_value = ()

    req = Request(message, payload, protocol,
                  time_service, task, loop=loop,
                  secure_proxy_ssl_header=secure_proxy_ssl_header)

    match_info = UrlMappingMatchInfo({}, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemple #24
0
def make_mocked_request(method,
                        path,
                        headers=None,
                        *,
                        match_info=sentinel,
                        version=HttpVersion(1, 1),
                        closing=False,
                        app=None,
                        writer=sentinel,
                        protocol=sentinel,
                        transport=sentinel,
                        payload=sentinel,
                        sslcontext=None,
                        client_max_size=1024**2,
                        loop=...):
    """Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    task = mock.Mock()
    if loop is ...:
        loop = mock.Mock()
        loop.create_future.return_value = ()

    if version < HttpVersion(1, 1):
        closing = True

    if headers:
        headers = CIMultiDict(headers)
        raw_hdrs = tuple(
            (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
    else:
        headers = CIMultiDict()
        raw_hdrs = ()

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(method, path, version, headers, raw_hdrs,
                                closing, False, False, chunked, URL(path))
    if app is None:
        app = _create_app_mock()

    if protocol is sentinel:
        protocol = mock.Mock()

    if transport is sentinel:
        transport = _create_transport(sslcontext)

    if writer is sentinel:
        writer = mock.Mock()
        writer.write_headers = make_mocked_coro(None)
        writer.write = make_mocked_coro(None)
        writer.write_eof = make_mocked_coro(None)
        writer.drain = make_mocked_coro(None)
        writer.transport = transport

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    req = Request(message,
                  payload,
                  protocol,
                  writer,
                  task,
                  loop,
                  client_max_size=client_max_size)

    match_info = UrlMappingMatchInfo(
        {} if match_info is sentinel else match_info, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemple #25
0
class HTTPResponse(object):
    def __init__(self,
                 body=None,
                 status=200,
                 headers=None,
                 content_type="text/plain",
                 body_bytes=b""):
        self.content_type = content_type
        self.headers = headers

        if body is not None:
            self.body = self.encode_body(body)
        else:
            self.body = body_bytes

        self.status = status
        self.headers = CIMultiDict(headers or {})

    def encode_body(self, data):
        try:
            return data.encode()
        except AttributeError:
            return str(data).encode()

    def parse_headers(self):
        headers = b""
        for key, value in self.headers.items():
            try:
                headers += b"%b: %b\r\n" % (key.encode(),
                                            value.encode("utf-8"))
            except AttributeError:
                headers += b"%b: %b\r\n" % (str(key).encode(),
                                            str(value).encode("utf-8"))

        return headers

    def has_message_body(self):
        """
        According to the following RFC message body and length SHOULD NOT
        be included in responses status 1XX, 204 and 304.
        https://tools.ietf.org/html/rfc2616#section-4.4
        https://tools.ietf.org/html/rfc2616#section-4.3
        """
        return self.status not in (204, 304) and not (100 <= self.status < 200)

    def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
        """
        TODO 先暂时不支持 keep alive
        :param version:
        :return:
        """
        timeout_header = b""
        if keep_alive and keep_alive_timeout is not None:
            timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout

        body = b""
        if self.has_message_body():
            body = self.body
            self.headers["Content-Length"] = self.headers.get(
                "Content-Length", len(self.body))

        self.headers["Content-Type"] = self.headers.get(
            "Content-Type", self.content_type)

        if self.status in (304, 412):
            self.headers = remove_entity_headers(self.headers)

        headers = self.parse_headers()

        if self.status == 200:
            status_msg = b"OK"
        else:
            status_msg = STATUS_CODES.get(self.status, b"UNKNOWN RESPONSE")
        """
        消息结构如下:
        HTTP/1.1 200 OK                     # 状态行
        Connection: close                   # headers
        Content-Type: text/plain
                                            # 空行
        xxxxxbody part                      # body内容
        """
        return (b"HTTP/%b %d %b\r\n"
                b"Connection: %b\r\n"
                b"%b"
                b"%b\r\n"
                b"%b") % (version.encode(), self.status, status_msg,
                          b"keep-alive" if keep_alive else b"close",
                          timeout_header, headers, body)
Exemple #26
0
class ClientRequest:

    GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
    POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
    ALL_METHODS = GET_METHODS.union(POST_METHODS).union(
        {hdrs.METH_DELETE, hdrs.METH_TRACE})

    DEFAULT_HEADERS = {
        hdrs.ACCEPT: '*/*',
        hdrs.ACCEPT_ENCODING: 'gzip, deflate',
    }

    SERVER_SOFTWARE = HttpMessage.SERVER_SOFTWARE

    body = b''
    auth = None
    response = None
    response_class = None

    _writer = None  # async task for streaming data
    _continue = None  # waiter future for '100 Continue' response

    # N.B.
    # Adding __del__ method with self._writer closing doesn't make sense
    # because _writer is instance method, thus it keeps a reference to self.
    # Until writer has finished finalizer will not be called.

    def __init__(self,
                 method,
                 url,
                 *,
                 params=None,
                 headers=None,
                 skip_auto_headers=frozenset(),
                 data=None,
                 cookies=None,
                 auth=None,
                 encoding='utf-8',
                 version=aiohttp.HttpVersion11,
                 compress=None,
                 chunked=None,
                 expect100=False,
                 loop=None,
                 response_class=None,
                 proxy=None,
                 proxy_auth=None,
                 timeout=5 * 60):

        if loop is None:
            loop = asyncio.get_event_loop()

        self.url = url
        self.method = method.upper()
        self.encoding = encoding
        self.chunked = chunked
        self.compress = compress
        self.loop = loop
        self.response_class = response_class or ClientResponse
        self._timeout = timeout

        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

        self.update_version(version)
        self.update_host(url)
        self.update_path(params)
        self.update_headers(headers)
        self.update_auto_headers(skip_auto_headers)
        self.update_cookies(cookies)
        self.update_content_encoding(data)
        self.update_auth(auth)
        self.update_proxy(proxy, proxy_auth)

        self.update_body_from_data(data, skip_auto_headers)
        self.update_transfer_encoding()
        self.update_expect_continue(expect100)

    def update_host(self, url):
        """Update destination host, port and connection type (ssl)."""
        url_parsed = urllib.parse.urlsplit(url)

        # check for network location part
        netloc = url_parsed.netloc
        if not netloc:
            raise ValueError('Host could not be detected.')

        # get host/port
        host = url_parsed.hostname
        if not host:
            raise ValueError('Host could not be detected.')

        try:
            port = url_parsed.port
        except ValueError:
            raise ValueError('Port number could not be converted.') from None

        # check domain idna encoding
        try:
            host = host.encode('idna').decode('utf-8')
            netloc = self.make_netloc(host, url_parsed.port)
        except UnicodeError:
            raise ValueError('URL has an invalid label.')

        # basic auth info
        username, password = url_parsed.username, url_parsed.password
        if username:
            self.auth = helpers.BasicAuth(username, password or '')

        # Record entire netloc for usage in host header
        self.netloc = netloc

        scheme = url_parsed.scheme
        self.ssl = scheme in ('https', 'wss')

        # set port number if it isn't already set
        if not port:
            if self.ssl:
                port = HTTPS_PORT
            else:
                port = HTTP_PORT

        self.host, self.port, self.scheme = host, port, scheme

    def make_netloc(self, host, port):
        ret = host
        if port:
            ret = ret + ':' + str(port)
        return ret

    def update_version(self, version):
        """Convert request version to two elements tuple.

        parser HTTP version '1.1' => (1, 1)
        """
        if isinstance(version, str):
            v = [l.strip() for l in version.split('.', 1)]
            try:
                version = int(v[0]), int(v[1])
            except ValueError:
                raise ValueError(
                    'Can not parse http version number: {}'.format(
                        version)) from None
        self.version = version

    def update_path(self, params):
        """Build path."""
        # extract path
        scheme, netloc, path, query, fragment = urllib.parse.urlsplit(self.url)
        if not path:
            path = '/'

        if isinstance(params, collections.Mapping):
            params = list(params.items())

        if params:
            if not isinstance(params, str):
                params = urllib.parse.urlencode(params)
            if query:
                query = '%s&%s' % (query, params)
            else:
                query = params

        self.path = urllib.parse.urlunsplit(
            ('', '', helpers.requote_uri(path), query, ''))
        self.url = urllib.parse.urlunsplit(
            (scheme, netloc, self.path, '', fragment))

    def update_headers(self, headers):
        """Update request headers."""
        self.headers = CIMultiDict()
        if headers:
            if isinstance(headers, dict):
                headers = headers.items()
            elif isinstance(headers, (MultiDictProxy, MultiDict)):
                headers = headers.items()

            for key, value in headers:
                self.headers.add(key, value)

    def update_auto_headers(self, skip_auto_headers):
        self.skip_auto_headers = skip_auto_headers
        used_headers = set(self.headers) | skip_auto_headers

        for hdr, val in self.DEFAULT_HEADERS.items():
            if hdr not in used_headers:
                self.headers.add(hdr, val)

        # add host
        if hdrs.HOST not in used_headers:
            self.headers[hdrs.HOST] = self.netloc

        if hdrs.USER_AGENT not in used_headers:
            self.headers[hdrs.USER_AGENT] = self.SERVER_SOFTWARE

    def update_cookies(self, cookies):
        """Update request cookies header."""
        if not cookies:
            return

        c = http.cookies.SimpleCookie()
        if hdrs.COOKIE in self.headers:
            c.load(self.headers.get(hdrs.COOKIE, ''))
            del self.headers[hdrs.COOKIE]

        if isinstance(cookies, dict):
            cookies = cookies.items()

        for name, value in cookies:
            if isinstance(value, http.cookies.Morsel):
                c[value.key] = value.value
            else:
                c[name] = value

        self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()

    def update_content_encoding(self, data):
        """Set request content encoding."""
        if not data:
            return

        enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
        if enc:
            if self.compress is not False:
                self.compress = enc
                # enable chunked, no need to deal with length
                self.chunked = True
        elif self.compress:
            if not isinstance(self.compress, str):
                self.compress = 'deflate'
            self.headers[hdrs.CONTENT_ENCODING] = self.compress
            self.chunked = True  # enable chunked, no need to deal with length

    def update_auth(self, auth):
        """Set basic auth."""
        if auth is None:
            auth = self.auth
        if auth is None:
            return

        if not isinstance(auth, helpers.BasicAuth):
            raise TypeError('BasicAuth() tuple is required instead')

        self.headers[hdrs.AUTHORIZATION] = auth.encode()

    def update_body_from_data(self, data, skip_auto_headers):
        if not data:
            return

        if isinstance(data, str):
            data = data.encode(self.encoding)

        if isinstance(data, (bytes, bytearray)):
            self.body = data
            if (hdrs.CONTENT_TYPE not in self.headers
                    and hdrs.CONTENT_TYPE not in skip_auto_headers):
                self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
            if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked:
                self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

        elif isinstance(
                data,
            (asyncio.StreamReader, streams.StreamReader, streams.DataQueue)):
            self.body = data

        elif asyncio.iscoroutine(data):
            self.body = data
            if (hdrs.CONTENT_LENGTH not in self.headers
                    and self.chunked is None):
                self.chunked = True

        elif isinstance(data, io.IOBase):
            assert not isinstance(data, io.StringIO), \
                'attempt to send text data instead of binary'
            self.body = data
            if not self.chunked and isinstance(data, io.BytesIO):
                # Not chunking if content-length can be determined
                size = len(data.getbuffer())
                self.headers[hdrs.CONTENT_LENGTH] = str(size)
                self.chunked = False
            elif not self.chunked and isinstance(data, io.BufferedReader):
                # Not chunking if content-length can be determined
                try:
                    size = os.fstat(data.fileno()).st_size - data.tell()
                    self.headers[hdrs.CONTENT_LENGTH] = str(size)
                    self.chunked = False
                except OSError:
                    # data.fileno() is not supported, e.g.
                    # io.BufferedReader(io.BytesIO(b'data'))
                    self.chunked = True
            else:
                self.chunked = True

            if hasattr(data, 'mode'):
                if data.mode == 'r':
                    raise ValueError('file {!r} should be open in binary mode'
                                     ''.format(data))
            if (hdrs.CONTENT_TYPE not in self.headers
                    and hdrs.CONTENT_TYPE not in skip_auto_headers
                    and hasattr(data, 'name')):
                mime = mimetypes.guess_type(data.name)[0]
                mime = 'application/octet-stream' if mime is None else mime
                self.headers[hdrs.CONTENT_TYPE] = mime

        elif isinstance(data, MultipartWriter):
            self.body = data.serialize()
            self.headers.update(data.headers)
            self.chunked = self.chunked or 8192

        else:
            if not isinstance(data, helpers.FormData):
                data = helpers.FormData(data)

            self.body = data(self.encoding)

            if (hdrs.CONTENT_TYPE not in self.headers
                    and hdrs.CONTENT_TYPE not in skip_auto_headers):
                self.headers[hdrs.CONTENT_TYPE] = data.content_type

            if data.is_multipart:
                self.chunked = self.chunked or 8192
            else:
                if (hdrs.CONTENT_LENGTH not in self.headers
                        and not self.chunked):
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_transfer_encoding(self):
        """Analyze transfer-encoding header."""
        te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()

        if self.chunked:
            if hdrs.CONTENT_LENGTH in self.headers:
                del self.headers[hdrs.CONTENT_LENGTH]
            if 'chunked' not in te:
                self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

            self.chunked = self.chunked if type(self.chunked) is int else 8192
        else:
            if 'chunked' in te:
                self.chunked = 8192
            else:
                self.chunked = None
                if hdrs.CONTENT_LENGTH not in self.headers:
                    self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))

    def update_expect_continue(self, expect=False):
        if expect:
            self.headers[hdrs.EXPECT] = '100-continue'
        elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue':
            expect = True

        if expect:
            self._continue = helpers.create_future(self.loop)

    def update_proxy(self, proxy, proxy_auth):
        if proxy and not proxy.startswith('http://'):
            raise ValueError("Only http proxies are supported")
        if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
            raise ValueError("proxy_auth must be None or BasicAuth() tuple")
        self.proxy = proxy
        self.proxy_auth = proxy_auth

    @asyncio.coroutine
    def write_bytes(self, request, reader):
        """Support coroutines that yields bytes objects."""
        # 100 response
        if self._continue is not None:
            yield from self._continue

        try:
            if asyncio.iscoroutine(self.body):
                request.transport.set_tcp_nodelay(True)
                exc = None
                value = None
                stream = self.body

                while True:
                    try:
                        if exc is not None:
                            result = stream.throw(exc)
                        else:
                            result = stream.send(value)
                    except StopIteration as exc:
                        if isinstance(exc.value, bytes):
                            yield from request.write(exc.value, drain=True)
                        break
                    except:
                        self.response.close()
                        raise

                    if isinstance(result, asyncio.Future):
                        exc = None
                        value = None
                        try:
                            value = yield result
                        except Exception as err:
                            exc = err
                    elif isinstance(result, (bytes, bytearray)):
                        yield from request.write(result, drain=True)
                        value = None
                    else:
                        raise ValueError('Bytes object is expected, got: %s.' %
                                         type(result))

            elif isinstance(self.body,
                            (asyncio.StreamReader, streams.StreamReader)):
                request.transport.set_tcp_nodelay(True)
                chunk = yield from self.body.read(streams.DEFAULT_LIMIT)
                while chunk:
                    yield from request.write(chunk, drain=True)
                    chunk = yield from self.body.read(streams.DEFAULT_LIMIT)

            elif isinstance(self.body, streams.DataQueue):
                request.transport.set_tcp_nodelay(True)
                while True:
                    try:
                        chunk = yield from self.body.read()
                        if chunk is EOF_MARKER:
                            break
                        yield from request.write(chunk, drain=True)
                    except streams.EofStream:
                        break

            elif isinstance(self.body, io.IOBase):
                chunk = self.body.read(self.chunked)
                while chunk:
                    request.write(chunk)
                    chunk = self.body.read(self.chunked)
                request.transport.set_tcp_nodelay(True)

            else:
                if isinstance(self.body, (bytes, bytearray)):
                    self.body = (self.body, )

                for chunk in self.body:
                    request.write(chunk)
                request.transport.set_tcp_nodelay(True)

        except Exception as exc:
            new_exc = aiohttp.ClientRequestError(
                'Can not write request body for %s' % self.url)
            new_exc.__context__ = exc
            new_exc.__cause__ = exc
            reader.set_exception(new_exc)
        else:
            assert request.transport.tcp_nodelay
            try:
                ret = request.write_eof()
                # NB: in asyncio 3.4.1+ StreamWriter.drain() is coroutine
                # see bug #170
                if (asyncio.iscoroutine(ret)
                        or isinstance(ret, asyncio.Future)):
                    yield from ret
            except Exception as exc:
                new_exc = aiohttp.ClientRequestError(
                    'Can not write request body for %s' % self.url)
                new_exc.__context__ = exc
                new_exc.__cause__ = exc
                reader.set_exception(new_exc)

        self._writer = None

    def send(self, writer, reader):
        writer.set_tcp_cork(True)
        request = aiohttp.Request(writer, self.method, self.path, self.version)

        if self.compress:
            request.add_compression_filter(self.compress)

        if self.chunked is not None:
            request.enable_chunked_encoding()
            request.add_chunking_filter(self.chunked)

        # set default content-type
        if (self.method in self.POST_METHODS
                and hdrs.CONTENT_TYPE not in self.skip_auto_headers
                and hdrs.CONTENT_TYPE not in self.headers):
            self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'

        for k, value in self.headers.items():
            request.add_header(k, value)
        request.send_headers()

        self._writer = helpers.ensure_future(self.write_bytes(request, reader),
                                             loop=self.loop)

        self.response = self.response_class(self.method,
                                            self.url,
                                            self.host,
                                            writer=self._writer,
                                            continue100=self._continue,
                                            timeout=self._timeout)
        self.response._post_init(self.loop)
        return self.response

    @asyncio.coroutine
    def close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None

    def terminate(self):
        if self._writer is not None:
            if not self.loop.is_closed():
                self._writer.cancel()
            self._writer = None
Exemple #27
0
def make_mocked_request(method, path, headers=None, *,
                        match_info=sentinel,
                        version=HttpVersion(1, 1), closing=False,
                        app=None,
                        writer=sentinel,
                        protocol=sentinel,
                        transport=sentinel,
                        payload=sentinel,
                        sslcontext=None,
                        client_max_size=1024**2,
                        loop=...):
    """Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    task = mock.Mock()
    if loop is ...:
        loop = mock.Mock()
        loop.create_future.return_value = ()

    if version < HttpVersion(1, 1):
        closing = True

    if headers:
        headers = CIMultiDict(headers)
        raw_hdrs = tuple(
            (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
    else:
        headers = CIMultiDict()
        raw_hdrs = ()

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(
        method, path, version, headers,
        raw_hdrs, closing, False, False, chunked, URL(path))
    if app is None:
        app = _create_app_mock()

    if protocol is sentinel:
        protocol = mock.Mock()

    if transport is sentinel:
        transport = _create_transport(sslcontext)

    if writer is sentinel:
        writer = mock.Mock()
        writer.write_headers = make_mocked_coro(None)
        writer.write = make_mocked_coro(None)
        writer.write_eof = make_mocked_coro(None)
        writer.drain = make_mocked_coro(None)
        writer.transport = transport

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    req = Request(message, payload,
                  protocol, writer, task, loop,
                  client_max_size=client_max_size)

    match_info = UrlMappingMatchInfo(
        {} if match_info is sentinel else match_info, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemple #28
0
class HttpMessage(ABC):
    """HttpMessage allows to write headers and payload to a stream.

    For example, lets say we want to read file then compress it with deflate
    compression and then send it with chunked transfer encoding, code may look
    like this:

       >>> response = aiohttp.Response(transport, 200)

    We have to use deflate compression first:

      >>> response.add_compression_filter('deflate')

    Then we want to split output stream into chunks of 1024 bytes size:

      >>> response.add_chunking_filter(1024)

    We can add headers to response with add_headers() method. add_headers()
    does not send data to transport, send_headers() sends request/response
    line and then sends headers:

      >>> response.add_headers(
      ...     ('Content-Disposition', 'attachment; filename="..."'))
      >>> response.send_headers()

    Now we can use chunked writer to write stream to a network stream.
    First call to write() method sends response status line and headers,
    add_header() and add_headers() method unavailable at this stage:

    >>> with open('...', 'rb') as f:
    ...     chunk = fp.read(8192)
    ...     while chunk:
    ...         response.write(chunk)
    ...         chunk = fp.read(8192)

    >>> response.write_eof()

    """

    writer = None

    # 'filter' is being used for altering write() behaviour,
    # add_chunking_filter adds deflate/gzip compression and
    # add_compression_filter splits incoming data into a chunks.
    filter = None

    HOP_HEADERS = None  # Must be set by subclass.

    SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
        sys.version_info, aiohttp.__version__)

    upgrade = False  # Connection: UPGRADE
    websocket = False  # Upgrade: WEBSOCKET
    has_chunked_hdr = False  # Transfer-encoding: chunked

    # subclass can enable auto sending headers with write() call,
    # this is useful for wsgi's start_response implementation.
    _send_headers = False

    def __init__(self, transport, version, close):
        self.transport = transport
        self._version = version
        self.closing = close
        self.keepalive = None
        self.chunked = False
        self.length = None
        self.headers = CIMultiDict()
        self.headers_sent = False
        self.output_length = 0
        self.headers_length = 0
        self._output_size = 0
        self._cache = {}

    @property
    @abstractmethod
    def status_line(self):
        return b''

    @abstractmethod
    def autochunked(self):
        return False

    @property
    def version(self):
        return self._version

    @property
    def body_length(self):
        return self.output_length - self.headers_length

    def force_close(self):
        self.closing = True
        self.keepalive = False

    def enable_chunked_encoding(self):
        self.chunked = True

    def keep_alive(self):
        if self.keepalive is None:
            if self.version < HttpVersion10:
                # keep alive not supported at all
                return False
            if self.version == HttpVersion10:
                if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
                    return True
                else:  # no headers means we close for Http 1.0
                    return False
            else:
                return not self.closing
        else:
            return self.keepalive

    def is_headers_sent(self):
        return self.headers_sent

    def add_header(self, name, value):
        """Analyze headers. Calculate content length,
        removes hop headers, etc."""
        assert not self.headers_sent, 'headers have been sent already'
        assert isinstance(name, str), \
            'Header name should be a string, got {!r}'.format(name)
        assert set(name).issubset(ASCIISET), \
            'Header name should contain ASCII chars, got {!r}'.format(name)
        assert isinstance(value, str), \
            'Header {!r} should have string value, got {!r}'.format(
                name, value)

        name = istr(name)
        value = value.strip()

        if name == hdrs.CONTENT_LENGTH:
            self.length = int(value)

        if name == hdrs.TRANSFER_ENCODING:
            self.has_chunked_hdr = value.lower().strip() == 'chunked'

        if name == hdrs.CONNECTION:
            val = value.lower()
            # handle websocket
            if 'upgrade' in val:
                self.upgrade = True
            # connection keep-alive
            elif 'close' in val:
                self.keepalive = False
            elif 'keep-alive' in val:
                self.keepalive = True

        elif name == hdrs.UPGRADE:
            if 'websocket' in value.lower():
                self.websocket = True
                self.headers[name] = value

        elif name not in self.HOP_HEADERS:
            # ignore hop-by-hop headers
            self.headers.add(name, value)

    def add_headers(self, *headers):
        """Adds headers to a HTTP message."""
        for name, value in headers:
            self.add_header(name, value)

    def send_headers(self, _sep=': ', _end='\r\n'):
        """Writes headers to a stream. Constructs payload writer."""
        # Chunked response is only for HTTP/1.1 clients or newer
        # and there is no Content-Length header is set.
        # Do not use chunked responses when the response is guaranteed to
        # not have a response body (304, 204).
        assert not self.headers_sent, 'headers have been sent already'
        self.headers_sent = True

        if self.chunked or self.autochunked():
            self.writer = self._write_chunked_payload()
            self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'

        elif self.length is not None:
            self.writer = self._write_length_payload(self.length)

        else:
            self.writer = self._write_eof_payload()

        next(self.writer)

        self._add_default_headers()

        # status + headers
        headers = self.status_line + ''.join(
            [k + _sep + v + _end for k, v in self.headers.items()])
        headers = headers.encode('utf-8') + b'\r\n'

        self.output_length += len(headers)
        self.headers_length = len(headers)
        self.transport.write(headers)

    def _add_default_headers(self):
        # set the connection header
        connection = None
        if self.upgrade:
            connection = 'Upgrade'
        elif not self.closing if self.keepalive is None else self.keepalive:
            if self.version == HttpVersion10:
                connection = 'keep-alive'
        else:
            if self.version == HttpVersion11:
                connection = 'close'

        if connection is not None:
            self.headers[hdrs.CONNECTION] = connection

    def write(self, chunk, *,
              drain=False, EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
        """Writes chunk of data to a stream by using different writers.

        writer uses filter to modify chunk of data.
        write_eof() indicates end of stream.
        writer can't be used after write_eof() method being called.
        write() return drain future.
        """
        assert (isinstance(chunk, (bytes, bytearray)) or
                chunk is EOF_MARKER), chunk

        size = self.output_length

        if self._send_headers and not self.headers_sent:
            self.send_headers()

        if self.filter:
            chunk = self.filter.send(chunk)
            while chunk not in (EOF_MARKER, EOL_MARKER):
                if chunk:
                    self.writer.send(chunk)
                chunk = next(self.filter)
        else:
            if chunk is not EOF_MARKER:
                self.writer.send(chunk)

        self._output_size += self.output_length - size

        if self._output_size > 64 * 1024:
            if drain:
                self._output_size = 0
                return self.transport.drain()

        return ()

    def write_eof(self):
        self.write(EOF_MARKER)
        try:
            self.writer.throw(aiohttp.EofStream())
        except StopIteration:
            pass

        return self.transport.drain()

    def _write_chunked_payload(self):
        """Write data in chunked transfer encoding."""
        while True:
            try:
                chunk = yield
            except aiohttp.EofStream:
                self.transport.write(b'0\r\n\r\n')
                self.output_length += 5
                break

            chunk = bytes(chunk)
            chunk_len = '{:x}\r\n'.format(len(chunk)).encode('ascii')
            self.transport.write(chunk_len + chunk + b'\r\n')
            self.output_length += len(chunk_len) + len(chunk) + 2

    def _write_length_payload(self, length):
        """Write specified number of bytes to a stream."""
        while True:
            try:
                chunk = yield
            except aiohttp.EofStream:
                break

            if length:
                l = len(chunk)
                if length >= l:
                    self.transport.write(chunk)
                    self.output_length += l
                    length = length-l
                else:
                    self.transport.write(chunk[:length])
                    self.output_length += length
                    length = 0

    def _write_eof_payload(self):
        while True:
            try:
                chunk = yield
            except aiohttp.EofStream:
                break

            self.transport.write(chunk)
            self.output_length += len(chunk)

    @wrap_payload_filter
    def add_chunking_filter(self, chunk_size=16*1024, *,
                            EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
        """Split incoming stream into chunks."""
        buf = bytearray()
        chunk = yield

        while True:
            if chunk is EOF_MARKER:
                if buf:
                    yield buf

                yield EOF_MARKER

            else:
                buf.extend(chunk)

                while len(buf) >= chunk_size:
                    chunk = bytes(buf[:chunk_size])
                    del buf[:chunk_size]
                    yield chunk

                chunk = yield EOL_MARKER

    @wrap_payload_filter
    def add_compression_filter(self, encoding='deflate', *,
                               EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
        """Compress incoming stream with deflate or gzip encoding."""
        zlib_mode = (16 + zlib.MAX_WBITS
                     if encoding == 'gzip' else -zlib.MAX_WBITS)
        zcomp = zlib.compressobj(wbits=zlib_mode)

        chunk = yield
        while True:
            if chunk is EOF_MARKER:
                yield zcomp.flush()
                chunk = yield EOF_MARKER

            else:
                yield zcomp.compress(chunk)
                chunk = yield EOL_MARKER
Exemple #29
0
class Message:
    def __init__(self,
                 # from_uri,
                 # to_uri,
                 content_type=None,
                 headers=None,
                 payload=None):
        # self.from_uri = from_uri
        # self.to_uri = to_uri
        if headers:
            self.headers = headers
        else:
            self.headers = CIMultiDict()

        for direction in ('From', 'To', 'Contact'): # parse From, To, and Contact headers
            direction_attribute = '%s_details' % direction.lower()
            if direction in self.headers:
                if not hasattr(self, direction_attribute):
                    setattr(self,
                            direction_attribute,
                            Contact.from_header(self.headers[direction]))
            elif hasattr(self, direction_attribute):
                contact = getattr(self, direction_attribute)
                self.headers[direction] = str(contact)
            elif direction != 'Contact':
                raise(ValueError('You must have a "%s" header or details.' % direction))

            if content_type:
                self.headers['Content-Type'] = content_type
        self.payload = payload

        # Build the message
        if 'Via' not in self.headers:
            self.headers['Via'] = 'SIP/2.0/%(protocol)s '+'%s:%s;branch=%s' % (self.contact_details['uri']['host'],
                                                                               self.contact_details['uri']['port'],
                                                                               utils.gen_branch(10))
        if 'Max-Forwards' not in self.headers:
            self.headers['Max-Forwards'] = '70'
        if 'Call-ID' not in self.headers:
            self.headers['Call-ID'] = uuid.uuid4()
        if 'User-Agent' not in self.headers:
            self.headers['User-Agent'] = 'Python/{0[0]}.{0[1]}.{0[2]} aiosip/{1}'.format(
                sys.version_info, aiosip.__version__)
        if 'Content-Length' not in self.headers:
            payload_len = len(self.payload.encode()) if self.payload else 0
            self.headers['Content-Length'] = payload_len

    @property
    def cseq(self):
        if not hasattr(self, '_cseq'):
            self._cseq = int(self.headers['CSeq'].split(' ')[0])
        return self._cseq

    @property
    def method(self):
        if not hasattr(self, '_method'):
            self._method = self.headers['CSeq'].split(' ')[1]
        return self._method

    def __str__(self):
        msg = []
        for k, v in sorted(self.headers.items()):
            if isinstance(v, (list, tuple)):
                msg.extend(['%s: %s' % (k, i) for i in v])
            else:
                msg.append('%s: %s' % (k, v))
        if self.payload:
            msg.append('%s%s' % (utils.EOL, self.payload))
        else:
            msg.append(utils.EOL)
        return utils.EOL.join(msg)

    def parsed_xml(self):
        if 'Content-Type' not in self.headers:
            return None
        if not self.headers['Content-Type'].endswith('+xml'):
            return None
        return PyQuery(self.payload).remove_namespaces()

    @classmethod
    def from_raw_message(cls, raw_message):
        lines = raw_message.split(utils.EOL)
        first_line = lines.pop(0)
        headers = CIMultiDict()
        payload = ''
        reading_headers = True
        for line in lines:
            if reading_headers:
                if ': ' in line:
                    k, v = line.split(': ', 1)
                    if k in headers:
                        o = headers.setdefault(k, [])
                        if not isinstance(o, list):
                            o = [o]
                        o.append(v)
                        headers[k] = o
                    else:
                        headers[k] = v
                else:  # Finish to parse headers
                    reading_headers = False
            else: # @todo: use content length to read payload
                payload += line  # reading payload
        if payload == '':
            payload = None

        m = FIRST_LINE_PATTERN['response']['regex'].match(first_line)
        if m:
            d = m.groupdict()
            return Response(status_code=int(d['status_code']),
                            status_message=d['status_message'],
                            headers=headers,
                            payload=payload)
        else:
            m = FIRST_LINE_PATTERN['request']['regex'].match(first_line)
            if m:
                d = m.groupdict()
                return Request(method=d['method'],
                               headers=headers,
                               payload=payload)
            else:
                    raise ValueError('Not a SIP message')