Exemplo n.º 1
0
    async def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._closed = False
        self._protocol = connection.protocol
        self._connection = connection

        connection.protocol.set_response_params(
            timer=self._timer,
            skip_payload=self.method.lower() == 'head',
            skip_status_codes=(204, 304),
            read_until_eof=read_until_eof,
            auto_decompress=self._auto_decompress)

        with self._timer:
            while True:
                # read response
                try:
                    (message, payload) = await self._protocol.read()
                except http.HttpProcessingError as exc:
                    raise ClientResponseError(
                        self.request_info, self.history,
                        code=exc.code,
                        message=exc.message, headers=exc.headers) from exc

                if (message.code < 100 or
                        message.code > 199 or message.code == 101):
                    break

                if self._continue is not None:
                    set_result(self._continue, True)
                    self._continue = None

        # payload eof handler
        payload.on_eof(self._response_eof)

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        self.content = payload

        # cookies
        for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
            try:
                self.cookies.load(hdr)
            except CookieError as exc:
                client_logger.warning(
                    'Can not load response cookies: %s', exc)
        return self
Exemplo n.º 2
0
    def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._setup_connection(connection)

        while True:
            httpstream = self._reader.set_parser(self._response_parser)

            # read response
            with Timeout(self._timeout, loop=self._loop):
                message = yield from httpstream.read()
            if message.code != 100:
                break

            if self._continue is not None and not self._continue.done():
                self._continue.set_result(True)
                self._continue = None

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason
        self._should_close = message.should_close

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        rwb = self._need_parse_response_body()
        self._reader.set_parser(
            aiohttp.HttpPayloadParser(message,
                                      readall=read_until_eof,
                                      response_with_body=rwb),
            self.content)

        # cookies
        self.cookies = http.cookies.SimpleCookie()
        if hdrs.SET_COOKIE in self.headers:
            for hdr in self.headers.getall(hdrs.SET_COOKIE):
                try:
                    self.cookies.load(hdr)
                except http.cookies.CookieError as exc:
                    client_logger.warning(
                        'Can not load response cookies: %s', exc)
        return self
Exemplo n.º 3
0
class ClientResponse(HeadersMixin):

    # from the Status-Line of the response
    version = None  # HTTP-Version
    status = None   # Status-Code
    reason = None   # Reason-Phrase

    content = None  # Payload stream
    headers = None  # Response headers, CIMultiDictProxy
    raw_headers = None  # Response raw headers, a sequence of pairs

    _connection = None  # current connection
    flow_control_class = StreamReader  # reader flow control
    _reader = None     # input stream
    _source_traceback = None
    # setted up by ClientRequest after ClientResponse object creation
    # post-init stage allows to not change ctor signature
    _loop = None
    _closed = True  # to allow __del__ for non-initialized properly response
    _session = None

    def __init__(self, method, url, *,
                 writer=None, continue100=None, timer=None,
                 request_info=None, auto_decompress=True):
        assert isinstance(url, URL)

        self.method = method
        self.headers = None
        self.cookies = SimpleCookie()

        self._url = url
        self._content = None
        self._writer = writer
        self._continue = continue100
        self._closed = True
        self._history = ()
        self._request_info = request_info
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress
        self._cache = {}  # reqired for @reify method decorator

    @property
    def url(self):
        return self._url

    @property
    def url_obj(self):
        warnings.warn(
            "Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
        return self._url

    @property
    def host(self):
        return self._url.host

    @property
    def _headers(self):
        return self.headers

    @property
    def request_info(self):
        return self._request_info

    @reify
    def content_disposition(self):
        raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
        if raw is None:
            return None
        disposition_type, params = multipart.parse_content_disposition(raw)
        params = MappingProxyType(params)
        filename = multipart.content_disposition_filename(params)
        return ContentDisposition(disposition_type, params, filename)

    def _post_init(self, loop, session):
        self._loop = loop
        self._session = session  # store a reference to session #1985
        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

    def __del__(self, _warnings=warnings):
        if self._loop is None:
            return  # not started
        if self._closed:
            return

        if self._connection is not None:
            self._connection.release()
            self._cleanup_writer()

            if self._loop.get_debug():
                if PY_36:
                    kwargs = {'source': self}
                else:
                    kwargs = {}
                _warnings.warn("Unclosed response {!r}".format(self),
                               ResourceWarning,
                               **kwargs)
                context = {'client_response': self,
                           'message': 'Unclosed response'}
                if self._source_traceback:
                    context['source_traceback'] = self._source_traceback
                self._loop.call_exception_handler(context)

    def __repr__(self):
        out = io.StringIO()
        ascii_encodable_url = str(self.url)
        if self.reason:
            ascii_encodable_reason = self.reason.encode('ascii',
                                                        'backslashreplace') \
                .decode('ascii')
        else:
            ascii_encodable_reason = self.reason
        print('<ClientResponse({}) [{} {}]>'.format(
            ascii_encodable_url, self.status, ascii_encodable_reason),
            file=out)
        print(self.headers, file=out)
        return out.getvalue()

    @property
    def connection(self):
        return self._connection

    @property
    def history(self):
        """A sequence of of responses, if redirects occurred."""
        return self._history

    async def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._closed = False
        self._protocol = connection.protocol
        self._connection = connection

        connection.protocol.set_response_params(
            timer=self._timer,
            skip_payload=self.method.lower() == 'head',
            skip_status_codes=(204, 304),
            read_until_eof=read_until_eof,
            auto_decompress=self._auto_decompress)

        with self._timer:
            while True:
                # read response
                try:
                    (message, payload) = await self._protocol.read()
                except http.HttpProcessingError as exc:
                    raise ClientResponseError(
                        self.request_info, self.history,
                        code=exc.code,
                        message=exc.message, headers=exc.headers) from exc

                if (message.code < 100 or
                        message.code > 199 or message.code == 101):
                    break

                if self._continue is not None:
                    set_result(self._continue, True)
                    self._continue = None

        # payload eof handler
        payload.on_eof(self._response_eof)

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        self.content = payload

        # cookies
        for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
            try:
                self.cookies.load(hdr)
            except CookieError as exc:
                client_logger.warning(
                    'Can not load response cookies: %s', exc)
        return self

    def _response_eof(self):
        if self._closed:
            return

        if self._connection is not None:
            # websocket, protocol could be None because
            # connection could be detached
            if (self._connection.protocol is not None and
                    self._connection.protocol.upgraded):
                return

            self._connection.release()
            self._connection = None

        self._closed = True
        self._cleanup_writer()

    @property
    def closed(self):
        return self._closed

    def close(self):
        if self._closed:
            return

        self._closed = True
        if self._loop is None or self._loop.is_closed():
            return

        if self._connection is not None:
            self._connection.close()
            self._connection = None
        self._cleanup_writer()
        self._notify_content()

    def release(self):
        if self._closed:
            return noop()

        self._closed = True
        if self._connection is not None:
            self._connection.release()
            self._connection = None

        self._cleanup_writer()
        self._notify_content()
        return noop()

    def raise_for_status(self):
        if 400 <= self.status:
            raise ClientResponseError(
                self.request_info,
                self.history,
                code=self.status,
                message=self.reason,
                headers=self.headers)

    def _cleanup_writer(self):
        if self._writer is not None:
            self._writer.cancel()
        self._writer = None
        self._session = None

    def _notify_content(self):
        content = self.content
        if content and content.exception() is None and not content.is_eof():
            content.set_exception(
                ClientConnectionError('Connection closed'))

    async def wait_for_close(self):
        if self._writer is not None:
            try:
                await self._writer
            finally:
                self._writer = None
        self.release()

    async def read(self):
        """Read response payload."""
        if self._content is None:
            try:
                self._content = await self.content.read()
            except Exception:
                self.close()
                raise

        return self._content

    def get_encoding(self):
        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        mimetype = helpers.parse_mimetype(ctype)

        encoding = mimetype.parameters.get('charset')
        if encoding:
            try:
                codecs.lookup(encoding)
            except LookupError:
                encoding = None
        if not encoding:
            if mimetype.type == 'application' and mimetype.subtype == 'json':
                # RFC 7159 states that the default encoding is UTF-8.
                encoding = 'utf-8'
            else:
                encoding = chardet.detect(self._content)['encoding']
        if not encoding:
            encoding = 'utf-8'

        return encoding

    async def text(self, encoding=None, errors='strict'):
        """Read response payload and decode."""
        if self._content is None:
            await self.read()

        if encoding is None:
            encoding = self.get_encoding()

        return self._content.decode(encoding, errors=errors)

    async def json(self, *, encoding=None, loads=json.loads,
                   content_type='application/json'):
        """Read and decodes JSON response."""
        if self._content is None:
            await self.read()

        if content_type:
            ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
            if content_type not in ctype:
                raise ContentTypeError(
                    self.request_info,
                    self.history,
                    message=('Attempt to decode JSON with '
                             'unexpected mimetype: %s' % ctype),
                    headers=self.headers)

        stripped = self._content.strip()
        if not stripped:
            return None

        if encoding is None:
            encoding = self.get_encoding()

        return loads(stripped.decode(encoding))

    async def __aenter__(self):
        return self

    async def __aexit__(self, exc_type, exc_val, exc_tb):
        # similar to _RequestContextManager, we do not need to check
        # for exceptions, response object can closes connection
        # is state is broken
        self.release()
 async def send(self, request, **config):
     response = await super(AiohttpTestTransport, self).send(request, **config)
     if not isinstance(response.headers, CIMultiDictProxy):
         response.headers = CIMultiDictProxy(CIMultiDict(response.internal_response.headers))
         response.content_type = response.headers.get("content-type")
     return response
Exemplo n.º 5
0
 def headers(self):
     """A case-insensitive multidict proxy with all headers."""
     return CIMultiDictProxy(self._message.headers)
Exemplo n.º 6
0
class ClientResponse(HeadersMixin):

    # from the Status-Line of the response
    version = None  # HTTP-Version
    status = None   # Status-Code
    reason = None   # Reason-Phrase

    content = None  # Payload stream
    headers = None  # Response headers, CIMultiDictProxy
    raw_headers = None  # Response raw headers, a sequence of pairs

    _connection = None  # current connection
    flow_control_class = FlowControlStreamReader  # reader flow control
    _reader = None     # input stream
    _source_traceback = None
    # setted up by ClientRequest after ClientResponse object creation
    # post-init stage allows to not change ctor signature
    _loop = None
    _closed = True  # to allow __del__ for non-initialized properly response
    _session = None

    def __init__(self, method, url, *,
                 writer=None, continue100=None, timer=None,
                 request_info=None, auto_decompress=True):
        assert isinstance(url, URL)

        self.method = method
        self.headers = None
        self.cookies = SimpleCookie()

        self._url = url
        self._content = None
        self._writer = writer
        self._continue = continue100
        self._closed = True
        self._history = ()
        self._request_info = request_info
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress

    @property
    def url(self):
        return self._url

    @property
    def url_obj(self):
        warnings.warn(
            "Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
        return self._url

    @property
    def host(self):
        return self._url.host

    @property
    def _headers(self):
        return self.headers

    @property
    def request_info(self):
        return self._request_info

    def _post_init(self, loop, session):
        self._loop = loop
        self._session = session  # store a reference to session #1985
        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

    def __del__(self, _warnings=warnings):
        if self._loop is None:
            return  # not started
        if self._closed:
            return

        if self._connection is not None:
            self._connection.release()
            self._cleanup_writer()

            # warn
            if __debug__:
                if self._loop.get_debug():
                    _warnings.warn("Unclosed response {!r}".format(self),
                                   ResourceWarning)
                    context = {'client_response': self,
                               'message': 'Unclosed response'}
                    if self._source_traceback:
                        context['source_traceback'] = self._source_traceback
                    self._loop.call_exception_handler(context)

    def __repr__(self):
        out = io.StringIO()
        ascii_encodable_url = str(self.url)
        if self.reason:
            ascii_encodable_reason = self.reason.encode('ascii',
                                                        'backslashreplace') \
                .decode('ascii')
        else:
            ascii_encodable_reason = self.reason
        print('<ClientResponse({}) [{} {}]>'.format(
            ascii_encodable_url, self.status, ascii_encodable_reason),
            file=out)
        print(self.headers, file=out)
        return out.getvalue()

    @property
    def connection(self):
        return self._connection

    @property
    def history(self):
        """A sequence of of responses, if redirects occurred."""
        return self._history

    @asyncio.coroutine
    def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._closed = False
        self._protocol = connection.protocol
        self._connection = connection

        connection.protocol.set_response_params(
            timer=self._timer,
            skip_payload=self.method.lower() == 'head',
            skip_status_codes=(204, 304),
            read_until_eof=read_until_eof,
            auto_decompress=self._auto_decompress)

        with self._timer:
            while True:
                # read response
                try:
                    (message, payload) = yield from self._protocol.read()
                except http.HttpProcessingError as exc:
                    raise ClientResponseError(
                        self.request_info, self.history,
                        code=exc.code,
                        message=exc.message, headers=exc.headers) from exc

                if (message.code < 100 or
                        message.code > 199 or message.code == 101):
                    break

                if self._continue is not None and not self._continue.done():
                    self._continue.set_result(True)
                    self._continue = None

        # payload eof handler
        payload.on_eof(self._response_eof)

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        self.content = payload

        # cookies
        for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
            try:
                self.cookies.load(hdr)
            except CookieError as exc:
                client_logger.warning(
                    'Can not load response cookies: %s', exc)
        return self

    def _response_eof(self):
        if self._closed:
            return

        if self._connection is not None:
            # websocket, protocol could be None because
            # connection could be detached
            if (self._connection.protocol is not None and
                    self._connection.protocol.upgraded):
                return

            self._connection.release()
            self._connection = None

        self._closed = True
        self._cleanup_writer()

    @property
    def closed(self):
        return self._closed

    def close(self):
        if self._closed:
            return

        self._closed = True
        if self._loop is None or self._loop.is_closed():
            return

        if self._connection is not None:
            self._connection.close()
            self._connection = None
        self._cleanup_writer()
        self._notify_content()

    def release(self):
        if self._closed:
            return noop()

        self._closed = True
        if self._connection is not None:
            self._connection.release()
            self._connection = None

        self._cleanup_writer()
        self._notify_content()
        return noop()

    def raise_for_status(self):
        if 400 <= self.status:
            raise ClientResponseError(
                self.request_info,
                self.history,
                code=self.status,
                message=self.reason,
                headers=self.headers)

    def _cleanup_writer(self):
        if self._writer is not None and not self._writer.done():
            self._writer.cancel()
        self._writer = None
        self._session = None

    def _notify_content(self):
        content = self.content
        if content and content.exception() is None and not content.is_eof():
            content.set_exception(
                ClientConnectionError('Connection closed'))

    @asyncio.coroutine
    def wait_for_close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None
        self.release()

    @asyncio.coroutine
    def read(self):
        """Read response payload."""
        if self._content is None:
            try:
                self._content = yield from self.content.read()
            except:
                self.close()
                raise

        return self._content

    def _get_encoding(self):
        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        mtype, stype, _, params = helpers.parse_mimetype(ctype)

        encoding = params.get('charset')
        if not encoding:
            if mtype == 'application' and stype == 'json':
                # RFC 7159 states that the default encoding is UTF-8.
                encoding = 'utf-8'
            else:
                encoding = chardet.detect(self._content)['encoding']
        if not encoding:
            encoding = 'utf-8'

        return encoding

    @asyncio.coroutine
    def text(self, encoding=None, errors='strict'):
        """Read response payload and decode."""
        if self._content is None:
            yield from self.read()

        if encoding is None:
            encoding = self._get_encoding()

        return self._content.decode(encoding, errors=errors)

    @asyncio.coroutine
    def json(self, *, encoding=None, loads=json.loads,
             content_type='application/json'):
        """Read and decodes JSON response."""
        if self._content is None:
            yield from self.read()

        if content_type:
            ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
            if content_type not in ctype:
                raise ContentTypeError(
                    self.request_info,
                    self.history,
                    message=('Attempt to decode JSON with '
                             'unexpected mimetype: %s' % ctype),
                    headers=self.headers)

        stripped = self._content.strip()
        if not stripped:
            return None

        if encoding is None:
            encoding = self._get_encoding()

        return loads(stripped.decode(encoding))

    if PY_35:
        @asyncio.coroutine
        def __aenter__(self):
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_val, exc_tb):
            # similar to _RequestContextManager, we do not need to check
            # for exceptions, response object can closes connection
            # is state is broken
            self.release()
Exemplo n.º 7
0
def test_response_with_immutable_headers() -> None:
    resp = Response(text='text',
                    headers=CIMultiDictProxy(CIMultiDict({'Header': 'Value'})))
    assert resp.headers == {'Header': 'Value',
                            'Content-Type': 'text/plain; charset=utf-8'}
Exemplo n.º 8
0
class ClientResponse(HeadersMixin):

    # from the Status-Line of the response
    version = None  # HTTP-Version
    status = None   # Status-Code
    reason = None   # Reason-Phrase

    cookies = None  # Response cookies (Set-Cookie)
    content = None  # Payload stream
    headers = None  # Response headers, CIMultiDictProxy
    raw_headers = None  # Response raw headers, a sequence of pairs

    _connection = None  # current connection
    flow_control_class = FlowControlStreamReader  # reader flow control
    _reader = None     # input stream
    _response_parser = aiohttp.HttpResponseParser()
    _source_traceback = None
    # setted up by ClientRequest after ClientResponse object creation
    # post-init stage allows to not change ctor signature
    _loop = None
    _closed = True  # to allow __del__ for non-initialized properly response

    def __init__(self, method, url, *, writer=None, continue100=None,
                 timeout=5*60):
        assert isinstance(url, URL)

        self.method = method
        self._url_obj = url
        self._content = None
        self._writer = writer
        self._continue = continue100
        self._closed = False
        self._should_close = True  # override by message.should_close later
        self._history = ()
        self._timeout = timeout

    @property
    def url_obj(self):
        return self._url_obj

    @property
    def url(self):
        return str(self._url_obj)

    @property
    def host(self):
        warnings.warn("Deprecated, use .url_obj.host",
                      DeprecationWarning,
                      stacklevel=2)
        return self._url_obj.host

    def _post_init(self, loop):
        self._loop = loop
        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

    def __del__(self, _warnings=warnings):
        if self._loop is None:
            return  # not started
        if self._closed:
            return
        self.close()

        _warnings.warn("Unclosed response {!r}".format(self),
                       ResourceWarning)
        context = {'client_response': self,
                   'message': 'Unclosed response'}
        if self._source_traceback:
            context['source_traceback'] = self._source_traceback
        self._loop.call_exception_handler(context)

    def __repr__(self):
        out = io.StringIO()
        ascii_encodable_url = str(self.url)
        if self.reason:
            ascii_encodable_reason = self.reason.encode('ascii',
                                                        'backslashreplace') \
                .decode('ascii')
        else:
            ascii_encodable_reason = self.reason
        print('<ClientResponse({}) [{} {}]>'.format(
            ascii_encodable_url, self.status, ascii_encodable_reason),
            file=out)
        print(self.headers, file=out)
        return out.getvalue()

    @property
    def connection(self):
        return self._connection

    @property
    def history(self):
        """A sequence of of responses, if redirects occurred."""
        return self._history

    def waiting_for_continue(self):
        return self._continue is not None

    def _setup_connection(self, connection):
        self._reader = connection.reader
        self._connection = connection
        self.content = self.flow_control_class(
            connection.reader, loop=connection.loop, timeout=self._timeout)

    def _need_parse_response_body(self):
        return (self.method.lower() != 'head' and
                self.status not in [204, 304])

    @asyncio.coroutine
    def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._setup_connection(connection)

        while True:
            httpstream = self._reader.set_parser(self._response_parser)

            # read response
            with Timeout(self._timeout, loop=self._loop):
                message = yield from httpstream.read()
            if message.code != 100:
                break

            if self._continue is not None and not self._continue.done():
                self._continue.set_result(True)
                self._continue = None

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason
        self._should_close = message.should_close

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        rwb = self._need_parse_response_body()
        self._reader.set_parser(
            aiohttp.HttpPayloadParser(message,
                                      readall=read_until_eof,
                                      response_with_body=rwb),
            self.content)

        # cookies
        self.cookies = http.cookies.SimpleCookie()
        if hdrs.SET_COOKIE in self.headers:
            for hdr in self.headers.getall(hdrs.SET_COOKIE):
                try:
                    self.cookies.load(hdr)
                except http.cookies.CookieError as exc:
                    client_logger.warning(
                        'Can not load response cookies: %s', exc)
        return self

    def close(self):
        if self._closed:
            return

        self._closed = True

        if self._loop is None or self._loop.is_closed():
            return

        if self._connection is not None:
            self._connection.close()
            self._connection = None
        self._cleanup_writer()
        self._notify_content()

    @asyncio.coroutine
    def release(self):
        if self._closed:
            return
        try:
            content = self.content
            if content is not None and not content.at_eof():
                chunk = yield from content.readany()
                while chunk is not EOF_MARKER or chunk:
                    chunk = yield from content.readany()
        except Exception:
            self._connection.close()
            self._connection = None
            raise
        finally:
            self._closed = True
            if self._connection is not None:
                self._connection.release()
                if self._reader is not None:
                    self._reader.unset_parser()
                self._connection = None
            self._cleanup_writer()
            self._notify_content()

    def raise_for_status(self):
        if 400 <= self.status:
            raise aiohttp.HttpProcessingError(
                code=self.status,
                message=self.reason)

    def _cleanup_writer(self):
        if self._writer is not None and not self._writer.done():
            self._writer.cancel()
        self._writer = None

    def _notify_content(self):
        content = self.content
        if content and content.exception() is None and not content.is_eof():
            content.set_exception(
                aiohttp.ClientDisconnectedError('Connection closed'))

    @asyncio.coroutine
    def wait_for_close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None
        yield from self.release()

    @asyncio.coroutine
    def read(self):
        """Read response payload."""
        if self._content is None:
            try:
                self._content = yield from self.content.read()
            except:
                self.close()
                raise
            else:
                yield from self.release()

        return self._content

    def _get_encoding(self):
        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        mtype, stype, _, params = helpers.parse_mimetype(ctype)

        encoding = params.get('charset')
        if not encoding:
            if mtype == 'application' and stype == 'json':
                # RFC 7159 states that the default encoding is UTF-8.
                encoding = 'utf-8'
            else:
                encoding = chardet.detect(self._content)['encoding']
        if not encoding:
            encoding = 'utf-8'

        return encoding

    @asyncio.coroutine
    def text(self, encoding=None):
        """Read response payload and decode."""
        if self._content is None:
            yield from self.read()

        if encoding is None:
            encoding = self._get_encoding()

        return self._content.decode(encoding)

    @asyncio.coroutine
    def json(self, *, encoding=None, loads=json.loads):
        """Read and decodes JSON response."""
        if self._content is None:
            yield from self.read()

        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        if 'json' not in ctype:
            client_logger.warning(
                'Attempt to decode JSON with unexpected mimetype: %s', ctype)

        stripped = self._content.strip()
        if not stripped:
            return None

        if encoding is None:
            encoding = self._get_encoding()

        return loads(stripped.decode(encoding))

    if PY_35:
        @asyncio.coroutine
        def __aenter__(self):
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_val, exc_tb):
            if exc_type is None:
                yield from self.release()
            else:
                self.close()
Exemplo n.º 9
0
 def request_info(self) -> RequestInfo:
     headers = CIMultiDictProxy(self.headers)  # type: CIMultiDictProxy[str]
     return RequestInfo(self.url, self.method, headers, self.original_url)
Exemplo n.º 10
0
    def _build_response(self,
                        url: 'Union[URL, str]',
                        method: str = hdrs.METH_GET,
                        request_headers: Dict = None,
                        status: int = 200,
                        body: str = '',
                        content_type: str = 'application/json',
                        payload: Dict = None,
                        headers: Dict = None,
                        response_class: 'ClientResponse' = None,
                        reason: Optional[str] = None) -> ClientResponse:
        if response_class is None:
            response_class = ClientResponse
        if payload is not None:
            body = json.dumps(payload)
        if not isinstance(body, bytes):
            body = str.encode(body)
        if request_headers is None:
            request_headers = {}
        kwargs = {}
        if AIOHTTP_VERSION >= StrictVersion('3.1.0'):
            loop = Mock()
            loop.get_debug = Mock()
            loop.get_debug.return_value = True
            kwargs['request_info'] = Mock(
                url=url,
                method=method,
                headers=CIMultiDictProxy(CIMultiDict(**request_headers)),
            )
            kwargs['writer'] = Mock()
            kwargs['continue100'] = None
            kwargs['timer'] = TimerNoop()
            if AIOHTTP_VERSION < StrictVersion('3.3.0'):
                kwargs['auto_decompress'] = True
            kwargs['traces'] = []
            kwargs['loop'] = loop
            kwargs['session'] = None
        else:
            loop = None
        # We need to initialize headers manually
        _headers = CIMultiDict({hdrs.CONTENT_TYPE: content_type})
        if headers:
            _headers.update(headers)
        raw_headers = self._build_raw_headers(_headers)
        resp = response_class(method, url, **kwargs)

        for hdr in _headers.getall(hdrs.SET_COOKIE, ()):
            resp.cookies.load(hdr)

        if AIOHTTP_VERSION >= StrictVersion('3.3.0'):
            # Reified attributes
            resp._headers = _headers
            resp._raw_headers = raw_headers
        else:
            resp.headers = _headers
            resp.raw_headers = raw_headers
        resp.status = status
        resp.reason = reason
        resp.content = stream_reader_factory(loop)
        resp.content.feed_data(body)
        resp.content.feed_eof()
        return resp
Exemplo n.º 11
0
    def parse_headers(
            self,
            lines: List[bytes]) -> Tuple['CIMultiDictProxy[str]', RawHeaders]:
        headers = CIMultiDict()  # type: CIMultiDict[str]
        raw_headers = []

        lines_idx = 1
        line = lines[1]
        line_count = len(lines)

        while line:
            # Parse initial header name : value pair.
            try:
                bname, bvalue = line.split(b':', 1)
            except ValueError:
                raise InvalidHeader(line) from None

            bname = bname.strip(b' \t')
            bvalue = bvalue.lstrip()
            if HDRRE.search(bname):
                raise InvalidHeader(bname)
            if len(bname) > self.max_field_size:
                raise LineTooLong(
                    "request header name {}".format(
                        bname.decode("utf8", "xmlcharrefreplace")),
                    str(self.max_field_size), str(len(bname)))

            header_length = len(bvalue)

            # next line
            lines_idx += 1
            line = lines[lines_idx]

            # consume continuation lines
            continuation = line and line[0] in (32, 9)  # (' ', '\t')

            if continuation:
                bvalue_lst = [bvalue]
                while continuation:
                    header_length += len(line)
                    if header_length > self.max_field_size:
                        raise LineTooLong(
                            'request header field {}'.format(
                                bname.decode("utf8", "xmlcharrefreplace")),
                            str(self.max_field_size), str(header_length))
                    bvalue_lst.append(line)

                    # next line
                    lines_idx += 1
                    if lines_idx < line_count:
                        line = lines[lines_idx]
                        if line:
                            continuation = line[0] in (32, 9)  # (' ', '\t')
                    else:
                        line = b''
                        break
                bvalue = b''.join(bvalue_lst)
            else:
                if header_length > self.max_field_size:
                    raise LineTooLong(
                        'request header field {}'.format(
                            bname.decode("utf8", "xmlcharrefreplace")),
                        str(self.max_field_size), str(header_length))

            bvalue = bvalue.strip()
            name = bname.decode('utf-8', 'surrogateescape')
            value = bvalue.decode('utf-8', 'surrogateescape')

            headers.add(name, value)
            raw_headers.append((bname, bvalue))

        return (CIMultiDictProxy(headers), tuple(raw_headers))
Exemplo n.º 12
0
 async def headers(self) -> MultiMapping[str]:
     return CIMultiDictProxy(CIMultiDict(self._headers))
Exemplo n.º 13
0
async def execute_event(
    app_config: AppConfig,
    event_name: str,
    payload: Optional[EventPayload],
    mocks: Optional[List[Callable[[ModuleType, EventContext], None]]] = None,
    *,
    fields: Optional[Dict[str, str]] = None,
    upload: Optional[Dict[str, bytes]] = None,
    preprocess: bool = False,
    postprocess: bool = False,
    context: Optional[EventContext] = None,
    **kwargs
) -> Union[Optional[EventPayload], List[EventPayload], Tuple[
        Optional[EventPayload], EventPayload, PostprocessHook], Tuple[
            List[EventPayload], EventPayload, PostprocessHook]]:
    """
    Test executes an app event.

    Notice that event implementation file needs to be saved to disk since this will simulate
    execution similar to how engine actually execute events. Writing to stream will be ignored.

    :param app_config: AppConfig, load using `app_config = config('path/to/app-config.json')`
    :param event_name: str, name of the event / module to execute
    :param payload: test payload to send to initial step
    :param mocks: lists of functions to execute in order to mock functionality
    :param postprocess: enables testing __postprocess__ called with last step result or
        result before a SHUFFLE step if present.
    :param context: EventContext, optional EventContext to use when calling event. If not provided
        a default context will be created.
    :param kwargs: that will be forwarded to the initial step of the event
    :return: the results of executing the event, for simple events it will be a single object,
        for events with initial Spawn[...] the results will be collected as a list.
        If postprocess is true, a tuple of 3 elements is return, first element is results as described
        above, second element the output of call to __postprocess__, and third one a PostprocessHook
        with response information used during call to __postprocess__
    """
    async def _postprocess(hook: PostprocessHook,
                           results: List[EventPayload]) -> EventPayload:
        assert context is not None
        pp_payload = results[-1] if len(results) > 0 else None
        return await handler.postprocess(context=context,
                                         payload=pp_payload,
                                         response=hook)

    async def _preprocess(hook: PreprocessHook,
                          payload: EventPayload) -> EventPayload:
        assert context is not None
        return await handler.preprocess(context=context,
                                        query_args=kwargs,
                                        payload=payload,
                                        request=hook)

    if context is None:
        context = create_test_context(app_config, event_name)

    event_info = app_config.events[event_name]
    impl = find_event_handler(app_config=app_config,
                              event_name=event_name,
                              event_info=event_info)
    effective_events = {
        **split_event_stages(app_config.app, event_name, event_info, impl)
    }
    handler = EventHandler(
        app_config=app_config,
        plugins=[],
        effective_events=effective_events,
        settings=app_config.effective_settings  # type: ignore
    )

    preprocess_hook, postprocess_hook = None, None
    if preprocess:
        preprocess_hook = PreprocessHook(
            headers=CIMultiDictProxy(CIMultiDict()),
            multipart_reader=MockMultipartReader(fields or {}, upload
                                                 or {}),  # type: ignore
            file_hook_factory=MockFileHook,
            payload_raw=b''
            if payload is None else Payload.to_json(payload).encode())
    if postprocess:
        postprocess_hook = PostprocessHook()
    if mocks is not None:
        _apply_mocks(context, handler, event_name, effective_events,
                     preprocess_hook, postprocess_hook, mocks)

    datatype = find_datatype_handler(app_config=app_config,
                                     event_name=event_name,
                                     event_info=event_info)
    if preprocess_hook:
        payload = await _preprocess(preprocess_hook, payload)
        if postprocess_hook and preprocess_hook.status is not None:
            postprocess_hook.set_status(preprocess_hook.status)
    elif datatype is None:
        if payload is not None:
            return (payload, payload,
                    postprocess_hook) if postprocess else payload
    elif not (datatype is DataObject or isinstance(payload, datatype)):
        return (payload, payload, postprocess_hook) if postprocess else payload

    on_queue, pp_result, pp_called = [payload], None, False
    for effective_event_name, event_info in effective_events.items():
        context = create_test_context(app_config,
                                      effective_event_name,
                                      track_ids=context.track_ids,
                                      auth_info=context.auth_info)
        stage_results = []
        for elem in on_queue:
            async for res in handler.handle_async_event(context=context,
                                                        query_args=kwargs,
                                                        payload=elem):
                stage_results.append(res)
        on_queue = stage_results if len(stage_results) > 0 else on_queue
        if postprocess_hook and not pp_called:
            pp_called = True
            pp_result = await _postprocess(postprocess_hook, on_queue)
        kwargs = {}

    if postprocess:
        if len(on_queue) == 0:
            return None, pp_result, postprocess_hook
        if len(on_queue) == 1:
            return on_queue[0], pp_result, postprocess_hook
        return list(on_queue), pp_result, postprocess_hook

    if len(on_queue) == 0:
        return None
    if len(on_queue) == 1:
        return on_queue[0]
    return list(on_queue)
Exemplo n.º 14
0
class ClientResponse(HeadersMixin):

    # from the Status-Line of the response
    version = None  # HTTP-Version
    status = None  # Status-Code
    reason = None  # Reason-Phrase

    content = None  # Payload stream
    headers = None  # Response headers, CIMultiDictProxy
    raw_headers = None  # Response raw headers, a sequence of pairs

    _connection = None  # current connection
    _reader = None  # input stream
    _source_traceback = None
    # setted up by ClientRequest after ClientResponse object creation
    # post-init stage allows to not change ctor signature
    _closed = True  # to allow __del__ for non-initialized properly response
    _released = False

    def __init__(self, method, url, *, writer, continue100, timer,
                 request_info, auto_decompress, traces, loop, session):
        assert isinstance(url, URL)

        self.method = method
        self.headers = None
        self.cookies = SimpleCookie()

        self._real_url = url
        self._url = url.with_fragment(None)
        self._body = None
        self._writer = writer
        self._continue = continue100  # None by default
        self._closed = True
        self._history = ()
        self._request_info = request_info
        self._timer = timer if timer is not None else TimerNoop()
        self._auto_decompress = auto_decompress  # True by default
        self._cache = {}  # required for @reify method decorator
        self._traces = traces
        self._loop = loop
        self._session = session  # store a reference to session #1985
        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

    @property
    def url(self):
        return self._url

    @property
    def url_obj(self):
        warnings.warn("Deprecated, use .url #1654",
                      DeprecationWarning,
                      stacklevel=2)
        return self._url

    @property
    def real_url(self):
        return self._real_url

    @property
    def host(self):
        return self._url.host

    @property
    def _headers(self):
        return self.headers

    @property
    def request_info(self):
        return self._request_info

    @reify
    def content_disposition(self):
        raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
        if raw is None:
            return None
        disposition_type, params = multipart.parse_content_disposition(raw)
        params = MappingProxyType(params)
        filename = multipart.content_disposition_filename(params)
        return ContentDisposition(disposition_type, params, filename)

    def __del__(self, _warnings=warnings):
        if self._closed:
            return

        if self._connection is not None:
            self._connection.release()
            self._cleanup_writer()

            if self._loop.get_debug():
                if PY_36:
                    kwargs = {'source': self}
                else:
                    kwargs = {}
                _warnings.warn("Unclosed response {!r}".format(self),
                               ResourceWarning, **kwargs)
                context = {
                    'client_response': self,
                    'message': 'Unclosed response'
                }
                if self._source_traceback:
                    context['source_traceback'] = self._source_traceback
                self._loop.call_exception_handler(context)

    def __repr__(self):
        out = io.StringIO()
        ascii_encodable_url = str(self.url)
        if self.reason:
            ascii_encodable_reason = self.reason.encode('ascii',
                                                        'backslashreplace') \
                .decode('ascii')
        else:
            ascii_encodable_reason = self.reason
        print('<ClientResponse({}) [{} {}]>'.format(ascii_encodable_url,
                                                    self.status,
                                                    ascii_encodable_reason),
              file=out)
        print(self.headers, file=out)
        return out.getvalue()

    @property
    def connection(self):
        return self._connection

    @property
    def history(self):
        """A sequence of of responses, if redirects occurred."""
        return self._history

    @property
    def links(self):
        links_str = ", ".join(self.headers.getall("link", []))

        links = MultiDict()

        if not links_str:
            return MultiDictProxy(links)

        for val in re.split(r",(?=\s*<)", links_str):
            url, params = re.match(r"\s*<(.*)>(.*)", val).groups()
            params = params.split(";")[1:]

            link = MultiDict()

            for param in params:
                key, _, value, _ = re.match(
                    r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param,
                    re.M).groups()

                link.add(key, value)

            key = link.get("rel", url)

            link.add("url", self.url.join(URL(url)))

            links.add(key, MultiDictProxy(link))

        return MultiDictProxy(links)

    async def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._closed = False
        self._protocol = connection.protocol
        self._connection = connection

        connection.protocol.set_response_params(
            timer=self._timer,
            skip_payload=self.method.lower() == 'head',
            read_until_eof=read_until_eof,
            auto_decompress=self._auto_decompress)

        with self._timer:
            while True:
                # read response
                try:
                    (message, payload) = await self._protocol.read()
                except http.HttpProcessingError as exc:
                    raise ClientResponseError(self.request_info,
                                              self.history,
                                              status=exc.code,
                                              message=exc.message,
                                              headers=exc.headers) from exc

                if (message.code < 100 or message.code > 199
                        or message.code == 101):
                    break

                if self._continue is not None:
                    set_result(self._continue, True)
                    self._continue = None

        # payload eof handler
        payload.on_eof(self._response_eof)

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        self.content = payload

        # cookies
        for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
            try:
                self.cookies.load(hdr)
            except CookieError as exc:
                client_logger.warning('Can not load response cookies: %s', exc)
        return self

    def _response_eof(self):
        if self._closed:
            return

        if self._connection is not None:
            # websocket, protocol could be None because
            # connection could be detached
            if (self._connection.protocol is not None
                    and self._connection.protocol.upgraded):
                return

            self._connection.release()
            self._connection = None

        self._closed = True
        self._cleanup_writer()

    @property
    def closed(self):
        return self._closed

    def close(self):
        if not self._released:
            self._notify_content()
        if self._closed:
            return

        self._closed = True
        if self._loop is None or self._loop.is_closed():
            return

        if self._connection is not None:
            self._connection.close()
            self._connection = None
        self._cleanup_writer()

    def release(self):
        if not self._released:
            self._notify_content()
        if self._closed:
            return noop()

        self._closed = True
        if self._connection is not None:
            self._connection.release()
            self._connection = None

        self._cleanup_writer()
        return noop()

    def raise_for_status(self):
        if 400 <= self.status:
            raise ClientResponseError(self.request_info,
                                      self.history,
                                      status=self.status,
                                      message=self.reason,
                                      headers=self.headers)

    def _cleanup_writer(self):
        if self._writer is not None:
            self._writer.cancel()
        self._writer = None
        self._session = None

    def _notify_content(self):
        content = self.content
        if content and content.exception() is None:
            content.set_exception(ClientConnectionError('Connection closed'))
        self._released = True

    async def wait_for_close(self):
        if self._writer is not None:
            try:
                await self._writer
            finally:
                self._writer = None
        self.release()

    async def read(self):
        """Read response payload."""
        if self._body is None:
            try:
                self._body = await self.content.read()
                for trace in self._traces:
                    await trace.send_response_chunk_received(self._body)
            except BaseException:
                self.close()
                raise
        elif self._released:
            raise ClientConnectionError('Connection closed')

        return self._body

    def get_encoding(self):
        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        mimetype = helpers.parse_mimetype(ctype)

        encoding = mimetype.parameters.get('charset')
        if encoding:
            try:
                codecs.lookup(encoding)
            except LookupError:
                encoding = None
        if not encoding:
            if mimetype.type == 'application' and mimetype.subtype == 'json':
                # RFC 7159 states that the default encoding is UTF-8.
                encoding = 'utf-8'
            else:
                encoding = chardet.detect(self._body)['encoding']
        if not encoding:
            encoding = 'utf-8'

        return encoding

    async def text(self, encoding=None, errors='strict'):
        """Read response payload and decode."""
        if self._body is None:
            await self.read()

        if encoding is None:
            encoding = self.get_encoding()

        return self._body.decode(encoding, errors=errors)

    async def json(self,
                   *,
                   encoding=None,
                   loads=json.loads,
                   content_type='application/json'):
        """Read and decodes JSON response."""
        if self._body is None:
            await self.read()

        if content_type:
            ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
            if not _is_expected_content_type(ctype, content_type):
                raise ContentTypeError(self.request_info,
                                       self.history,
                                       message=('Attempt to decode JSON with '
                                                'unexpected mimetype: %s' %
                                                ctype),
                                       headers=self.headers)

        stripped = self._body.strip()
        if not stripped:
            return None

        if encoding is None:
            encoding = self.get_encoding()

        return loads(stripped.decode(encoding))

    async def __aenter__(self):
        return self

    async def __aexit__(self, exc_type, exc_val, exc_tb):
        # similar to _RequestContextManager, we do not need to check
        # for exceptions, response object can closes connection
        # is state is broken
        self.release()
Exemplo n.º 15
0
    def parse_headers(self, lines):
        """Parses RFC 5322 headers from a stream.

        Line continuations are supported. Returns list of header name
        and value pairs. Header name is in upper case.
        """
        headers = CIMultiDict()
        raw_headers = []

        lines_idx = 1
        line = lines[1]
        line_count = len(lines)

        while line:
            # Parse initial header name : value pair.
            try:
                bname, bvalue = line.split(b':', 1)
            except ValueError:
                raise InvalidHeader(line) from None

            bname = bname.strip(b' \t')
            bvalue = bvalue.lstrip()
            if HDRRE.search(bname):
                raise InvalidHeader(bname)
            if len(bname) > self.max_field_size:
                raise LineTooLong(
                    "request header name {}".format(
                        bname.decode("utf8", "xmlcharrefreplace")),
                    self.max_field_size, len(bname))

            header_length = len(bvalue)

            # next line
            lines_idx += 1
            line = lines[lines_idx]

            # consume continuation lines
            continuation = line and line[0] in (32, 9)  # (' ', '\t')

            if continuation:
                bvalue = [bvalue]
                while continuation:
                    header_length += len(line)
                    if header_length > self.max_field_size:
                        raise LineTooLong(
                            'request header field {}'.format(
                                bname.decode("utf8", "xmlcharrefreplace")),
                            self.max_field_size, header_length)
                    bvalue.append(line)

                    # next line
                    lines_idx += 1
                    if lines_idx < line_count:
                        line = lines[lines_idx]
                        if line:
                            continuation = line[0] in (32, 9)  # (' ', '\t')
                    else:
                        line = b''
                        break
                bvalue = b''.join(bvalue)
            else:
                if header_length > self.max_field_size:
                    raise LineTooLong(
                        'request header field {}'.format(
                            bname.decode("utf8", "xmlcharrefreplace")),
                        self.max_field_size, header_length)

            bvalue = bvalue.strip()
            name = bname.decode('utf-8', 'surrogateescape')
            value = bvalue.decode('utf-8', 'surrogateescape')

            headers.add(name, value)
            raw_headers.append((bname, bvalue))

        close_conn = None
        encoding = None
        upgrade = False
        chunked = False
        raw_headers = tuple(raw_headers)
        headers = CIMultiDictProxy(headers)

        # keep-alive
        conn = headers.get(hdrs.CONNECTION)
        if conn:
            v = conn.lower()
            if v == 'close':
                close_conn = True
            elif v == 'keep-alive':
                close_conn = False
            elif v == 'upgrade':
                upgrade = True

        # encoding
        enc = headers.get(hdrs.CONTENT_ENCODING)
        if enc:
            enc = enc.lower()
            if enc in ('gzip', 'deflate', 'br'):
                encoding = enc

        # chunking
        te = headers.get(hdrs.TRANSFER_ENCODING)
        if te and 'chunked' in te.lower():
            chunked = True

        return headers, raw_headers, close_conn, encoding, upgrade, chunked
Exemplo n.º 16
0
class ClientResponse:

    # from the Status-Line of the response
    version = None  # HTTP-Version
    status = None  # Status-Code
    reason = None  # Reason-Phrase

    cookies = None  # Response cookies (Set-Cookie)
    content = None  # Payload stream
    headers = None  # Response headers, CIMultiDictProxy
    raw_headers = None  # Response raw headers, a sequence of pairs

    _connection = None  # current connection
    flow_control_class = FlowControlStreamReader  # reader flow control
    _reader = None  # input stream
    _response_parser = aiohttp.HttpResponseParser()
    _source_traceback = None
    # setted up by ClientRequest after ClientResponse object creation
    # post-init stage allows to not change ctor signature
    _loop = None
    _closed = True  # to allow __del__ for non-initialized properly response

    def __init__(self, method, url, host="", *, writer=None, continue100=None):
        super().__init__()

        self.method = method
        self.url = url
        self.host = host
        self._content = None
        self._writer = writer
        self._continue = continue100
        self._closed = False
        self._should_close = True  # override by message.should_close later
        self._history = ()

    def _post_init(self, loop):
        self._loop = loop
        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

    def __del__(self, _warnings=warnings):
        if self._closed:
            return
        self.close()

        _warnings.warn("Unclosed response {!r}".format(self), ResourceWarning)
        context = {"client_response": self, "message": "Unclosed response"}
        if self._source_traceback:
            context["source_traceback"] = self._source_traceback
        self._loop.call_exception_handler(context)

    def __repr__(self):
        out = io.StringIO()
        print("<ClientResponse({}) [{} {}]>".format(self.url, self.status, self.reason), file=out)
        print(self.headers, file=out)
        return out.getvalue()

    @property
    def connection(self):
        return self._connection

    @property
    def history(self):
        """A sequence of of responses, if redirects occured."""
        return self._history

    def waiting_for_continue(self):
        return self._continue is not None

    def _setup_connection(self, connection):
        self._reader = connection.reader
        self._connection = connection
        self.content = self.flow_control_class(connection.reader, loop=connection.loop)

    def _need_parse_response_body(self):
        return self.method.lower() != "head" and self.status not in [204, 304]

    @asyncio.coroutine
    def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._setup_connection(connection)

        while True:
            httpstream = self._reader.set_parser(self._response_parser)

            # read response
            message = yield from httpstream.read()
            if message.code != 100:
                break

            if self._continue is not None and not self._continue.done():
                self._continue.set_result(True)
                self._continue = None

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason
        self._should_close = message.should_close

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        response_with_body = self._need_parse_response_body()
        self._reader.set_parser(
            aiohttp.HttpPayloadParser(message, readall=read_until_eof, response_with_body=response_with_body),
            self.content,
        )

        # cookies
        self.cookies = http.cookies.SimpleCookie()
        if hdrs.SET_COOKIE in self.headers:
            for hdr in self.headers.getall(hdrs.SET_COOKIE):
                try:
                    self.cookies.load(hdr)
                except http.cookies.CookieError as exc:
                    client_logger.warning("Can not load response cookies: %s", exc)
        return self

    def close(self, force=True):
        if not force:
            warnings.warn("force parameter should be True", DeprecationWarning, stacklevel=2)
        if self._closed:
            return

        self._closed = True

        if hasattr(self._loop, "is_closed"):
            if self._loop.is_closed():
                return

        if self._connection is not None:
            self._connection.close()
            self._connection = None
        self._cleanup_writer()

    @asyncio.coroutine
    def release(self):
        if self._closed:
            return
        try:
            content = self.content
            if content is not None and not content.at_eof():
                chunk = yield from content.readany()
                while chunk is not EOF_MARKER or chunk:
                    chunk = yield from content.readany()
        except Exception:
            self._connection.close()
            self._connection = None
            raise
        finally:
            self._closed = True
            if self._connection is not None:
                self._connection.release()
                if self._reader is not None:
                    self._reader.unset_parser()
                self._connection = None
            self._cleanup_writer()

    def _cleanup_writer(self):
        if self._writer is not None and not self._writer.done():
            self._writer.cancel()
        self._writer = None

    @asyncio.coroutine
    def wait_for_close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None
        yield from self.release()

    @asyncio.coroutine
    def read(self, decode=False):
        """Read response payload."""
        if self._content is None:
            try:
                self._content = yield from self.content.read()
            except:
                self.close()
                raise
            else:
                yield from self.release()

        data = self._content

        if decode:
            warnings.warn(".read(True) is deprecated. use .json() instead", DeprecationWarning)
            return (yield from self.json())

        return data

    def _get_encoding(self):
        ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
        mtype, stype, _, params = helpers.parse_mimetype(ctype)

        encoding = params.get("charset")
        if not encoding:
            encoding = chardet.detect(self._content)["encoding"]
        if not encoding:
            encoding = "utf-8"

        return encoding

    @asyncio.coroutine
    def text(self, encoding=None):
        """Read response payload and decode."""
        if self._content is None:
            yield from self.read()

        if encoding is None:
            encoding = self._get_encoding()

        return self._content.decode(encoding)

    @asyncio.coroutine
    def json(self, *, encoding=None, loads=json.loads):
        """Read and decodes JSON response."""
        if self._content is None:
            yield from self.read()

        ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
        if "json" not in ctype:
            client_logger.warning("Attempt to decode JSON with unexpected mimetype: %s", ctype)

        stripped = self._content.strip()
        if not stripped:
            return None

        if encoding is None:
            encoding = self._get_encoding()

        return loads(stripped.decode(encoding))

    if PY_35:

        @asyncio.coroutine
        def __aenter__(self):
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_val, exc_tb):
            if exc_type is None:
                yield from self.release()
            else:
                self.close()
Exemplo n.º 17
0
def make_mocked_request(method, path, headers=None, *,
                        match_info=sentinel,
                        version=HttpVersion(1, 1), closing=False,
                        app=None,
                        writer=sentinel,
                        protocol=sentinel,
                        transport=sentinel,
                        payload=sentinel,
                        sslcontext=None,
                        client_max_size=1024**2,
                        loop=...):
    """Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    task = mock.Mock()
    if loop is ...:
        loop = mock.Mock()
        loop.create_future.return_value = ()

    if version < HttpVersion(1, 1):
        closing = True

    if headers:
        headers = CIMultiDictProxy(CIMultiDict(headers))
        raw_hdrs = tuple(
            (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
    else:
        headers = CIMultiDictProxy(CIMultiDict())
        raw_hdrs = ()

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(
        method, path, version, headers,
        raw_hdrs, closing, False, False, chunked, URL(path))
    if app is None:
        app = _create_app_mock()

    if transport is sentinel:
        transport = _create_transport(sslcontext)

    if protocol is sentinel:
        protocol = mock.Mock()
        protocol.transport = transport

    if writer is sentinel:
        writer = mock.Mock()
        writer.write_headers = make_mocked_coro(None)
        writer.write = make_mocked_coro(None)
        writer.write_eof = make_mocked_coro(None)
        writer.drain = make_mocked_coro(None)
        writer.transport = transport

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    req = Request(message, payload,
                  protocol, writer, task, loop,
                  client_max_size=client_max_size)

    match_info = UrlMappingMatchInfo(
        {} if match_info is sentinel else match_info, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemplo n.º 18
0
class ClientResponse(HeadersMixin):

    # from the Status-Line of the response
    version = None  # HTTP-Version
    status = None  # Status-Code
    reason = None  # Reason-Phrase

    content = None  # Payload stream
    headers = None  # Response headers, CIMultiDictProxy
    raw_headers = None  # Response raw headers, a sequence of pairs

    _connection = None  # current connection
    flow_control_class = FlowControlStreamReader  # reader flow control
    _reader = None  # input stream
    _response_parser = aiohttp.HttpResponseParser()
    _source_traceback = None
    # setted up by ClientRequest after ClientResponse object creation
    # post-init stage allows to not change ctor signature
    _loop = None
    _closed = True  # to allow __del__ for non-initialized properly response

    def __init__(self,
                 method,
                 url,
                 *,
                 writer=None,
                 continue100=None,
                 timeout=5 * 60):
        assert isinstance(url, URL)

        self.method = method
        self._url_obj = url
        self._content = None
        self._writer = writer
        self._continue = continue100
        self._closed = False
        self._should_close = True  # override by message.should_close later
        self._history = ()
        self._timeout = timeout
        self.cookies = http.cookies.SimpleCookie()

    @property
    def url_obj(self):
        return self._url_obj

    @property
    def url(self):
        warnings.warn("Deprecated, use .url_obj",
                      DeprecationWarning,
                      stacklevel=2)
        return str(self._url_obj)

    @property
    def host(self):
        warnings.warn("Deprecated, use .url_obj.host",
                      DeprecationWarning,
                      stacklevel=2)
        return self._url_obj.host

    def _post_init(self, loop):
        self._loop = loop
        if loop.get_debug():
            self._source_traceback = traceback.extract_stack(sys._getframe(1))

    def __del__(self, _warnings=warnings):
        if self._loop is None:
            return  # not started
        if self._closed:
            return
        self.close()

        _warnings.warn("Unclosed response {!r}".format(self), ResourceWarning)
        context = {'client_response': self, 'message': 'Unclosed response'}
        if self._source_traceback:
            context['source_traceback'] = self._source_traceback
        self._loop.call_exception_handler(context)

    def __repr__(self):
        out = io.StringIO()
        ascii_encodable_url = str(self.url)
        if self.reason:
            ascii_encodable_reason = self.reason.encode('ascii',
                                                        'backslashreplace') \
                .decode('ascii')
        else:
            ascii_encodable_reason = self.reason
        print('<ClientResponse({}) [{} {}]>'.format(ascii_encodable_url,
                                                    self.status,
                                                    ascii_encodable_reason),
              file=out)
        print(self.headers, file=out)
        return out.getvalue()

    @property
    def connection(self):
        return self._connection

    @property
    def history(self):
        """A sequence of of responses, if redirects occurred."""
        return self._history

    def _setup_connection(self, connection):
        self._reader = connection.reader
        self._connection = connection
        self.content = self.flow_control_class(connection.reader,
                                               loop=connection.loop,
                                               timeout=self._timeout)

    def _need_parse_response_body(self):
        return (self.method.lower() != 'head'
                and self.status not in [204, 304])

    @asyncio.coroutine
    def start(self, connection, read_until_eof=False):
        """Start response processing."""
        self._setup_connection(connection)

        while True:
            httpstream = self._reader.set_parser(self._response_parser)

            # read response
            with Timeout(self._timeout, loop=self._loop):
                message = yield from httpstream.read()
            if message.code < 100 or message.code > 199 or message.code == 101:
                break

            if self._continue is not None and not self._continue.done():
                self._continue.set_result(True)
                self._continue = None

        # response status
        self.version = message.version
        self.status = message.code
        self.reason = message.reason
        self._should_close = message.should_close

        # headers
        self.headers = CIMultiDictProxy(message.headers)
        self.raw_headers = tuple(message.raw_headers)

        # payload
        rwb = self._need_parse_response_body()
        self._reader.set_parser(
            aiohttp.HttpPayloadParser(message,
                                      readall=read_until_eof,
                                      response_with_body=rwb), self.content)

        # cookies
        for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
            try:
                self.cookies.load(hdr)
            except http.cookies.CookieError as exc:
                client_logger.warning('Can not load response cookies: %s', exc)
        return self

    def close(self):
        if self._closed:
            return

        self._closed = True

        if self._loop is None or self._loop.is_closed():
            return

        if self._connection is not None:
            self._connection.close()
            self._connection = None
        self._cleanup_writer()
        self._notify_content()

    @asyncio.coroutine
    def release(self, *, consume=False):
        if self._closed:
            return
        try:
            content = self.content
            if content is not None:
                if consume:
                    while not content.at_eof():
                        yield from content.readany()
                else:
                    close = False
                    if content.exception() is not None:
                        close = True
                    else:
                        content.read_nowait()
                        if not content.at_eof():
                            close = True
                    if close and self._connection is not None:
                        self._connection.close()
                        self._connection = None
        except Exception:
            self._connection.close()
            self._connection = None
            raise
        finally:
            self._closed = True
            if self._connection is not None:
                self._connection.release()
                if self._reader is not None:
                    self._reader.unset_parser()
                self._connection = None
            self._cleanup_writer()
            self._notify_content()

    def raise_for_status(self):
        if 400 <= self.status:
            raise aiohttp.HttpProcessingError(code=self.status,
                                              message=self.reason)

    def _cleanup_writer(self):
        if self._writer is not None and not self._writer.done():
            self._writer.cancel()
        self._writer = None

    def _notify_content(self):
        content = self.content
        if content and content.exception() is None and not content.is_eof():
            content.set_exception(
                aiohttp.ClientDisconnectedError('Connection closed'))

    @asyncio.coroutine
    def wait_for_close(self):
        if self._writer is not None:
            try:
                yield from self._writer
            finally:
                self._writer = None
        yield from self.release()

    @asyncio.coroutine
    def read(self):
        """Read response payload."""
        if self._content is None:
            try:
                self._content = yield from self.content.read()
            except:
                self.close()
                raise
            else:
                yield from self.release()

        return self._content

    def _get_encoding(self):
        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        mtype, stype, _, params = helpers.parse_mimetype(ctype)

        encoding = params.get('charset')
        if not encoding:
            if mtype == 'application' and stype == 'json':
                # RFC 7159 states that the default encoding is UTF-8.
                encoding = 'utf-8'
            else:
                encoding = chardet.detect(self._content)['encoding']
        if not encoding:
            encoding = 'utf-8'

        return encoding

    @asyncio.coroutine
    def text(self, encoding=None, errors='strict'):
        """Read response payload and decode."""
        if self._content is None:
            yield from self.read()

        if encoding is None:
            encoding = self._get_encoding()

        return self._content.decode(encoding, errors=errors)

    @asyncio.coroutine
    def json(self, *, encoding=None, loads=json.loads):
        """Read and decodes JSON response."""
        if self._content is None:
            yield from self.read()

        ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
        if 'json' not in ctype:
            client_logger.warning(
                'Attempt to decode JSON with unexpected mimetype: %s', ctype)

        stripped = self._content.strip()
        if not stripped:
            return None

        if encoding is None:
            encoding = self._get_encoding()

        return loads(stripped.decode(encoding))

    if PY_35:

        @asyncio.coroutine
        def __aenter__(self):
            return self

        @asyncio.coroutine
        def __aexit__(self, exc_type, exc_val, exc_tb):
            if exc_type is None:
                yield from self.release()
            else:
                self.close()
Exemplo n.º 19
0
def make_mocked_request(method: str,
                        path: str,
                        headers: Any = None,
                        *,
                        match_info: Any = sentinel,
                        version: HttpVersion = HttpVersion(1, 1),
                        closing: bool = False,
                        app: Any = None,
                        writer: Any = sentinel,
                        protocol: Any = sentinel,
                        transport: Any = sentinel,
                        payload: Any = sentinel,
                        sslcontext: Optional[SSLContext] = None,
                        client_max_size: int = 1024**2,
                        loop: Any = ...) -> Any:
    """Creates mocked web.Request testing purposes.

    Useful in unit tests, when spinning full web server is overkill or
    specific conditions and errors are hard to trigger.

    """

    task = mock.Mock()
    if loop is ...:
        loop = mock.Mock()
        loop.create_future.return_value = ()

    if version < HttpVersion(1, 1):
        closing = True

    if headers:
        headers = CIMultiDictProxy(CIMultiDict(headers))
        raw_hdrs = tuple(
            (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
    else:
        headers = CIMultiDictProxy(CIMultiDict())
        raw_hdrs = ()

    chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()

    message = RawRequestMessage(method, path, version, headers, raw_hdrs,
                                closing, False, False, chunked, URL(path))
    if app is None:
        app = _create_app_mock()

    if transport is sentinel:
        transport = _create_transport(sslcontext)

    if protocol is sentinel:
        protocol = mock.Mock()
        protocol.transport = transport

    if writer is sentinel:
        writer = mock.Mock()
        writer.write_headers = make_mocked_coro(None)
        writer.write = make_mocked_coro(None)
        writer.write_eof = make_mocked_coro(None)
        writer.drain = make_mocked_coro(None)
        writer.transport = transport

    protocol.transport = transport
    protocol.writer = writer

    if payload is sentinel:
        payload = mock.Mock()

    req = Request(message,
                  payload,
                  protocol,
                  writer,
                  task,
                  loop,
                  client_max_size=client_max_size)

    match_info = UrlMappingMatchInfo(
        {} if match_info is sentinel else match_info, mock.Mock())
    match_info.add_app(app)
    req._match_info = match_info

    return req
Exemplo n.º 20
0
 async def headers(self) -> MultiMapping[str]:
     headers: MultiMapping[str] = await self._rq.headers()
     new_headers: CIMultiDict[str] = CIMultiDict(headers)
     for h, v in self._headers.items():
         new_headers.add(h, v)
     return CIMultiDictProxy(new_headers)