async def start(self, connection, read_until_eof=False): """Start response processing.""" self._setup_connection(connection) message = None while True: httpstream = self._reader.set_parser(self._response_parser) # read response message = await httpstream.read() if message.code != 100: break if self._continue is not None and not self._continue.done(): self._continue.set_result(True) self._continue = None # response status self.version = message.version self.status = message.code self.reason = message.reason self._should_close = message.should_close # headers self.headers = CIMultiDictProxy(message.headers) # payload response_with_body = self._need_parse_response_body() self._reader.set_parser( aiohttp.HttpPayloadParser( message, compression=False, readall=read_until_eof, response_with_body=response_with_body), self.content ) # cookies self.cookies = http.cookies.SimpleCookie() if aiohttp.client.hdrs.SET_COOKIE in self.headers: for hdr in self.headers.getall(aiohttp.client.hdrs.SET_COOKIE): try: self.cookies.load(hdr) except http.cookies.CookieError as exc: aiohttp.log.client_logger.warning( 'Can not load response cookies: %s', exc) return self
def start(self, connection, read_until_eof=False): """Start response processing.""" self._setup_connection(connection) while True: httpstream = self._reader.set_parser(self._response_parser) # read response self.message = yield from httpstream.read() if self.message.code != 100: break if self._continue is not None and not self._continue.done(): self._continue.set_result(True) self._continue = None # response status self.version = self.message.version self.status = self.message.code self.reason = self.message.reason # headers self.headers = CaseInsensitiveMultiDict( self.message.headers.items(getall=True)) # payload response_with_body = self.method.lower() != 'head' self._reader.set_parser( aiohttp.HttpPayloadParser(self.message, readall=read_until_eof, response_with_body=response_with_body), self.content) # cookies self.cookies = http.cookies.SimpleCookie() if 'SET-COOKIE' in self.headers: for hdr in self.headers.getall('SET-COOKIE'): try: self.cookies.load(hdr) except http.cookies.CookieError as exc: client_logger.warning('Can not load response cookies: %s', exc) connection.share_cookies(self.cookies) return self
def start(self, connection, read_until_eof=False): """Start response processing.""" self._setup_connection(connection) with self._timer: while True: httpstream = self._reader.set_parser(self._response_parser) # read response message = yield from httpstream.read() if (message.code < 100 or message.code > 199 or message.code == 101): break if self._continue is not None and not self._continue.done(): self._continue.set_result(True) self._continue = None # response status self.version = message.version self.status = message.code self.reason = message.reason self._should_close = message.should_close # headers self.headers = CIMultiDictProxy(message.headers) self.raw_headers = tuple(message.raw_headers) # payload rwb = self._need_parse_response_body() self._reader.set_parser( aiohttp.HttpPayloadParser(message, readall=read_until_eof, response_with_body=rwb), self.content) # cookies for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): try: self.cookies.load(hdr) except CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) return self
def start(self, connection, read_until_eof=False): """Start response processing.""" self._reader = connection.reader self._connection = connection while True: httpstream = self._reader.set_parser(self._response_parser) # read response self.message = yield from httpstream.read() if self.message.code != 100: break if self._continue is not None and not self._continue.done(): self._continue.set_result(True) self._continue = None # response status self.version = self.message.version self.status = self.message.code self.reason = self.message.reason # headers for hdr, val in self.message.headers: self.add_header(hdr, val) # payload self.content = self._reader.set_parser( aiohttp.HttpPayloadParser(self.message, readall=read_until_eof)) # cookies self.cookies = http.cookies.SimpleCookie() if 'Set-Cookie' in self: for hdr in self.get_all('Set-Cookie'): try: self.cookies.load(hdr) except http.cookies.CookieError as exc: logging.warn('Can not load response cookies: %s', exc) return self
def start(self, stream, transport, read_until_eof=False): """Start response processing.""" self.stream = stream self.transport = transport while True: httpstream = stream.set_parser(self._response_parser) # read response self.message = yield from httpstream.read() if self.message.code != 100: break if self._continue is not None and not self._continue.done(): self._continue.set_result(True) self._continue = None # response status self.version = self.message.version self.status = self.message.code self.reason = self.message.reason # headers for hdr, val in self.message.headers: self.add_header(hdr, val) # payload self.content = stream.set_parser( aiohttp.HttpPayloadParser(self.message, readall=read_until_eof)) # cookies self.cookies = http.cookies.SimpleCookie() if 'Set-Cookie' in self: for hdr in self.get_all('Set-Cookie'): self.cookies.load(hdr) return self
def start(self): """Start processing of incoming requests. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ reader = self.reader self.writer.set_tcp_nodelay(True) try: while not self._closing: message = None self._keepalive = False self._request_count += 1 self._reading_request = False payload = None # slow request timer with Timeout(max(self._slow_request_timeout, self._keepalive_timeout), loop=self._loop): # read request headers httpstream = reader.set_parser(self._request_parser) message = yield from httpstream.read() # request may not have payload try: content_length = int( message.headers.get(hdrs.CONTENT_LENGTH, 0)) except ValueError: raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) from None if (content_length > 0 or message.method == hdrs.METH_CONNECT or hdrs.SEC_WEBSOCKET_KEY1 in message.headers or 'chunked' in message.headers.get( hdrs.TRANSFER_ENCODING, '')): payload = streams.FlowControlStreamReader(reader, loop=self._loop) reader.set_parser(aiohttp.HttpPayloadParser(message), payload) else: payload = EMPTY_PAYLOAD yield from self.handle_request(message, payload) if payload and not payload.is_eof(): self.log_debug('Uncompleted request.') self._closing = True else: reader.unset_parser() if not self._keepalive or not self._keepalive_timeout: self._closing = True except asyncio.CancelledError: self.log_debug('Request handler cancelled.') return except asyncio.TimeoutError: self.log_debug('Request handler timed out.') return except errors.ClientDisconnectedError: self.log_debug('Ignored premature client disconnection #1.') return except errors.HttpProcessingError as exc: yield from self.handle_error(exc.code, message, None, exc, exc.headers, exc.message) except Exception as exc: yield from self.handle_error(500, message, None, exc) finally: self._request_handler = None if self.transport is None: self.log_debug('Ignored premature client disconnection #2.') else: self.transport.close()
def start(self): """Start processing of incoming requests. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ reader = self.reader while True: message = None self._keep_alive = False self._request_count += 1 self._reading_request = False payload = None try: # start slow request timer if self._timeout and self._timeout_handle is None: self._timeout_handle = self._loop.call_later( self._timeout, self.cancel_slow_request) # read request headers httpstream = reader.set_parser(self._request_parser) message = yield from httpstream.read() # cancel slow request timer if self._timeout_handle is not None: self._timeout_handle.cancel() self._timeout_handle = None # request may not have payload if (message.headers.get(hdrs.CONTENT_LENGTH, 0) or hdrs.SEC_WEBSOCKET_KEY1 in message.headers or 'chunked' in message.headers.get( hdrs.TRANSFER_ENCODING, '')): payload = streams.FlowControlStreamReader(reader, loop=self._loop) reader.set_parser(aiohttp.HttpPayloadParser(message), payload) else: payload = EMPTY_PAYLOAD yield from self.handle_request(message, payload) except (asyncio.CancelledError, errors.ClientDisconnectedError): if self.debug: self.log_exception( 'Ignored premature client disconnection.') break except errors.HttpProcessingError as exc: if self.transport is not None: yield from self.handle_error(exc.code, message, None, exc, exc.headers) except Exception as exc: yield from self.handle_error(500, message, None, exc) finally: if self.transport is None: self.log_debug('Ignored premature client disconnection.') break if payload and not payload.is_eof(): self.log_debug('Uncompleted request.') self._request_handler = None self.transport.close() break else: reader.unset_parser() if self._request_handler: if self._keep_alive and self._keep_alive_period: self.log_debug('Start keep-alive timer for %s sec.', self._keep_alive_period) self._keep_alive_handle = self._loop.call_later( self._keep_alive_period, self.transport.close) elif self._keep_alive and self._keep_alive_on: # do nothing, rely on kernel or upstream server pass else: self.log_debug('Close client connection.') self._request_handler = None self.transport.close() break else: # connection is closed break
def start(self): """Start processing of incoming requests. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ reader = self.reader while True: message = None self._keep_alive = False self._request_count += 1 self._reading_request = False payload = None try: prefix = reader.set_parser(self._request_prefix) yield from prefix.read() self._reading_request = True # stop keep-alive timer if self._keep_alive_handle is not None: self._keep_alive_handle.cancel() self._keep_alive_handle = None # start slow request timer if self._timeout and self._timeout_handle is None: self._timeout_handle = self._loop.call_later( self._timeout, self.cancel_slow_request) # read request headers httpstream = reader.set_parser(self._request_parser) message = yield from httpstream.read() # cancel slow request timer if self._timeout_handle is not None: self._timeout_handle.cancel() self._timeout_handle = None payload = streams.FlowControlStreamReader(reader, loop=self._loop) reader.set_parser(aiohttp.HttpPayloadParser(message), payload) handler = self.handle_request(message, payload) if (asyncio.iscoroutine(handler) or isinstance(handler, asyncio.Future)): yield from handler except (asyncio.CancelledError, errors.ClientDisconnectedError): self.log_debug('Ignored premature client disconnection.') break except errors.HttpProcessingError as exc: if self.transport is not None: yield from self.handle_error(exc.code, message, None, exc, exc.headers) except Exception as exc: yield from self.handle_error(500, message, None, exc) finally: if self.transport is None: self.log_debug('Ignored premature client disconnection.') break if payload and not payload.is_eof(): self.log_debug('Uncompleted request.') self._request_handler = None self.transport.close() break else: reader.unset_parser() if self._request_handler: if self._keep_alive and self._keep_alive_period: self.log_debug('Start keep-alive timer for %s sec.', self._keep_alive_period) self._keep_alive_handle = self._loop.call_later( self._keep_alive_period, self.transport.close) else: self.log_debug('Close client connection.') self._request_handler = None self.transport.close() break else: break