def __call__(self, out, buf): # payload params length = self.message.headers.get('CONTENT-LENGTH', self.length) if 'SEC-WEBSOCKET-KEY1' in self.message.headers: length = 8 # payload decompression wrapper if self.compression and self.message.compression: out = DeflateBuffer(out, self.message.compression) # payload parser if 'chunked' in self.message.headers.get('TRANSFER-ENCODING', ''): yield from self.parse_chunked_payload(out, buf) elif length is not None: try: length = int(length) except ValueError: raise errors.InvalidHeader('CONTENT-LENGTH') from None if length < 0: raise errors.InvalidHeader('CONTENT-LENGTH') elif length > 0: yield from self.parse_length_payload(out, buf, length) else: if self.readall and getattr(self.message, 'code', 0) != 204: yield from self.parse_eof_payload(out, buf) elif getattr(self.message, 'method', None) in ('PUT', 'POST'): logging.warn( # pragma: no cover 'Content-Length or Transfer-Encoding header is required') out.feed_eof()
def __call__(self, out, buf): # payload params chunked = False length = self.length for name, value in self.message.headers: if name == 'CONTENT-LENGTH': length = value elif name == 'TRANSFER-ENCODING': chunked = value.lower() == 'chunked' elif name == 'SEC-WEBSOCKET-KEY1': length = 8 # payload decompression wrapper if self.compression and self.message.compression: out = DeflateBuffer(out, self.message.compression) # payload parser if chunked: yield from self.parse_chunked_payload(out, buf) elif length is not None: try: length = int(length) except ValueError: raise errors.InvalidHeader('CONTENT-LENGTH') from None if length < 0: raise errors.InvalidHeader('CONTENT-LENGTH') elif length > 0: yield from self.parse_length_payload(out, buf, length) else: if self.readall: yield from self.parse_eof_payload(out, buf) elif self.message.method in ('PUT', 'POST'): logging.warn( 'Content-Length or Transfer-Encoding header is required') out.feed_eof()
def start(self): """Start processing of incoming requests. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ reader = self.reader self.writer.set_tcp_nodelay(True) try: while not self._closing: message = None self._keepalive = False self._request_count += 1 self._reading_request = False payload = None # slow request timer with Timeout(max(self._slow_request_timeout, self._keepalive_timeout), loop=self._loop): # read request headers httpstream = reader.set_parser(self._request_parser) message = yield from httpstream.read() # request may not have payload try: content_length = int( message.headers.get(hdrs.CONTENT_LENGTH, 0)) except ValueError: raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) from None if (content_length > 0 or message.method == hdrs.METH_CONNECT or hdrs.SEC_WEBSOCKET_KEY1 in message.headers or 'chunked' in message.headers.get( hdrs.TRANSFER_ENCODING, '')): payload = streams.FlowControlStreamReader(reader, loop=self._loop) reader.set_parser(aiohttp.HttpPayloadParser(message), payload) else: payload = EMPTY_PAYLOAD yield from self.handle_request(message, payload) if payload and not payload.is_eof(): self.log_debug('Uncompleted request.') self._closing = True else: reader.unset_parser() if not self._keepalive or not self._keepalive_timeout: self._closing = True except asyncio.CancelledError: self.log_debug('Request handler cancelled.') return except asyncio.TimeoutError: self.log_debug('Request handler timed out.') return except errors.ClientDisconnectedError: self.log_debug('Ignored premature client disconnection #1.') return except errors.HttpProcessingError as exc: yield from self.handle_error(exc.code, message, None, exc, exc.headers, exc.message) except Exception as exc: yield from self.handle_error(500, message, None, exc) finally: self._request_handler = None if self.transport is None: self.log_debug('Ignored premature client disconnection #2.') else: self.transport.close()
def data_received(self, data, SEP=b'\r\n', CONTENT_LENGTH=hdrs.CONTENT_LENGTH, METH_CONNECT=hdrs.METH_CONNECT, SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1): if self._closing: return while self._messages: if self._waiters: waiter = self._waiters.popleft() message = self._messages.popleft() waiter.set_result(message) else: break # read HTTP message (request line + headers), \r\n\r\n # and split by lines if self._payload_parser is None and not self._upgrade: if self._message_tail: data, self._message_tail = self._message_tail + data, b'' start_pos = 0 while True: pos = data.find(SEP, start_pos) if pos >= start_pos: # line found self._message_lines.append(data[start_pos:pos]) # \r\n\r\n found start_pos = pos + 2 if data[start_pos:start_pos+2] == SEP: self._message_lines.append(b'') msg = None try: msg = self._request_parser.parse_message( self._message_lines) # payload length length = msg.headers.get(CONTENT_LENGTH) if length is not None: try: length = int(length) except ValueError: raise errors.InvalidHeader(CONTENT_LENGTH) if length < 0: raise errors.InvalidHeader(CONTENT_LENGTH) # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in msg.headers: raise errors.InvalidHeader(SEC_WEBSOCKET_KEY1) except errors.HttpProcessingError as exc: # something happened during parsing self._closing = True self._request_handlers.append( ensure_future( self.handle_error( exc.code, msg, None, exc, exc.headers, exc.message), loop=self._loop)) return except Exception as exc: self._closing = True self._request_handlers.append( ensure_future( self.handle_error(500, msg, None, exc), loop=self._loop)) return else: self._request_count += 1 self._reading_request = True self._message_lines.clear() self._upgrade = msg.upgrade # calculate payload empty_payload = True if ((length is not None and length > 0) or msg.chunked): payload = streams.FlowControlStreamReader( self, loop=self._loop) payload_parser = HttpPayloadParser( payload, length=length, chunked=msg.chunked, method=msg.method, compression=msg.compression) if not payload_parser.done: empty_payload = False self._payload_parser = payload_parser elif msg.method == METH_CONNECT: empty_payload = False payload = streams.FlowControlStreamReader( self, loop=self._loop) self._payload_parser = HttpPayloadParser( payload, method=msg.method, compression=msg.compression, readall=True) else: payload = EMPTY_PAYLOAD if self._waiters: waiter = self._waiters.popleft() waiter.set_result((msg, payload)) elif self._max_concurrent_handlers: self._max_concurrent_handlers -= 1 handler = ensure_future( self.start(msg, payload), loop=self._loop) self._request_handlers.append(handler) else: self._messages.append((msg, payload)) start_pos = start_pos+2 if start_pos < len(data): if empty_payload and not self._upgrade: continue self.data_received(data[start_pos:]) return else: self._message_tail = data[start_pos:] return # no parser, just store elif self._payload_parser is None and self._upgrade: assert not self._message_lines if data: self._message_tail += data # feed payload else: assert not self._message_lines if data: eof, tail = self._payload_parser.feed_data(data) if eof: self._payload_parser = None if tail: super().data_received(tail)
def parse_headers(self, lines): """Parses RFC2822 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case. """ close_conn = None encoding = None headers = [] lines_idx = 1 line = lines[1] while line: header_length = len(line) # Parse initial header name : value pair. try: name, value = line.split(':', 1) except ValueError: raise errors.InvalidHeader(line) from None name = name.strip(' \t').upper() if HDRRE.search(name): raise errors.InvalidHeader(name) # next line lines_idx += 1 line = lines[lines_idx] # consume continuation lines continuation = line and line[0] in CONTINUATION if continuation: value = [value] while continuation: header_length += len(line) if header_length > self.max_field_size: raise errors.LineTooLong( 'limit request headers fields size') value.append(line) # next line lines_idx += 1 line = lines[lines_idx] continuation = line[0] in CONTINUATION value = '\r\n'.join(value) else: if header_length > self.max_field_size: raise errors.LineTooLong( 'limit request headers fields size') value = value.strip() # keep-alive and encoding if name == 'CONNECTION': v = value.lower() if v == 'close': close_conn = True elif v == 'keep-alive': close_conn = False elif name == 'CONTENT-ENCODING': enc = value.lower() if enc in ('gzip', 'deflate'): encoding = enc headers.append((name, value)) return multidict.MultiDict(headers), close_conn, encoding