Example #1
0
    def data_received(self, data):
        self._raw.put_nowait(data)  # 原始数据都保存
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)
        if self._is_proxy:  # 如果是代理过程那么就不要再去parse了
            return
        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserInvalidMethodError as e:  # CONNECT 包
            pass
        except HttpParserUpgrade:  # CONNECT 包
            pass
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)
Example #2
0
    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)
Example #3
0
    def on_header(self, name, value):
        """
        补全 HTTP 请求的 head 信息
        """
        if name == b'Content-Length' and int(value) > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        self.headers.append((name.decode(), value.decode('utf-8')))
Example #4
0
    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if self._header_fragment == b'Content-Length' \
                    and int(value) > self.request_max_size:
                exception = PayloadTooLarge('Payload Too Large')
                self.write_error(exception)

            self.headers.append(
                (self._header_fragment.decode().casefold(), value.decode()))

            self._header_fragment = b''
Example #5
0
    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if (self._header_fragment == b"Content-Length"
                    and int(value) > self.request_max_size):
                self.write_error(PayloadTooLarge("Payload Too Large"))
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode("latin_1")
            self.headers.append(
                (self._header_fragment.decode().casefold(), value))

            self._header_fragment = b""
Example #6
0
    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            exception = InvalidUsage('Bad Request')
            self.write_error(exception)
Example #7
0
    def data_received(self, data):
        """
        接受数据
        """
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:    # 请求数据过大
            # 在`exceptions.py`中添加 PayloadTooLarge 错误
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # 如果是第一次接受数据,创建 parser
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # 解析请求
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            exception = InvalidUsage('Bad Request')
            self.write_error(exception)
Example #8
0
    def on_header(self, name, value):
        if name == b'Content-Length' and int(value) > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        self.headers.append((name.decode().casefold(), value.decode()))
Example #9
0
    async def read(self) -> Optional[bytes]:
        """
        Read some bytes of request body.
        """

        # Send a 100-continue if needed
        if self.expecting_continue:
            self.expecting_continue = False
            await self._send(HTTP_CONTINUE)

        # Receive request body chunk
        buf = self.recv_buffer
        if self.request_bytes_left == 0 and self.request_body == "chunked":
            # Process a chunk header: \r\n<size>[;<chunk extensions>]\r\n
            while True:
                pos = buf.find(b"\r\n", 3)

                if pos != -1:
                    break

                if len(buf) > 64:
                    self.keep_alive = False
                    raise InvalidUsage("Bad chunked encoding")

                await self._receive_more()

            try:
                size = int(buf[2:pos].split(b";", 1)[0].decode(), 16)
            except Exception:
                self.keep_alive = False
                raise InvalidUsage("Bad chunked encoding")

            del buf[:pos + 2]

            if size <= 0:
                self.request_body = None

                if size < 0:
                    self.keep_alive = False
                    raise InvalidUsage("Bad chunked encoding")

                return None

            self.request_bytes_left = size
            self.request_bytes += size

        # Request size limit
        if self.request_bytes > self.request_max_size:
            self.keep_alive = False
            raise PayloadTooLarge("Request body exceeds the size limit")

        # End of request body?
        if not self.request_bytes_left:
            self.request_body = None
            return None

        # At this point we are good to read/return up to request_bytes_left
        if not buf:
            await self._receive_more()

        data = bytes(buf[:self.request_bytes_left])
        size = len(data)

        del buf[:size]

        self.request_bytes_left -= size

        return data
Example #10
0
    async def http1_request_header(self):
        """
        Receive and parse request header into self.request.
        """
        HEADER_MAX_SIZE = min(8192, self.request_max_size)
        # Receive until full header is in buffer
        buf = self.recv_buffer
        pos = 0

        while True:
            pos = buf.find(b"\r\n\r\n", pos)
            if pos != -1:
                break

            pos = max(0, len(buf) - 3)
            if pos >= HEADER_MAX_SIZE:
                break

            await self._receive_more()

        if pos >= HEADER_MAX_SIZE:
            raise PayloadTooLarge("Request header exceeds the size limit")

        # Parse header content
        try:
            head = buf[:pos]
            raw_headers = head.decode(errors="surrogateescape")
            reqline, *split_headers = raw_headers.split("\r\n")
            method, self.url, protocol = reqline.split(" ")

            if protocol == "HTTP/1.1":
                self.keep_alive = True
            elif protocol == "HTTP/1.0":
                self.keep_alive = False
            else:
                raise Exception  # Raise a Bad Request on try-except

            self.head_only = method.upper() == "HEAD"
            request_body = False
            headers = []

            for name, value in (h.split(":", 1) for h in split_headers):
                name, value = h = name.lower(), value.lstrip()

                if name in ("content-length", "transfer-encoding"):
                    request_body = True
                elif name == "connection":
                    self.keep_alive = value.lower() == "keep-alive"

                headers.append(h)
        except Exception:
            raise InvalidUsage("Bad Request")

        headers_instance = Header(headers)
        self.upgrade_websocket = (headers_instance.get(
            "upgrade", "").lower() == "websocket")

        # Prepare a Request object
        request = self.protocol.request_class(
            url_bytes=self.url.encode(),
            headers=headers_instance,
            head=bytes(head),
            version=protocol[5:],
            method=method,
            transport=self.protocol.transport,
            app=self.protocol.app,
        )

        # Prepare for request body
        self.request_bytes_left = self.request_bytes = 0
        if request_body:
            headers = request.headers
            expect = headers.get("expect")

            if expect is not None:
                if expect.lower() == "100-continue":
                    self.expecting_continue = True
                else:
                    raise HeaderExpectationFailed(f"Unknown Expect: {expect}")

            if headers.get("transfer-encoding") == "chunked":
                self.request_body = "chunked"
                pos -= 2  # One CRLF stays in buffer
            else:
                self.request_body = True
                self.request_bytes_left = self.request_bytes = int(
                    headers["content-length"])

        # Remove header and its trailing CRLF
        del buf[:pos + 4]
        self.stage = Stage.HANDLER
        self.request, request.stream = request, self
        self.protocol.state["requests_count"] += 1
Example #11
0
    async def read(self) -> Optional[bytes]:  # no cov
        """
        Read some bytes of request body.
        """

        # Send a 100-continue if needed
        if self.expecting_continue:
            self.expecting_continue = False
            await self._send(HTTP_CONTINUE)

        # Receive request body chunk
        buf = self.recv_buffer
        if self.request_bytes_left == 0 and self.request_body == "chunked":
            # Process a chunk header: \r\n<size>[;<chunk extensions>]\r\n
            while True:
                pos = buf.find(b"\r\n", 3)

                if pos != -1:
                    break

                if len(buf) > 64:
                    self.keep_alive = False
                    raise InvalidUsage("Bad chunked encoding")

                await self._receive_more()

            try:
                size = int(buf[2:pos].split(b";", 1)[0].decode(), 16)
            except Exception:
                self.keep_alive = False
                raise InvalidUsage("Bad chunked encoding")

            if size <= 0:
                self.request_body = None

                if size < 0:
                    self.keep_alive = False
                    raise InvalidUsage("Bad chunked encoding")

                # Consume CRLF, chunk size 0 and the two CRLF that follow
                pos += 4
                # Might need to wait for the final CRLF
                while len(buf) < pos:
                    await self._receive_more()
                del buf[:pos]
                return None

            # Remove CRLF, chunk size and the CRLF that follows
            del buf[:pos + 2]

            self.request_bytes_left = size
            self.request_bytes += size

        # Request size limit
        if self.request_bytes > self.request_max_size:
            self.keep_alive = False
            raise PayloadTooLarge("Request body exceeds the size limit")

        # End of request body?
        if not self.request_bytes_left:
            self.request_body = None
            return None

        # At this point we are good to read/return up to request_bytes_left
        if not buf:
            await self._receive_more()

        data = bytes(buf[:self.request_bytes_left])
        size = len(data)

        del buf[:size]

        self.request_bytes_left -= size

        await self.dispatch(
            "http.lifecycle.read_body",
            inline=True,
            context={"body": data},
        )

        return data