def _read_chunked(rfile, limit=sys.maxsize): """ Read a HTTP body with chunked transfer encoding. Args: rfile: the input file limit: A positive integer """ total = 0 while True: line = rfile.readline(128) if line == b"": raise exceptions.HttpException("Connection closed prematurely") if line != b"\r\n" and line != b"\n": try: length = int(line, 16) except ValueError: raise exceptions.HttpSyntaxException( "Invalid chunked encoding length: {}".format(line)) total += length if total > limit: raise exceptions.HttpException( "HTTP Body too large. Limit is {}, " "chunked content longer than {}".format(limit, total)) chunk = rfile.read(length) suffix = rfile.readline(5) if suffix != b"\r\n": raise exceptions.HttpSyntaxException("Malformed chunked body") if length == 0: return yield chunk
def read_body(rfile, expected_size, limit=None, max_chunk_size=4096): """ Read an HTTP message body Args: rfile: The input stream expected_size: The expected body size (see :py:meth:`expected_body_size`) limit: Maximum body size max_chunk_size: Maximium chunk size that gets yielded Returns: A generator that yields byte chunks of the content. Raises: exceptions.HttpException, if an error occurs Caveats: max_chunk_size is not considered if the transfer encoding is chunked. """ if not limit or limit < 0: limit = sys.maxsize if not max_chunk_size: max_chunk_size = limit if expected_size is None: for x in _read_chunked(rfile, limit): yield x elif expected_size >= 0: if limit is not None and expected_size > limit: raise exceptions.HttpException( "HTTP Body too large. " "Limit is {}, content length was advertised as {}".format( limit, expected_size)) bytes_left = expected_size while bytes_left: chunk_size = min(bytes_left, max_chunk_size) content = rfile.read(chunk_size) if len(content) < chunk_size: raise exceptions.HttpException("Unexpected EOF") yield content bytes_left -= chunk_size else: bytes_left = limit while bytes_left: chunk_size = min(bytes_left, max_chunk_size) content = rfile.read(chunk_size) if not content: return yield content bytes_left -= chunk_size not_done = rfile.read(1) if not_done: raise exceptions.HttpException( "HTTP body too large. Limit is {}.".format(limit))
def http_connect(self, connect_to): req = net_http.Request( host=connect_to[0], port=connect_to[1], method=b'CONNECT', scheme=b"", authority=f"{connect_to[0]}:{connect_to[1]}".encode(), path=b"", http_version=b'HTTP/1.1', headers=((b"Host", connect_to[0].encode("idna")),), content=b'', trailers=None, timestamp_start=0, timestamp_end=0, ) self.wfile.write(net_http.http1.assemble_request(req)) self.wfile.flush() try: resp = self.protocol.read_response(self.rfile, req) if resp.status_code != 200: raise exceptions.HttpException("Unexpected status code: %s" % resp.status_code) except exceptions.HttpException as e: raise PathocError( "Proxy CONNECT failed: %s" % repr(e) )
def assemble_request(request): if request.data.content is None: raise exceptions.HttpException( "Cannot assemble flow with missing content") head = assemble_request_head(request) body = b"".join(assemble_body(request.data.headers, [request.data.content])) return head + body
def assemble_response(response): if response.data.content is None: raise exceptions.HttpException( "Cannot assemble flow with missing content") head = assemble_response_head(response) body = b"".join( assemble_body(response.data.headers, [response.data.content], response.data.trailers)) return head + body
def read_raw_frame(rfile): header = rfile.safe_read(9) length = int(codecs.encode(header[:3], 'hex_codec'), 16) if length == 4740180: raise exceptions.HttpException("Length field looks more like HTTP/1.1:\n{}".format(rfile.read(-1))) body = rfile.safe_read(length) return [header, body]
def http_connect(self, connect_to): self.wfile.write(b'CONNECT %s:%d HTTP/1.1\r\n' % (connect_to[0].encode("idna"), connect_to[1]) + b'\r\n') self.wfile.flush() try: resp = self.protocol.read_response(self.rfile, treq(method=b"CONNECT")) if resp.status_code != 200: raise exceptions.HttpException("Unexpected status code: %s" % resp.status_code) except exceptions.HttpException as e: raise PathocError("Proxy CONNECT failed: %s" % repr(e))
def assemble_body(headers, body_chunks, trailers): if "chunked" in headers.get("transfer-encoding", "").lower(): for chunk in body_chunks: if chunk: yield b"%x\r\n%s\r\n" % (len(chunk), chunk) if trailers: yield b"0\r\n%s\r\n" % trailers else: yield b"0\r\n\r\n" else: if trailers: raise exceptions.HttpException( "Sending HTTP/1.1 trailer headers requires transfer-encoding: chunked" ) for chunk in body_chunks: yield chunk
def http_connect(self, connect_to): req = net_http.Request( first_line_format='authority', method='CONNECT', scheme=None, host=connect_to[0].encode("idna"), port=connect_to[1], path=None, http_version='HTTP/1.1', content=b'', ) self.wfile.write(net_http.http1.assemble_request(req)) self.wfile.flush() try: resp = self.protocol.read_response(self.rfile, req) if resp.status_code != 200: raise exceptions.HttpException("Unexpected status code: %s" % resp.status_code) except exceptions.HttpException as e: raise PathocError("Proxy CONNECT failed: %s" % repr(e))
def read_frame(rfile, parse=True): """ Reads a full HTTP/2 frame from a file-like object. Returns a parsed frame and the consumed bytes. """ header = rfile.safe_read(9) length = int(codecs.encode(header[:3], 'hex_codec'), 16) if length == 4740180: raise exceptions.HttpException( "Length field looks more like HTTP/1.1:\n{}".format( rfile.read(-1))) body = rfile.safe_read(length) if parse: frame, _ = Frame.parse_frame_header(header) frame.parse_body(memoryview(body)) else: frame = None return frame, b''.join([header, body])