def _read_chunked(rfile, limit=sys.maxsize): """ Read a HTTP body with chunked transfer encoding. Args: rfile: the input file limit: A positive integer """ total = 0 while True: line = rfile.readline(128) if line == b"": raise exceptions.HttpException("Connection closed prematurely") if line != b"\r\n" and line != b"\n": try: length = int(line, 16) except ValueError: raise exceptions.HttpSyntaxException("Invalid chunked encoding length: {}".format(line)) total += length if total > limit: raise exceptions.HttpException( "HTTP Body too large. Limit is {}, " "chunked content longer than {}".format(limit, total) ) chunk = rfile.read(length) suffix = rfile.readline(5) if suffix != b"\r\n": raise exceptions.HttpSyntaxException("Malformed chunked body") if length == 0: return yield chunk
def read_body(rfile, expected_size, limit=None, max_chunk_size=4096): """ Read an HTTP message body Args: rfile: The input stream expected_size: The expected body size (see :py:meth:`expected_body_size`) limit: Maximum body size max_chunk_size: Maximium chunk size that gets yielded Returns: A generator that yields byte chunks of the content. Raises: exceptions.HttpException, if an error occurs Caveats: max_chunk_size is not considered if the transfer encoding is chunked. """ if not limit or limit < 0: limit = sys.maxsize if not max_chunk_size: max_chunk_size = limit if expected_size is None: for x in _read_chunked(rfile, limit): yield x elif expected_size >= 0: if limit is not None and expected_size > limit: raise exceptions.HttpException( "HTTP Body too large. " "Limit is {}, content length was advertised as {}".format( limit, expected_size)) bytes_left = expected_size while bytes_left: chunk_size = min(bytes_left, max_chunk_size) content = rfile.read(chunk_size) if len(content) < chunk_size: raise exceptions.HttpException("Unexpected EOF") yield content bytes_left -= chunk_size else: bytes_left = limit while bytes_left: chunk_size = min(bytes_left, max_chunk_size) content = rfile.read(chunk_size) if not content: return yield content bytes_left -= chunk_size not_done = rfile.read(1) if not_done: raise exceptions.HttpException( "HTTP body too large. Limit is {}.".format(limit))
def assemble_request(request): if request.data.content is None: raise exceptions.HttpException( "Cannot assemble flow with missing content") head = assemble_request_head(request) body = b"".join(assemble_body(request.data.headers, [request.data.content])) return head + body
def http_connect(self, connect_to): self.wfile.write('CONNECT %s:%s HTTP/1.1\r\n' % tuple(connect_to) + '\r\n') self.wfile.flush() try: resp = self.protocol.read_response(self.rfile, treq(method="CONNECT")) if resp.status_code != 200: raise exceptions.HttpException("Unexpected status code: %s" % resp.status_code) except exceptions.HttpException as e: six.reraise(PathocError, PathocError("Proxy CONNECT failed: %s" % repr(e)))
def http_connect(self, connect_to): self.wfile.write(b'CONNECT %s:%d HTTP/1.1\r\n' % (connect_to[0].encode("idna"), connect_to[1]) + b'\r\n') self.wfile.flush() try: resp = self.protocol.read_response(self.rfile, treq(method=b"CONNECT")) if resp.status_code != 200: raise exceptions.HttpException("Unexpected status code: %s" % resp.status_code) except exceptions.HttpException as e: raise PathocError("Proxy CONNECT failed: %s" % repr(e))