def test_parser_upgrade_response_1(self): m = mock.Mock() headers = {} m.on_header.side_effect = headers.__setitem__ p = httptools.HttpResponseParser(m) try: p.feed_data(UPGRADE_RESPONSE1) except httptools.HttpParserUpgrade as ex: offset = ex.args[0] else: self.fail('HttpParserUpgrade was not raised') self.assertEqual(UPGRADE_RESPONSE1[offset:], b'data') self.assertEqual(p.get_http_version(), '1.1') self.assertEqual(p.get_status_code(), 101) m.on_status.assert_called_once_with(b'Switching Protocols') m.on_headers_complete.assert_called_once_with() self.assertEqual(m.on_header.call_count, 6) self.assertEqual(len(headers), 6) m.on_message_complete.assert_called_once_with()
def test_parser_response_1(self): m = mock.Mock() headers = {} m.on_header.side_effect = headers.__setitem__ p = httptools.HttpResponseParser(m) p.feed_data(memoryview(RESPONSE1_HEAD)) self.assertEqual(p.get_http_version(), '1.1') self.assertEqual(p.get_status_code(), 200) m.on_status.assert_called_once_with(b'OK') m.on_headers_complete.assert_called_once_with() self.assertEqual(m.on_header.call_count, 8) self.assertEqual(len(headers), 8) self.assertEqual(headers.get(b'Connection'), b'close') self.assertEqual(headers.get(b'Content-Type'), b'text/html; charset=UTF-8') self.assertFalse(m.on_body.called) p.feed_data(bytearray(RESPONSE1_BODY)) m.on_body.assert_called_once_with(RESPONSE1_BODY) m.on_message_complete.assert_called_once_with() self.assertFalse(m.on_url.called) self.assertFalse(m.on_chunk_header.called) self.assertFalse(m.on_chunk_complete.called) with self.assertRaisesRegex( httptools.HttpParserError, 'Expected HTTP/'): p.feed_data(b'12123123')
def test_full_request_lifecycle_handler_baseexception(transport, container): protocol = aioweb.protocol.HttpProtocol(container=container) with unittest.mock.patch("asyncio.create_task") as mock: protocol.connection_made(transport) coro = mock.call_args.args[0] # # When we now start our coroutine, it should suspend and wait # coro.send(None) # # Feed some data # request = b'''GET / HTTP/1.1 Host: example.com Content-Length: 3 XYZ''' protocol.data_received(request.replace(b'\n', b'\r\n')) # # When we now call send on the coroutine to simulate that the event # loop reschedules it, it should invoke our handler function. We instruct # the dummy handler to raise an exception # container.set_exception(BaseException()) coro.send(None) # # Make sure that the handler has been called # assert container._request is not None # # Verify some attributes of the request object # request = container._request assert isinstance(request, aioweb.request.Request) headers = request.headers() assert headers is not None assert isinstance(headers, dict) assert "Host" in headers assert headers["Host"] == b"example.com" assert request.http_version() == "1.1" # # Verify that we have written back something into the transport # assert len(transport._data) > 0 # # Now let us try to parse the response data # parser_helper = ParserHelper() parser = httptools.HttpResponseParser(parser_helper) parser.feed_data(transport._data) # # If we get to this point, this is a valid HTTP response # assert parser.get_status_code() == 500 # # Finally check that the transport is not closed # assert not transport._is_closing
def reset(self): self.headers.clear() self.response = None self.writing_paused = False self.writable.set() self.parser = httptools.HttpResponseParser(self) self._connection_lost = False self._pending_task = None self._can_release = False
def data_received(self, data): if self.parser is None: self.headers = [] self.parser = httptools.HttpResponseParser(self) try: self.parser.feed_data(data) except HttpParserError: message = 'Bad Request' print(message)
def reset(self) -> None: self.headers = [] self.request = None self.response = None self.writing_paused = False self.writable.set() self.expect_100_continue = False self.parser = httptools.HttpResponseParser(self) # type: ignore self._connection_lost = False self._pending_task = None self._can_release = False self._upgraded = False
def test_parser_response_5(self): m = mock.Mock() m.on_status = None m.on_header = None m.on_body = None m.on_headers_complete = None m.on_chunk_header = None m.on_chunk_complete = None p = httptools.HttpResponseParser(m) p.feed_data(RESPONSE1_HEAD) p.feed_data(RESPONSE1_BODY) m.on_message_complete.assert_called_once_with()
def test_parser_response_cb_on_message_complete_1(self): class Error(Exception): pass m = mock.Mock() m.on_message_complete.side_effect = Error() p = httptools.HttpResponseParser(m) try: p.feed_data(RESPONSE1_HEAD + RESPONSE1_BODY) except httptools.HttpParserCallbackError as ex: self.assertIsInstance(ex.__context__, Error) else: self.fail('HttpParserCallbackError was not raised')
def test_parser_response_3(self): callbacks = {'on_header', 'on_headers_complete', 'on_body', 'on_message_complete'} for cbname in callbacks: with self.subTest('{} callback fails correctly'.format(cbname)): with self.assertRaisesRegex(httptools.HttpParserCallbackError, 'callback error'): m = mock.Mock() getattr(m, cbname).side_effect = Exception() p = httptools.HttpResponseParser(m) p.feed_data(RESPONSE1_HEAD + RESPONSE1_BODY)
def __init__(self, path, host="127.0.0.1", port=None, timeout=10): self._buffers = [] self._transport = None self._host = host self._path = path self._waiter = asyncio.Future() self._parser = httptools.HttpResponseParser(self) self._timeout_handle = None loop = asyncio.get_running_loop() loop.create_task(loop.create_connection(lambda: self, host, port or 80)).add_done_callback( self._connect_cb) if timeout: self._timeout_handle = loop.call_later(timeout, self._on_timeout)
def __init__(self, loop, pool): self.loop = loop self.pool = weakref.ref(pool) self.transport = None self.open = False self.writing_paused = False self.writable = asyncio.Event() self.ready = asyncio.Event() self.response_ready = asyncio.Event() # per request state self.headers = [] self.response = None self.parser = httptools.HttpResponseParser(self) self._connection_lost = False self._pending_task = None self._can_release = False
async def http_request(self, uri: str, method: str = "GET", headers: list = None, response_cls=None): import httptools response_cls = response_cls or HTTPResponse url = parse.urlparse(uri) host, _, port = url.netloc.partition(":") try: port = int(port) except ValueError: if url.scheme == "http": port = 80 elif url.scheme == "https": port = 443 else: raise Exception(f"unknown scheme: {url.scheme}") target_addr = (host, port) await self.connect(target_addr) await self.init() header_list = [f"Host: {self.target_address}".encode()] if headers: for header in headers: if isinstance(header, str): header = header.encode() header_list.append(header) ver = b"HTTP/1.1" method = method.upper().encode() url = url.geturl().encode() data = b"%b %b %b\r\n%b\r\n\r\n" % (method, url, ver, b"\r\n".join(header_list)) await self.sendall(data) response = response_cls(self) parser = httptools.HttpResponseParser(response) while not response.done: data = await self.recv(gvars.PACKET_SIZE) if not data: raise Exception("Incomplete response") parser.feed_data(data) return response
def __init__(self, loop, pool) -> None: self.loop = loop self.pool = weakref.ref(pool) self.transport = None self.open = False self.writing_paused = False self.writable = asyncio.Event() self.ready = asyncio.Event() self.response_ready = asyncio.Event() self.expect_100_continue = False self.request = None self.request_timeout = 20 self.headers = [] self.response = None self.parser = httptools.HttpResponseParser(self) # type: ignore self._connection_lost = False self._pending_task = None self._can_release = False self._upgraded = False
async def request(method, url, headers=None, data=""): sslcontext = None parsed_url = httptools.parse_url(url.encode()) ip = parsed_url.host.decode() port = parsed_url.port if port: port = port.decode() if not port: if parsed_url.schema == b"https": sslcontext = ssl.create_default_context( purpose=ssl.Purpose.CLIENT_AUTH) port = 443 else: port = 80 path = parsed_url.path or b"/" path = path.decode() reader, writer = await asyncio.wait_for(asyncio.open_connection( ip, port, ssl=sslcontext), timeout=30) headers = { "User-Agent": "aioclient", "Host": ip, "Content-Length": len(data) } headers.update() headers.update(headers or {}) header_raw = "".join([f"{k}:{v}\r\n" for k, v in headers.items()]) http_raw = f"{method} {path} HTTP/1.1\r\n{header_raw}\r\n{data}".encode() response = Response() parser = httptools.HttpResponseParser(response) writer.write(http_raw) while True: chunk = await reader.read(100) parser.feed_data(chunk) if len(chunk) < 100: break response.status_code = parser.get_status_code() writer.close() return response
def __init__(self, url: str, connection: Connection, request: Request, chunk_size: int = 1 * 1024 * 1024, decode: bool = True): self._connection = connection self._headers = Headers() self._content = bytearray() # noinspection PyArgumentList # 调用C的http协议的解析器,解析http # self._parser = HttpResponseParser(self) self._parser = httptools.HttpResponseParser(self) # 用来协调http协议的数据的解析进度,默认是从解析headers状态开始 self._parser_status = ResponseStatus.PENDING_HEADERS self._cookies = None self._status_code = None self._chunk_size = chunk_size self._decode = decode self._decoder = None self._encoding = None self.request = request self.url = url
def __init__(self, sock, *, buffer_size=DEFAULT_BUFFER_SIZE, timeout=None): self._sock = sock self._parser = httptools.HttpResponseParser(self) # options self.buffer_size = buffer_size self.timeout = timeout # primary attrs self.version = None self.status = None self.reason = b'' self.headers = [] # temp attrs self.current_buffer_size = self.buffer_size self.header_name = b'' self.body_chunks = [] # state self.started = False self.headers_completed = False self.completed = False
def __init__(self, consul_backend): self._consul_backend = consul_backend self._parser = httptools.HttpResponseParser(self) self._transport = None self._last_modify_index = None self._buffers = []
def _build_parser(callbacks): return httptools.HttpResponseParser(callbacks)
def test_pipelining(transport, container): protocol = aioweb.protocol.HttpProtocol(container=container) with unittest.mock.patch("asyncio.create_task") as mock: protocol.connection_made(transport) coro = mock.call_args.args[0] # # Feed a first complete record # request = b'''GET / HTTP/1.1 Host: example1.com Content-Length: 3 XYZ''' protocol.data_received(request.replace(b'\n', b'\r\n')) assert protocol.get_state() == aioweb.protocol.ConnectionState.PENDING # # Now feed a second record # request = b'''GET / HTTP/1.1 Host: example2.com Content-Length: 3 123''' protocol.data_received(request.replace(b'\n', b'\r\n')) assert protocol.get_state() == aioweb.protocol.ConnectionState.PENDING # # Now simulate the event loop and run the coroutine for the first time # coro.send(None) # # This should again block on the queue, but only after the two records # have been received. Thus we should have invoked our handler twice # assert container._request_count == 2 # # Get the first request # request = container._requests[0] assert isinstance(request, aioweb.request.Request) # # this should be the first request # assert request.headers()['Host'] == b"example1.com" # # check content of first response # assert container._replies[0] == b"XYZ" # # Now check the second request # request = container._requests[1] assert isinstance(request, aioweb.request.Request) assert request.headers()['Host'] == b"example2.com" assert container._replies[1] == b"123" # # Now we check that two responses have been written into # the transport. We get them from the transport and send them # through our parser # parser_helper = ParserHelper() parser = httptools.HttpResponseParser(parser_helper) parser.feed_data(transport._messages[0]) assert parser.get_status_code() == 200 assert bytes(parser_helper._body) == b"XYZ" parser_helper = ParserHelper() parser = httptools.HttpResponseParser(parser_helper) parser.feed_data(transport._messages[1]) assert parser.get_status_code() == 200 assert bytes(parser_helper._body) == b"123"
def test_full_request_lifecycle_http11(transport, container): protocol = aioweb.protocol.HttpProtocol(container=container) with unittest.mock.patch("asyncio.create_task") as mock: protocol.connection_made(transport) coro = mock.call_args.args[0] # # When we now start our coroutine, it should suspend and wait # coro.send(None) # # Feed some data and complete the headers # request = b'''GET / HTTP/1.1 Host: example.com Content-Length: 3 X''' protocol.data_received(request.replace(b'\n', b'\r\n')) assert protocol.get_state() == aioweb.protocol.ConnectionState.BODY # # When we now call send on the coroutine to simulate that the event # loop reschedules it, it should invoke our handler function # coro.send(None) # # Make sure that the handler has been called # assert container._request is not None # # Verify some attributes of the request object # request = container._request assert isinstance(request, aioweb.request.Request) headers = request.headers() assert headers is not None assert isinstance(headers, dict) assert "Host" in headers assert headers["Host"] == b"example.com" assert request.http_version() == "1.1" # # Get the future to wait for completion of the body # future = request.body().send(None) # # In our case, the body should not be complete yet # assert not future.done() # # complete it # request = b'YZ' protocol.data_received(request) assert protocol.get_state() == aioweb.protocol.ConnectionState.PENDING # # At this point, our future should be complete # body = future.result() assert body == b"XYZ" # # Verify that we have written back something into the transport # assert len(transport._data) > 0 # # Now let us try to parse the response data # parser_helper = ParserHelper() parser = httptools.HttpResponseParser(parser_helper) parser.feed_data(transport._data) # # If we get to this point, this is a valid HTTP response # assert parser.get_status_code() == 200 assert parser_helper._body == b"abc" # # Finally check that the transport is not closed # assert not transport._is_closing
def test_parser_response_4(self): p = httptools.HttpResponseParser(None) with self.assertRaises(httptools.HttpParserInvalidStatusError): p.feed_data(b'HTTP/1.1 1299 FOOSPAM\r\n')
def test_parser_response_2(self): with self.assertRaisesRegex(TypeError, 'a bytes-like object'): httptools.HttpResponseParser(None).feed_data('')