def test_https_proxy(strategy, tctx): """Test a CONNECT request, followed by a HTTP GET /""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) tctx.options.connection_strategy = strategy (playbook >> DataReceived(tctx.client, b"CONNECT example.proxy:80 HTTP/1.1\r\n\r\n") << http.HttpConnectHook(Placeholder()) >> reply()) if strategy == "eager": (playbook << OpenConnection(server) >> reply(None)) (playbook << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply()) if strategy == "lazy": (playbook << OpenConnection(server) >> reply(None)) (playbook << SendData( server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!")) assert playbook
def test_cancel_during_response_hook(tctx): """ Test that we properly handle the case of the following event sequence: - we receive a server response - we trigger the response hook - the client cancels the stream - the response hook completes Given that we have already triggered the response hook, we don't want to trigger the error hook. """ playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b'GET / HTTP/1.1\r\nHost: example.com\r\n\r\n') >> DataReceived(server, b"HTTP/1.1 204 No Content\r\n\r\n") << http.HttpResponseHeadersHook(flow) << CloseConnection(server) >> reply(to=-2) << http.HttpResponseHook(flow) >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize()) >> reply(to=-2) )
def test_response_streaming(tctx): """Test HTTP response streaming""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) def enable_streaming(flow: HTTPFlow): flow.response.stream = lambda x: x.upper() assert ( Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(tctx.client, b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nabc") << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nABC") >> DataReceived(server, b"def") << SendData(tctx.client, b"DEF") << http.HttpResponseHook(flow) >> reply() )
def test_stream_modify(tctx): """Test HTTP stream modification""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) def enable_streaming(flow: HTTPFlow): if flow.response is None: flow.request.stream = lambda x: b"[" + x + b"]" else: flow.response.stream = lambda x: b"[" + x + b"]" assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n" b"0\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming) << OpenConnection(server) >> reply(None) << SendData( server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"5\r\n[abc]\r\n" b"2\r\n[]\r\n") << http.HttpRequestHook(flow) >> reply() << SendData(server, b"0\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\ndef\r\n" b"0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"5\r\n[def]\r\n" b"2\r\n[]\r\n") << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"0\r\n\r\n"))
def test_h2_to_h1(tctx): """Test HTTP/2 -> HTTP/1 request translation""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) conn, playbook = h2_client(tctx) conn.send_headers(1, example_request_headers, end_stream=True) response = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, conn.data_to_send()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived( server, b"Hello World!") << http.HttpResponseHook(flow) << CloseConnection(server) >> reply(to=-2) << SendData( tctx.client, response)) events = conn.receive_data(response()) assert event_types(events) == [ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.DataReceived, h2.events.StreamEnded ] resp: h2.events.ResponseReceived = events[0] body: h2.events.DataReceived = events[1] assert resp.headers == [(b':status', b'200'), (b'content-length', b'12')] assert body.data == b"Hello World!"
def test_simple(tctx): playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) initial = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial)) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] sff = FrameFactory() assert ( playbook # a conforming h2 server would send settings first, we disregard this for now. >> DataReceived( server, sff.build_headers_frame(example_response_headers).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived( server, sff.build_data_frame(b"Hello, World!", flags=["END_STREAM" ]).serialize()) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() + cff.build_data_frame(b"", flags=["END_STREAM"]).serialize())) assert flow().request.url == "http://example.com/" assert flow().response.text == "Hello, World!"
def test_h1_to_h2(tctx): """Test HTTP/1 -> HTTP/2 request translation""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) conf = h2.config.H2Configuration(client_side=False) conn = h2.connection.H2Connection(conf) conn.initiate_connection() request = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, request)) events = conn.receive_data(request()) assert event_types(events) == [ h2.events.RemoteSettingsChanged, h2.events.RequestReceived, h2.events.StreamEnded ] conn.send_headers(1, example_response_headers) conn.send_data(1, b"Hello World!", end_stream=True) settings_ack = Placeholder(bytes) assert ( playbook >> DataReceived(server, conn.data_to_send()) << http.HttpResponseHeadersHook(flow) << SendData(server, settings_ack) >> reply(to=-2) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n\r\nHello World!") << CloseConnection(tctx.client)) assert settings_ack() == b'\x00\x00\x00\x04\x01\x00\x00\x00\x00'
def test_response_streaming(tctx, why, transfer_encoding): """Test HTTP response streaming""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) if why.startswith("body_size"): tctx.options.stream_large_bodies = why.replace("body_size=", "") def enable_streaming(flow: HTTPFlow): if why == "addon": flow.response.stream = True assert (playbook >> DataReceived( tctx.client, b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n" ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook( flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\n")) if transfer_encoding == "identity": playbook >> DataReceived(server, b"Content-Length: 6\r\n\r\n" b"abc") else: playbook >> DataReceived( server, b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook << http.HttpResponseHeadersHook(flow) playbook >> reply(side_effect=enable_streaming) if transfer_encoding == "identity": playbook << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Content-Length: 6\r\n\r\n" b"abc") playbook >> DataReceived(server, b"def") playbook << SendData(tctx.client, b"def") else: if why == "body_size=3": playbook >> DataReceived(server, b"3\r\ndef\r\n") playbook << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"6\r\nabcdef\r\n") else: playbook << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook >> DataReceived(server, b"3\r\ndef\r\n") playbook << SendData(tctx.client, b"3\r\ndef\r\n") playbook >> DataReceived(server, b"0\r\n\r\n") playbook << http.HttpResponseHook(flow) playbook >> reply() if transfer_encoding == "chunked": playbook << SendData(tctx.client, b"0\r\n\r\n") assert playbook
def test_upgrade(tctx): """Test a HTTP -> WebSocket upgrade""" tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN flow = Placeholder(HTTPFlow) assert ( Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData(tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << websocket.WebsocketStartHook(flow) >> reply() >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello world")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.server, masked(b"\x81\x0bhello world")) >> DataReceived(tctx.server, b"\x82\nhello back") << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.client, b"\x82\nhello back") >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello again")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.server, masked(b"\x81\x0bhello again")) ) assert len(flow().websocket.messages) == 3 assert flow().websocket.messages[0].content == b"hello world" assert flow().websocket.messages[0].from_client assert flow().websocket.messages[0].type == Opcode.TEXT assert flow().websocket.messages[1].content == b"hello back" assert flow().websocket.messages[1].from_client is False assert flow().websocket.messages[1].type == Opcode.BINARY
def test_upgrade_streamed(tctx): """If the HTTP response is streamed, we may get early data from the client.""" tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN flow = Placeholder(HTTPFlow) def enable_streaming(flow: HTTPFlow): flow.response.stream = True assert ( Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData(tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHook(flow) >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello world")) # early !! >> reply(to=-2) << websocket.WebsocketStartHook(flow) >> reply() << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.server, masked(b"\x81\x0bhello world")) >> DataReceived(tctx.server, b"\x82\nhello back") << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.client, b"\x82\nhello back") >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello again")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.server, masked(b"\x81\x0bhello again")) )
def test_response_trailers(tctx: Context, open_h2_server_conn: Server, stream): playbook, cff = start_h2_client(tctx) tctx.server = open_h2_server_conn sff = FrameFactory() def enable_streaming(flow: HTTPFlow): flow.response.stream = bool(stream) flow = Placeholder(HTTPFlow) ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData(tctx.server, Placeholder(bytes)) # a conforming h2 server would send settings first, we disregard this for now. >> DataReceived(tctx.server, sff.build_headers_frame(example_response_headers).serialize() + sff.build_data_frame(b"Hello, World!").serialize()) << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) ) if stream: playbook << SendData( tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() ) assert ( playbook >> DataReceived(tctx.server, sff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHook(flow) ) assert flow().response.trailers del flow().response.trailers["resp-trailer-a"] if stream: assert ( playbook >> reply() << SendData(tctx.client, cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()) ) else: assert ( playbook >> reply() << SendData(tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() + cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()))
def test_upgrade(tctx, proto): """Test a HTTP -> WebSocket upgrade with different protocols enabled""" if proto != "websocket": tctx.options.websocket = False if proto != "tcp": tctx.options.rawtcp = False tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN http_flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) ( playbook >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(http_flow) >> reply() << http.HttpRequestHook(http_flow) >> reply() << SendData(tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(http_flow) >> reply() << http.HttpResponseHook(http_flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") ) if proto == "websocket": assert playbook << WebsocketStartHook(Placeholder(WebSocketFlow)) elif proto == "tcp": assert playbook << TcpStartHook(Placeholder(TCPFlow)) else: assert ( playbook << Log("Sent HTTP 101 response, but no protocol is enabled to upgrade to.", "warn") << CloseConnection(tctx.client) )
def test_no_normalization(tctx, normalize): """Test that we don't normalize headers when we just pass them through.""" tctx.options.normalize_outbound_headers = normalize tctx.options.validate_inbound_headers = False server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook, cff = start_h2_client(tctx) request_headers = list(example_request_headers) + [(b"Should-Not-Be-Capitalized! ", b" :) ")] request_headers_lower = [(k.lower(), v) for (k, v) in request_headers] response_headers = list(example_response_headers) + [(b"Same", b"Here")] response_headers_lower = [(k.lower(), v) for (k, v) in response_headers] initial = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial) ) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] assert hpack.hpack.Decoder().decode(frames[1].data, True) == request_headers_lower if normalize else request_headers sff = FrameFactory() ( playbook >> DataReceived(server, sff.build_headers_frame(response_headers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() ) if normalize: playbook << Log("Lowercased 'Same' header as uppercase is not allowed with HTTP/2.") hdrs = response_headers_lower if normalize else response_headers assert playbook << SendData(tctx.client, cff.build_headers_frame(hdrs, flags=["END_STREAM"]).serialize()) assert flow().request.headers.fields == ((b"Should-Not-Be-Capitalized! ", b" :) "),) assert flow().response.headers.fields == ((b"Same", b"Here"),)
def test_no_normalization(tctx): """Test that we don't normalize headers when we just pass them through.""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook, cff = start_h2_client(tctx) request_headers = example_request_headers + ( (b"Should-Not-Be-Capitalized! ", b" :) "), ) response_headers = example_response_headers + ( (b"Same", b"Here"), ) initial = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial) ) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] assert hpack.hpack.Decoder().decode(frames[1].data, True) == list(request_headers) sff = FrameFactory() assert ( playbook >> DataReceived(server, sff.build_headers_frame(response_headers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, cff.build_headers_frame(response_headers).serialize() + cff.build_data_frame(b"", flags=["END_STREAM"]).serialize()) ) assert flow().request.headers.fields == ((b"Should-Not-Be-Capitalized! ", b" :) "),) assert flow().response.headers.fields == ((b"Same", b"Here"),)
def test_http_proxy(tctx): """Test a simple HTTP GET / request""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"GET http://example.com/foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n" ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook( flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World") << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived( server, b"!") << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!")) assert server().address == ("example.com", 80)
def test_kill_flow(tctx, when): """Test that we properly kill flows if instructed to do so""" server = Placeholder(Server) connect_flow = Placeholder(HTTPFlow) flow = Placeholder(HTTPFlow) def kill(flow: HTTPFlow): # Can't use flow.kill() here because that currently still depends on a reply object. flow.error = Error(Error.KILLED_MESSAGE) def assert_kill(err_hook: bool = True): playbook >> reply(side_effect=kill) if err_hook: playbook << http.HttpErrorHook(flow) playbook >> reply() playbook << CloseConnection(tctx.client) assert playbook playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) assert (playbook >> DataReceived(tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n") << http.HttpConnectHook(connect_flow)) if when == "http_connect": return assert_kill(False) assert (playbook >> reply() << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << http.HttpRequestHeadersHook(flow)) if when == "requestheaders": return assert_kill() assert (playbook >> reply() << http.HttpRequestHook(flow)) if when == "request": return assert_kill() if when == "script-response-responseheaders": assert (playbook >> reply(side_effect=lambda f: setattr(f, "response", Response.make())) << http.HttpResponseHeadersHook(flow)) return assert_kill() assert (playbook >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World") << http.HttpResponseHeadersHook(flow)) if when == "responseheaders": return assert_kill() if when == "response": assert (playbook >> reply() >> DataReceived(server, b"!") << http.HttpResponseHook(flow)) return assert_kill(False) elif when == "error": assert (playbook >> reply() >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow)) return assert_kill(False) else: raise AssertionError
def test_request_streaming(tctx, response): """ Test HTTP request streaming This is a bit more contrived as we may receive server data while we are still sending the request. """ server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) def enable_streaming(flow: HTTPFlow): flow.request.stream = lambda x: x.upper() assert ( playbook >> DataReceived(tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 6\r\n\r\n" b"abc") << http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming) << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 6\r\n\r\n" b"ABC") ) if response == "normal response": assert ( playbook >> DataReceived(tctx.client, b"def") << SendData(server, b"DEF") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") ) elif response == "early response": # We may receive a response before we have finished sending our request. # We continue sending unless the server closes the connection. # https://tools.ietf.org/html/rfc7231#section-6.5.11 assert ( playbook >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") >> DataReceived(tctx.client, b"def") << SendData(server, b"DEF") # Important: no request hook here! ) elif response == "early close": assert ( playbook >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") >> ConnectionClosed(server) << CloseConnection(server) << CloseConnection(tctx.client) ) elif response == "early kill": err = Placeholder(bytes) assert ( playbook >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err) << CloseConnection(tctx.client) ) assert b"502 Bad Gateway" in err() else: # pragma: no cover assert False
def _test_cancel(stream_req, stream_resp, draw): """ Test that we don't raise an exception if someone disconnects. """ tctx = context.Context(connection.Client(("client", 1234), ("127.0.0.1", 8080), 1605699329), opts) playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) def maybe_stream(flow: HTTPFlow): if stream_req: flow.request.stream = True if stream_resp and flow.response: flow.response.stream = True hook_req_headers = http.HttpRequestHeadersHook(flow) hook_req = http.HttpRequestHook(flow) hook_resp_headers = http.HttpResponseHeadersHook(flow) hook_resp = http.HttpResponseHook(flow) hook_error = http.HttpErrorHook(flow) openconn = OpenConnection(server) send_upstream = SendData(server, Placeholder(bytes)) data_req = DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize()) data_reqbody = DataReceived(tctx.client, cff.build_data_frame(b"foo", flags=["END_STREAM"]).serialize()) data_resp = DataReceived(server, cff.build_headers_frame(example_response_headers).serialize()) data_respbody = DataReceived(server, cff.build_data_frame(b"bar", flags=["END_STREAM"]).serialize()) client_disc = ConnectionClosed(tctx.client) client_rst = DataReceived(tctx.client, cff.build_rst_stream_frame(1).serialize()) server_disc = ConnectionClosed(server) server_rst = DataReceived(server, cff.build_rst_stream_frame(1).serialize()) evts: Dict[str, Tuple[Any, Any, Any]] = {} # precondition, but-not-after-this evts["data_req"] = data_req, None, client_disc evts["data_reqbody"] = data_reqbody, data_req, client_disc evts["reply_hook_req_headers"] = reply(to=hook_req_headers, side_effect=maybe_stream), hook_req_headers, None evts["reply_hook_req"] = reply(to=hook_req), hook_req, None evts["reply_openconn"] = reply(None, to=openconn, side_effect=make_h2), openconn, None evts["data_resp"] = data_resp, send_upstream, server_disc evts["data_respbody"] = data_respbody, data_resp, server_disc evts["reply_hook_resp_headers"] = reply(to=hook_resp_headers, side_effect=maybe_stream), hook_resp_headers, None evts["reply_hook_resp"] = reply(to=hook_resp), hook_resp, None evts["reply_hook_error"] = reply(to=hook_error), hook_error, None evts["err_client_disc"] = client_disc, None, None evts["err_client_rst"] = client_rst, None, client_disc evts["err_server_disc"] = server_disc, send_upstream, None evts["err_server_rst"] = server_rst, send_upstream, server_disc def eq_maybe(a, b): # _eq helpfully raises a TypeError when placeholder types don't match # that is useful in (test) development, but may happen legitimately when fuzzing here. try: return _eq(a, b) except TypeError: return False while evts: candidates = [] for name, (evt, precon, negprecon) in evts.items(): precondition_ok = ( precon is None or any(eq_maybe(x, precon) for x in playbook.actual) ) neg_precondition_ok = ( negprecon is None or not any(eq_maybe(x, negprecon) for x in playbook.actual) ) if precondition_ok and neg_precondition_ok: # crude hack to increase fuzzing efficiency: make it more likely that we progress. for i in range(1 if name.startswith("err_") else 3): candidates.append((name, evt)) if not candidates: break name, evt = draw(candidates) del evts[name] try: assert playbook >> evt except AssertionError: if any( isinstance(x, _TracebackInPlaybook) for x in playbook.actual ): raise else: # add commands that the server issued. playbook.expected.extend(playbook.actual[len(playbook.expected):])
def test_request_streaming(tctx, why, transfer_encoding, response): """ Test HTTP request streaming This is a bit more contrived as we may receive server data while we are still sending the request. """ server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) if why.startswith("body_size"): tctx.options.stream_large_bodies = why.replace("body_size=", "") def enable_streaming(flow: HTTPFlow): if why == "addon": flow.request.stream = True playbook >> DataReceived( tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n") if transfer_encoding == "identity": playbook >> DataReceived(tctx.client, b"Content-Length: 9\r\n\r\n" b"abc") else: playbook >> DataReceived( tctx.client, b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook << http.HttpRequestHeadersHook(flow) playbook >> reply(side_effect=enable_streaming) needs_more_data_before_open = (why == "body_size=3" and transfer_encoding == "chunked") if needs_more_data_before_open: playbook >> DataReceived(tctx.client, b"3\r\ndef\r\n") playbook << OpenConnection(server) playbook >> reply(None) playbook << SendData(server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n") if transfer_encoding == "identity": playbook << SendData(server, b"Content-Length: 9\r\n\r\n" b"abc") playbook >> DataReceived(tctx.client, b"def") playbook << SendData(server, b"def") else: if needs_more_data_before_open: playbook << SendData( server, b"Transfer-Encoding: chunked\r\n\r\n" b"6\r\nabcdef\r\n") else: playbook << SendData( server, b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook >> DataReceived(tctx.client, b"3\r\ndef\r\n") playbook << SendData(server, b"3\r\ndef\r\n") if response == "normal response": if transfer_encoding == "identity": playbook >> DataReceived(tctx.client, b"ghi") playbook << SendData(server, b"ghi") else: playbook >> DataReceived(tctx.client, b"3\r\nghi\r\n0\r\n\r\n") playbook << SendData(server, b"3\r\nghi\r\n") playbook << http.HttpRequestHook(flow) playbook >> reply() if transfer_encoding == "chunked": playbook << SendData(server, b"0\r\n\r\n") assert ( playbook >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")) elif response == "early response": # We may receive a response before we have finished sending our request. # We continue sending unless the server closes the connection. # https://tools.ietf.org/html/rfc7231#section-6.5.11 assert (playbook >> DataReceived( server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" ) << http.HttpResponseHeadersHook(flow) >> reply( ) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" )) if transfer_encoding == "identity": playbook >> DataReceived(tctx.client, b"ghi") playbook << SendData(server, b"ghi") else: playbook >> DataReceived(tctx.client, b"3\r\nghi\r\n0\r\n\r\n") playbook << SendData(server, b"3\r\nghi\r\n") playbook << http.HttpRequestHook(flow) playbook >> reply() if transfer_encoding == "chunked": playbook << SendData(server, b"0\r\n\r\n") assert playbook elif response == "early close": assert (playbook >> DataReceived( server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" ) << http.HttpResponseHeadersHook(flow) >> reply( ) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" ) >> ConnectionClosed(server) << CloseConnection(server) << CloseConnection(tctx.client)) elif response == "early kill": err = Placeholder(bytes) assert (playbook >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow) >> reply() << SendData( tctx.client, err) << CloseConnection(tctx.client)) assert b"502 Bad Gateway" in err() else: # pragma: no cover assert False