def test_multiple_server_connections(tctx): """Test multiple requests being rewritten to different targets.""" server1 = Placeholder(Server) server2 = Placeholder(Server) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False) def redirect(to: str): def side_effect(flow: HTTPFlow): flow.request.url = to return side_effect assert ( playbook >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") << http.HttpRequestHook(Placeholder()) >> reply(side_effect=redirect("http://one.redirect/")) << OpenConnection(server1) >> reply(None) << SendData( server1, b"GET / HTTP/1.1\r\nHost: one.redirect\r\n\r\n") >> DataReceived( server1, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")) assert ( playbook >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") << http.HttpRequestHook(Placeholder()) >> reply(side_effect=redirect("http://two.redirect/")) << OpenConnection(server2) >> reply(None) << SendData( server2, b"GET / HTTP/1.1\r\nHost: two.redirect\r\n\r\n") >> DataReceived( server2, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")) assert server1().address == ("one.redirect", 80) assert server2().address == ("two.redirect", 80)
def test_no_normalization(tctx): """Test that we don't normalize headers when we just pass them through.""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook, cff = start_h2_client(tctx) request_headers = example_request_headers + ( (b"Should-Not-Be-Capitalized! ", b" :) "), ) response_headers = example_response_headers + ( (b"Same", b"Here"), ) initial = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial) ) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] assert hpack.hpack.Decoder().decode(frames[1].data, True) == list(request_headers) sff = FrameFactory() assert ( playbook >> DataReceived(server, sff.build_headers_frame(response_headers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, cff.build_headers_frame(response_headers).serialize() + cff.build_data_frame(b"", flags=["END_STREAM"]).serialize()) ) assert flow().request.headers.fields == ((b"Should-Not-Be-Capitalized! ", b" :) "),) assert flow().response.headers.fields == ((b"Same", b"Here"),)
def test_response_trailers(tctx: Context, open_h2_server_conn: Server, stream): playbook, cff = start_h2_client(tctx) tctx.server = open_h2_server_conn sff = FrameFactory() def enable_streaming(flow: HTTPFlow): flow.response.stream = bool(stream) flow = Placeholder(HTTPFlow) (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData(tctx.server, Placeholder(bytes)) # a conforming h2 server would send settings first, we disregard this for now. >> DataReceived( tctx.server, sff.build_headers_frame(example_response_headers).serialize() + sff.build_data_frame(b"Hello, World!").serialize()) << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming)) if stream: playbook << SendData( tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize()) assert (playbook >> DataReceived( tctx.server, sff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHook(flow)) assert flow().response.trailers del flow().response.trailers["resp-trailer-a"] if stream: assert (playbook >> reply() << SendData( tctx.client, cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize())) else: assert (playbook >> reply() << SendData( tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() + cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()))
def test_upgrade(tctx): """Test a HTTP -> WebSocket upgrade""" tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN flow = Placeholder(HTTPFlow) assert ( Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) >> DataReceived( tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData( tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived( tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << websocket.WebsocketStartHook(flow) >> reply() >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello world")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData( tctx.server, masked(b"\x81\x0bhello world")) >> DataReceived( tctx.server, b"\x82\nhello back") << websocket.WebsocketMessageHook(flow) >> reply() << SendData( tctx.client, b"\x82\nhello back") >> DataReceived( tctx.client, masked_bytes(b"\x81\x0bhello again")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData( tctx.server, masked(b"\x81\x0bhello again"))) assert len(flow().websocket.messages) == 3 assert flow().websocket.messages[0].content == b"hello world" assert flow().websocket.messages[0].from_client assert flow().websocket.messages[0].type == Opcode.TEXT assert flow().websocket.messages[1].content == b"hello back" assert flow().websocket.messages[1].from_client is False assert flow().websocket.messages[1].type == Opcode.BINARY assert flow().live
def test_http_proxy(tctx): """Test a simple HTTP GET / request""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"GET http://example.com/foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n" ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook( flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World") << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived( server, b"!") << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!")) assert server().address == ("example.com", 80)
def test_upgrade_streamed(tctx): """If the HTTP response is streamed, we may get early data from the client.""" tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN flow = Placeholder(HTTPFlow) def enable_streaming(flow: HTTPFlow): flow.response.stream = True assert ( Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) >> DataReceived( tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData( tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived( tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) << SendData( tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHook(flow) >> DataReceived( tctx.client, masked_bytes(b"\x81\x0bhello world")) # early !! >> reply(to=-2) << websocket.WebsocketStartHook(flow) >> reply() << websocket.WebsocketMessageHook(flow) >> reply() << SendData( tctx.server, masked(b"\x81\x0bhello world")) >> DataReceived( tctx.server, b"\x82\nhello back") << websocket.WebsocketMessageHook(flow) >> reply() << SendData( tctx.client, b"\x82\nhello back") >> DataReceived( tctx.client, masked_bytes(b"\x81\x0bhello again")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData( tctx.server, masked(b"\x81\x0bhello again")))
def test_upgrade(tctx, proto): """Test a HTTP -> WebSocket upgrade with different protocols enabled""" if proto != "websocket": tctx.options.websocket = False if proto != "tcp": tctx.options.rawtcp = False tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN http_flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) (playbook >> DataReceived( tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(http_flow) >> reply() << http.HttpRequestHook(http_flow) >> reply() << SendData( tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived( tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(http_flow) >> reply() << http.HttpResponseHook(http_flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n")) if proto == "websocket": assert playbook << WebsocketStartHook(http_flow) elif proto == "tcp": assert playbook << TcpStartHook(Placeholder(TCPFlow)) else: assert (playbook << Log( "Sent HTTP 101 response, but no protocol is enabled to upgrade to.", "warn") << CloseConnection(tctx.client))
def test_http_server_aborts(tctx, stream): """Test handling of the case where a server aborts during response transmission.""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) def enable_streaming(flow: HTTPFlow): flow.response.stream = True assert (playbook >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\n" b"Content-Length: 6\r\n" b"\r\n" b"abc") << http.HttpResponseHeadersHook(flow)) if stream: assert (playbook >> reply(side_effect=enable_streaming) << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Content-Length: 6\r\n" b"\r\n" b"abc")) else: assert playbook >> reply() assert (playbook >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow)) if stream: assert (playbook >> reply() << CloseConnection(tctx.client)) else: error_html = Placeholder(bytes) assert (playbook >> reply() << SendData(tctx.client, error_html) << CloseConnection(tctx.client)) assert b"502 Bad Gateway" in error_html() assert b"peer closed connection" in error_html() assert "peer closed connection" in flow().error.msg
def test_redirect(strategy, https_server, https_client, tctx, monkeypatch): """Test redirects between http:// and https:// in regular proxy mode.""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) tctx.options.connection_strategy = strategy p = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False) if https_server: monkeypatch.setattr(tls, "ServerTLSLayer", tls.MockTLSLayer) def redirect(flow: HTTPFlow): if https_server: flow.request.url = "https://redirected.site/" else: flow.request.url = "http://redirected.site/" if https_client: p >> DataReceived(tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n") if strategy == "eager": p << OpenConnection(Placeholder()) p >> reply(None) p << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') p >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") p << layer.NextLayerHook(Placeholder()) p >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) else: p >> DataReceived(tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") p << http.HttpRequestHook(flow) p >> reply(side_effect=redirect) p << OpenConnection(server) p >> reply(None) p << SendData(server, b"GET / HTTP/1.1\r\nHost: redirected.site\r\n\r\n") p >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!") p << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!") assert p if https_server: assert server().address == ("redirected.site", 443) else: assert server().address == ("redirected.site", 80)
def test_response_streaming(tctx): """Test HTTP response streaming""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) def enable_streaming(flow: HTTPFlow): flow.response.stream = lambda x: x.upper() assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n" ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook( flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nabc") << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nABC") >> DataReceived(server, b"def") << SendData( tctx.client, b"DEF") << http.HttpResponseHook(flow) >> reply())
def test_upstream_error(tctx): playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) err = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply("oops server <> error") << http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err)) frames = decode_frames(err()) assert [type(x) for x in frames] == [ hyperframe.frame.HeadersFrame, hyperframe.frame.DataFrame, ] d = frames[1] assert isinstance(d, hyperframe.frame.DataFrame) assert b"502 Bad Gateway" in d.data assert b"server <> error" in d.data
def test_simple(tctx): playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) initial = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial) ) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] sff = FrameFactory() assert ( playbook # a conforming h2 server would send settings first, we disregard this for now. >> DataReceived(server, sff.build_headers_frame(example_response_headers).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived(server, sff.build_data_frame(b"Hello, World!", flags=["END_STREAM"]).serialize()) << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() + cff.build_data_frame(b"", flags=["END_STREAM"]).serialize()) ) assert flow().request.url == "http://example.com/" assert flow().response.text == "Hello, World!"
def test_body_size_limit(tctx, where, transfer_encoding): """Test HTTP request body_size_limit""" tctx.options.body_size_limit = "3" err = Placeholder(bytes) flow = Placeholder(HTTPFlow) if transfer_encoding == "identity": body = b"Content-Length: 6\r\n\r\nabcdef" else: body = b"Transfer-Encoding: chunked\r\n\r\n6\r\nabcdef" if where == "request": assert ( Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n" + body) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpErrorHook(flow) >> reply() << SendData( tctx.client, err) << CloseConnection(tctx.client)) assert b"413 Payload Too Large" in err() assert b"body_size_limit" in err() else: server = Placeholder(Server) assert ( Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\n" + body) << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err) << CloseConnection(tctx.client) << CloseConnection(server)) assert b"502 Bad Gateway" in err() assert b"body_size_limit" in err()
def test_kill_stream(tctx): """Test that we can kill individual streams.""" playbook, cff = start_h2_client(tctx) flow1 = Placeholder(HTTPFlow) flow2 = Placeholder(HTTPFlow) req_headers_hook_1 = http.HttpRequestHeadersHook(flow1) def kill(flow: HTTPFlow): # Can't use flow.kill() here because that currently still depends on a reply object. flow.error = Error(Error.KILLED_MESSAGE) server = Placeholder(Server) data_req1 = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=1).serialize() + cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=3).serialize()) << req_headers_hook_1 << http.HttpRequestHeadersHook(flow2) >> reply(side_effect=kill) << http.HttpErrorHook(flow2) >> reply() << SendData(tctx.client, cff.build_rst_stream_frame(3, error_code=ErrorCodes.INTERNAL_ERROR).serialize()) >> reply(to=req_headers_hook_1) << http.HttpRequestHook(flow1) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, data_req1) ) frames = decode_frames(data_req1()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ]
def test_https_proxy(strategy, tctx): """Test a CONNECT request, followed by a HTTP GET /""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) tctx.options.connection_strategy = strategy (playbook >> DataReceived(tctx.client, b"CONNECT example.proxy:80 HTTP/1.1\r\n\r\n") << http.HttpConnectHook(Placeholder()) >> reply()) if strategy == "eager": (playbook << OpenConnection(server) >> reply(None)) (playbook << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply()) if strategy == "lazy": (playbook << OpenConnection(server) >> reply(None)) (playbook << SendData(server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!")) assert playbook
def test_request_trailers(tctx: Context, open_h2_server_conn: Server, stream): playbook, cff = start_h2_client(tctx) tctx.server = open_h2_server_conn def enable_streaming(flow: HTTPFlow): flow.request.stream = bool(stream) flow = Placeholder(HTTPFlow) server_data1 = Placeholder(bytes) server_data2 = Placeholder(bytes) ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() ) << http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming) ) if stream: playbook << SendData(tctx.server, server_data1) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_trailers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHook(flow) >> reply() << SendData(tctx.server, server_data2) ) frames = decode_frames(server_data1.setdefault(b"") + server_data2()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, hyperframe.frame.DataFrame, hyperframe.frame.HeadersFrame, ]
def test_request_streaming(tctx, response): """ Test HTTP request streaming This is a bit more contrived as we may receive server data while we are still sending the request. """ server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) def enable_streaming(flow: HTTPFlow): flow.request.stream = lambda x: x.upper() assert ( playbook >> DataReceived(tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 6\r\n\r\n" b"abc") << http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming) << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 6\r\n\r\n" b"ABC") ) if response == "normal response": assert ( playbook >> DataReceived(tctx.client, b"def") << SendData(server, b"DEF") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") ) elif response == "early response": # We may receive a response before we have finished sending our request. # We continue sending unless the server closes the connection. # https://tools.ietf.org/html/rfc7231#section-6.5.11 assert ( playbook >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") >> DataReceived(tctx.client, b"def") << SendData(server, b"DEF") # Important: no request hook here! ) elif response == "early close": assert ( playbook >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n") >> ConnectionClosed(server) << CloseConnection(server) << CloseConnection(tctx.client) ) elif response == "early kill": err = Placeholder(bytes) assert ( playbook >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err) << CloseConnection(tctx.client) ) assert b"502 Bad Gateway" in err() else: # pragma: no cover assert False
def test_http2_client_aborts(tctx, stream, when, how): """ Test handling of the case where a client aborts during request or response transmission. If the client aborts the request transmission, we must trigger an error hook, if the client disconnects during response transmission, no error hook is triggered. """ server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook, cff = start_h2_client(tctx) resp = Placeholder(bytes) def enable_request_streaming(flow: HTTPFlow): flow.request.stream = True def enable_response_streaming(flow: HTTPFlow): flow.response.stream = True assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize()) << http.HttpRequestHeadersHook(flow) ) if stream and when == "request": assert ( playbook >> reply(side_effect=enable_request_streaming) << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") ) else: assert playbook >> reply() if when == "request": if "RST" in how: playbook >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize()) else: playbook >> ConnectionClosed(tctx.client) playbook << CloseConnection(tctx.client) if stream: playbook << CloseConnection(server) playbook << http.HttpErrorHook(flow) playbook >> reply() if how == "RST+disconnect": playbook >> ConnectionClosed(tctx.client) playbook << CloseConnection(tctx.client) assert playbook assert "stream reset" in flow().error.msg or "peer closed connection" in flow().error.msg return assert ( playbook >> DataReceived(tctx.client, cff.build_data_frame(b"", flags=["END_STREAM"]).serialize()) << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\n123") << http.HttpResponseHeadersHook(flow) ) if stream: assert ( playbook >> reply(side_effect=enable_response_streaming) << SendData(tctx.client, resp) ) else: assert playbook >> reply() if "RST" in how: playbook >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize()) else: playbook >> ConnectionClosed(tctx.client) playbook << CloseConnection(tctx.client) assert ( playbook << CloseConnection(server) << http.HttpErrorHook(flow) >> reply() ) if how == "RST+disconnect": assert ( playbook >> ConnectionClosed(tctx.client) << CloseConnection(tctx.client) ) if "RST" in how: assert "stream reset" in flow().error.msg else: assert "peer closed connection" in flow().error.msg
def _test_cancel(stream_req, stream_resp, draw): """ Test that we don't raise an exception if someone disconnects. """ tctx = context.Context(connection.Client(("client", 1234), ("127.0.0.1", 8080), 1605699329), opts) playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) def maybe_stream(flow: HTTPFlow): if stream_req: flow.request.stream = True if stream_resp and flow.response: flow.response.stream = True hook_req_headers = http.HttpRequestHeadersHook(flow) hook_req = http.HttpRequestHook(flow) hook_resp_headers = http.HttpResponseHeadersHook(flow) hook_resp = http.HttpResponseHook(flow) hook_error = http.HttpErrorHook(flow) openconn = OpenConnection(server) send_upstream = SendData(server, Placeholder(bytes)) data_req = DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize()) data_reqbody = DataReceived(tctx.client, cff.build_data_frame(b"foo", flags=["END_STREAM"]).serialize()) data_resp = DataReceived(server, cff.build_headers_frame(example_response_headers).serialize()) data_respbody = DataReceived(server, cff.build_data_frame(b"bar", flags=["END_STREAM"]).serialize()) client_disc = ConnectionClosed(tctx.client) client_rst = DataReceived(tctx.client, cff.build_rst_stream_frame(1).serialize()) server_disc = ConnectionClosed(server) server_rst = DataReceived(server, cff.build_rst_stream_frame(1).serialize()) evts: Dict[str, Tuple[Any, Any, Any]] = {} # precondition, but-not-after-this evts["data_req"] = data_req, None, client_disc evts["data_reqbody"] = data_reqbody, data_req, client_disc evts["reply_hook_req_headers"] = reply(to=hook_req_headers, side_effect=maybe_stream), hook_req_headers, None evts["reply_hook_req"] = reply(to=hook_req), hook_req, None evts["reply_openconn"] = reply(None, to=openconn, side_effect=make_h2), openconn, None evts["data_resp"] = data_resp, send_upstream, server_disc evts["data_respbody"] = data_respbody, data_resp, server_disc evts["reply_hook_resp_headers"] = reply(to=hook_resp_headers, side_effect=maybe_stream), hook_resp_headers, None evts["reply_hook_resp"] = reply(to=hook_resp), hook_resp, None evts["reply_hook_error"] = reply(to=hook_error), hook_error, None evts["err_client_disc"] = client_disc, None, None evts["err_client_rst"] = client_rst, None, client_disc evts["err_server_disc"] = server_disc, send_upstream, None evts["err_server_rst"] = server_rst, send_upstream, server_disc def eq_maybe(a, b): # _eq helpfully raises a TypeError when placeholder types don't match # that is useful in (test) development, but may happen legitimately when fuzzing here. try: return _eq(a, b) except TypeError: return False while evts: candidates = [] for name, (evt, precon, negprecon) in evts.items(): precondition_ok = ( precon is None or any(eq_maybe(x, precon) for x in playbook.actual) ) neg_precondition_ok = ( negprecon is None or not any(eq_maybe(x, negprecon) for x in playbook.actual) ) if precondition_ok and neg_precondition_ok: # crude hack to increase fuzzing efficiency: make it more likely that we progress. for i in range(1 if name.startswith("err_") else 3): candidates.append((name, evt)) if not candidates: break name, evt = draw(candidates) del evts[name] try: assert playbook >> evt except AssertionError: if any( isinstance(x, _TracebackInPlaybook) for x in playbook.actual ): raise else: # add commands that the server issued. playbook.expected.extend(playbook.actual[len(playbook.expected):])
def test_request_streaming(tctx, why, transfer_encoding, response): """ Test HTTP request streaming This is a bit more contrived as we may receive server data while we are still sending the request. """ server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) if why.startswith("body_size"): tctx.options.stream_large_bodies = why.replace("body_size=", "") def enable_streaming(flow: HTTPFlow): if why == "addon": flow.request.stream = True playbook >> DataReceived( tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n") if transfer_encoding == "identity": playbook >> DataReceived(tctx.client, b"Content-Length: 9\r\n\r\n" b"abc") else: playbook >> DataReceived( tctx.client, b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook << http.HttpRequestHeadersHook(flow) playbook >> reply(side_effect=enable_streaming) needs_more_data_before_open = (why == "body_size=3" and transfer_encoding == "chunked") if needs_more_data_before_open: playbook >> DataReceived(tctx.client, b"3\r\ndef\r\n") playbook << OpenConnection(server) playbook >> reply(None) playbook << SendData(server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n") if transfer_encoding == "identity": playbook << SendData(server, b"Content-Length: 9\r\n\r\n" b"abc") playbook >> DataReceived(tctx.client, b"def") playbook << SendData(server, b"def") else: if needs_more_data_before_open: playbook << SendData( server, b"Transfer-Encoding: chunked\r\n\r\n" b"6\r\nabcdef\r\n") else: playbook << SendData( server, b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook >> DataReceived(tctx.client, b"3\r\ndef\r\n") playbook << SendData(server, b"3\r\ndef\r\n") if response == "normal response": if transfer_encoding == "identity": playbook >> DataReceived(tctx.client, b"ghi") playbook << SendData(server, b"ghi") else: playbook >> DataReceived(tctx.client, b"3\r\nghi\r\n0\r\n\r\n") playbook << SendData(server, b"3\r\nghi\r\n") playbook << http.HttpRequestHook(flow) playbook >> reply() if transfer_encoding == "chunked": playbook << SendData(server, b"0\r\n\r\n") assert ( playbook >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")) elif response == "early response": # We may receive a response before we have finished sending our request. # We continue sending unless the server closes the connection. # https://tools.ietf.org/html/rfc7231#section-6.5.11 assert (playbook >> DataReceived( server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" ) << http.HttpResponseHeadersHook(flow) >> reply( ) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" )) if transfer_encoding == "identity": playbook >> DataReceived(tctx.client, b"ghi") playbook << SendData(server, b"ghi") else: playbook >> DataReceived(tctx.client, b"3\r\nghi\r\n0\r\n\r\n") playbook << SendData(server, b"3\r\nghi\r\n") playbook << http.HttpRequestHook(flow) playbook >> reply() if transfer_encoding == "chunked": playbook << SendData(server, b"0\r\n\r\n") assert playbook elif response == "early close": assert (playbook >> DataReceived( server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" ) << http.HttpResponseHeadersHook(flow) >> reply( ) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n" ) >> ConnectionClosed(server) << CloseConnection(server) << CloseConnection(tctx.client)) elif response == "early kill": err = Placeholder(bytes) assert (playbook >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow) >> reply() << SendData( tctx.client, err) << CloseConnection(tctx.client)) assert b"502 Bad Gateway" in err() else: # pragma: no cover assert False
def test_kill_flow(tctx, when): """Test that we properly kill flows if instructed to do so""" server = Placeholder(Server) connect_flow = Placeholder(HTTPFlow) flow = Placeholder(HTTPFlow) def kill(flow: HTTPFlow): # Can't use flow.kill() here because that currently still depends on a reply object. flow.error = Error(Error.KILLED_MESSAGE) def assert_kill(err_hook: bool = True): playbook >> reply(side_effect=kill) if err_hook: playbook << http.HttpErrorHook(flow) playbook >> reply() playbook << CloseConnection(tctx.client) assert playbook playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) assert (playbook >> DataReceived(tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n") << http.HttpConnectHook(connect_flow)) if when == "http_connect": return assert_kill(False) assert (playbook >> reply() << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << http.HttpRequestHeadersHook(flow)) if when == "requestheaders": return assert_kill() assert (playbook >> reply() << http.HttpRequestHook(flow)) if when == "request": return assert_kill() if when == "script-response-responseheaders": assert (playbook >> reply(side_effect=lambda f: setattr(f, "response", Response.make())) << http.HttpResponseHeadersHook(flow)) return assert_kill() assert (playbook >> reply() << OpenConnection(server) >> reply(None) << SendData(server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World") << http.HttpResponseHeadersHook(flow)) if when == "responseheaders": return assert_kill() if when == "response": assert (playbook >> reply() >> DataReceived(server, b"!") << http.HttpResponseHook(flow)) return assert_kill(False) elif when == "error": assert (playbook >> reply() >> ConnectionClosed(server) << CloseConnection(server) << http.HttpErrorHook(flow)) return assert_kill(False) else: raise AssertionError
def test_upstream_proxy(tctx, redirect, scheme): """Test that an upstream HTTP proxy is used.""" server = Placeholder(Server) server2 = Placeholder(Server) flow = Placeholder(HTTPFlow) tctx.options.mode = "upstream:http://proxy:8080" playbook = Playbook(http.HttpLayer(tctx, HTTPMode.upstream), hooks=False) if scheme == "http": assert ( playbook >> DataReceived(tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") << OpenConnection(server) >> reply(None) << SendData(server, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") ) else: assert ( playbook >> DataReceived(tctx.client, b"CONNECT example.com:443 HTTP/1.1\r\n\r\n") << SendData(tctx.client, b"HTTP/1.1 200 Connection established\r\n\r\n") >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << OpenConnection(server) >> reply(None) << SendData(server, b"CONNECT example.com:443 HTTP/1.1\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 Connection established\r\n\r\n") << SendData(server, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") ) playbook >> DataReceived(server, b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n") playbook << SendData(tctx.client, b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n") assert playbook assert server().address == ("proxy", 8080) if scheme == "http": playbook >> DataReceived(tctx.client, b"GET http://example.com/two HTTP/1.1\r\nHost: example.com\r\n\r\n") else: playbook >> DataReceived(tctx.client, b"GET /two HTTP/1.1\r\nHost: example.com\r\n\r\n") assert (playbook << http.HttpRequestHook(flow)) if redirect == "change-destination": flow().request.host = "other-server" flow().request.host_header = "example.com" elif redirect == "change-proxy": flow().server_conn.via = ServerSpec("http", address=("other-proxy", 1234)) playbook >> reply() if redirect: # Protocol-wise we wouldn't need to open a new connection for plain http host redirects, # but we disregard this edge case to simplify implementation. playbook << OpenConnection(server2) playbook >> reply(None) else: server2 = server if scheme == "http": if redirect == "change-destination": playbook << SendData(server2, b"GET http://other-server/two HTTP/1.1\r\nHost: example.com\r\n\r\n") else: playbook << SendData(server2, b"GET http://example.com/two HTTP/1.1\r\nHost: example.com\r\n\r\n") else: if redirect == "change-destination": playbook << SendData(server2, b"CONNECT other-server:443 HTTP/1.1\r\n\r\n") playbook >> DataReceived(server2, b"HTTP/1.1 200 Connection established\r\n\r\n") elif redirect == "change-proxy": playbook << SendData(server2, b"CONNECT example.com:443 HTTP/1.1\r\n\r\n") playbook >> DataReceived(server2, b"HTTP/1.1 200 Connection established\r\n\r\n") playbook << SendData(server2, b"GET /two HTTP/1.1\r\nHost: example.com\r\n\r\n") playbook >> DataReceived(server2, b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n") playbook << SendData(tctx.client, b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n") assert playbook if redirect == "change-proxy": assert server2().address == ("other-proxy", 1234) else: assert server2().address == ("proxy", 8080) assert ( playbook >> ConnectionClosed(tctx.client) << CloseConnection(tctx.client) )