def test_server_unreachable(tctx, connect): """Test the scenario where the target server is unreachable.""" tctx.options.connection_strategy = "eager" server = Placeholder(Server) flow = Placeholder(HTTPFlow) err = Placeholder(bytes) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False) if connect: playbook >> DataReceived(tctx.client, b"CONNECT example.com:443 HTTP/1.1\r\n\r\n") else: playbook >> DataReceived(tctx.client, b"GET http://example.com/ HTTP/1.1\r\n\r\n") playbook << OpenConnection(server) playbook >> reply("Connection failed") if not connect: # Our API isn't ideal here, there is no error hook for CONNECT requests currently. # We could fix this either by having CONNECT request go through all our regular hooks, # or by adding dedicated ok/error hooks. playbook << http.HttpErrorHook(flow) playbook >> reply() playbook << SendData(tctx.client, err) if not connect: playbook << CloseConnection(tctx.client) assert playbook if not connect: assert flow().error assert b"502 Bad Gateway" in err() assert b"Connection failed" in err()
def test_https_proxy(strategy, tctx): """Test a CONNECT request, followed by a HTTP GET /""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) tctx.options.connection_strategy = strategy (playbook >> DataReceived(tctx.client, b"CONNECT example.proxy:80 HTTP/1.1\r\n\r\n") << http.HttpConnectHook(Placeholder()) >> reply()) if strategy == "eager": (playbook << OpenConnection(server) >> reply(None)) (playbook << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply()) if strategy == "lazy": (playbook << OpenConnection(server) >> reply(None)) (playbook << SendData( server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!") << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!")) assert playbook
def test_cancel_then_server_disconnect(tctx): """ Test that we properly handle the case of the following event sequence: - client cancels a stream - we start an error hook - server disconnects - error hook completes. """ playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b'GET / HTTP/1.1\r\nHost: example.com\r\n\r\n') >> DataReceived( tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize()) << CloseConnection(server) << http.HttpErrorHook(flow) >> reply() >> ConnectionClosed(server) << None)
def test_disconnect_while_intercept(tctx): """Test a server disconnect while a request is intercepted.""" tctx.options.connection_strategy = "eager" server1 = Placeholder(Server) server2 = Placeholder(Server) flow = Placeholder(HTTPFlow) assert ( Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False) >> DataReceived(tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n") << http.HttpConnectHook(Placeholder(HTTPFlow)) >> reply() << OpenConnection(server1) >> reply(None) << SendData( tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived( tctx.client, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << http.HttpRequestHook(flow) >> ConnectionClosed(server1) << CloseConnection(server1) >> reply(to=-3) << OpenConnection(server2) >> reply(None) << SendData( server2, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived( server2, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")) assert server1() != server2() assert flow().server_conn == server2()
def test_transparent_tcp(tctx: Context, monkeypatch, connection_strategy): monkeypatch.setattr(platform, "original_addr", lambda sock: ("address", 22)) flow = Placeholder(TCPFlow) tctx.options.connection_strategy = connection_strategy sock = object() playbook = Playbook(modes.TransparentProxy(tctx)) ( playbook << GetSocket(tctx.client) >> reply(sock) ) if connection_strategy == "lazy": assert playbook else: assert ( playbook << OpenConnection(tctx.server) >> reply(None) >> DataReceived(tctx.server, b"hello") << NextLayerHook(Placeholder(NextLayer)) >> reply_next_layer(tcp.TCPLayer) << TcpStartHook(flow) >> reply() << TcpMessageHook(flow) >> reply() << SendData(tctx.client, b"hello") ) assert flow().messages[0].content == b"hello" assert not flow().messages[0].from_client assert tctx.server.address == ("address", 22)
def test_h2_to_h1(tctx): """Test HTTP/2 -> HTTP/1 request translation""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) conn, playbook = h2_client(tctx) conn.send_headers(1, example_request_headers, end_stream=True) response = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, conn.data_to_send()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived( server, b"Hello World!") << http.HttpResponseHook(flow) << CloseConnection(server) >> reply(to=-2) << SendData( tctx.client, response)) events = conn.receive_data(response()) assert event_types(events) == [ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.DataReceived, h2.events.StreamEnded ] resp: h2.events.ResponseReceived = events[0] body: h2.events.DataReceived = events[1] assert resp.headers == [(b':status', b'200'), (b'content-length', b'12')] assert body.data == b"Hello World!"
def test_simple(tctx): playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) initial = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial)) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] sff = FrameFactory() assert ( playbook # a conforming h2 server would send settings first, we disregard this for now. >> DataReceived( server, sff.build_headers_frame(example_response_headers).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived( server, sff.build_data_frame(b"Hello, World!", flags=["END_STREAM" ]).serialize()) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() + cff.build_data_frame(b"", flags=["END_STREAM"]).serialize())) assert flow().request.url == "http://example.com/" assert flow().response.text == "Hello, World!"
def test_kill_stream(tctx): """Test that we can kill individual streams.""" playbook, cff = start_h2_client(tctx) flow1 = Placeholder(HTTPFlow) flow2 = Placeholder(HTTPFlow) req_headers_hook_1 = http.HttpRequestHeadersHook(flow1) def kill(flow: HTTPFlow): # Can't use flow.kill() here because that currently still depends on a reply object. flow.error = Error(Error.KILLED_MESSAGE) server = Placeholder(Server) data_req1 = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame( example_request_headers, flags=["END_STREAM"], stream_id=1).serialize() + cff.build_headers_frame( example_request_headers, flags=["END_STREAM"], stream_id=3).serialize()) << req_headers_hook_1 << http.HttpRequestHeadersHook(flow2) >> reply(side_effect=kill) << http.HttpErrorHook(flow2) >> reply() << SendData( tctx.client, cff.build_rst_stream_frame( 3, error_code=ErrorCodes.INTERNAL_ERROR).serialize()) >> reply(to=req_headers_hook_1) << http.HttpRequestHook(flow1) >> reply() << OpenConnection(server) >> reply( None, side_effect=make_h2) << SendData(server, data_req1)) frames = decode_frames(data_req1()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ]
def test_stream_concurrent_get_connection(tctx): """Test that an immediate second request for the same domain does not trigger a second connection attempt.""" playbook, cff = start_h2_client(tctx) playbook.hooks = False server = Placeholder(Server) data = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=1).serialize()) << (o := OpenConnection(server)) >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=3).serialize()) >> reply(None, to=o, side_effect=make_h2) << SendData(server, data)) frames = decode_frames(data()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, hyperframe.frame.HeadersFrame, ]
def test_no_data_on_closed_stream(self, tctx): frame_factory = FrameFactory() req = Request.make("GET", "http://example.com/") resp = {":status": 200} assert ( Playbook(Http2Client(tctx)) << SendData( tctx.server, Placeholder(bytes)) # preamble + initial settings frame >> DataReceived( tctx.server, frame_factory.build_settings_frame({}, ack=True).serialize()) >> http.RequestHeaders(1, req, end_stream=True) << SendData( tctx.server, b"\x00\x00\x06\x01\x05\x00\x00\x00\x01\x82\x86\x84\\\x81\x07") >> http.RequestEndOfMessage(1) >> DataReceived( tctx.server, frame_factory.build_headers_frame(resp).serialize()) << http.ReceiveHttp(Placeholder( http.ResponseHeaders)) >> http.RequestProtocolError( 1, "cancelled", code=status_codes.CLIENT_CLOSED_REQUEST) << SendData( tctx.server, frame_factory.build_rst_stream_frame( 1, ErrorCodes.CANCEL).serialize()) >> DataReceived( tctx.server, frame_factory.build_data_frame(b"foo").serialize()) << SendData( tctx.server, frame_factory.build_rst_stream_frame( 1, ErrorCodes.STREAM_CLOSED).serialize()) ) # important: no ResponseData event here!
def test_request_trailers(tctx: Context, open_h2_server_conn: Server, stream): playbook, cff = start_h2_client(tctx) tctx.server = open_h2_server_conn def enable_streaming(flow: HTTPFlow): flow.request.stream = bool(stream) flow = Placeholder(HTTPFlow) server_data1 = Placeholder(bytes) server_data2 = Placeholder(bytes) (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize()) << http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming)) if stream: playbook << SendData(tctx.server, server_data1) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_trailers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHook(flow) >> reply() << SendData( tctx.server, server_data2)) frames = decode_frames(server_data1.setdefault(b"") + server_data2()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, hyperframe.frame.DataFrame, hyperframe.frame.HeadersFrame, ]
def test_stream_modify(tctx): """Test HTTP stream modification""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) def enable_streaming(flow: HTTPFlow): if flow.response is None: flow.request.stream = lambda x: b"[" + x + b"]" else: flow.response.stream = lambda x: b"[" + x + b"]" assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived( tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n" b"0\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming) << OpenConnection(server) >> reply(None) << SendData( server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"5\r\n[abc]\r\n" b"2\r\n[]\r\n") << http.HttpRequestHook(flow) >> reply() << SendData(server, b"0\r\n\r\n") >> DataReceived( server, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\ndef\r\n" b"0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"5\r\n[def]\r\n" b"2\r\n[]\r\n") << http.HttpResponseHook(flow) >> reply() << SendData(tctx.client, b"0\r\n\r\n"))
def test_response_streaming(tctx, why, transfer_encoding): """Test HTTP response streaming""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) if why.startswith("body_size"): tctx.options.stream_large_bodies = why.replace("body_size=", "") def enable_streaming(flow: HTTPFlow): if why == "addon": flow.response.stream = True assert (playbook >> DataReceived( tctx.client, b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n" ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook( flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\n")) if transfer_encoding == "identity": playbook >> DataReceived(server, b"Content-Length: 6\r\n\r\n" b"abc") else: playbook >> DataReceived( server, b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook << http.HttpResponseHeadersHook(flow) playbook >> reply(side_effect=enable_streaming) if transfer_encoding == "identity": playbook << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Content-Length: 6\r\n\r\n" b"abc") playbook >> DataReceived(server, b"def") playbook << SendData(tctx.client, b"def") else: if why == "body_size=3": playbook >> DataReceived(server, b"3\r\ndef\r\n") playbook << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"6\r\nabcdef\r\n") else: playbook << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n\r\n" b"3\r\nabc\r\n") playbook >> DataReceived(server, b"3\r\ndef\r\n") playbook << SendData(tctx.client, b"3\r\ndef\r\n") playbook >> DataReceived(server, b"0\r\n\r\n") playbook << http.HttpResponseHook(flow) playbook >> reply() if transfer_encoding == "chunked": playbook << SendData(tctx.client, b"0\r\n\r\n") assert playbook
def test_simple(self, tctx, pipeline): hdrs1 = Placeholder(RequestHeaders) hdrs2 = Placeholder(RequestHeaders) req2 = ( b"GET http://example.com/two HTTP/1.1\r\n" b"Host: example.com\r\n" b"\r\n" ) playbook = Playbook(Http1Server(tctx)) ( playbook >> DataReceived(tctx.client, b"POST http://example.com/one HTTP/1.1\r\n" b"Content-Length: 3\r\n" b"\r\n" b"abc" + (req2 if pipeline else b"")) << ReceiveHttp(hdrs1) << ReceiveHttp(RequestData(1, b"abc")) << ReceiveHttp(RequestEndOfMessage(1)) >> ResponseHeaders(1, http.Response.make(200)) << SendData(tctx.client, b'HTTP/1.1 200 OK\r\ncontent-length: 0\r\n\r\n') >> ResponseEndOfMessage(1) ) if not pipeline: playbook >> DataReceived(tctx.client, req2) assert ( playbook << ReceiveHttp(hdrs2) << ReceiveHttp(RequestEndOfMessage(3)) )
def test_reverse_proxy(tctx, keep_host_header): """Test mitmproxy in reverse proxy mode. - make sure that we connect to the right host - make sure that we respect keep_host_header - make sure that we include non-standard ports in the host header (#4280) """ server = Placeholder(Server) tctx.options.mode = "reverse:http://localhost:8000" tctx.options.connection_strategy = "lazy" tctx.options.keep_host_header = keep_host_header assert ( Playbook(modes.ReverseProxy(tctx), hooks=False) >> DataReceived(tctx.client, b"GET /foo HTTP/1.1\r\n" b"Host: example.com\r\n\r\n") << NextLayerHook(Placeholder(NextLayer)) >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) << OpenConnection(server) >> reply(None) << SendData(server, b"GET /foo HTTP/1.1\r\n" b"Host: " + (b"example.com" if keep_host_header else b"localhost:8000") + b"\r\n\r\n") >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") ) assert server().address == ("localhost", 8000)
def test_http_client_aborts(tctx, stream): """Test handling of the case where a client aborts during request transmission.""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=True) def enable_streaming(flow: HTTPFlow): flow.request.stream = True assert (playbook >> DataReceived( tctx.client, b"POST http://example.com/ HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 6\r\n" b"\r\n" b"abc") << http.HttpRequestHeadersHook(flow)) if stream: assert (playbook >> reply(side_effect=enable_streaming) << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 6\r\n" b"\r\n" b"abc")) else: assert playbook >> reply() (playbook >> ConnectionClosed(tctx.client) << CloseConnection(tctx.client)) if stream: playbook << CloseConnection(server) assert (playbook << http.HttpErrorHook(flow) >> reply() << None) assert "peer closed connection" in flow().error.msg
def test_h1_to_h2(tctx): """Test HTTP/1 -> HTTP/2 request translation""" server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular)) conf = h2.config.H2Configuration(client_side=False) conn = h2.connection.H2Connection(conf) conn.initiate_connection() request = Placeholder(bytes) assert (playbook >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, request)) events = conn.receive_data(request()) assert event_types(events) == [ h2.events.RemoteSettingsChanged, h2.events.RequestReceived, h2.events.StreamEnded ] conn.send_headers(1, example_response_headers) conn.send_data(1, b"Hello World!", end_stream=True) settings_ack = Placeholder(bytes) assert ( playbook >> DataReceived(server, conn.data_to_send()) << http.HttpResponseHeadersHook(flow) << SendData(server, settings_ack) >> reply(to=-2) << http.HttpResponseHook(flow) >> reply() << SendData( tctx.client, b"HTTP/1.1 200 OK\r\n\r\nHello World!") << CloseConnection(tctx.client)) assert settings_ack() == b'\x00\x00\x00\x04\x01\x00\x00\x00\x00'
def test_rst_then_close(tctx): """ Test that we properly handle the case of a client that first causes protocol errors and then disconnects. Adapted from h2spec http2/5.1/5. """ playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> DataReceived( tctx.client, cff.build_data_frame(b"unexpected data frame").serialize()) << SendData( tctx.client, cff.build_rst_stream_frame( 1, ErrorCodes.STREAM_CLOSED).serialize()) >> ConnectionClosed(tctx.client) << CloseConnection( tctx.client) >> reply("connection cancelled", to=-5) << http.HttpErrorHook(flow) >> reply()) assert flow().error.msg == "connection cancelled"
def test_proxy_chain(tctx, strategy): server = Placeholder(Server) tctx.options.connection_strategy = strategy playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False) playbook >> DataReceived(tctx.client, b"CONNECT proxy:8080 HTTP/1.1\r\n\r\n") if strategy == "eager": playbook << OpenConnection(server) playbook >> reply(None) playbook << SendData(tctx.client, b"HTTP/1.1 200 Connection established\r\n\r\n") playbook >> DataReceived(tctx.client, b"CONNECT second-proxy:8080 HTTP/1.1\r\n\r\n") playbook << layer.NextLayerHook(Placeholder()) playbook >> reply_next_layer( lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) playbook << SendData( tctx.client, b"HTTP/1.1 502 Bad Gateway\r\n" b"content-length: 198\r\n" b"\r\n" b"mitmproxy received an HTTP CONNECT request even though it is not running in regular/upstream mode. " b"This usually indicates a misconfiguration, please see the mitmproxy mode documentation for details." ) assert playbook
def test_early_server_data(tctx): playbook, cff = start_h2_client(tctx) sff = FrameFactory() tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN tctx.server.alpn = b"h2" flow = Placeholder(HTTPFlow) server1 = Placeholder(bytes) server2 = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << (h := http.HttpRequestHook(flow)) # Surprise! We get data from the server before the request hook finishes. >> DataReceived(tctx.server, sff.build_settings_frame({}).serialize()) << SendData(tctx.server, server1) # Request hook finishes... >> reply(to=h) << SendData(tctx.server, server2) ) assert [type(x) for x in decode_frames(server1())] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.SettingsFrame, ] assert [type(x) for x in decode_frames(server2())] == [ hyperframe.frame.HeadersFrame, ]
def test_upstream_error(tctx): playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) err = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply("oops server <> error") << http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err) ) frames = decode_frames(err()) assert [type(x) for x in frames] == [ hyperframe.frame.HeadersFrame, hyperframe.frame.DataFrame, ] d = frames[1] assert isinstance(d, hyperframe.frame.DataFrame) assert b"502 Bad Gateway" in d.data assert b"server <> error" in d.data
def test_cancel_during_response_hook(tctx): """ Test that we properly handle the case of the following event sequence: - we receive a server response - we trigger the response hook - the client cancels the stream - the response hook completes Given that we have already triggered the response hook, we don't want to trigger the error hook. """ playbook, cff = start_h2_client(tctx) flow = Placeholder(HTTPFlow) server = Placeholder(Server) assert (playbook >> DataReceived( tctx.client, cff.build_headers_frame(example_request_headers, flags=[ "END_STREAM" ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None) << SendData( server, b'GET / HTTP/1.1\r\nHost: example.com\r\n\r\n') >> DataReceived(server, b"HTTP/1.1 204 No Content\r\n\r\n") << http.HttpResponseHeadersHook(flow) << CloseConnection(server) >> reply(to=-2) << http.HttpResponseHook(flow) >> DataReceived( tctx.client, cff.build_rst_stream_frame( 1, ErrorCodes.CANCEL).serialize()) >> reply(to=-2))
def test_response_trailers(tctx: Context, open_h2_server_conn: Server, stream): playbook, cff = start_h2_client(tctx) tctx.server = open_h2_server_conn sff = FrameFactory() def enable_streaming(flow: HTTPFlow): flow.response.stream = bool(stream) flow = Placeholder(HTTPFlow) ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << SendData(tctx.server, Placeholder(bytes)) # a conforming h2 server would send settings first, we disregard this for now. >> DataReceived(tctx.server, sff.build_headers_frame(example_response_headers).serialize() + sff.build_data_frame(b"Hello, World!").serialize()) << http.HttpResponseHeadersHook(flow) >> reply(side_effect=enable_streaming) ) if stream: playbook << SendData( tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() ) assert ( playbook >> DataReceived(tctx.server, sff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHook(flow) ) assert flow().response.trailers del flow().response.trailers["resp-trailer-a"] if stream: assert ( playbook >> reply() << SendData(tctx.client, cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()) ) else: assert ( playbook >> reply() << SendData(tctx.client, cff.build_headers_frame(example_response_headers).serialize() + cff.build_data_frame(b"Hello, World!").serialize() + cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()))
def test_upgrade(tctx): """Test a HTTP -> WebSocket upgrade""" tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN http_flow = Placeholder(HTTPFlow) flow = Placeholder(WebSocketFlow) assert ( Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(http_flow) >> reply() << http.HttpRequestHook(http_flow) >> reply() << SendData(tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(http_flow) >> reply() << http.HttpResponseHook(http_flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << websocket.WebsocketStartHook(flow) >> reply() >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello world")) << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.server, masked(b"\x81\x0bhello world")) >> DataReceived(tctx.server, b"\x82\nhello back") << websocket.WebsocketMessageHook(flow) >> reply() << SendData(tctx.client, b"\x82\nhello back") ) assert flow().handshake_flow == http_flow() assert len(flow().messages) == 2 assert flow().messages[0].content == "hello world" assert flow().messages[0].from_client assert flow().messages[1].content == b"hello back" assert flow().messages[1].from_client is False
def test_socks5_success(address: str, packed: bytes, tctx: Context): tctx.options.connection_strategy = "eager" playbook = Playbook(modes.Socks5Proxy(tctx)) server = Placeholder(Server) nextlayer = Placeholder(NextLayer) assert (playbook >> DataReceived(tctx.client, CLIENT_HELLO) << SendData( tctx.client, SERVER_HELLO) >> DataReceived( tctx.client, b"\x05\x01\x00" + packed + b"\x12\x34applicationdata") << OpenConnection(server) >> reply(None) << SendData( tctx.client, b"\x05\x00\x00\x01\x00\x00\x00\x00\x00\x00") << NextLayerHook(nextlayer)) assert server().address == (address, 0x1234) assert nextlayer().data_client() == b"applicationdata"
def test_upgrade(tctx, proto): """Test a HTTP -> WebSocket upgrade with different protocols enabled""" if proto != "websocket": tctx.options.websocket = False if proto != "tcp": tctx.options.rawtcp = False tctx.server.address = ("example.com", 80) tctx.server.state = ConnectionState.OPEN http_flow = Placeholder(HTTPFlow) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) ( playbook >> DataReceived(tctx.client, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") << http.HttpRequestHeadersHook(http_flow) >> reply() << http.HttpRequestHook(http_flow) >> reply() << SendData(tctx.server, b"GET / HTTP/1.1\r\n" b"Connection: upgrade\r\n" b"Upgrade: websocket\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n") >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") << http.HttpResponseHeadersHook(http_flow) >> reply() << http.HttpResponseHook(http_flow) >> reply() << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"\r\n") ) if proto == "websocket": assert playbook << WebsocketStartHook(Placeholder(WebSocketFlow)) elif proto == "tcp": assert playbook << TcpStartHook(Placeholder(TCPFlow)) else: assert ( playbook << Log("Sent HTTP 101 response, but no protocol is enabled to upgrade to.", "warn") << CloseConnection(tctx.client) )
def test_http_proxy_tcp(tctx, mode, close_first): """Test TCP over HTTP CONNECT.""" server = Placeholder(Server) if mode == "upstream": tctx.options.mode = "upstream:http://proxy:8080" toplayer = http.HttpLayer(tctx, HTTPMode.upstream) else: tctx.options.mode = "regular" toplayer = http.HttpLayer(tctx, HTTPMode.regular) playbook = Playbook(toplayer, hooks=False) assert ( playbook >> DataReceived(tctx.client, b"CONNECT example:443 HTTP/1.1\r\n\r\n") << SendData(tctx.client, b"HTTP/1.1 200 Connection established\r\n\r\n") >> DataReceived(tctx.client, b"this is not http") << layer.NextLayerHook(Placeholder()) >> reply_next_layer(lambda ctx: TCPLayer(ctx, ignore=True)) << OpenConnection(server) ) playbook >> reply(None) if mode == "upstream": playbook << SendData(server, b"CONNECT example:443 HTTP/1.1\r\n\r\n") playbook >> DataReceived(server, b"HTTP/1.1 200 Connection established\r\n\r\n") assert ( playbook << SendData(server, b"this is not http") >> DataReceived(server, b"true that") << SendData(tctx.client, b"true that") ) if mode == "regular": assert server().address == ("example", 443) else: assert server().address == ("proxy", 8080) if close_first == "client": a, b = tctx.client, server else: a, b = server, tctx.client assert ( playbook >> ConnectionClosed(a) << CloseConnection(b) >> ConnectionClosed(b) << CloseConnection(a) )
def _h2_response(chunks): tctx = context.Context(connection.Client(("client", 1234), ("127.0.0.1", 8080), 1605699329), opts) playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False) server = Placeholder(connection.Server) assert ( playbook >> DataReceived(tctx.client, b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, Placeholder()) ) for chunk in chunks: for _ in playbook.layer.handle_event(events.DataReceived(server(), chunk)): pass
def test_upgrade_denied(self, tctx): assert ( Playbook(Http1Server(tctx)) >> DataReceived( tctx.client, b"GET http://example.com/ HTTP/1.1\r\n" b"Connection: Upgrade\r\n" b"Upgrade: websocket\r\n" b"\r\n") << ReceiveHttp(Placeholder(RequestHeaders)) << ReceiveHttp(RequestEndOfMessage(1)) >> ResponseHeaders(1, http.Response.make(200)) << SendData( tctx.client, b'HTTP/1.1 200 OK\r\ncontent-length: 0\r\n\r\n') >> ResponseEndOfMessage(1) >> DataReceived( tctx.client, b"GET / HTTP/1.1\r\n\r\n") << ReceiveHttp( Placeholder(RequestHeaders)) << ReceiveHttp( RequestEndOfMessage(3)))
def test_no_normalization(tctx, normalize): """Test that we don't normalize headers when we just pass them through.""" tctx.options.normalize_outbound_headers = normalize tctx.options.validate_inbound_headers = False server = Placeholder(Server) flow = Placeholder(HTTPFlow) playbook, cff = start_h2_client(tctx) request_headers = list(example_request_headers) + [(b"Should-Not-Be-Capitalized! ", b" :) ")] request_headers_lower = [(k.lower(), v) for (k, v) in request_headers] response_headers = list(example_response_headers) + [(b"Same", b"Here")] response_headers_lower = [(k.lower(), v) for (k, v) in response_headers] initial = Placeholder(bytes) assert ( playbook >> DataReceived(tctx.client, cff.build_headers_frame(request_headers, flags=["END_STREAM"]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >> reply(None, side_effect=make_h2) << SendData(server, initial) ) frames = decode_frames(initial()) assert [type(x) for x in frames] == [ hyperframe.frame.SettingsFrame, hyperframe.frame.HeadersFrame, ] assert hpack.hpack.Decoder().decode(frames[1].data, True) == request_headers_lower if normalize else request_headers sff = FrameFactory() ( playbook >> DataReceived(server, sff.build_headers_frame(response_headers, flags=["END_STREAM"]).serialize()) << http.HttpResponseHeadersHook(flow) >> reply() << http.HttpResponseHook(flow) >> reply() ) if normalize: playbook << Log("Lowercased 'Same' header as uppercase is not allowed with HTTP/2.") hdrs = response_headers_lower if normalize else response_headers assert playbook << SendData(tctx.client, cff.build_headers_frame(hdrs, flags=["END_STREAM"]).serialize()) assert flow().request.headers.fields == ((b"Should-Not-Be-Capitalized! ", b" :) "),) assert flow().response.headers.fields == ((b"Same", b"Here"),)