Esempio n. 1
0
def test_kill_stream(tctx):
    """Test that we can kill individual streams."""
    playbook, cff = start_h2_client(tctx)
    flow1 = Placeholder(HTTPFlow)
    flow2 = Placeholder(HTTPFlow)

    req_headers_hook_1 = http.HttpRequestHeadersHook(flow1)

    def kill(flow: HTTPFlow):
        # Can't use flow.kill() here because that currently still depends on a reply object.
        flow.error = Error(Error.KILLED_MESSAGE)

    server = Placeholder(Server)
    data_req1 = Placeholder(bytes)

    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(
            example_request_headers, flags=["END_STREAM"],
            stream_id=1).serialize() + cff.build_headers_frame(
                example_request_headers, flags=["END_STREAM"],
                stream_id=3).serialize()) << req_headers_hook_1 <<
            http.HttpRequestHeadersHook(flow2) >> reply(side_effect=kill) <<
            http.HttpErrorHook(flow2) >> reply() << SendData(
                tctx.client,
                cff.build_rst_stream_frame(
                    3, error_code=ErrorCodes.INTERNAL_ERROR).serialize()) >>
            reply(to=req_headers_hook_1) << http.HttpRequestHook(flow1) >>
            reply() << OpenConnection(server) >> reply(
                None, side_effect=make_h2) << SendData(server, data_req1))
    frames = decode_frames(data_req1())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
    ]
Esempio n. 2
0
def test_response_streaming(tctx):
    """Test HTTP response streaming"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)

    def enable_streaming(flow: HTTPFlow):
        flow.response.stream = lambda x: x.upper()

    assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.regular))
            >> DataReceived(tctx.client, b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n")
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n")
            >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nabc")
            << http.HttpResponseHeadersHook(flow)
            >> reply(side_effect=enable_streaming)
            << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nABC")
            >> DataReceived(server, b"def")
            << SendData(tctx.client, b"DEF")
            << http.HttpResponseHook(flow)
            >> reply()
    )
Esempio n. 3
0
def test_https_proxy(strategy, tctx):
    """Test a CONNECT request, followed by a HTTP GET /"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))
    tctx.options.connection_strategy = strategy

    (playbook >> DataReceived(tctx.client,
                              b"CONNECT example.proxy:80 HTTP/1.1\r\n\r\n") <<
     http.HttpConnectHook(Placeholder()) >> reply())
    if strategy == "eager":
        (playbook << OpenConnection(server) >> reply(None))
    (playbook << SendData(tctx.client,
                          b'HTTP/1.1 200 Connection established\r\n\r\n') >>
     DataReceived(tctx.client,
                  b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") <<
     layer.NextLayerHook(Placeholder()) >>
     reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) <<
     http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(flow)
     >> reply())
    if strategy == "lazy":
        (playbook << OpenConnection(server) >> reply(None))
    (playbook << SendData(
        server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") >>
     DataReceived(
         server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!")
     << http.HttpResponseHeadersHook(flow) >> reply() <<
     http.HttpResponseHook(flow) >> reply() << SendData(
         tctx.client,
         b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!"))
    assert playbook
Esempio n. 4
0
def test_upstream_error(tctx):
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)
    err = Placeholder(bytes)
    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply("oops server <> error")
            << http.HttpErrorHook(flow)
            >> reply()
            << SendData(tctx.client, err)
    )
    frames = decode_frames(err())
    assert [type(x) for x in frames] == [
        hyperframe.frame.HeadersFrame,
        hyperframe.frame.DataFrame,
    ]
    d = frames[1]
    assert isinstance(d, hyperframe.frame.DataFrame)
    assert b"502 Bad Gateway" in d.data
    assert b"server &lt;&gt; error" in d.data
Esempio n. 5
0
def test_simple(tctx):
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)
    initial = Placeholder(bytes)
    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers, flags=[
            "END_STREAM"
        ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply(None, side_effect=make_h2) << SendData(server, initial))
    frames = decode_frames(initial())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
    ]
    sff = FrameFactory()
    assert (
        playbook
        # a conforming h2 server would send settings first, we disregard this for now.
        >> DataReceived(
            server,
            sff.build_headers_frame(example_response_headers).serialize()) <<
        http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived(
            server,
            sff.build_data_frame(b"Hello, World!", flags=["END_STREAM"
                                                          ]).serialize()) <<
        http.HttpResponseHook(flow) >> reply() << SendData(
            tctx.client,
            cff.build_headers_frame(example_response_headers).serialize() +
            cff.build_data_frame(b"Hello, World!").serialize() +
            cff.build_data_frame(b"", flags=["END_STREAM"]).serialize()))
    assert flow().request.url == "http://example.com/"
    assert flow().response.text == "Hello, World!"
Esempio n. 6
0
def test_rst_then_close(tctx):
    """
    Test that we properly handle the case of a client that first causes protocol errors and then disconnects.

    Adapted from h2spec http2/5.1/5.
    """
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> DataReceived(tctx.client, cff.build_data_frame(b"unexpected data frame").serialize())
            << SendData(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.STREAM_CLOSED).serialize())
            >> ConnectionClosed(tctx.client)
            << CloseConnection(tctx.client)
            >> reply("connection cancelled", to=-5)
            << http.HttpErrorHook(flow)
            >> reply()
    )
    assert flow().error.msg == "connection cancelled"
Esempio n. 7
0
def test_early_server_data(tctx):
    playbook, cff = start_h2_client(tctx)
    sff = FrameFactory()

    tctx.server.address = ("example.com", 80)
    tctx.server.state = ConnectionState.OPEN
    tctx.server.alpn = b"h2"

    flow = Placeholder(HTTPFlow)
    server1 = Placeholder(bytes)
    server2 = Placeholder(bytes)
    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << (h := http.HttpRequestHook(flow))
            # Surprise! We get data from the server before the request hook finishes.
            >> DataReceived(tctx.server, sff.build_settings_frame({}).serialize())
            << SendData(tctx.server, server1)
            # Request hook finishes...
            >> reply(to=h)
            << SendData(tctx.server, server2)
    )
    assert [type(x) for x in decode_frames(server1())] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.SettingsFrame,
    ]
    assert [type(x) for x in decode_frames(server2())] == [
        hyperframe.frame.HeadersFrame,
    ]
Esempio n. 8
0
def test_h1_to_h2(tctx):
    """Test HTTP/1 -> HTTP/2 request translation"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)

    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    conf = h2.config.H2Configuration(client_side=False)
    conn = h2.connection.H2Connection(conf)
    conn.initiate_connection()

    request = Placeholder(bytes)
    assert (playbook >> DataReceived(
        tctx.client,
        b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") <<
            http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply(None, side_effect=make_h2) << SendData(server, request))
    events = conn.receive_data(request())
    assert event_types(events) == [
        h2.events.RemoteSettingsChanged, h2.events.RequestReceived,
        h2.events.StreamEnded
    ]

    conn.send_headers(1, example_response_headers)
    conn.send_data(1, b"Hello World!", end_stream=True)
    settings_ack = Placeholder(bytes)
    assert (
        playbook >> DataReceived(server, conn.data_to_send()) <<
        http.HttpResponseHeadersHook(flow) << SendData(server, settings_ack) >>
        reply(to=-2) << http.HttpResponseHook(flow) >> reply() << SendData(
            tctx.client, b"HTTP/1.1 200 OK\r\n\r\nHello World!") <<
        CloseConnection(tctx.client))
    assert settings_ack() == b'\x00\x00\x00\x04\x01\x00\x00\x00\x00'
Esempio n. 9
0
def test_cancel_then_server_disconnect(tctx):
    """
    Test that we properly handle the case of the following event sequence:
        - client cancels a stream
        - we start an error hook
        - server disconnects
        - error hook completes.
    """
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b'GET / HTTP/1.1\r\nHost: example.com\r\n\r\n')
            >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
            << CloseConnection(server)
            << http.HttpErrorHook(flow)
            >> reply()
            >> ConnectionClosed(server)
            << None
    )
Esempio n. 10
0
def test_request_trailers(tctx: Context, open_h2_server_conn: Server, stream):
    playbook, cff = start_h2_client(tctx)
    tctx.server = open_h2_server_conn

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = bool(stream)

    flow = Placeholder(HTTPFlow)
    server_data1 = Placeholder(bytes)
    server_data2 = Placeholder(bytes)
    (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers).serialize() +
        cff.build_data_frame(b"Hello, World!").serialize()) <<
     http.HttpRequestHeadersHook(flow) >> reply(side_effect=enable_streaming))
    if stream:
        playbook << SendData(tctx.server, server_data1)
    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_trailers, flags=[
            "END_STREAM"
        ]).serialize()) << http.HttpRequestHook(flow) >> reply() << SendData(
            tctx.server, server_data2))
    frames = decode_frames(server_data1.setdefault(b"") + server_data2())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
        hyperframe.frame.DataFrame,
        hyperframe.frame.HeadersFrame,
    ]
Esempio n. 11
0
def test_h2_to_h1(tctx):
    """Test HTTP/2 -> HTTP/1 request translation"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)

    conn, playbook = h2_client(tctx)

    conn.send_headers(1, example_request_headers, end_stream=True)
    response = Placeholder(bytes)
    assert (
        playbook >> DataReceived(tctx.client, conn.data_to_send()) <<
        http.HttpRequestHeadersHook(flow) >> reply() <<
        http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
        reply(None) << SendData(
            server, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") >>
        DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\n")
        << http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived(
            server, b"Hello World!") << http.HttpResponseHook(flow) <<
        CloseConnection(server) >> reply(to=-2) << SendData(
            tctx.client, response))
    events = conn.receive_data(response())
    assert event_types(events) == [
        h2.events.ResponseReceived, h2.events.DataReceived,
        h2.events.DataReceived, h2.events.StreamEnded
    ]
    resp: h2.events.ResponseReceived = events[0]
    body: h2.events.DataReceived = events[1]
    assert resp.headers == [(b':status', b'200'), (b'content-length', b'12')]
    assert body.data == b"Hello World!"
Esempio n. 12
0
def test_cancel_during_response_hook(tctx):
    """
    Test that we properly handle the case of the following event sequence:
        - we receive a server response
        - we trigger the response hook
        - the client cancels the stream
        - the response hook completes

    Given that we have already triggered the response hook, we don't want to trigger the error hook.
    """
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b'GET / HTTP/1.1\r\nHost: example.com\r\n\r\n')
            >> DataReceived(server, b"HTTP/1.1 204 No Content\r\n\r\n")
            << http.HttpResponseHeadersHook(flow)
            << CloseConnection(server)
            >> reply(to=-2)
            << http.HttpResponseHook(flow)
            >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
            >> reply(to=-2)
    )
Esempio n. 13
0
def test_stream_modify(tctx):
    """Test HTTP stream modification"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)

    def enable_streaming(flow: HTTPFlow):
        if flow.response is None:
            flow.request.stream = lambda x: b"[" + x + b"]"
        else:
            flow.response.stream = lambda x: b"[" + x + b"]"

    assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
        tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n"
        b"Transfer-Encoding: chunked\r\n\r\n"
        b"3\r\nabc\r\n"
        b"0\r\n\r\n") << http.HttpRequestHeadersHook(flow) >>
            reply(side_effect=enable_streaming) << OpenConnection(server) >>
            reply(None) << SendData(
                server, b"POST / HTTP/1.1\r\n"
                b"Host: example.com\r\n"
                b"Transfer-Encoding: chunked\r\n\r\n"
                b"5\r\n[abc]\r\n"
                b"2\r\n[]\r\n") << http.HttpRequestHook(flow) >> reply() <<
            SendData(server, b"0\r\n\r\n") >> DataReceived(
                server, b"HTTP/1.1 200 OK\r\n"
                b"Transfer-Encoding: chunked\r\n\r\n"
                b"3\r\ndef\r\n"
                b"0\r\n\r\n") << http.HttpResponseHeadersHook(flow) >>
            reply(side_effect=enable_streaming) << SendData(
                tctx.client, b"HTTP/1.1 200 OK\r\n"
                b"Transfer-Encoding: chunked\r\n\r\n"
                b"5\r\n[def]\r\n"
                b"2\r\n[]\r\n") << http.HttpResponseHook(flow) >> reply() <<
            SendData(tctx.client, b"0\r\n\r\n"))
Esempio n. 14
0
def test_response_streaming(tctx, why, transfer_encoding):
    """Test HTTP response streaming"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    if why.startswith("body_size"):
        tctx.options.stream_large_bodies = why.replace("body_size=", "")

    def enable_streaming(flow: HTTPFlow):
        if why == "addon":
            flow.response.stream = True

    assert (playbook >> DataReceived(
        tctx.client,
        b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n"
    ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(
        flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(
            server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n") >>
            DataReceived(server, b"HTTP/1.1 200 OK\r\n"))
    if transfer_encoding == "identity":
        playbook >> DataReceived(server, b"Content-Length: 6\r\n\r\n" b"abc")
    else:
        playbook >> DataReceived(
            server, b"Transfer-Encoding: chunked\r\n\r\n"
            b"3\r\nabc\r\n")

    playbook << http.HttpResponseHeadersHook(flow)
    playbook >> reply(side_effect=enable_streaming)

    if transfer_encoding == "identity":
        playbook << SendData(
            tctx.client, b"HTTP/1.1 200 OK\r\n"
            b"Content-Length: 6\r\n\r\n"
            b"abc")
        playbook >> DataReceived(server, b"def")
        playbook << SendData(tctx.client, b"def")
    else:
        if why == "body_size=3":
            playbook >> DataReceived(server, b"3\r\ndef\r\n")
            playbook << SendData(
                tctx.client, b"HTTP/1.1 200 OK\r\n"
                b"Transfer-Encoding: chunked\r\n\r\n"
                b"6\r\nabcdef\r\n")
        else:
            playbook << SendData(
                tctx.client, b"HTTP/1.1 200 OK\r\n"
                b"Transfer-Encoding: chunked\r\n\r\n"
                b"3\r\nabc\r\n")
            playbook >> DataReceived(server, b"3\r\ndef\r\n")
            playbook << SendData(tctx.client, b"3\r\ndef\r\n")
        playbook >> DataReceived(server, b"0\r\n\r\n")

    playbook << http.HttpResponseHook(flow)
    playbook >> reply()

    if transfer_encoding == "chunked":
        playbook << SendData(tctx.client, b"0\r\n\r\n")

    assert playbook
Esempio n. 15
0
def test_http_client_aborts(tctx, stream):
    """Test handling of the case where a client aborts during request transmission."""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=True)

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = True

    assert (playbook >> DataReceived(
        tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n"
        b"Content-Length: 6\r\n"
        b"\r\n"
        b"abc") << http.HttpRequestHeadersHook(flow))
    if stream:
        assert (playbook >> reply(side_effect=enable_streaming) <<
                OpenConnection(server) >> reply(None) << SendData(
                    server, b"POST / HTTP/1.1\r\n"
                    b"Host: example.com\r\n"
                    b"Content-Length: 6\r\n"
                    b"\r\n"
                    b"abc"))
    else:
        assert playbook >> reply()
    (playbook >> ConnectionClosed(tctx.client) << CloseConnection(tctx.client))
    if stream:
        playbook << CloseConnection(server)
    assert (playbook << http.HttpErrorHook(flow) >> reply() << None)

    assert "peer closed connection" in flow().error.msg
Esempio n. 16
0
def test_upgrade(tctx):
    """Test a HTTP -> WebSocket upgrade"""
    tctx.server.address = ("example.com", 80)
    tctx.server.state = ConnectionState.OPEN
    flow = Placeholder(HTTPFlow)
    assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.transparent))
            >> DataReceived(tctx.client,
                            b"GET / HTTP/1.1\r\n"
                            b"Connection: upgrade\r\n"
                            b"Upgrade: websocket\r\n"
                            b"Sec-WebSocket-Version: 13\r\n"
                            b"\r\n")
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << SendData(tctx.server, b"GET / HTTP/1.1\r\n"
                                     b"Connection: upgrade\r\n"
                                     b"Upgrade: websocket\r\n"
                                     b"Sec-WebSocket-Version: 13\r\n"
                                     b"\r\n")
            >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n"
                                         b"Upgrade: websocket\r\n"
                                         b"Connection: Upgrade\r\n"
                                         b"\r\n")
            << http.HttpResponseHeadersHook(flow)
            >> reply()
            << http.HttpResponseHook(flow)
            >> reply()
            << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n"
                                     b"Upgrade: websocket\r\n"
                                     b"Connection: Upgrade\r\n"
                                     b"\r\n")
            << websocket.WebsocketStartHook(flow)
            >> reply()
            >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello world"))
            << websocket.WebsocketMessageHook(flow)
            >> reply()
            << SendData(tctx.server, masked(b"\x81\x0bhello world"))
            >> DataReceived(tctx.server, b"\x82\nhello back")
            << websocket.WebsocketMessageHook(flow)
            >> reply()
            << SendData(tctx.client, b"\x82\nhello back")
            >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello again"))
            << websocket.WebsocketMessageHook(flow)
            >> reply()
            << SendData(tctx.server, masked(b"\x81\x0bhello again"))
    )
    assert len(flow().websocket.messages) == 3
    assert flow().websocket.messages[0].content == b"hello world"
    assert flow().websocket.messages[0].from_client
    assert flow().websocket.messages[0].type == Opcode.TEXT
    assert flow().websocket.messages[1].content == b"hello back"
    assert flow().websocket.messages[1].from_client is False
    assert flow().websocket.messages[1].type == Opcode.BINARY
Esempio n. 17
0
def test_stream_concurrency(tctx):
    """Test that we can send an intercepted request with a lower stream id than one that has already been sent."""
    playbook, cff = start_h2_client(tctx)
    flow1 = Placeholder(HTTPFlow)
    flow2 = Placeholder(HTTPFlow)

    reqheadershook1 = http.HttpRequestHeadersHook(flow1)
    reqheadershook2 = http.HttpRequestHeadersHook(flow2)
    reqhook1 = http.HttpRequestHook(flow1)
    reqhook2 = http.HttpRequestHook(flow2)

    server = Placeholder(Server)
    data_req1 = Placeholder(bytes)
    data_req2 = Placeholder(bytes)

    assert (playbook
            >> DataReceived(
                tctx.client,
                cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=1).serialize() +
                cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=3).serialize())
            << reqheadershook1
            << reqheadershook2
            >> reply(to=reqheadershook1)
            << reqhook1
            >> reply(to=reqheadershook2)
            << reqhook2
            # req 2 overtakes 1 and we already have a reply:
            >> reply(to=reqhook2)
            << OpenConnection(server)
            >> reply(None, side_effect=make_h2)
            << SendData(server, data_req2)
            >> reply(to=reqhook1)
            << SendData(server, data_req1)
            )
    frames = decode_frames(data_req2())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
    ]
    frames = decode_frames(data_req1())
    assert [type(x) for x in frames] == [
        hyperframe.frame.HeadersFrame,
    ]
Esempio n. 18
0
def test_upgrade_streamed(tctx):
    """If the HTTP response is streamed, we may get early data from the client."""
    tctx.server.address = ("example.com", 80)
    tctx.server.state = ConnectionState.OPEN
    flow = Placeholder(HTTPFlow)

    def enable_streaming(flow: HTTPFlow):
        flow.response.stream = True

    assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.transparent))
            >> DataReceived(tctx.client,
                            b"GET / HTTP/1.1\r\n"
                            b"Connection: upgrade\r\n"
                            b"Upgrade: websocket\r\n"
                            b"Sec-WebSocket-Version: 13\r\n"
                            b"\r\n")
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << SendData(tctx.server, b"GET / HTTP/1.1\r\n"
                                     b"Connection: upgrade\r\n"
                                     b"Upgrade: websocket\r\n"
                                     b"Sec-WebSocket-Version: 13\r\n"
                                     b"\r\n")
            >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n"
                                         b"Upgrade: websocket\r\n"
                                         b"Connection: Upgrade\r\n"
                                         b"\r\n")
            << http.HttpResponseHeadersHook(flow)
            >> reply(side_effect=enable_streaming)
            << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n"
                                     b"Upgrade: websocket\r\n"
                                     b"Connection: Upgrade\r\n"
                                     b"\r\n")
            << http.HttpResponseHook(flow)
            >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello world"))  # early !!
            >> reply(to=-2)
            << websocket.WebsocketStartHook(flow)
            >> reply()
            << websocket.WebsocketMessageHook(flow)
            >> reply()
            << SendData(tctx.server, masked(b"\x81\x0bhello world"))
            >> DataReceived(tctx.server, b"\x82\nhello back")
            << websocket.WebsocketMessageHook(flow)
            >> reply()
            << SendData(tctx.client, b"\x82\nhello back")
            >> DataReceived(tctx.client, masked_bytes(b"\x81\x0bhello again"))
            << websocket.WebsocketMessageHook(flow)
            >> reply()
            << SendData(tctx.server, masked(b"\x81\x0bhello again"))
    )
Esempio n. 19
0
def test_response_trailers(tctx: Context, open_h2_server_conn: Server, stream):
    playbook, cff = start_h2_client(tctx)
    tctx.server = open_h2_server_conn
    sff = FrameFactory()

    def enable_streaming(flow: HTTPFlow):
        flow.response.stream = bool(stream)

    flow = Placeholder(HTTPFlow)
    (
        playbook
        >> DataReceived(tctx.client,
                        cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
        << http.HttpRequestHeadersHook(flow)
        >> reply()
        << http.HttpRequestHook(flow)
        >> reply()
        << SendData(tctx.server, Placeholder(bytes))
        # a conforming h2 server would send settings first, we disregard this for now.
        >> DataReceived(tctx.server, sff.build_headers_frame(example_response_headers).serialize() +
                        sff.build_data_frame(b"Hello, World!").serialize())
        << http.HttpResponseHeadersHook(flow)
        >> reply(side_effect=enable_streaming)
    )
    if stream:
        playbook << SendData(
            tctx.client,
            cff.build_headers_frame(example_response_headers).serialize() +
            cff.build_data_frame(b"Hello, World!").serialize()
        )
    assert (
        playbook
        >> DataReceived(tctx.server, sff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize())
        << http.HttpResponseHook(flow)
    )
    assert flow().response.trailers
    del flow().response.trailers["resp-trailer-a"]
    if stream:
        assert (
            playbook
            >> reply()
            << SendData(tctx.client,
                        cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize())
        )
    else:
        assert (
            playbook
            >> reply()
            << SendData(tctx.client,
                        cff.build_headers_frame(example_response_headers).serialize() +
                        cff.build_data_frame(b"Hello, World!").serialize() +
                        cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()))
Esempio n. 20
0
def test_upgrade(tctx, proto):
    """Test a HTTP -> WebSocket upgrade with different protocols enabled"""
    if proto != "websocket":
        tctx.options.websocket = False
    if proto != "tcp":
        tctx.options.rawtcp = False

    tctx.server.address = ("example.com", 80)
    tctx.server.state = ConnectionState.OPEN
    http_flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.transparent))
    (
            playbook
            >> DataReceived(tctx.client,
                            b"GET / HTTP/1.1\r\n"
                            b"Connection: upgrade\r\n"
                            b"Upgrade: websocket\r\n"
                            b"Sec-WebSocket-Version: 13\r\n"
                            b"\r\n")
            << http.HttpRequestHeadersHook(http_flow)
            >> reply()
            << http.HttpRequestHook(http_flow)
            >> reply()
            << SendData(tctx.server, b"GET / HTTP/1.1\r\n"
                                     b"Connection: upgrade\r\n"
                                     b"Upgrade: websocket\r\n"
                                     b"Sec-WebSocket-Version: 13\r\n"
                                     b"\r\n")
            >> DataReceived(tctx.server, b"HTTP/1.1 101 Switching Protocols\r\n"
                                         b"Upgrade: websocket\r\n"
                                         b"Connection: Upgrade\r\n"
                                         b"\r\n")
            << http.HttpResponseHeadersHook(http_flow)
            >> reply()
            << http.HttpResponseHook(http_flow)
            >> reply()
            << SendData(tctx.client, b"HTTP/1.1 101 Switching Protocols\r\n"
                                     b"Upgrade: websocket\r\n"
                                     b"Connection: Upgrade\r\n"
                                     b"\r\n")
    )
    if proto == "websocket":
        assert playbook << WebsocketStartHook(Placeholder(WebSocketFlow))
    elif proto == "tcp":
        assert playbook << TcpStartHook(Placeholder(TCPFlow))
    else:
        assert (
            playbook
            << Log("Sent HTTP 101 response, but no protocol is enabled to upgrade to.", "warn")
            << CloseConnection(tctx.client)
        )
Esempio n. 21
0
def test_body_size_limit(tctx, where, transfer_encoding):
    """Test HTTP request body_size_limit"""
    tctx.options.body_size_limit = "3"
    err = Placeholder(bytes)
    flow = Placeholder(HTTPFlow)

    if transfer_encoding == "identity":
        body = b"Content-Length: 6\r\n\r\nabcdef"
    else:
        body = b"Transfer-Encoding: chunked\r\n\r\n6\r\nabcdef"

    if where == "request":
        assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
                tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
                b"Host: example.com\r\n" + body) <<
            http.HttpRequestHeadersHook(flow) >> reply()
            << http.HttpErrorHook(flow) >> reply() << SendData(
                tctx.client, err) << CloseConnection(tctx.client))
        assert b"413 Payload Too Large" in err()
        assert b"body_size_limit" in err()
    else:
        server = Placeholder(Server)
        assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
                tctx.client, b"GET http://example.com/ HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") <<
            http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply(None) << SendData(
                server, b"GET / HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") >> DataReceived(
                    server, b"HTTP/1.1 200 OK\r\n" + body) <<
            http.HttpResponseHeadersHook(flow) >> reply() <<
            http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err)
            << CloseConnection(tctx.client) << CloseConnection(server))
        assert b"502 Bad Gateway" in err()
        assert b"body_size_limit" in err()
Esempio n. 22
0
def test_no_normalization(tctx, normalize):
    """Test that we don't normalize headers when we just pass them through."""
    tctx.options.normalize_outbound_headers = normalize
    tctx.options.validate_inbound_headers = False

    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook, cff = start_h2_client(tctx)

    request_headers = list(example_request_headers) + [(b"Should-Not-Be-Capitalized! ", b" :) ")]
    request_headers_lower = [(k.lower(), v) for (k, v) in request_headers]
    response_headers = list(example_response_headers) + [(b"Same", b"Here")]
    response_headers_lower = [(k.lower(), v) for (k, v) in response_headers]

    initial = Placeholder(bytes)
    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None, side_effect=make_h2)
            << SendData(server, initial)
    )
    frames = decode_frames(initial())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
    ]
    assert hpack.hpack.Decoder().decode(frames[1].data, True) == request_headers_lower if normalize else request_headers

    sff = FrameFactory()
    (
            playbook
            >> DataReceived(server, sff.build_headers_frame(response_headers, flags=["END_STREAM"]).serialize())
            << http.HttpResponseHeadersHook(flow)
            >> reply()
            << http.HttpResponseHook(flow)
            >> reply()
    )
    if normalize:
        playbook << Log("Lowercased 'Same' header as uppercase is not allowed with HTTP/2.")
    hdrs = response_headers_lower if normalize else response_headers
    assert playbook << SendData(tctx.client, cff.build_headers_frame(hdrs, flags=["END_STREAM"]).serialize())

    assert flow().request.headers.fields == ((b"Should-Not-Be-Capitalized! ", b" :) "),)
    assert flow().response.headers.fields == ((b"Same", b"Here"),)
Esempio n. 23
0
def test_transparent_sni(tctx):
    """Test that we keep the SNI in lazy transparent mode."""
    tctx.client.sni = "example.com"
    tctx.server.address = ("192.0.2.42", 443)
    tctx.server.tls = True

    flow = Placeholder(HTTPFlow)

    server = Placeholder(Server)
    assert (Playbook(http.HttpLayer(tctx, HTTPMode.transparent)) >>
            DataReceived(tctx.client, b"GET / HTTP/1.1\r\n\r\n") <<
            http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server))
    assert server().address == ("192.0.2.42", 443)
    assert server().sni == "example.com"
Esempio n. 24
0
def test_no_normalization(tctx):
    """Test that we don't normalize headers when we just pass them through."""

    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook, cff = start_h2_client(tctx)

    request_headers = example_request_headers + (
        (b"Should-Not-Be-Capitalized! ", b" :) "),
    )
    response_headers = example_response_headers + (
        (b"Same", b"Here"),
    )

    initial = Placeholder(bytes)
    assert (
            playbook
            >> DataReceived(tctx.client,
                            cff.build_headers_frame(request_headers, flags=["END_STREAM"]).serialize())
            << http.HttpRequestHeadersHook(flow)
            >> reply()
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None, side_effect=make_h2)
            << SendData(server, initial)
    )
    frames = decode_frames(initial())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
    ]
    assert hpack.hpack.Decoder().decode(frames[1].data, True) == list(request_headers)

    sff = FrameFactory()
    assert (
            playbook
            >> DataReceived(server, sff.build_headers_frame(response_headers, flags=["END_STREAM"]).serialize())
            << http.HttpResponseHeadersHook(flow)
            >> reply()
            << http.HttpResponseHook(flow)
            >> reply()
            << SendData(tctx.client,
                        cff.build_headers_frame(response_headers).serialize() +
                        cff.build_data_frame(b"", flags=["END_STREAM"]).serialize())
    )
    assert flow().request.headers.fields == ((b"Should-Not-Be-Capitalized! ", b" :) "),)
    assert flow().response.headers.fields == ((b"Same", b"Here"),)
Esempio n. 25
0
def test_request_stream_modify(tctx):
    """Test HTTP response streaming"""
    server = Placeholder(Server)

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = lambda x: x.upper()

    assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
        tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n"
        b"Content-Length: 6\r\n\r\n"
        b"abc") << http.HttpRequestHeadersHook(
            Placeholder(HTTPFlow)) >> reply(side_effect=enable_streaming) <<
            OpenConnection(server) >> reply(None) << SendData(
                server, b"POST / HTTP/1.1\r\n"
                b"Host: example.com\r\n"
                b"Content-Length: 6\r\n\r\n"
                b"ABC"))
Esempio n. 26
0
def test_http_proxy(tctx):
    """Test a simple HTTP GET / request"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    assert (Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
        tctx.client,
        b"GET http://example.com/foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n"
    ) << http.HttpRequestHeadersHook(flow) >> reply() << http.HttpRequestHook(
        flow) >> reply() << OpenConnection(server) >> reply(None) << SendData(
            server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n")
            >> DataReceived(
                server,
                b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World") <<
            http.HttpResponseHeadersHook(flow) >> reply() >> DataReceived(
                server, b"!") << http.HttpResponseHook(flow) >> reply() <<
            SendData(
                tctx.client,
                b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World!"))
    assert server().address == ("example.com", 80)
Esempio n. 27
0
def test_http_server_aborts(tctx, stream):
    """Test handling of the case where a server aborts during response transmission."""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    def enable_streaming(flow: HTTPFlow):
        flow.response.stream = True

    assert (playbook >> DataReceived(
        tctx.client, b"GET http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >>
            reply() << http.HttpRequestHook(flow) >> reply() <<
            OpenConnection(server) >> reply(None) << SendData(
                server, b"GET / HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") >> DataReceived(
                    server, b"HTTP/1.1 200 OK\r\n"
                    b"Content-Length: 6\r\n"
                    b"\r\n"
                    b"abc") << http.HttpResponseHeadersHook(flow))
    if stream:
        assert (playbook >> reply(side_effect=enable_streaming) << SendData(
            tctx.client, b"HTTP/1.1 200 OK\r\n"
            b"Content-Length: 6\r\n"
            b"\r\n"
            b"abc"))
    else:
        assert playbook >> reply()
    assert (playbook >> ConnectionClosed(server) << CloseConnection(server) <<
            http.HttpErrorHook(flow))
    if stream:
        assert (playbook >> reply() << CloseConnection(tctx.client))
    else:
        error_html = Placeholder(bytes)
        assert (playbook >> reply() << SendData(tctx.client, error_html) <<
                CloseConnection(tctx.client))
        assert b"502 Bad Gateway" in error_html()
        assert b"peer closed connection" in error_html()

    assert "peer closed connection" in flow().error.msg
Esempio n. 28
0
def test_http2_client_aborts(tctx, stream, when, how):
    """
    Test handling of the case where a client aborts during request or response transmission.

    If the client aborts the request transmission, we must trigger an error hook,
    if the client disconnects during response transmission, no error hook is triggered.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook, cff = start_h2_client(tctx)
    resp = Placeholder(bytes)

    def enable_request_streaming(flow: HTTPFlow):
        flow.request.stream = True

    def enable_response_streaming(flow: HTTPFlow):
        flow.response.stream = True

    assert (
            playbook
            >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize())
            << http.HttpRequestHeadersHook(flow)
    )
    if stream and when == "request":
        assert (
                playbook
                >> reply(side_effect=enable_request_streaming)
                << http.HttpRequestHook(flow)
                >> reply()
                << OpenConnection(server)
                >> reply(None)
                << SendData(server, b"GET / HTTP/1.1\r\n"
                                    b"Host: example.com\r\n\r\n")
        )
    else:
        assert playbook >> reply()

    if when == "request":
        if "RST" in how:
            playbook >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
        else:
            playbook >> ConnectionClosed(tctx.client)
            playbook << CloseConnection(tctx.client)

        if stream:
            playbook << CloseConnection(server)
        playbook << http.HttpErrorHook(flow)
        playbook >> reply()

        if how == "RST+disconnect":
            playbook >> ConnectionClosed(tctx.client)
            playbook << CloseConnection(tctx.client)

        assert playbook
        assert "stream reset" in flow().error.msg or "peer closed connection" in flow().error.msg
        return

    assert (
            playbook
            >> DataReceived(tctx.client, cff.build_data_frame(b"", flags=["END_STREAM"]).serialize())
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"GET / HTTP/1.1\r\n"
                                b"Host: example.com\r\n\r\n")
            >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\n123")
            << http.HttpResponseHeadersHook(flow)
    )
    if stream:
        assert (
                playbook
                >> reply(side_effect=enable_response_streaming)
                << SendData(tctx.client, resp)
        )
    else:
        assert playbook >> reply()

    if "RST" in how:
        playbook >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
    else:
        playbook >> ConnectionClosed(tctx.client)
        playbook << CloseConnection(tctx.client)

    assert (
            playbook
            << CloseConnection(server)
            << http.HttpErrorHook(flow)
            >> reply()
    )

    if how == "RST+disconnect":
        assert (
                playbook
                >> ConnectionClosed(tctx.client)
                << CloseConnection(tctx.client)
        )

    if "RST" in how:
        assert "stream reset" in flow().error.msg
    else:
        assert "peer closed connection" in flow().error.msg
Esempio n. 29
0
def test_kill_flow(tctx, when):
    """Test that we properly kill flows if instructed to do so"""
    server = Placeholder(Server)
    connect_flow = Placeholder(HTTPFlow)
    flow = Placeholder(HTTPFlow)

    def kill(flow: HTTPFlow):
        # Can't use flow.kill() here because that currently still depends on a reply object.
        flow.error = Error(Error.KILLED_MESSAGE)

    def assert_kill(err_hook: bool = True):
        playbook >> reply(side_effect=kill)
        if err_hook:
            playbook << http.HttpErrorHook(flow)
            playbook >> reply()
        playbook << CloseConnection(tctx.client)
        assert playbook

    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))
    assert (playbook
            >> DataReceived(tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n")
            << http.HttpConnectHook(connect_flow))
    if when == "http_connect":
        return assert_kill(False)
    assert (playbook
            >> reply()
            << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n')
            >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n")
            << layer.NextLayerHook(Placeholder())
            >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent))
            << http.HttpRequestHeadersHook(flow))
    if when == "requestheaders":
        return assert_kill()
    assert (playbook
            >> reply()
            << http.HttpRequestHook(flow))
    if when == "request":
        return assert_kill()
    if when == "script-response-responseheaders":
        assert (playbook
                >> reply(side_effect=lambda f: setattr(f, "response", Response.make()))
                << http.HttpResponseHeadersHook(flow))
        return assert_kill()
    assert (playbook
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n")
            >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World")
            << http.HttpResponseHeadersHook(flow))
    if when == "responseheaders":
        return assert_kill()

    if when == "response":
        assert (playbook
                >> reply()
                >> DataReceived(server, b"!")
                << http.HttpResponseHook(flow))
        return assert_kill(False)
    elif when == "error":
        assert (playbook
                >> reply()
                >> ConnectionClosed(server)
                << CloseConnection(server)
                << http.HttpErrorHook(flow))
        return assert_kill(False)
    else:
        raise AssertionError
Esempio n. 30
0
def test_request_streaming(tctx, response):
    """
    Test HTTP request streaming

    This is a bit more contrived as we may receive server data while we are still sending the request.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = lambda x: x.upper()

    assert (
            playbook
            >> DataReceived(tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
                                         b"Host: example.com\r\n"
                                         b"Content-Length: 6\r\n\r\n"
                                         b"abc")
            << http.HttpRequestHeadersHook(flow)
            >> reply(side_effect=enable_streaming)
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"POST / HTTP/1.1\r\n"
                                b"Host: example.com\r\n"
                                b"Content-Length: 6\r\n\r\n"
                                b"ABC")
    )
    if response == "normal response":
        assert (
                playbook
                >> DataReceived(tctx.client, b"def")
                << SendData(server, b"DEF")
                >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
                << http.HttpResponseHeadersHook(flow)
                >> reply()
                << http.HttpResponseHook(flow)
                >> reply()
                << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
        )
    elif response == "early response":
        # We may receive a response before we have finished sending our request.
        # We continue sending unless the server closes the connection.
        # https://tools.ietf.org/html/rfc7231#section-6.5.11
        assert (
                playbook
                >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                << http.HttpResponseHeadersHook(flow)
                >> reply()
                << http.HttpResponseHook(flow)
                >> reply()
                << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                >> DataReceived(tctx.client, b"def")
                << SendData(server, b"DEF")  # Important: no request hook here!
        )
    elif response == "early close":
        assert (
                playbook
                >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                << http.HttpResponseHeadersHook(flow)
                >> reply()
                << http.HttpResponseHook(flow)
                >> reply()
                << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                >> ConnectionClosed(server)
                << CloseConnection(server)
                << CloseConnection(tctx.client)
        )
    elif response == "early kill":
        err = Placeholder(bytes)
        assert (
                playbook
                >> ConnectionClosed(server)
                << CloseConnection(server)
                << http.HttpErrorHook(flow)
                >> reply()
                << SendData(tctx.client, err)
                << CloseConnection(tctx.client)
        )
        assert b"502 Bad Gateway" in err()
    else:  # pragma: no cover
        assert False