Ejemplo n.º 1
0
def test_http_client_aborts(tctx, stream):
    """Test handling of the case where a client aborts during request transmission."""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=True)

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = True

    assert (playbook >> DataReceived(
        tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n"
        b"Content-Length: 6\r\n"
        b"\r\n"
        b"abc") << http.HttpRequestHeadersHook(flow))
    if stream:
        assert (playbook >> reply(side_effect=enable_streaming) <<
                OpenConnection(server) >> reply(None) << SendData(
                    server, b"POST / HTTP/1.1\r\n"
                    b"Host: example.com\r\n"
                    b"Content-Length: 6\r\n"
                    b"\r\n"
                    b"abc"))
    else:
        assert playbook >> reply()
    (playbook >> ConnectionClosed(tctx.client) << CloseConnection(tctx.client))
    if stream:
        playbook << CloseConnection(server)
    assert (playbook << http.HttpErrorHook(flow) >> reply() << None)

    assert "peer closed connection" in flow().error.msg
Ejemplo n.º 2
0
def test_server_unreachable(tctx, connect):
    """Test the scenario where the target server is unreachable."""
    tctx.options.connection_strategy = "eager"
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    err = Placeholder(bytes)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False)
    if connect:
        playbook >> DataReceived(tctx.client,
                                 b"CONNECT example.com:443 HTTP/1.1\r\n\r\n")
    else:
        playbook >> DataReceived(tctx.client,
                                 b"GET http://example.com/ HTTP/1.1\r\n\r\n")

    playbook << OpenConnection(server)
    playbook >> reply("Connection failed")
    if not connect:
        # Our API isn't ideal here, there is no error hook for CONNECT requests currently.
        # We could fix this either by having CONNECT request go through all our regular hooks,
        # or by adding dedicated ok/error hooks.
        playbook << http.HttpErrorHook(flow)
        playbook >> reply()
    playbook << SendData(tctx.client, err)
    if not connect:
        playbook << CloseConnection(tctx.client)

    assert playbook
    if not connect:
        assert flow().error
    assert b"502 Bad Gateway" in err()
    assert b"Connection failed" in err()
Ejemplo n.º 3
0
 def assert_kill(err_hook: bool = True):
     playbook >> reply(side_effect=kill)
     if err_hook:
         playbook << http.HttpErrorHook(flow)
         playbook >> reply()
     playbook << CloseConnection(tctx.client)
     assert playbook
Ejemplo n.º 4
0
def test_kill_stream(tctx):
    """Test that we can kill individual streams."""
    playbook, cff = start_h2_client(tctx)
    flow1 = Placeholder(HTTPFlow)
    flow2 = Placeholder(HTTPFlow)

    req_headers_hook_1 = http.HttpRequestHeadersHook(flow1)

    def kill(flow: HTTPFlow):
        # Can't use flow.kill() here because that currently still depends on a reply object.
        flow.error = Error(Error.KILLED_MESSAGE)

    server = Placeholder(Server)
    data_req1 = Placeholder(bytes)

    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(
            example_request_headers, flags=["END_STREAM"],
            stream_id=1).serialize() + cff.build_headers_frame(
                example_request_headers, flags=["END_STREAM"],
                stream_id=3).serialize()) << req_headers_hook_1 <<
            http.HttpRequestHeadersHook(flow2) >> reply(side_effect=kill) <<
            http.HttpErrorHook(flow2) >> reply() << SendData(
                tctx.client,
                cff.build_rst_stream_frame(
                    3, error_code=ErrorCodes.INTERNAL_ERROR).serialize()) >>
            reply(to=req_headers_hook_1) << http.HttpRequestHook(flow1) >>
            reply() << OpenConnection(server) >> reply(
                None, side_effect=make_h2) << SendData(server, data_req1))
    frames = decode_frames(data_req1())
    assert [type(x) for x in frames] == [
        hyperframe.frame.SettingsFrame,
        hyperframe.frame.HeadersFrame,
    ]
Ejemplo n.º 5
0
def test_cancel_then_server_disconnect(tctx):
    """
    Test that we properly handle the case of the following event sequence:
        - client cancels a stream
        - we start an error hook
        - server disconnects
        - error hook completes.
    """
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers, flags=[
            "END_STREAM"
        ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply(None) << SendData(
                server, b'GET / HTTP/1.1\r\nHost: example.com\r\n\r\n') >>
            DataReceived(
                tctx.client,
                cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
            << CloseConnection(server) << http.HttpErrorHook(flow) >> reply()
            >> ConnectionClosed(server) << None)
Ejemplo n.º 6
0
def test_rst_then_close(tctx):
    """
    Test that we properly handle the case of a client that first causes protocol errors and then disconnects.

    Adapted from h2spec http2/5.1/5.
    """
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers, flags=[
            "END_STREAM"
        ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            DataReceived(
                tctx.client,
                cff.build_data_frame(b"unexpected data frame").serialize()) <<
            SendData(
                tctx.client,
                cff.build_rst_stream_frame(
                    1, ErrorCodes.STREAM_CLOSED).serialize()) >>
            ConnectionClosed(tctx.client) << CloseConnection(
                tctx.client) >> reply("connection cancelled", to=-5) <<
            http.HttpErrorHook(flow) >> reply())
    assert flow().error.msg == "connection cancelled"
Ejemplo n.º 7
0
def test_body_size_limit(tctx, where, transfer_encoding):
    """Test HTTP request body_size_limit"""
    tctx.options.body_size_limit = "3"
    err = Placeholder(bytes)
    flow = Placeholder(HTTPFlow)

    if transfer_encoding == "identity":
        body = b"Content-Length: 6\r\n\r\nabcdef"
    else:
        body = b"Transfer-Encoding: chunked\r\n\r\n6\r\nabcdef"

    if where == "request":
        assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
                tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
                b"Host: example.com\r\n" + body) <<
            http.HttpRequestHeadersHook(flow) >> reply()
            << http.HttpErrorHook(flow) >> reply() << SendData(
                tctx.client, err) << CloseConnection(tctx.client))
        assert b"413 Payload Too Large" in err()
        assert b"body_size_limit" in err()
    else:
        server = Placeholder(Server)
        assert (
            Playbook(http.HttpLayer(tctx, HTTPMode.regular)) >> DataReceived(
                tctx.client, b"GET http://example.com/ HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") <<
            http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply(None) << SendData(
                server, b"GET / HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") >> DataReceived(
                    server, b"HTTP/1.1 200 OK\r\n" + body) <<
            http.HttpResponseHeadersHook(flow) >> reply() <<
            http.HttpErrorHook(flow) >> reply() << SendData(tctx.client, err)
            << CloseConnection(tctx.client) << CloseConnection(server))
        assert b"502 Bad Gateway" in err()
        assert b"body_size_limit" in err()
Ejemplo n.º 8
0
def test_server_aborts(tctx, data):
    """Test the scenario where the server doesn't serve a response"""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    err = Placeholder(bytes)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False)
    assert (playbook >> DataReceived(
        tctx.client,
        b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n") <<
            OpenConnection(server) >> reply(None) << SendData(
                server, b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n"))
    if data:
        playbook >> DataReceived(server, data)
    assert (playbook >> ConnectionClosed(server) << CloseConnection(server)
            << http.HttpErrorHook(flow) >> reply() << SendData(
                tctx.client, err) << CloseConnection(tctx.client))
    assert flow().error
    assert b"502 Bad Gateway" in err()
Ejemplo n.º 9
0
def test_http_server_aborts(tctx, stream):
    """Test handling of the case where a server aborts during response transmission."""
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    def enable_streaming(flow: HTTPFlow):
        flow.response.stream = True

    assert (playbook >> DataReceived(
        tctx.client, b"GET http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n\r\n") << http.HttpRequestHeadersHook(flow) >>
            reply() << http.HttpRequestHook(flow) >> reply() <<
            OpenConnection(server) >> reply(None) << SendData(
                server, b"GET / HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") >> DataReceived(
                    server, b"HTTP/1.1 200 OK\r\n"
                    b"Content-Length: 6\r\n"
                    b"\r\n"
                    b"abc") << http.HttpResponseHeadersHook(flow))
    if stream:
        assert (playbook >> reply(side_effect=enable_streaming) << SendData(
            tctx.client, b"HTTP/1.1 200 OK\r\n"
            b"Content-Length: 6\r\n"
            b"\r\n"
            b"abc"))
    else:
        assert playbook >> reply()
    assert (playbook >> ConnectionClosed(server) << CloseConnection(server) <<
            http.HttpErrorHook(flow))
    if stream:
        assert (playbook >> reply() << CloseConnection(tctx.client))
    else:
        error_html = Placeholder(bytes)
        assert (playbook >> reply() << SendData(tctx.client, error_html) <<
                CloseConnection(tctx.client))
        assert b"502 Bad Gateway" in error_html()
        assert b"peer closed connection" in error_html()

    assert "peer closed connection" in flow().error.msg
Ejemplo n.º 10
0
def test_upstream_error(tctx):
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)
    err = Placeholder(bytes)
    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers, flags=[
            "END_STREAM"
        ]).serialize()) << http.HttpRequestHeadersHook(flow) >> reply() <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply("oops server <> error") << http.HttpErrorHook(flow) >>
            reply() << SendData(tctx.client, err))
    frames = decode_frames(err())
    assert [type(x) for x in frames] == [
        hyperframe.frame.HeadersFrame,
        hyperframe.frame.DataFrame,
    ]
    d = frames[1]
    assert isinstance(d, hyperframe.frame.DataFrame)
    assert b"502 Bad Gateway" in d.data
    assert b"server &lt;&gt; error" in d.data
Ejemplo n.º 11
0
def test_http2_client_aborts(tctx, stream, when, how):
    """
    Test handling of the case where a client aborts during request or response transmission.

    If the client aborts the request transmission, we must trigger an error hook,
    if the client disconnects during response transmission, no error hook is triggered.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook, cff = start_h2_client(tctx)
    resp = Placeholder(bytes)

    def enable_request_streaming(flow: HTTPFlow):
        flow.request.stream = True

    def enable_response_streaming(flow: HTTPFlow):
        flow.response.stream = True

    assert (
            playbook
            >> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize())
            << http.HttpRequestHeadersHook(flow)
    )
    if stream and when == "request":
        assert (
                playbook
                >> reply(side_effect=enable_request_streaming)
                << http.HttpRequestHook(flow)
                >> reply()
                << OpenConnection(server)
                >> reply(None)
                << SendData(server, b"GET / HTTP/1.1\r\n"
                                    b"Host: example.com\r\n\r\n")
        )
    else:
        assert playbook >> reply()

    if when == "request":
        if "RST" in how:
            playbook >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
        else:
            playbook >> ConnectionClosed(tctx.client)
            playbook << CloseConnection(tctx.client)

        if stream:
            playbook << CloseConnection(server)
        playbook << http.HttpErrorHook(flow)
        playbook >> reply()

        if how == "RST+disconnect":
            playbook >> ConnectionClosed(tctx.client)
            playbook << CloseConnection(tctx.client)

        assert playbook
        assert "stream reset" in flow().error.msg or "peer closed connection" in flow().error.msg
        return

    assert (
            playbook
            >> DataReceived(tctx.client, cff.build_data_frame(b"", flags=["END_STREAM"]).serialize())
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"GET / HTTP/1.1\r\n"
                                b"Host: example.com\r\n\r\n")
            >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\n123")
            << http.HttpResponseHeadersHook(flow)
    )
    if stream:
        assert (
                playbook
                >> reply(side_effect=enable_response_streaming)
                << SendData(tctx.client, resp)
        )
    else:
        assert playbook >> reply()

    if "RST" in how:
        playbook >> DataReceived(tctx.client, cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
    else:
        playbook >> ConnectionClosed(tctx.client)
        playbook << CloseConnection(tctx.client)

    assert (
            playbook
            << CloseConnection(server)
            << http.HttpErrorHook(flow)
            >> reply()
    )

    if how == "RST+disconnect":
        assert (
                playbook
                >> ConnectionClosed(tctx.client)
                << CloseConnection(tctx.client)
        )

    if "RST" in how:
        assert "stream reset" in flow().error.msg
    else:
        assert "peer closed connection" in flow().error.msg
Ejemplo n.º 12
0
def test_kill_flow(tctx, when):
    """Test that we properly kill flows if instructed to do so"""
    server = Placeholder(Server)
    connect_flow = Placeholder(HTTPFlow)
    flow = Placeholder(HTTPFlow)

    def kill(flow: HTTPFlow):
        # Can't use flow.kill() here because that currently still depends on a reply object.
        flow.error = Error(Error.KILLED_MESSAGE)

    def assert_kill(err_hook: bool = True):
        playbook >> reply(side_effect=kill)
        if err_hook:
            playbook << http.HttpErrorHook(flow)
            playbook >> reply()
        playbook << CloseConnection(tctx.client)
        assert playbook

    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))
    assert (playbook
            >> DataReceived(tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n")
            << http.HttpConnectHook(connect_flow))
    if when == "http_connect":
        return assert_kill(False)
    assert (playbook
            >> reply()
            << SendData(tctx.client, b'HTTP/1.1 200 Connection established\r\n\r\n')
            >> DataReceived(tctx.client, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n")
            << layer.NextLayerHook(Placeholder())
            >> reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent))
            << http.HttpRequestHeadersHook(flow))
    if when == "requestheaders":
        return assert_kill()
    assert (playbook
            >> reply()
            << http.HttpRequestHook(flow))
    if when == "request":
        return assert_kill()
    if when == "script-response-responseheaders":
        assert (playbook
                >> reply(side_effect=lambda f: setattr(f, "response", Response.make()))
                << http.HttpResponseHeadersHook(flow))
        return assert_kill()
    assert (playbook
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n")
            >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World")
            << http.HttpResponseHeadersHook(flow))
    if when == "responseheaders":
        return assert_kill()

    if when == "response":
        assert (playbook
                >> reply()
                >> DataReceived(server, b"!")
                << http.HttpResponseHook(flow))
        return assert_kill(False)
    elif when == "error":
        assert (playbook
                >> reply()
                >> ConnectionClosed(server)
                << CloseConnection(server)
                << http.HttpErrorHook(flow))
        return assert_kill(False)
    else:
        raise AssertionError
Ejemplo n.º 13
0
def test_request_streaming(tctx, response):
    """
    Test HTTP request streaming

    This is a bit more contrived as we may receive server data while we are still sending the request.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = lambda x: x.upper()

    assert (
            playbook
            >> DataReceived(tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
                                         b"Host: example.com\r\n"
                                         b"Content-Length: 6\r\n\r\n"
                                         b"abc")
            << http.HttpRequestHeadersHook(flow)
            >> reply(side_effect=enable_streaming)
            << http.HttpRequestHook(flow)
            >> reply()
            << OpenConnection(server)
            >> reply(None)
            << SendData(server, b"POST / HTTP/1.1\r\n"
                                b"Host: example.com\r\n"
                                b"Content-Length: 6\r\n\r\n"
                                b"ABC")
    )
    if response == "normal response":
        assert (
                playbook
                >> DataReceived(tctx.client, b"def")
                << SendData(server, b"DEF")
                >> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
                << http.HttpResponseHeadersHook(flow)
                >> reply()
                << http.HttpResponseHook(flow)
                >> reply()
                << SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
        )
    elif response == "early response":
        # We may receive a response before we have finished sending our request.
        # We continue sending unless the server closes the connection.
        # https://tools.ietf.org/html/rfc7231#section-6.5.11
        assert (
                playbook
                >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                << http.HttpResponseHeadersHook(flow)
                >> reply()
                << http.HttpResponseHook(flow)
                >> reply()
                << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                >> DataReceived(tctx.client, b"def")
                << SendData(server, b"DEF")  # Important: no request hook here!
        )
    elif response == "early close":
        assert (
                playbook
                >> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                << http.HttpResponseHeadersHook(flow)
                >> reply()
                << http.HttpResponseHook(flow)
                >> reply()
                << SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
                >> ConnectionClosed(server)
                << CloseConnection(server)
                << CloseConnection(tctx.client)
        )
    elif response == "early kill":
        err = Placeholder(bytes)
        assert (
                playbook
                >> ConnectionClosed(server)
                << CloseConnection(server)
                << http.HttpErrorHook(flow)
                >> reply()
                << SendData(tctx.client, err)
                << CloseConnection(tctx.client)
        )
        assert b"502 Bad Gateway" in err()
    else:  # pragma: no cover
        assert False
Ejemplo n.º 14
0
def _test_cancel(stream_req, stream_resp, draw):
    """
    Test that we don't raise an exception if someone disconnects.
    """
    tctx = context.Context(connection.Client(("client", 1234), ("127.0.0.1", 8080), 1605699329), opts)
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    def maybe_stream(flow: HTTPFlow):
        if stream_req:
            flow.request.stream = True
        if stream_resp and flow.response:
            flow.response.stream = True

    hook_req_headers = http.HttpRequestHeadersHook(flow)
    hook_req = http.HttpRequestHook(flow)
    hook_resp_headers = http.HttpResponseHeadersHook(flow)
    hook_resp = http.HttpResponseHook(flow)
    hook_error = http.HttpErrorHook(flow)
    openconn = OpenConnection(server)
    send_upstream = SendData(server, Placeholder(bytes))

    data_req = DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize())
    data_reqbody = DataReceived(tctx.client, cff.build_data_frame(b"foo", flags=["END_STREAM"]).serialize())
    data_resp = DataReceived(server, cff.build_headers_frame(example_response_headers).serialize())
    data_respbody = DataReceived(server, cff.build_data_frame(b"bar", flags=["END_STREAM"]).serialize())

    client_disc = ConnectionClosed(tctx.client)
    client_rst = DataReceived(tctx.client, cff.build_rst_stream_frame(1).serialize())
    server_disc = ConnectionClosed(server)
    server_rst = DataReceived(server, cff.build_rst_stream_frame(1).serialize())

    evts: Dict[str, Tuple[Any, Any, Any]] = {}
    # precondition, but-not-after-this
    evts["data_req"] = data_req, None, client_disc
    evts["data_reqbody"] = data_reqbody, data_req, client_disc
    evts["reply_hook_req_headers"] = reply(to=hook_req_headers, side_effect=maybe_stream), hook_req_headers, None
    evts["reply_hook_req"] = reply(to=hook_req), hook_req, None
    evts["reply_openconn"] = reply(None, to=openconn, side_effect=make_h2), openconn, None
    evts["data_resp"] = data_resp, send_upstream, server_disc
    evts["data_respbody"] = data_respbody, data_resp, server_disc
    evts["reply_hook_resp_headers"] = reply(to=hook_resp_headers, side_effect=maybe_stream), hook_resp_headers, None
    evts["reply_hook_resp"] = reply(to=hook_resp), hook_resp, None
    evts["reply_hook_error"] = reply(to=hook_error), hook_error, None

    evts["err_client_disc"] = client_disc, None, None
    evts["err_client_rst"] = client_rst, None, client_disc
    evts["err_server_disc"] = server_disc, send_upstream, None
    evts["err_server_rst"] = server_rst, send_upstream, server_disc

    def eq_maybe(a, b):
        # _eq helpfully raises a TypeError when placeholder types don't match
        # that is useful in (test) development, but may happen legitimately when fuzzing here.
        try:
            return _eq(a, b)
        except TypeError:
            return False

    while evts:
        candidates = []
        for name, (evt, precon, negprecon) in evts.items():
            precondition_ok = (
                    precon is None or any(eq_maybe(x, precon) for x in playbook.actual)
            )
            neg_precondition_ok = (
                    negprecon is None or not any(eq_maybe(x, negprecon) for x in playbook.actual)
            )
            if precondition_ok and neg_precondition_ok:
                # crude hack to increase fuzzing efficiency: make it more likely that we progress.
                for i in range(1 if name.startswith("err_") else 3):
                    candidates.append((name, evt))
        if not candidates:
            break

        name, evt = draw(candidates)
        del evts[name]
        try:
            assert playbook >> evt
        except AssertionError:
            if any(
                    isinstance(x, _TracebackInPlaybook)
                    for x in playbook.actual
            ):
                raise
            else:
                # add commands that the server issued.
                playbook.expected.extend(playbook.actual[len(playbook.expected):])
Ejemplo n.º 15
0
def test_request_streaming(tctx, why, transfer_encoding, response):
    """
    Test HTTP request streaming

    This is a bit more contrived as we may receive server data while we are still sending the request.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    if why.startswith("body_size"):
        tctx.options.stream_large_bodies = why.replace("body_size=", "")

    def enable_streaming(flow: HTTPFlow):
        if why == "addon":
            flow.request.stream = True

    playbook >> DataReceived(
        tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n")
    if transfer_encoding == "identity":
        playbook >> DataReceived(tctx.client, b"Content-Length: 9\r\n\r\n"
                                 b"abc")
    else:
        playbook >> DataReceived(
            tctx.client, b"Transfer-Encoding: chunked\r\n\r\n"
            b"3\r\nabc\r\n")

    playbook << http.HttpRequestHeadersHook(flow)
    playbook >> reply(side_effect=enable_streaming)

    needs_more_data_before_open = (why == "body_size=3"
                                   and transfer_encoding == "chunked")
    if needs_more_data_before_open:
        playbook >> DataReceived(tctx.client, b"3\r\ndef\r\n")

    playbook << OpenConnection(server)
    playbook >> reply(None)
    playbook << SendData(server, b"POST / HTTP/1.1\r\n"
                         b"Host: example.com\r\n")

    if transfer_encoding == "identity":
        playbook << SendData(server, b"Content-Length: 9\r\n\r\n" b"abc")
        playbook >> DataReceived(tctx.client, b"def")
        playbook << SendData(server, b"def")
    else:
        if needs_more_data_before_open:
            playbook << SendData(
                server, b"Transfer-Encoding: chunked\r\n\r\n"
                b"6\r\nabcdef\r\n")
        else:
            playbook << SendData(
                server, b"Transfer-Encoding: chunked\r\n\r\n"
                b"3\r\nabc\r\n")
            playbook >> DataReceived(tctx.client, b"3\r\ndef\r\n")
            playbook << SendData(server, b"3\r\ndef\r\n")

    if response == "normal response":
        if transfer_encoding == "identity":
            playbook >> DataReceived(tctx.client, b"ghi")
            playbook << SendData(server, b"ghi")
        else:
            playbook >> DataReceived(tctx.client, b"3\r\nghi\r\n0\r\n\r\n")
            playbook << SendData(server, b"3\r\nghi\r\n")

        playbook << http.HttpRequestHook(flow)
        playbook >> reply()
        if transfer_encoding == "chunked":
            playbook << SendData(server, b"0\r\n\r\n")
        assert (
            playbook >> DataReceived(
                server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") <<
            http.HttpResponseHeadersHook(flow) >> reply() <<
            http.HttpResponseHook(flow) >> reply() << SendData(
                tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"))
    elif response == "early response":
        # We may receive a response before we have finished sending our request.
        # We continue sending unless the server closes the connection.
        # https://tools.ietf.org/html/rfc7231#section-6.5.11
        assert (playbook >> DataReceived(
            server,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) << http.HttpResponseHeadersHook(flow) >> reply(
        ) << http.HttpResponseHook(flow) >> reply() << SendData(
            tctx.client,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ))
        if transfer_encoding == "identity":
            playbook >> DataReceived(tctx.client, b"ghi")
            playbook << SendData(server, b"ghi")
        else:
            playbook >> DataReceived(tctx.client, b"3\r\nghi\r\n0\r\n\r\n")
            playbook << SendData(server, b"3\r\nghi\r\n")
        playbook << http.HttpRequestHook(flow)
        playbook >> reply()
        if transfer_encoding == "chunked":
            playbook << SendData(server, b"0\r\n\r\n")
        assert playbook
    elif response == "early close":
        assert (playbook >> DataReceived(
            server,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) << http.HttpResponseHeadersHook(flow) >> reply(
        ) << http.HttpResponseHook(flow) >> reply() << SendData(
            tctx.client,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) >> ConnectionClosed(server) << CloseConnection(server) <<
                CloseConnection(tctx.client))
    elif response == "early kill":
        err = Placeholder(bytes)
        assert (playbook >> ConnectionClosed(server) << CloseConnection(server)
                << http.HttpErrorHook(flow) >> reply() << SendData(
                    tctx.client, err) << CloseConnection(tctx.client))
        assert b"502 Bad Gateway" in err()
    else:  # pragma: no cover
        assert False