Пример #1
0
 def no_flow_hooks():
     assert (
         Playbook(tcp.TCPLayer(tctx, ignore=ignore), hooks=True) <<
         OpenConnection(tctx.server) >> reply(None) >> DataReceived(
             tctx.client, b"hello!") << SendData(tctx.server, b"hello!"))
Пример #2
0
def _test_cancel(stream_req, stream_resp, draw):
    """
    Test that we don't raise an exception if someone disconnects.
    """
    tctx = context.Context(
        context.Client(("client", 1234), ("127.0.0.1", 8080), 1605699329),
        opts)
    playbook, cff = start_h2_client(tctx)
    flow = Placeholder(HTTPFlow)
    server = Placeholder(Server)

    def maybe_stream(flow: HTTPFlow):
        if stream_req:
            flow.request.stream = True
        if stream_resp and flow.response:
            flow.response.stream = True

    hook_req_headers = http.HttpRequestHeadersHook(flow)
    hook_req = http.HttpRequestHook(flow)
    hook_resp_headers = http.HttpResponseHeadersHook(flow)
    hook_resp = http.HttpResponseHook(flow)
    hook_error = http.HttpErrorHook(flow)
    openconn = OpenConnection(server)
    send_upstream = SendData(server, Placeholder(bytes))

    data_req = DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers).serialize())
    data_reqbody = DataReceived(
        tctx.client,
        cff.build_data_frame(b"foo", flags=["END_STREAM"]).serialize())
    data_resp = DataReceived(
        server,
        cff.build_headers_frame(example_response_headers).serialize())
    data_respbody = DataReceived(
        server,
        cff.build_data_frame(b"bar", flags=["END_STREAM"]).serialize())

    client_disc = ConnectionClosed(tctx.client)
    client_rst = DataReceived(tctx.client,
                              cff.build_rst_stream_frame(1).serialize())
    server_disc = ConnectionClosed(server)
    server_rst = DataReceived(server,
                              cff.build_rst_stream_frame(1).serialize())

    evts: Dict[str, Tuple[Any, Any, Any]] = {}
    # precondition, but-not-after-this
    evts["data_req"] = data_req, None, client_disc
    evts["data_reqbody"] = data_reqbody, data_req, client_disc
    evts["reply_hook_req_headers"] = reply(
        to=hook_req_headers, side_effect=maybe_stream), hook_req_headers, None
    evts["reply_hook_req"] = reply(to=hook_req), hook_req, None
    evts["reply_openconn"] = reply(None, to=openconn,
                                   side_effect=make_h2), openconn, None
    evts["data_resp"] = data_resp, send_upstream, server_disc
    evts["data_respbody"] = data_respbody, data_resp, server_disc
    evts["reply_hook_resp_headers"] = reply(
        to=hook_resp_headers,
        side_effect=maybe_stream), hook_resp_headers, None
    evts["reply_hook_resp"] = reply(to=hook_resp), hook_resp, None
    evts["reply_hook_error"] = reply(to=hook_error), hook_error, None

    evts["err_client_disc"] = client_disc, None, None
    evts["err_client_rst"] = client_rst, None, client_disc
    evts["err_server_disc"] = server_disc, send_upstream, None
    evts["err_server_rst"] = server_rst, send_upstream, server_disc

    def eq_maybe(a, b):
        # _eq helpfully raises a TypeError when placeholder types don't match
        # that is useful in (test) development, but may happen legitimately when fuzzing here.
        try:
            return _eq(a, b)
        except TypeError:
            return False

    while evts:
        candidates = []
        for name, (evt, precon, negprecon) in evts.items():
            precondition_ok = (precon is None or any(
                eq_maybe(x, precon) for x in playbook.actual))
            neg_precondition_ok = (
                negprecon is None
                or not any(eq_maybe(x, negprecon) for x in playbook.actual))
            if precondition_ok and neg_precondition_ok:
                # crude hack to increase fuzzing efficiency: make it more likely that we progress.
                for i in range(1 if name.startswith("err_") else 3):
                    candidates.append((name, evt))
        if not candidates:
            break

        name, evt = draw(candidates)
        del evts[name]
        try:
            assert playbook >> evt
        except AssertionError:
            if any(
                    isinstance(x, _TracebackInPlaybook)
                    for x in playbook.actual):
                raise
            else:
                # add commands that the server issued.
                playbook.expected.extend(
                    playbook.actual[len(playbook.expected):])
Пример #3
0
def test_kill_flow(tctx, when):
    """Test that we properly kill flows if instructed to do so"""
    server = Placeholder(Server)
    connect_flow = Placeholder(HTTPFlow)
    flow = Placeholder(HTTPFlow)

    def kill(flow: HTTPFlow):
        # Can't use flow.kill() here because that currently still depends on a reply object.
        flow.error = Error(Error.KILLED_MESSAGE)

    def assert_kill(err_hook: bool = True):
        playbook >> reply(side_effect=kill)
        if err_hook:
            playbook << http.HttpErrorHook(flow)
            playbook >> reply()
        playbook << CloseConnection(tctx.client)
        assert playbook

    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))
    assert (playbook >> DataReceived(
        tctx.client, b"CONNECT example.com:80 HTTP/1.1\r\n\r\n") <<
            http.HttpConnectHook(connect_flow))
    if when == "http_connect":
        return assert_kill(False)
    assert (
        playbook >> reply() << SendData(
            tctx.client,
            b'HTTP/1.1 200 Connection established\r\n\r\n') >> DataReceived(
                tctx.client,
                b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n") <<
        layer.NextLayerHook(Placeholder()) >>
        reply_next_layer(lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent))
        << http.HttpRequestHeadersHook(flow))
    if when == "requestheaders":
        return assert_kill()
    assert (playbook >> reply() << http.HttpRequestHook(flow))
    if when == "request":
        return assert_kill()
    if when == "script-response-responseheaders":
        assert (playbook >> reply(
            side_effect=lambda f: setattr(f, "response", HTTPResponse.make()))
                << http.HttpResponseHeadersHook(flow))
        return assert_kill()
    assert (
        playbook >> reply() << OpenConnection(server) >>
        reply(None) << SendData(
            server, b"GET /foo?hello=1 HTTP/1.1\r\nHost: example.com\r\n\r\n")
        >> DataReceived(
            server,
            b"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World") <<
        http.HttpResponseHeadersHook(flow))
    if when == "responseheaders":
        return assert_kill()

    if when == "response":
        assert (playbook >> reply() >> DataReceived(server, b"!") <<
                http.HttpResponseHook(flow))
        return assert_kill(False)
    elif when == "error":
        assert (playbook >> reply() >> ConnectionClosed(server) <<
                CloseConnection(server) << http.HttpErrorHook(flow))
        return assert_kill(False)
    else:
        raise AssertionError
Пример #4
0
def test_upstream_proxy(tctx, redirect, scheme):
    """Test that an upstream HTTP proxy is used."""
    server = Placeholder(Server)
    server2 = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    tctx.options.mode = "upstream:http://proxy:8080"
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.upstream), hooks=False)

    if scheme == "http":
        assert (playbook >> DataReceived(
            tctx.client,
            b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n"
        ) << OpenConnection(server) >> reply(None) << SendData(
            server,
            b"GET http://example.com/ HTTP/1.1\r\nHost: example.com\r\n\r\n"))

    else:
        assert (playbook >> DataReceived(
            tctx.client,
            b"CONNECT example.com:443 HTTP/1.1\r\n\r\n") << SendData(
                tctx.client, b"HTTP/1.1 200 Connection established\r\n\r\n") >>
                DataReceived(tctx.client,
                             b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") <<
                layer.NextLayerHook(Placeholder()) >> reply_next_layer(
                    lambda ctx: http.HttpLayer(ctx, HTTPMode.transparent)) <<
                OpenConnection(server) >> reply(None) << SendData(
                    server, b"CONNECT example.com:443 HTTP/1.1\r\n\r\n")
                >> DataReceived(
                    server, b"HTTP/1.1 200 Connection established\r\n\r\n") <<
                SendData(server,
                         b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n"))

    playbook >> DataReceived(server,
                             b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n")
    playbook << SendData(tctx.client,
                         b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n")

    assert playbook
    assert server().address == ("proxy", 8080)

    if scheme == "http":
        playbook >> DataReceived(
            tctx.client,
            b"GET http://example.com/two HTTP/1.1\r\nHost: example.com\r\n\r\n"
        )
    else:
        playbook >> DataReceived(
            tctx.client, b"GET /two HTTP/1.1\r\nHost: example.com\r\n\r\n")

    assert (playbook << http.HttpRequestHook(flow))
    if redirect == "change-destination":
        flow().request.host = "other-server"
        flow().request.host_header = "example.com"
    elif redirect == "change-proxy":
        flow().server_conn.via = ServerSpec("http",
                                            address=("other-proxy", 1234))
    playbook >> reply()

    if redirect:
        # Protocol-wise we wouldn't need to open a new connection for plain http host redirects,
        # but we disregard this edge case to simplify implementation.
        playbook << OpenConnection(server2)
        playbook >> reply(None)
    else:
        server2 = server

    if scheme == "http":
        if redirect == "change-destination":
            playbook << SendData(
                server2,
                b"GET http://other-server/two HTTP/1.1\r\nHost: example.com\r\n\r\n"
            )
        else:
            playbook << SendData(
                server2,
                b"GET http://example.com/two HTTP/1.1\r\nHost: example.com\r\n\r\n"
            )
    else:
        if redirect == "change-destination":
            playbook << SendData(server2,
                                 b"CONNECT other-server:443 HTTP/1.1\r\n\r\n")
            playbook >> DataReceived(
                server2, b"HTTP/1.1 200 Connection established\r\n\r\n")
        elif redirect == "change-proxy":
            playbook << SendData(server2,
                                 b"CONNECT example.com:443 HTTP/1.1\r\n\r\n")
            playbook >> DataReceived(
                server2, b"HTTP/1.1 200 Connection established\r\n\r\n")
        playbook << SendData(
            server2, b"GET /two HTTP/1.1\r\nHost: example.com\r\n\r\n")

    playbook >> DataReceived(server2,
                             b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n")
    playbook << SendData(tctx.client,
                         b"HTTP/1.1 418 OK\r\nContent-Length: 0\r\n\r\n")

    assert playbook

    if redirect == "change-proxy":
        assert server2().address == ("other-proxy", 1234)
    else:
        assert server2().address == ("proxy", 8080)

    assert (playbook >> ConnectionClosed(tctx.client) << CloseConnection(
        tctx.client))
Пример #5
0
def test_request_streaming(tctx, response):
    """
    Test HTTP request streaming

    This is a bit more contrived as we may receive server data while we are still sending the request.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))

    def enable_streaming(flow: HTTPFlow):
        flow.request.stream = lambda x: x.upper()

    assert (playbook >> DataReceived(
        tctx.client, b"POST http://example.com/ HTTP/1.1\r\n"
        b"Host: example.com\r\n"
        b"Content-Length: 6\r\n\r\n"
        b"abc") << http.HttpRequestHeadersHook(flow) >>
            reply(side_effect=enable_streaming) << http.HttpRequestHook(flow)
            >> reply() << OpenConnection(server) >> reply(None) << SendData(
                server, b"POST / HTTP/1.1\r\n"
                b"Host: example.com\r\n"
                b"Content-Length: 6\r\n\r\n"
                b"ABC"))
    if response == "normal response":
        assert (
            playbook >> DataReceived(tctx.client, b"def") << SendData(
                server, b"DEF") >> DataReceived(
                    server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n") <<
            http.HttpResponseHeadersHook(flow) >> reply() <<
            http.HttpResponseHook(flow) >> reply() << SendData(
                tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"))
    elif response == "early response":
        # We may receive a response before we have finished sending our request.
        # We continue sending unless the server closes the connection.
        # https://tools.ietf.org/html/rfc7231#section-6.5.11
        assert (playbook >> DataReceived(
            server,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) << http.HttpResponseHeadersHook(flow) >> reply(
        ) << http.HttpResponseHook(flow) >> reply() << SendData(
            tctx.client,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) >> DataReceived(tctx.client, b"def") << SendData(
            server, b"DEF")  # Important: no request hook here!
                )
    elif response == "early close":
        assert (playbook >> DataReceived(
            server,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) << http.HttpResponseHeadersHook(flow) >> reply(
        ) << http.HttpResponseHook(flow) >> reply() << SendData(
            tctx.client,
            b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n"
        ) >> ConnectionClosed(server) << CloseConnection(server) <<
                CloseConnection(tctx.client))
    elif response == "early kill":
        err = Placeholder(bytes)
        assert (playbook >> ConnectionClosed(server) << CloseConnection(server)
                << http.HttpErrorHook(flow) >> reply() << SendData(
                    tctx.client, err) << CloseConnection(tctx.client))
        assert b"502 Bad Gateway" in err()
    else:  # pragma: no cover
        assert False
Пример #6
0
def test_http2_client_aborts(tctx, stream, when, how):
    """
    Test handling of the case where a client aborts during request or response transmission.

    If the client aborts the request transmission, we must trigger an error hook,
    if the client disconnects during response transmission, no error hook is triggered.
    """
    server = Placeholder(Server)
    flow = Placeholder(HTTPFlow)
    playbook, cff = start_h2_client(tctx)
    resp = Placeholder(bytes)

    def enable_request_streaming(flow: HTTPFlow):
        flow.request.stream = True

    def enable_response_streaming(flow: HTTPFlow):
        flow.response.stream = True

    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_headers_frame(example_request_headers).serialize()) <<
            http.HttpRequestHeadersHook(flow))
    if stream and when == "request":
        assert (playbook >> reply(side_effect=enable_request_streaming) <<
                http.HttpRequestHook(flow) >> reply() << OpenConnection(server)
                >> reply(None) << SendData(
                    server, b"GET / HTTP/1.1\r\n"
                    b"Host: example.com\r\n\r\n"))
    else:
        assert playbook >> reply()

    if when == "request":
        if "RST" in how:
            playbook >> DataReceived(
                tctx.client,
                cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
        else:
            playbook >> ConnectionClosed(tctx.client)
            playbook << CloseConnection(tctx.client)

        if stream:
            playbook << CloseConnection(server)
        playbook << http.HttpErrorHook(flow)
        playbook >> reply()

        if how == "RST+disconnect":
            playbook >> ConnectionClosed(tctx.client)
            playbook << CloseConnection(tctx.client)

        assert playbook
        assert "stream reset" in flow(
        ).error.msg or "peer closed connection" in flow().error.msg
        return

    assert (playbook >> DataReceived(
        tctx.client,
        cff.build_data_frame(b"", flags=["END_STREAM"]).serialize()) <<
            http.HttpRequestHook(flow) >> reply() << OpenConnection(server) >>
            reply(None) << SendData(
                server, b"GET / HTTP/1.1\r\n"
                b"Host: example.com\r\n\r\n") >> DataReceived(
                    server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\n123")
            << http.HttpResponseHeadersHook(flow))
    if stream:
        assert (playbook >> reply(side_effect=enable_response_streaming) <<
                SendData(tctx.client, resp))
    else:
        assert playbook >> reply()

    if "RST" in how:
        playbook >> DataReceived(
            tctx.client,
            cff.build_rst_stream_frame(1, ErrorCodes.CANCEL).serialize())
    else:
        playbook >> ConnectionClosed(tctx.client)
        playbook << CloseConnection(tctx.client)

    assert (playbook << CloseConnection(server) << http.HttpErrorHook(flow) >>
            reply())

    if how == "RST+disconnect":
        assert (playbook >> ConnectionClosed(tctx.client) << CloseConnection(
            tctx.client))

    if "RST" in how:
        assert "stream reset" in flow().error.msg
    else:
        assert "peer closed connection" in flow().error.msg