Esempio n. 1
0
def test_two_responses_chunked_in_parts():
    msg = b"HTTP/1.1 200 OK\r\n" + \
          b"Content-Type: text/plain; charset=utf-8\r\n" + \
          b"Transfer-Encoding: chunked\r\n" + \
          b"\r\n" + \
          b"4\r\n" + \
          b"Wiki\r\n" + \
          b"5\r\n" + \
          b"pedia\r\n" + \
          b"E\r\n" + \
          b" in\r\n" + \
          b"\r\n" + \
          b"chunks.\r\n" + \
          b"0\r\n" + \
          b"\r\n"

    msg = msg * 2
    parser = intialize_parser(get_http_request)

    msgs = chunks(msg, 15)
    parsed_messages = []

    for data in msgs:
        parsed_messages += parse(parser, data)

    assert len(parsed_messages) == 2
    for parsed_message in parsed_messages:
        assert parsed_message.headers[
            b'Content-Type'] == b"text/plain; charset=utf-8"
        assert parsed_message.body == b"Wikipedia in\r\n\r\nchunks."
Esempio n. 2
0
def parse_message_pairs(stream: BufferedIOBase):
    parser = intialize_parser(parse_message_pair)

    data = stream.read(1024)
    while data:
        for rr in parse(parser, data):
            yield rr
        data = stream.read(1024)
Esempio n. 3
0
def test_one_response_no_length():
    msg = b"HTTP/1.1 200 OK\r\n" + \
          b"Content-Type: text/plain; charset=utf-8\r\n" + \
          b"\r\n" + \
          b"abcd\r\n"

    parser = intialize_parser(get_http_request)
    parsed_messages = []
    parsed_messages += parse(parser, msg)
    # We need to explicitly close the message, otherwise the parser does not know when the message ends
    parsed_messages += parse(parser, None)

    assert len(parsed_messages) == 1
    for parsed_message in parsed_messages:
        assert parsed_message.headers[
            b'Content-Type'] == b"text/plain; charset=utf-8"
        assert parsed_message.body == b"abcd\r\n"
Esempio n. 4
0
def test_two_requests_whole():
    msg = b"GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\nGET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n"

    parser = intialize_parser(get_http_request)
    parsed_messages = list(parse(parser, msg))

    assert len(parsed_messages) == 2
    for parsed_message in parsed_messages:
        assert parsed_message.headers[b'Host'] == b"www.example.com"
        assert parsed_message.body == None
        assert parsed_message.method == b"GET"
        assert parsed_message.path == b"/"
Esempio n. 5
0
def test_two_responses_whole():
    msg = b"HTTP/1.1 200 OK\r\n" + \
          b"Content-Type: text/plain; charset=utf-8\r\n" + \
          b"Content-Length: 6\r\n" + \
          b"\r\n" + \
          b"abcd\r\n"

    msg = msg * 2
    parser = intialize_parser(get_http_request)
    parsed_messages = list(parse(parser, msg))

    assert len(parsed_messages) == 2
    for parsed_message in parsed_messages:
        assert parsed_message.headers[
            b'Content-Type'] == b"text/plain; charset=utf-8"
        assert parsed_message.body == b"abcd\r\n"
Esempio n. 6
0
def test_two_requests_in_pieces():
    msg = b"GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\nGET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n"
    msgs = chunks(msg, 15)

    parser = intialize_parser(get_http_request)
    parsed_messages = []

    for data in msgs:
        parsed_messages += parse(parser, data)

    assert len(parsed_messages) == 2
    for parsed_message in parsed_messages:
        assert parsed_message.headers[b'Host'] == b"www.example.com"
        assert parsed_message.body == None
        assert parsed_message.method == b"GET"
        assert parsed_message.path == b"/"
Esempio n. 7
0
async def proxy_data(reader, writer, connection_string, pairer, processor):
    try:
        parser = intialize_parser(http_parser.get_http_request)
        while True:
            data = await reader.read(BUFFER_SIZE)

            for msg in parse(parser, data):
                msg = processor.process_message(msg)
                pairer.add_message(msg)
                for data in msg.to_bytes():
                    writer.write(data)
                await writer.drain()

            if not data:
                break
    except Exception as e:
        logger.info('proxy_task exception {}'.format(e))
    finally:
        writer.close()
        logger.info('close connection {}'.format(connection_string))
Esempio n. 8
0
def test_two_responses_in_pieces():
    msg = b"HTTP/1.1 200 OK\r\n" + \
          b"Content-Type: text/plain; charset=utf-8\r\n" + \
          b"Content-Length: 6\r\n" + \
          b"\r\n" + \
          b"abcd\r\n"

    msg = msg * 3
    parser = intialize_parser(get_http_request)
    msgs = chunks(msg, 15)
    parsed_messages = []

    for data in msgs:
        parsed_messages += parse(parser, data)

    assert len(parsed_messages) == 3
    for parsed_message in parsed_messages:
        assert parsed_message.headers[
            b'Content-Type'] == b"text/plain; charset=utf-8"
        assert parsed_message.body == b"abcd\r\n"
Esempio n. 9
0
def test_one_response_chunked_whole():
    msg = b"HTTP/1.1 200 OK\r\n" + \
          b"Content-Type: text/plain; charset=utf-8\r\n" + \
          b"Transfer-Encoding: chunked\r\n" + \
          b"\r\n" + \
          b"4\r\n" + \
          b"Wiki\r\n" + \
          b"5\r\n" + \
          b"pedia\r\n" + \
          b"E\r\n" + \
          b" in\r\n" + \
          b"\r\n" + \
          b"chunks.\r\n" + \
          b"0\r\n" + \
          b"\r\n"

    parser = intialize_parser(get_http_request)
    parsed_messages = list(parse(parser, msg))
    assert len(parsed_messages) == 1
    for parsed_message in parsed_messages:
        assert parsed_message.headers[
            b'Content-Type'] == b"text/plain; charset=utf-8"
        assert parsed_message.body == b"Wikipedia in\r\n\r\nchunks."