Пример #1
0
def test_headers_without_deadline():
    metadata = Metadata([('chagga', 'chrome')])

    assert Request(
        'flysch', 'plains', 'slaps', content_type='pemako', metadata=metadata,
    ).to_headers() == [
        (':method', 'flysch'),
        (':scheme', 'plains'),
        (':path', 'slaps'),
        ('te', 'trailers'),
        ('content-type', 'pemako'),
        ('chagga', 'chrome'),
    ]

    assert Request(
        'flysch', 'plains', 'slaps', authority='sleev', content_type='pemako',
        message_type='deltic', message_encoding='eutexia',
        message_accept_encoding='glyptic', user_agent='chrisom',
        metadata=metadata,
    ).to_headers() == [
        (':method', 'flysch'),
        (':scheme', 'plains'),
        (':path', 'slaps'),
        (':authority', 'sleev'),
        ('te', 'trailers'),
        ('content-type', 'pemako'),
        ('grpc-message-type', 'deltic'),
        ('grpc-encoding', 'eutexia'),
        ('grpc-accept-encoding', 'glyptic'),
        ('user-agent', 'chrisom'),
        ('chagga', 'chrome'),
    ]
Пример #2
0
async def test_send_trailing_metadata_on_closed_stream(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    to_server_transport = TransportStub(server_h2c)

    client_conn = Connection(client_h2c, to_server_transport, loop=loop)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    server_proc = EventsProcessor(DummyHandler(), server_conn)
    client_proc = EventsProcessor(DummyHandler(), client_conn)

    request = Request(method='POST', scheme='http', path='/',
                      content_type='application/grpc+proto',
                      authority='test.com')
    client_h2_stream = client_conn.create_stream()
    await client_h2_stream.send_request(request.to_headers(),
                                        _processor=client_proc)

    request = DummyRequest(value='ping')
    await send_message(client_h2_stream, ProtoCodec(), request, DummyRequest,
                       end=True)
    to_server_transport.process(server_proc)

    server_h2_stream = server_proc.handler.stream
    request_metadata = decode_metadata(server_proc.handler.headers)

    send_trailing_metadata_done = False
    async with Stream(server_h2_stream, Cardinality.UNARY_UNARY, ProtoCodec(),
                      DummyRequest, DummyReply,
                      metadata=request_metadata) as server_stream:
        await server_stream.send_trailing_metadata(status=Status.UNKNOWN)
        send_trailing_metadata_done = True

    assert send_trailing_metadata_done
Пример #3
0
async def test_exit_and_connection_was_closed(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    to_server_transport = TransportStub(server_h2c)

    client_conn = Connection(client_h2c, to_server_transport, loop=loop)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    server_proc = EventsProcessor(DummyHandler(), server_conn)
    client_proc = EventsProcessor(DummyHandler(), client_conn)

    request = Request('POST', 'http', '/',
                      content_type='application/grpc+proto',
                      authority='test.com')
    client_h2_stream = client_conn.create_stream()
    await client_h2_stream.send_request(request.to_headers(),
                                        _processor=client_proc)

    request = DummyRequest(value='ping')
    await send_message(client_h2_stream, ProtoCodec(), request, DummyRequest,
                       end=True)
    to_server_transport.process(server_proc)

    server_h2_stream = server_proc.handler.stream
    request_metadata = Metadata.from_headers(server_proc.handler.headers)

    async with Stream(server_h2_stream, Cardinality.UNARY_UNARY, ProtoCodec(),
                      DummyRequest, DummyReply,
                      metadata=request_metadata) as server_stream:
        await server_stream.recv_message()
        client_h2c.close_connection()
        to_server_transport.process(server_proc)

        raise ServerError()  # should be suppressed
Пример #4
0
async def test_send_headers_into_closed_stream(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    to_server_transport = TransportStub(server_h2c)
    client_conn = Connection(client_h2c, to_server_transport, loop=loop)

    client_processor = EventsProcessor(DummyHandler(), client_conn)
    client_stream = client_conn.create_stream()

    server_processor = EventsProcessor(DummyHandler(), server_conn)

    request = Request(method='POST',
                      scheme='http',
                      path='/',
                      content_type='application/grpc+proto',
                      authority='test.com')
    await client_stream.send_request(request.to_headers(),
                                     _processor=client_processor)

    to_server_transport.process(server_processor)

    server_stream, = server_processor.streams.values()
    server_stream._h2_connection.streams.pop(server_stream.id)
    with pytest.raises(StreamClosedError):
        await server_stream.send_headers([(':status', '200')])
Пример #5
0
async def test_exit_and_connection_was_broken(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    to_server_transport = TransportStub(server_h2c)

    client_conn = Connection(client_h2c, to_server_transport, loop=loop)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    server_proc = EventsProcessor(DummyHandler(), server_conn)
    client_proc = EventsProcessor(DummyHandler(), client_conn)

    request = Request(method='POST', scheme='http', path='/',
                      content_type='application/grpc+proto',
                      authority='test.com')
    client_h2_stream = client_conn.create_stream()
    await client_h2_stream.send_request(request.to_headers(),
                                        _processor=client_proc)

    request = DummyRequest(value='ping')
    await send_message(client_h2_stream, ProtoCodec(), request, DummyRequest,
                       end=True)
    to_server_transport.process(server_proc)

    server_h2_stream = server_proc.handler.stream
    request_metadata = decode_metadata(server_proc.handler.headers)

    with pytest.raises(WriteError):
        async with Stream(server_h2_stream, Cardinality.UNARY_UNARY,
                          ProtoCodec(), DummyRequest, DummyReply,
                          metadata=request_metadata) as server_stream:
            await server_stream.recv_message()

            # simulate broken connection
            to_client_transport.__raise_on_write__(WriteError)
Пример #6
0
async def test_exit_and_connection_was_broken(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    to_server_transport = TransportStub(server_h2c)

    client_conn = Connection(client_h2c, to_server_transport, loop=loop)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    server_proc = EventsProcessor(DummyHandler(), server_conn)
    client_proc = EventsProcessor(DummyHandler(), client_conn)

    request = Request('POST', 'http', '/', authority='test.com')
    client_h2_stream = client_conn.create_stream()
    await client_h2_stream.send_request(request.to_headers(),
                                        _processor=client_proc)

    request = SavoysRequest(kyler='cloth')
    await send_message(client_h2_stream, request, SavoysRequest, end=True)
    to_server_transport.process(server_proc)

    server_h2_stream = server_proc.handler.stream
    request_metadata = Metadata.from_headers(server_proc.handler.headers)

    with pytest.raises(WriteError):
        async with Stream(server_h2_stream,
                          Cardinality.UNARY_UNARY,
                          SavoysRequest,
                          SavoysReply,
                          metadata=request_metadata) as server_stream:
            await server_stream.recv_message()

            # simulate broken connection
            to_client_transport.__raise_on_write__(WriteError)
Пример #7
0
async def test_recv_data_larger_than_window_size(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    to_server_transport = TransportStub(server_h2c)
    client_conn = Connection(client_h2c, to_server_transport, loop=loop)

    client_processor = EventsProcessor(DummyHandler(), client_conn)
    client_stream = client_conn.create_stream()

    request = Request(method='POST',
                      scheme='http',
                      path='/',
                      content_type='application/grpc+proto',
                      authority='test.com')
    await client_stream.send_request(request.to_headers(),
                                     _processor=client_processor)

    initial_window = server_h2c.local_settings.initial_window_size
    assert (client_h2c.local_flow_control_window(
        client_stream.id) == initial_window)

    # data should be bigger than window size
    data = b'0' * (initial_window + 1)
    size = len(data)

    # sending less than a full message
    await client_stream.send_data(data[:initial_window - 1])

    # let server process it's events
    server_processor = EventsProcessor(DummyHandler(), server_conn)
    for event in to_server_transport.events():
        server_processor.process(event)

    # checking window size was decreased
    assert client_h2c.local_flow_control_window(client_stream.id) == 1

    # simulate that server is waiting for the size of a message and should
    # acknowledge that size as soon as it will be received
    server_stream, = server_processor.streams.values()
    recv_task = loop.create_task(server_stream.recv_data(size))
    await asyncio.wait([recv_task], timeout=.01, loop=loop)
    assert server_stream.__buffer__._read_size == size
    assert server_stream.__buffer__._size == initial_window - 1

    # check that server acknowledged received partial data
    assert client_h2c.local_flow_control_window(client_stream.id) > 1

    # sending remaining data and recv_task should finish
    await client_stream.send_data(data[initial_window - 1:])
    for event in to_server_transport.events():
        server_processor.process(event)
    await asyncio.wait_for(recv_task, 0.01, loop=loop)
    assert server_stream.__buffer__._size == 0
Пример #8
0
async def test_stream_release(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    to_server_transport = TransportStub(server_h2c)
    client_conn = Connection(client_h2c, to_server_transport, loop=loop)

    client_processor = EventsProcessor(DummyHandler(), client_conn)
    client_stream = client_conn.create_stream()

    server_processor = EventsProcessor(DummyHandler(), server_conn)

    request = Request(method='POST',
                      scheme='http',
                      path='/',
                      content_type='application/grpc+proto',
                      authority='test.com')

    assert not client_processor.streams
    client_release_stream = await client_stream.send_request(
        request.to_headers(),
        _processor=client_processor,
    )
    assert client_release_stream and client_processor.streams

    # sending data and closing stream on the client-side
    msg = b'message'
    await client_stream.send_data(msg, end_stream=True)
    events1 = to_server_transport.process(server_processor)
    assert any(isinstance(e, StreamEnded) for e in events1), events1

    # intentionally sending some stream-specific frame after stream was
    # half-closed
    client_h2c.increment_flow_control_window(10, stream_id=client_stream.id)
    client_conn.flush()
    events2 = to_server_transport.process(server_processor)
    assert any(isinstance(e, WindowUpdated) for e in events2), events2

    server_stream, = server_processor.streams.values()
    await server_stream.recv_data(len(msg))
    await server_stream.end()

    events3 = to_client_transport.process(client_processor)
    assert any(isinstance(e, StreamEnded) for e in events3), events3

    # simulating request handler exit by releasing server-side stream
    server_processor.handler.release_stream()
    assert not server_processor.streams

    # simulating call exit by releasing client-side stream
    assert client_processor.streams
    client_release_stream()
    assert not client_processor.streams
Пример #9
0
async def test_send_data_larger_than_frame_size(loop):
    client_h2c, server_h2c = create_connections()

    transport = TransportStub(server_h2c)
    conn = Connection(client_h2c, transport, loop=loop)
    stream = conn.create_stream()

    request = Request('POST', 'http', '/', authority='test.com')
    processor = EventsProcessor(DummyHandler(), conn)

    await stream.send_request(request.to_headers(), _processor=processor)
    await stream.send_data(b'0' * (client_h2c.max_outbound_frame_size + 1))
Пример #10
0
def test_headers_with_deadline():
    deadline = Mock()
    deadline.time_remaining.return_value = 0.1

    metadata = Metadata([('dominic', 'lovech')])

    assert Request(
        'briana',
        'dismal',
        'dost',
        authority='lemnos',
        content_type='gazebos',
        metadata=metadata,
        deadline=deadline,
    ).to_headers() == [
        (':method', 'briana'),
        (':scheme', 'dismal'),
        (':path', 'dost'),
        (':authority', 'lemnos'),
        ('grpc-timeout', '100m'),
        ('te', 'trailers'),
        ('content-type', 'gazebos'),
        ('dominic', 'lovech'),
    ]

    assert Request(
        'briana',
        'dismal',
        'dost',
        authority='edges',
        content_type='gazebos',
        message_type='dobson',
        message_encoding='patera',
        message_accept_encoding='shakers',
        user_agent='dowlin',
        metadata=metadata,
        deadline=deadline,
    ).to_headers() == [
        (':method', 'briana'),
        (':scheme', 'dismal'),
        (':path', 'dost'),
        (':authority', 'edges'),
        ('grpc-timeout', '100m'),
        ('te', 'trailers'),
        ('content-type', 'gazebos'),
        ('grpc-message-type', 'dobson'),
        ('grpc-encoding', 'patera'),
        ('grpc-accept-encoding', 'shakers'),
        ('user-agent', 'dowlin'),
        ('dominic', 'lovech'),
    ]
Пример #11
0
def _broken_stream():
    class BrokenChannel:
        def __connect__(self):
            raise IOError('Intentionally broken connection')

    request = Request('POST', 'http', '/foo/bar', authority='test.com')
    return Stream(BrokenChannel(), request, SavoysRequest, SavoysReply)
Пример #12
0
def _broken_stream():
    class BrokenChannel:
        def __connect__(self):
            raise IOError('Intentionally broken connection')

    request = Request('POST', 'http', '/foo/bar', content_type=CONTENT_TYPE)
    return Stream(BrokenChannel(), request, SavoysRequest, SavoysReply)
Пример #13
0
def _stub(loop):
    protocol = H2Protocol(Handler(),
                          H2Configuration(header_encoding='utf-8'),
                          loop=loop)
    channel = ChannelStub(protocol)
    request = Request('POST', 'http', '/foo/bar', authority='test.com')
    stream = Stream(channel, request, SavoysRequest, SavoysReply)
    server = ServerStub(protocol)
    return Stub(stream, server, channel)
Пример #14
0
async def test_initial_window_size_update(loop):
    client_h2c, server_h2c = create_connections()

    to_client_transport = TransportStub(client_h2c)
    server_conn = Connection(server_h2c, to_client_transport, loop=loop)

    to_server_transport = TransportStub(server_h2c)
    client_conn = Connection(client_h2c, to_server_transport, loop=loop)

    client_processor = EventsProcessor(DummyHandler(), client_conn)
    client_stream = client_conn.create_stream()

    request = Request(method='POST',
                      scheme='http',
                      path='/',
                      content_type='application/grpc+proto',
                      authority='test.com')
    await client_stream.send_request(request.to_headers(),
                                     _processor=client_processor)

    # data should be bigger than window size
    initial_window = server_h2c.local_settings.initial_window_size
    data = b'0' * (initial_window + 1)

    assert (client_h2c.local_flow_control_window(
        client_stream.id) == initial_window)

    # send_data should wait until settings/window updated
    send_task = loop.create_task(client_stream.send_data(data))
    await asyncio.wait([send_task], timeout=0.01)

    assert client_h2c.local_flow_control_window(client_stream.id) == 0

    # updating settings and window, this should increase stream window size
    server_h2c.update_settings(
        {SettingCodes.INITIAL_WINDOW_SIZE: initial_window + 1})
    server_h2c.increment_flow_control_window(1, stream_id=None)
    server_conn.flush()
    to_client_transport.process(client_processor)

    assert client_h2c.local_flow_control_window(client_stream.id) == 1
    await asyncio.wait([send_task], timeout=0.01)

    assert send_task.done()
Пример #15
0
def _stream(stub):
    stream_mock = Mock()
    stream_mock.processor.create_stream.return_value = stub

    class Channel:
        async def __connect__(self):
            return stream_mock

    request = Request('POST', 'http', '/foo/bar', content_type=CONTENT_TYPE)
    return Stream(Channel(), request, SavoysRequest, SavoysReply)
Пример #16
0
async def test_outbound_streams_limit(stub, loop):
    stub.server.connection.update_settings({
        SettingCodes.MAX_CONCURRENT_STREAMS:
        1,
    })
    stub.server.flush()

    request = Request('POST',
                      'http',
                      '/foo/bar',
                      content_type=CONTENT_TYPE,
                      authority='test.com')

    async def worker1():
        s1 = Stream(stub.channel, request, SavoysRequest, SavoysReply)
        async with s1:
            await s1.send_message(SavoysRequest(kyler='bhatta'), end=True)
            assert await s1.recv_message() == SavoysReply(benito='giselle')

    async def worker2():
        s2 = Stream(stub.channel, request, SavoysRequest, SavoysReply)
        async with s2:
            await s2.send_message(SavoysRequest(kyler='bhatta'), end=True)
            assert await s2.recv_message() == SavoysReply(benito='giselle')

    def send_response(stream_id):
        stub.server.connection.send_headers(
            stream_id,
            [(':status', '200'), ('content-type', CONTENT_TYPE)],
        )
        stub.server.connection.send_data(
            stream_id,
            encode_message(SavoysReply(benito='giselle')),
        )
        stub.server.connection.send_headers(
            stream_id,
            [('grpc-status', str(Status.OK.value))],
            end_stream=True,
        )
        stub.server.flush()

    w1 = loop.create_task(worker1())
    w2 = loop.create_task(worker2())

    done, pending = await asyncio.wait([w1, w2], loop=loop, timeout=0.001)
    assert not done and pending == {w1, w2}

    send_response(1)
    await asyncio.wait_for(w1, 0.1, loop=loop)

    send_response(3)
    await asyncio.wait_for(w2, 0.1, loop=loop)
Пример #17
0
async def test_connection_error():
    request = Request('POST', 'http', '/foo/bar',
                      content_type='application/grpc+proto',
                      authority='test.com')

    class BrokenChannel:
        def __connect__(self):
            raise IOError('Intentionally broken connection')

    stream = Stream(BrokenChannel(), request, ProtoCodec(),
                    DummyRequest, DummyReply)

    with pytest.raises(IOError) as err:
        async with stream:
            await stream.send_request()
    err.match('Intentionally broken connection')