Ejemplo n.º 1
0
def test_transport_registry_get_event_transports(redis_default_config):
    registry = TransportRegistry().load_config(redis_default_config)
    debug_transport = DebugEventTransport()
    redis_transport = RedisEventTransport(consumer_group_prefix="foo",
                                          consumer_name="bar")

    registry.set_event_transport("redis1", redis_transport)
    registry.set_event_transport("redis2", redis_transport)
    registry.set_event_transport("debug1", debug_transport)
    registry.set_event_transport("debug2", debug_transport)
    transports = registry.get_event_transports(
        ["default", "foo", "bar", "redis1", "redis2", "debug1", "debug2"])

    default_redis_transport = registry.get_event_transport("default")

    transports = dict(transports)
    assert set(
        transports[default_redis_transport]) == {"default", "foo", "bar"}
    assert set(transports[debug_transport]) == {"debug1", "debug2"}
    assert set(transports[redis_transport]) == {"redis1", "redis2"}
Ejemplo n.º 2
0
async def test_history_get_subset_multiple_batches(
    redis_event_transport: RedisEventTransport, redis_client
):
    message = EventMessage(native_id="", api_name="my_api", event_name="my_event")
    data = ByFieldMessageSerializer()(message)
    await redis_client.xadd("my_api.my_event:stream", data, message_id=b"1-0")
    await redis_client.xadd("my_api.my_event:stream", data, message_id=b"2-0")
    await redis_client.xadd("my_api.my_event:stream", data, message_id=b"3-0")
    await redis_client.xadd("my_api.my_event:stream", data, message_id=b"4-0")
    await redis_client.xadd("my_api.my_event:stream", data, message_id=b"5-0")

    messages = redis_event_transport.history(
        "my_api",
        "my_event",
        batch_size=2,
        start=datetime.fromtimestamp(0.002),
        stop=datetime.fromtimestamp(0.004),
    )
    message_ids = {m.native_id async for m in messages}
    assert message_ids == {"2-0", "3-0", "4-0"}
Ejemplo n.º 3
0
async def test_from_config(redis_client):
    await redis_client.select(5)
    host, port = redis_client.address
    transport = RedisEventTransport.from_config(
        config=Config.load_dict({}),
        url=f"redis://127.0.0.1:{port}/5",
        connection_parameters=dict(maxsize=123),
        batch_size=123,
        # Non default serializers, event though they wouldn't make sense in this context
        serializer="lightbus.serializers.BlobMessageSerializer",
        deserializer="lightbus.serializers.BlobMessageDeserializer",
    )
    with await transport.connection_manager() as transport_client:
        assert transport_client.connection.address == ("127.0.0.1", port)
        assert transport_client.connection.db == 5
        await transport_client.set("x", 1)
        assert await redis_client.get("x")

    assert transport._redis_pool.connection.maxsize == 123
    assert isinstance(transport.serializer, BlobMessageSerializer)
    assert isinstance(transport.deserializer, BlobMessageDeserializer)
Ejemplo n.º 4
0
async def test_cleanup_group_deleted(redis_event_transport: RedisEventTransport, redis_client):
    """Test that a whole group gets deleted when all the consumers get cleaned up"""
    # Add a couple of messages for our consumers to fetch
    await redis_client.xadd("test_stream", {"noop": ""})
    await redis_client.xadd("test_stream", {"noop": ""})
    # Create the group
    await redis_client.xgroup_create("test_stream", "test_group", latest_id="0")

    # Create group test_consumer_1 by performing a read.
    await redis_client.xread_group(
        group_name="test_group",
        consumer_name="test_consumer_1",
        streams=["test_stream"],
        latest_ids=[">"],
        timeout=None,
        count=1,
    )

    # Create group test_consumer_2 by performing a read.
    await redis_client.xread_group(
        group_name="test_group",
        consumer_name="test_consumer_2",
        streams=["test_stream"],
        latest_ids=[">"],
        timeout=None,
        count=1,
    )

    # Wait a moment to force test_consumer_1 & test_consumer_2 to look old
    await asyncio.sleep(0.100)

    # Set a very low ttl
    redis_event_transport.consumer_ttl = 0.050

    # Do it
    await redis_event_transport._cleanup(stream_names=["test_stream"])

    groups = await redis_client.xinfo_groups("test_stream")
    assert len(groups) == 0, groups
Ejemplo n.º 5
0
async def test_reconnect_while_listening_dead_server(
        standalone_redis_server: StandaloneRedisServer, create_redis_pool,
        dummy_api, error_queue):
    redis_url = f"redis://127.0.0.1:{standalone_redis_server.port}/0"
    standalone_redis_server.start()

    redis_event_transport = RedisEventTransport(
        redis_pool=await create_redis_pool(address=redis_url),
        consumption_restart_delay=0.0001,
        service_name="test",
        consumer_name="test",
        stream_use=StreamUse.PER_EVENT,
    )

    async def co_enqeue():
        redis_client = await aioredis.create_redis(address=redis_url)
        try:
            while True:
                await asyncio.sleep(0.1)
                logging.info("test_reconnect_while_listening: Sending message")
                await redis_client.xadd(
                    "my.dummy.my_event:stream",
                    fields={
                        b"api_name": b"my.dummy",
                        b"event_name": b"my_event",
                        b"id": b"123",
                        b"version": b"1",
                        b":field": b'"value"',
                    },
                )
        finally:
            redis_client.close()

    total_messages = 0

    async def co_consume():
        nonlocal total_messages

        consumer = redis_event_transport.consume([("my.dummy", "my_event")],
                                                 "test_listener",
                                                 error_queue=error_queue)
        async for messages_ in consumer:
            total_messages += len(messages_)
            await redis_event_transport.acknowledge(*messages_)
            logging.info(
                f"Received {len(messages_)} messages. Total now at {total_messages}"
            )

    # Starting enqeuing and consuming events
    enqueue_task = asyncio.ensure_future(co_enqeue())
    consume_task = asyncio.ensure_future(co_consume())

    await asyncio.sleep(0.2)
    assert total_messages > 0

    # Stop enqeuing and stop the server
    await cancel(enqueue_task)
    standalone_redis_server.stop()

    # We don't get any more messages
    total_messages = 0
    await asyncio.sleep(0.2)
    assert total_messages == 0

    try:
        # Now start the server again, and start emitting messages
        standalone_redis_server.start()
        enqueue_task = asyncio.ensure_future(co_enqeue())
        total_messages = 0
        await asyncio.sleep(0.2)
        # ... the consumer has auto-reconnected and received some messages
        assert total_messages > 0
    finally:
        await cancel(enqueue_task, consume_task)
Ejemplo n.º 6
0
async def test_reclaim_pending_messages(redis_client, redis_pool, error_queue):
    """Test that unacked messages belonging to this consumer get reclaimed on startup"""

    # Add a message
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"value"',
        },
    )
    # Create the consumer group
    await redis_client.xgroup_create(stream="my.dummy.my_event:stream",
                                     group_name="test_service-test_listener",
                                     latest_id="0")

    # Claim it in the name of ourselves
    await redis_client.xread_group(
        group_name="test_service-test_listener",
        consumer_name="good_consumer",
        streams=["my.dummy.my_event:stream"],
        latest_ids=[">"],
    )

    event_transport = RedisEventTransport(
        redis_pool=redis_pool,
        service_name="test_service",
        consumer_name="good_consumer",
        stream_use=StreamUse.PER_EVENT,
    )
    consumer = event_transport.consume(
        listen_for=[("my.dummy", "my_event")],
        since="0",
        listener_name="test_listener",
        error_queue=error_queue,
    )

    messages = []

    async def consume():
        async for messages_ in consumer:
            messages.extend(messages_)
            await event_transport.acknowledge(*messages_)

    task = asyncio.ensure_future(consume())
    await asyncio.sleep(0.1)
    await cancel(task)

    assert len(messages) == 1
    assert messages[0].api_name == "my.dummy"
    assert messages[0].event_name == "my_event"
    assert messages[0].kwargs == {"field": "value"}
    assert messages[0].native_id
    assert type(messages[0].native_id) == str

    # Now check that redis believes the message has been consumed
    total_pending, *_ = await redis_client.xpending(
        stream="my.dummy.my_event:stream",
        group_name="test_service-test_listener")
    assert total_pending == 0
Ejemplo n.º 7
0
async def test_reclaim_lost_messages_consume(redis_client, redis_pool,
                                             error_queue):
    """Test that messages which another consumer has timed out on can be reclaimed

    Unlike the above test, we call consume() here, not _reclaim_lost_messages()
    """

    # Add a message
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"value"',
        },
    )
    # Create the consumer group
    await redis_client.xgroup_create(stream="my.dummy.my_event:stream",
                                     group_name="test_service-test_listener",
                                     latest_id="0")

    # Claim it in the name of another consumer
    await redis_client.xread_group(
        group_name="test_service-test_listener",
        consumer_name="bad_consumer",
        streams=["my.dummy.my_event:stream"],
        latest_ids=[">"],
    )
    # Sleep a moment to fake a short timeout
    await asyncio.sleep(0.1)

    event_transport = RedisEventTransport(
        redis_pool=redis_pool,
        service_name="test_service",
        consumer_name="good_consumer",
        acknowledgement_timeout=0.01,  # in ms, short for the sake of testing
        stream_use=StreamUse.PER_EVENT,
    )
    consumer = event_transport.consume(
        listen_for=[("my.dummy", "my_event")],
        since="0",
        listener_name="test_listener",
        error_queue=error_queue,
    )

    messages = []

    async def consume():
        async for messages_ in consumer:
            messages.extend(messages_)
            # Ack the messages, otherwise the message will get picked up in the
            # claiming (good) and then, because it hasn't been acked, get picked
            # up by the consume too (bad).
            await event_transport.acknowledge(*messages_)

    task = asyncio.ensure_future(consume())
    await asyncio.sleep(0.1)
    await cancel(task)

    assert len(messages) == 1
Ejemplo n.º 8
0
async def test_reclaim_lost_messages_different_event(redis_client: Redis,
                                                     redis_pool):
    """Test that messages which another consumer has timed out on can be reclaimed

    However, in this case we have a single stream for an entire API. The stream
    has a lost message for an event we are not listening for. In this case the
    event shouldn't be claimed
    """

    # Add a message
    await redis_client.xadd(
        "my.dummy.*:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"value"',
        },
    )
    # Create the consumer group
    await redis_client.xgroup_create(stream="my.dummy.*:stream",
                                     group_name="test_service",
                                     latest_id="0")

    # Claim it in the name of another consumer
    result = await redis_client.xread_group(
        group_name="test_service",
        consumer_name="bad_consumer",
        streams=["my.dummy.*:stream"],
        latest_ids=[">"],
    )
    assert result, "Didn't actually manage to claim any message"

    # Sleep a moment to fake a short timeout
    await asyncio.sleep(0.1)

    event_transport = RedisEventTransport(
        redis_pool=redis_pool,
        service_name="test_service",
        consumer_name="good_consumer",
        acknowledgement_timeout=0.01,  # in ms, short for the sake of testing
        stream_use=StreamUse.PER_API,
    )
    reclaimer = event_transport._reclaim_lost_messages(
        stream_names=["my.dummy.*:stream"],
        consumer_group="test_service",
        expected_events={"another_event"
                         },  # NOTE: We this is NOT the event we created above
    )

    reclaimed_messages = []
    async for m in reclaimer:
        reclaimed_messages.extend(m)
        for m in reclaimed_messages:
            await event_transport.acknowledge(m)

    assert len(reclaimed_messages) == 0

    result = await redis_client.xinfo_consumers("my.dummy.*:stream",
                                                "test_service")
    consumer_info = {r[b"name"]: r for r in result}

    assert consumer_info[b"bad_consumer"][b"pending"] == 0
    assert consumer_info[b"good_consumer"][b"pending"] == 0
Ejemplo n.º 9
0
async def test_consume_events_since_datetime(
    redis_event_transport: RedisEventTransport, redis_client, error_queue
):
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"1"',
        },
        message_id="1515000001000-0",
    )
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"2"',
        },
        message_id="1515000002000-0",
    )
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"3"',
        },
        message_id="1515000003000-0",
    )

    # 1515000001500-0 -> 2018-01-03T17:20:01.500Z
    since_datetime = datetime(2018, 1, 3, 17, 20, 1, 500)
    consumer = redis_event_transport.consume(
        [("my.dummy", "my_event")],
        listener_name="test",
        since=since_datetime,
        forever=False,
        error_queue=error_queue,
    )

    events = []

    async def co():
        async for messages in consumer:
            events.extend(messages)
            await redis_event_transport.acknowledge(*messages)

    task = asyncio.ensure_future(co())
    await asyncio.sleep(0.1)
    await cancel(task)

    assert len(events) == 2
    assert events[0].kwargs["field"] == "2"
    assert events[1].kwargs["field"] == "3"
Ejemplo n.º 10
0
async def test_consume_events_since_id(
    redis_event_transport: RedisEventTransport, redis_client, error_queue
):
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"1"',
        },
        message_id="1515000001000-0",
    )
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"2"',
        },
        message_id="1515000002000-0",
    )
    await redis_client.xadd(
        "my.dummy.my_event:stream",
        fields={
            b"api_name": b"my.dummy",
            b"event_name": b"my_event",
            b"id": b"123",
            b"version": b"1",
            b":field": b'"3"',
        },
        message_id="1515000003000-0",
    )

    consumer = redis_event_transport.consume(
        [("my.dummy", "my_event")],
        "cg",
        since="1515000001500-0",
        forever=False,
        error_queue=error_queue,
    )

    events = []

    async def co():
        async for messages in consumer:
            events.extend(messages)
            await redis_event_transport.acknowledge(*messages)

    task = asyncio.ensure_future(co())
    await asyncio.sleep(0.1)
    await cancel(task)

    messages_ids = [m.native_id for m in events if isinstance(m, EventMessage)]
    assert len(messages_ids) == 2
    assert len(events) == 2
    assert events[0].kwargs["field"] == "2"
    assert events[1].kwargs["field"] == "3"