コード例 #1
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
    async def test_configure_kafka_consumer(self):
        app = kafkaesk.Application(
            kafka_settings={
                "max_partition_fetch_bytes": 100,
                "fetch_max_wait_ms": 100,
                "metadata_max_age_ms": 100,
                # invalid for consumer so should not be applied here
                "max_batch_size": 100,
            }
        )
        # verify it is created correctly
        app.consumer_factory(group_id="foobar")

        # now, validate the wiring
        with patch("kafkaesk.app.aiokafka.AIOKafkaConsumer") as mock:
            app.consumer_factory(group_id="foobar")
            mock.assert_called_with(
                bootstrap_servers=None,
                loop=ANY,
                group_id="foobar",
                api_version="auto",
                enable_auto_commit=False,
                max_partition_fetch_bytes=100,
                fetch_max_wait_ms=100,
                metadata_max_age_ms=100,
            )
コード例 #2
0
ファイル: test_app.py プロジェクト: onna/kafkaesk
    async def test_publish_injects_tracing(self):
        app = kafkaesk.Application(kafka_servers=["foo"])
        producer = AsyncMock()
        producer.send.return_value = fut = asyncio.Future()
        fut.set_result("ok")
        app._get_producer = AsyncMock(return_value=producer)
        config = Config(
            config={
                "sampler": {
                    "type": "const",
                    "param": 1
                },
                "logging": True,
                "propagation": "b3"
            },
            service_name="test_service",
            scope_manager=ContextVarsScopeManager(),
        )
        # this call also sets opentracing.tracer
        tracer = config.initialize_tracer()

        span = tracer.start_span(operation_name="dummy")
        tracer.scope_manager.activate(span, True)

        future = await app.raw_publish("foobar", b"foobar")
        await future

        headers = producer.mock_calls[0].kwargs["headers"]
        assert str(span).startswith(headers[0][1].decode())
コード例 #3
0
ファイル: test_app.py プロジェクト: onna/kafkaesk
    async def test_publish_propagates_headers(self):
        app = kafkaesk.Application(kafka_servers=["foo"])

        class Foo(pydantic.BaseModel):
            bar: str

        producer = AsyncMock()
        producer.send.return_value = fut = asyncio.Future()
        fut.set_result("ok")
        app._get_producer = AsyncMock(return_value=producer)
        app._topic_mng = MagicMock()
        app._topic_mng.get_topic_id.return_value = "foobar"
        app._topic_mng.topic_exists = AsyncMock(return_value=True)

        future = await app.publish("foobar",
                                   Foo(bar="foo"),
                                   headers=[("foo", b"bar")])
        _ = await future

        producer.send.assert_called_with(
            "foobar",
            value=b'{"schema":"Foo:1","data":{"bar":"foo"}}',
            key=None,
            headers=[("foo", b"bar")],
        )
コード例 #4
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
    def test_configure(self):
        app = kafkaesk.Application()
        app.configure(
            kafka_servers=["kafka_servers"],
            topic_prefix="topic_prefix",
            kafka_settings={"kafka_settings": "kafka_settings"},
            api_version="api_version",
            replication_factor="replication_factor",
        )
        assert app._kafka_servers == ["kafka_servers"]
        assert app._topic_prefix == "topic_prefix"
        assert app._kafka_settings == {"kafka_settings": "kafka_settings"}
        assert app._kafka_api_version == "api_version"
        assert app._replication_factor == "replication_factor"

        # now make sure none values do not overwrite
        app.configure(
            kafka_servers=None,
            topic_prefix=None,
            kafka_settings=None,
            api_version=None,
            replication_factor=None,
        )
        assert app._kafka_servers == ["kafka_servers"]
        assert app._topic_prefix == "topic_prefix"
        assert app._kafka_settings == {"kafka_settings": "kafka_settings"}
        assert app._kafka_api_version == "api_version"
        assert app._replication_factor == "replication_factor"
コード例 #5
0
async def app(kafka, topic_prefix):
    yield kafkaesk.Application(
        [f"{kafka[0]}:{kafka[1]}"],
        topic_prefix=topic_prefix,
        kafka_settings={
            "metadata_max_age_ms": 500,
        },
    )
コード例 #6
0
async def test_consume_every_message_once_during_rebalance(kafka, topic_prefix):
    """
    No matter what, even without reassignment, some messages
    seem to be relayed. You can see if when a single consumer and no rebalance
    sometimes.
    """
    consumed = {}

    def record_msg(record):
        key = f"{record.partition}-{record.offset}"
        if key not in consumed:
            consumed[key] = 0
        consumed[key] += 1

    apps = []
    for idx in range(5):
        app = kafkaesk.Application(
            [f"{kafka[0]}:{kafka[1]}"],
            topic_prefix=topic_prefix,
        )
        app.schema(streams=[TOPIC])(Foo)
        app.id = idx

        @app.subscribe(TOPIC, group=GROUP)
        async def consumer(ob: Foo, record, app):
            record_msg(record)

        await app.initialize()
        apps.append(app)

    consumer_tasks = []
    for app in apps:
        consumer_tasks.append(asyncio.create_task(app.consume_forever()))

    await asyncio.sleep(1)
    produce = asyncio.create_task(producer(apps[0], TOPIC))
    await asyncio.sleep(5)

    # cycle through each, destroying...
    for idx in range(5):
        await apps[idx].stop()
        await asyncio.sleep(1)
        assert consumer_tasks[idx].done()

        # start again
        consumer_tasks[idx] = asyncio.create_task(apps[idx].consume_forever())

    produce.cancel()

    for idx in range(5):
        await apps[idx].stop()

    assert len(consumed) > 100
    # now check that we always consumed a message only once

    for v in consumed.values():
        assert v == 1
コード例 #7
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
 async def test_consumer_health_check_raises_exception(self):
     app = kafkaesk.Application()
     subscription_consumer = kafkaesk.SubscriptionConsumer(
         app, kafkaesk.Subscription("foo", lambda: 1, "group")
     )
     app._subscription_consumers.append(subscription_consumer)
     subscription_consumer._consumer = AsyncMock()
     subscription_consumer._consumer._client.ready.return_value = False
     with pytest.raises(kafkaesk.exceptions.ConsumerUnhealthyException):
         await app.health_check()
コード例 #8
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
    async def test_publish_injects_tracing(self):
        app = kafkaesk.Application(kafka_servers=["foo"])
        producer = AsyncMock()
        app._get_producer = AsyncMock(return_value=producer)

        tracer = opentracing.global_tracer()
        tracer._scope_manager = ContextVarsScopeManager()

        tracer.scope_manager.activate("foobar", True)

        with patch.object(tracer, "inject") as mock:
            await app.raw_publish("foobar", b"foobar")
            mock.assert_called_once()
コード例 #9
0
ファイル: test_handler.py プロジェクト: onna/kafkaesk
async def test_handler_initializes_applogger(kafka, logger):
    app = kafkaesk.Application(
        [f"{kafka[0]}:{kafka[1]}"],
        topic_prefix=uuid.uuid4().hex,
        kafka_settings={"metadata_max_age_ms": 500},
    )

    handler = PydanticKafkaeskHandler(app, "log.test")
    logger.addHandler(handler)

    logger.error("Hi!")

    await asyncio.sleep(0.1)
    assert app._initialized
コード例 #10
0
ファイル: test_app.py プロジェクト: onna/kafkaesk
    async def test_consumer_health_check_raises_exception(self):
        app = kafkaesk.Application()
        subscription = kafkaesk.Subscription("test_consumer",
                                             lambda record: 1,
                                             "group",
                                             topics=["foo"])

        subscription_consumer = kafkaesk.BatchConsumer(
            subscription=subscription,
            app=app,
        )
        app._subscription_consumers.append(subscription_consumer)
        subscription_consumer._consumer = AsyncMock()
        subscription_consumer._consumer._client.ready.return_value = False
        with pytest.raises(kafkaesk.exceptions.ConsumerUnhealthyException):
            await app.health_check()
コード例 #11
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
    async def test_publish_configured_retention_policy(self):
        app = kafkaesk.Application(kafka_servers=["foo"])

        @app.schema(retention=100)
        class Foo(pydantic.BaseModel):
            bar: str

        producer = AsyncMock()
        app._get_producer = AsyncMock(return_value=producer)
        app._topic_mng = MagicMock()
        app._topic_mng.get_topic_id.return_value = "foobar"
        app._topic_mng.topic_exists = AsyncMock(return_value=False)
        app._topic_mng.create_topic = AsyncMock()

        await app.publish("foobar", Foo(bar="foo"), headers={"foo": b"bar"})
        app._topic_mng.create_topic.assert_called_with(
            "foobar", replication_factor=None, retention_ms=100 * 1000
        )
コード例 #12
0
ファイル: test_app.py プロジェクト: onna/kafkaesk
    async def test_configure_kafka_producer(self):
        app = kafkaesk.Application(
            kafka_settings={
                "metadata_max_age_ms": 100,
                "max_batch_size": 100,
                # invalid for producer so should not be applied here
                "max_partition_fetch_bytes": 100,
            })
        # verify it is created correctly
        app.producer_factory()

        # now, validate the wiring
        with patch("kafkaesk.app.aiokafka.AIOKafkaProducer") as mock:
            app.producer_factory()
            mock.assert_called_with(
                bootstrap_servers=None,
                loop=ANY,
                api_version="auto",
                metadata_max_age_ms=100,
                max_batch_size=100,
            )
コード例 #13
0
async def test_many_consumers_rebalancing(kafka, topic_prefix):
    apps = []
    for idx in range(5):
        app = kafkaesk.Application(
            [f"{kafka[0]}:{kafka[1]}"],
            topic_prefix=topic_prefix,
        )
        app.schema(streams=[TOPIC])(Foo)
        app.id = idx

        @app.subscribe(TOPIC, group=GROUP)
        async def consumer(ob: Foo, record, app):
            ...

        await app.initialize()
        apps.append(app)

    produce = asyncio.create_task(producer(apps[0], TOPIC))

    consumer_tasks = []
    for app in apps:
        consumer_tasks.append(asyncio.create_task(app.consume_forever()))

    await asyncio.sleep(5)

    # cycle through each, destroying...
    for idx in range(5):
        await apps[idx].stop()
        await asyncio.sleep(1)
        assert consumer_tasks[idx].done()

        # start again
        consumer_tasks[idx] = asyncio.create_task(apps[idx].consume_forever())

    produce.cancel()

    for idx in range(5):
        await apps[idx].stop()
コード例 #14
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
 async def test_consumer_health_check_producer_healthy(self):
     app = kafkaesk.Application()
     app._producer = MagicMock()
     app._producer._sender.sender_task.done.return_value = False
     await app.health_check()
コード例 #15
0
ファイル: fixtures.py プロジェクト: dmanchon/kafkaesk
async def app(kafka):
    yield kafkaesk.Application(
        [f"{kafka[0]}:{kafka[1]}"],
        topic_prefix=uuid.uuid4().hex,
        kafka_settings={"metadata_max_age_ms": 500},
    )
コード例 #16
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
 async def test_consumer_health_check(self):
     app = kafkaesk.Application()
     subscription_consumer = AsyncMock()
     app._subscription_consumers.append(subscription_consumer)
     subscription_consumer.consumer._client.ready.return_value = True
     await app.health_check()
コード例 #17
0
ファイル: app.py プロジェクト: tysongg/kafkaesk_log_generator
import kafkaesk
import os

kafka_servers = os.environ.get("KAFKA_SERVERS", "localhost:9092").split(",")

app = kafkaesk.Application(kafka_servers=kafka_servers)
コード例 #18
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
 async def test_consumer_health_check_producer_unhealthy(self):
     app = kafkaesk.Application()
     app._producer = MagicMock()
     app._producer._sender.sender_task.done.return_value = True
     with pytest.raises(kafkaesk.exceptions.ProducerUnhealthyException):
         await app.health_check()
コード例 #19
0
ファイル: test_app.py プロジェクト: Extintor/kafkaesk
 async def test_initialize_with_unconfigured_app_raises_exception(self):
     app = kafkaesk.Application()
     with pytest.raises(kafkaesk.exceptions.AppNotConfiguredException):
         await app.initialize()