示例#1
0
async def test_get_registered_schema_missing():
    app = Application()

    class Foo(pydantic.BaseModel):
        bar: str

    assert app.get_schema_reg(Foo) is None
示例#2
0
 async def test_emit_swallow_ex(self):
     sub = SubscriptionConsumer(
         Application(),
         Subscription("foo", lambda record: 1, "group"),
         event_handlers={"event": [AsyncMock(side_effect=Exception)]},
     )
     await sub.emit("event", "foo", "bar")
示例#3
0
async def test_slow_messages(app: Application):
    consumed = []

    @app.schema("Slow", streams=["foo.bar"])
    class Slow(pydantic.BaseModel):
        latency: float

    @app.subscribe("foo.bar", group="test_group", concurrency=10, timeout_seconds=0.045)
    async def consumer(data: Slow, record: aiokafka.ConsumerRecord):
        try:
            await asyncio.sleep(data.latency)
            consumed.append(("ok", data.latency, record.topic))
        except asyncio.CancelledError:
            consumed.append(("cancelled", data.latency, record.topic))

    async with app:
        for idx in range(10):
            await app.publish("foo.bar", Slow(latency=idx * 0.01))
            await asyncio.sleep(0.01)
        await app.flush()

        fut = asyncio.create_task(app.consume_for(num_messages=8, seconds=2))
        await fut

        assert len([x for x in consumed if x[0] == "ok"]) == 5
        assert len([x for x in consumed if x[0] == "cancelled"]) == 5
示例#4
0
文件: test_run.py 项目: onna/kafkaesk
async def test_run_exits_cleanly_while_consuming(kafka, topic_prefix):
    kserver = f"{kafka[0]}:{kafka[1]}"
    app = Application(
        [kserver],
        topic_prefix=topic_prefix,
    )
    async with app:
        pro = asyncio.create_task(producer(app, TOPIC))

        proc = await asyncio.create_subprocess_exec(
            "kafkaesk",
            "tests.acceptance.test_run:test_app",
            "--kafka-servers",
            kserver,
            "--topic-prefix",
            topic_prefix,
            # cwd=_test_dir,
        )

        await asyncio.sleep(5)
        pro.cancel()

        proc.send_signal(signal.SIGINT)
        await proc.wait()

        assert proc.returncode == 0

        results = await app.topic_mng.list_consumer_group_offsets(GROUP)
        topic_id = app.topic_mng.get_topic_id(TOPIC)
        count = 0
        for tp, pos in results.items():
            if tp.topic != topic_id:
                continue
            count += pos.offset
        assert count > 0
示例#5
0
async def test_do_not_require_schema_name():
    app = Application()

    @app.schema()
    class Foo(pydantic.BaseModel):
        bar: str

    assert "Foo:1" in app._schemas
示例#6
0
 async def test_auto_commit_can_be_disabled(self, subscription_conf):
     sub = BatchConsumer(
         subscription=subscription_conf,
         app=Application(kafka_servers=["foobar"]),
         auto_commit=False,
     )
     await sub._maybe_commit()
     assert sub._last_commit == 0
示例#7
0
    async def test_emit_swallow_ex(self, subscription_conf):
        sub = BatchConsumer(
            subscription=subscription_conf,
            app=Application(kafka_servers=["foobar"]),
            event_handlers={"event": [AsyncMock(side_effect=Exception)]},
        )

        await sub.emit("event", "foo", "bar")
示例#8
0
 async def test_emit_raises_stop(self):
     sub = SubscriptionConsumer(
         Application(),
         Subscription("foo", lambda record: 1, "group"),
         event_handlers={"event": [AsyncMock(side_effect=StopConsumer)]},
     )
     with pytest.raises(StopConsumer):
         await sub.emit("event", "foo", "bar")
示例#9
0
 async def test_emit(self):
     probe = AsyncMock()
     sub = SubscriptionConsumer(
         Application(),
         Subscription("foo", lambda record: 1, "group"),
         event_handlers={"event": [probe]},
     )
     await sub.emit("event", "foo", "bar")
     probe.assert_called_with("foo", "bar")
示例#10
0
    async def test_emit_raises_stop(self, subscription_conf):
        sub = BatchConsumer(
            subscription=subscription_conf,
            app=Application(kafka_servers=["foobar"]),
            event_handlers={"event": [AsyncMock(side_effect=StopConsumer)]},
        )

        with pytest.raises(StopConsumer):
            await sub.emit("event", "foo", "bar")
示例#11
0
    async def test_emit(self, subscription_conf):
        probe = AsyncMock()

        sub = BatchConsumer(
            subscription=subscription_conf,
            app=Application(kafka_servers=["foobar"]),
            event_handlers={"event": [probe]},
        )
        await sub.emit("event", "foo", "bar")
        probe.assert_called_with("foo", "bar")
示例#12
0
async def test_not_allowed_to_register_same_schema_twice():
    app = Application()

    @app.schema("Foo", version=1)
    class Foo1(pydantic.BaseModel):
        bar: str

    with pytest.raises(SchemaConflictException):

        @app.schema("Foo", version=1)
        class Foo2(pydantic.BaseModel):
            foo: str
示例#13
0
 async def test_retries_on_connection_failure(self):
     sub = SubscriptionConsumer(
         Application(),
         Subscription("foo", lambda record: 1, "group"),
     )
     run_mock = AsyncMock()
     sleep = AsyncMock()
     run_mock.side_effect = [aiokafka.errors.KafkaConnectionError, StopConsumer]
     with patch.object(sub, "initialize", AsyncMock()), patch.object(
         sub, "finalize", AsyncMock()
     ), patch.object(sub, "_run", run_mock), patch("kafkaesk.subscription.asyncio.sleep", sleep):
         await sub()
         sleep.assert_called_once()
         assert len(run_mock.mock_calls) == 2
示例#14
0
async def test_health_check_should_fail_with_unhandled(app: Application):
    @app.subscribe(TOPIC, group=TOPIC)
    async def consume(data):
        raise Exception("failure!")

    async with app:
        produce = asyncio.create_task(producer(app, TOPIC))
        fut = asyncio.create_task(app.consume_forever())
        await fut

        with pytest.raises(ConsumerUnhealthyException):
            await app.health_check()

        produce.cancel()
示例#15
0
    async def test_finalize_handles_exceptions(self):
        sub = SubscriptionConsumer(
            Application(),
            Subscription("foo", lambda record: 1, "group"),
        )
        consumer = AsyncMock()
        consumer.stop.side_effect = Exception
        consumer.commit.side_effect = Exception
        retry_policy = AsyncMock()
        retry_policy.finalize.side_effect = Exception

        sub._consumer = consumer
        sub._retry_policy = retry_policy
        await sub.finalize()

        consumer.stop.assert_called_once()
        consumer.commit.assert_called_once()
        retry_policy.finalize.assert_called_once()
示例#16
0
文件: logger.py 项目: onna/kafkaesk
async def test_log() -> None:
    app = Application(kafka_servers=["localhost:9092"])

    logger = logging.getLogger("kafkaesk.ext.logging.kafka")
    handler = PydanticKafkaeskHandler(app, "logging.test")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    stream_logger = logging.getLogger("kafakesk.ext.logging.stream")
    stream_handler = PydanticStreamHandler()
    stream_logger.addHandler(stream_handler)
    stream_logger.setLevel(logging.DEBUG)

    @app.subscribe("logging.test", group="example.logging.consumer")
    async def consume(data: PydanticLogModel) -> None:
        stream_logger.info(data.json())

    async with app:
        logger.debug("Log Message", UserLog(user="******"))
        await app.flush()
        await app.consume_for(1, seconds=5)
示例#17
0
    async def test_run_exits_when_fut_closed_fut(self):
        sub = SubscriptionConsumer(
            Application(),
            Subscription("foo", lambda record: 1, "group"),
        )
        consumer = AsyncMock()
        consumer.getmany.return_value = {"": [record_factory() for _ in range(10)]}
        sub._consumer = consumer
        sub._running = True

        async def _handle_message(record):
            await asyncio.sleep(0.03)

        with patch.object(sub, "_handle_message", _handle_message):
            task = asyncio.create_task(sub._run())
            await asyncio.sleep(0.01)
            stop_task = asyncio.create_task(sub.stop())
            await asyncio.sleep(0.01)
            sub._close_fut.set_result(None)

            await asyncio.wait([stop_task, task])
示例#18
0
from kafkaesk import Application
from models import SimpleMessage
from models import SimpleTweet

import asyncio

app = Application(kafka_servers=["localhost:9092"])


@app.subscribe("content", group="example_content_group")
async def messages(data: SimpleMessage) -> None:
    print("SimpleMesage")
    print("------------")
    print(f"Message: {data.message}")
    print(f"Meta: {data.meta}")
    print("- - -")


@app.subscribe("content", group="example_content_group")
async def tweets(data: SimpleTweet) -> None:
    print("SimpleTweet")
    print("-----------")
    print(f"Message: {data.message}")
    print(f"Likes & Retweets: {data.likes} - {data.retweets}")
    print("- - -")


async def main() -> None:
    async with app:
        await app.consume_forever()
示例#19
0
def subscription(subscription_conf):
    yield BatchConsumer(
        subscription=subscription_conf,
        app=Application(kafka_servers=["foobar"]),
    )
示例#20
0
文件: test_run.py 项目: onna/kafkaesk
from .produce import Foo
from .produce import producer
from kafkaesk import Application

import asyncio
import pytest
import signal

TOPIC = "test-run"
GROUP = "test-run2"

pytestmark = pytest.mark.asyncio

test_app = Application()

test_app.schema(streams=[TOPIC])(Foo)


@test_app.subscribe(TOPIC, group=GROUP)
async def _consumer(ob: Foo, record, app):
    ...


async def test_run_exits_cleanly_while_consuming(kafka, topic_prefix):
    kserver = f"{kafka[0]}:{kafka[1]}"
    app = Application(
        [kserver],
        topic_prefix=topic_prefix,
    )
    async with app:
        pro = asyncio.create_task(producer(app, TOPIC))
示例#21
0
from kafkaesk import Application
from pydantic import BaseModel

import asyncio

app = Application()


@app.schema("Foobar")
class Foobar(BaseModel):
    foo: str
    bar: str


@app.subscribe("content.*", group="example_content_group")
async def messages(data: Foobar):
    print(f"{data.foo}: {data.bar}")


async def generate_data():
    app.configure(kafka_servers=["localhost:9092"])
    async with app:
        for idx in range(1000):
            await app.publish("content.foo", Foobar(foo=str(idx), bar="yo"))


if __name__ == "__main__":
    asyncio.run(generate_data())
示例#22
0
def subscription():
    yield SubscriptionConsumer(
        Application(kafka_servers=["foobar"]), Subscription("foo", lambda record: 1, "group")
    )