Beispiel #1
0
async def _get_kafka_messages(topic: str, start: int) -> List[ConsumerRecord]:
    def _value_deserializer(value):
        value = value.decode("utf-8")
        try:
            return json.loads(value)
        except JSONDecodeError:
            return ast.literal_eval(value)

    loop = asyncio.get_event_loop()
    consumer = AIOKafkaConsumer(
        topic, value_deserializer=_value_deserializer,
        loop=loop, bootstrap_servers=settings.KAFKA_SERVER,
    )

    await consumer.start()
    try:
        partitions = consumer.partitions_for_topic(topic)
        tps = [TopicPartition(topic, p) for p in partitions]

        offsets = await consumer.offsets_for_times({tp: start for tp in tps})
        for tp, offset in offsets.items():
            offset = offset.offset if offset else (await consumer.end_offsets([tp]))[tp]
            consumer.seek(tp, offset)

        records = await consumer.getmany(*tps, timeout_ms=1000*60)

        messages = []
        for tp in tps:
            messages += records.get(tp, [])
        logger.info(f"Got kafka messages {messages} by key {topic}")
        return messages
    finally:
        # Will leave consumer group; perform autocommit if enabled.
        await consumer.stop()
async def produce_and_consume():
    # Produce
    producer = AIOKafkaProducer(bootstrap_servers='localhost:9093',
                                security_protocol="SSL",
                                ssl_context=context)

    await producer.start()
    try:
        msg = await producer.send_and_wait('my_topic',
                                           b"Super Message",
                                           partition=0)
    finally:
        await producer.stop()

    consumer = AIOKafkaConsumer("my_topic",
                                bootstrap_servers='localhost:9093',
                                security_protocol="SSL",
                                ssl_context=context)
    await consumer.start()
    try:
        consumer.seek(TopicPartition('my_topic', 0), msg.offset)
        fetch_msg = await consumer.getone()
    finally:
        await consumer.stop()

    print("Success", msg, fetch_msg)
Beispiel #3
0
async def seek_to_offset(consumer: AIOKafkaConsumer, topic: str, start: int = -1):
    """
    Seek to the last message in topic.
    """
    partition_number, offset = -1, -1
    # Loop through partitions and find the latest offset
    for p in consumer.partitions_for_topic(topic):
        tp = TopicPartition(topic, p)
        committed = await consumer.committed(tp)
        await consumer.seek_to_end(tp)
        last_offset = await consumer.position(tp)
        # print("topic: {} partition: {} committed: {} last: {}".format(topic, p, committed, last_offset))
        if offset < last_offset:
            offset = last_offset
            partition_number = p
    tp = TopicPartition(topic, partition_number)
    consumer.seek(tp, offset - start)
Beispiel #4
0
    async def _pull(self, partitions):
        logging.info(f"thread:{self.name} pull start... ")

        try:
            consumer = AIOKafkaConsumer(loop=asyncio.get_running_loop(),
                                        bootstrap_servers=self.queue_brokers,
                                        value_deserializer=_value_serializer,
                                        enable_auto_commit=True)

            await consumer.start()
            consumer.assign(list(partitions.keys()))

            for k in partitions:
                offset = partitions[k]
                if offset > 0:
                    consumer.seek(k, offset)

            async for msg in consumer:
                if self._running is False:
                    logging.info(f"thread:{self.name} stop running")
                    break

                acr_meta = await self._process(msg)
                if acr_meta is not None:
                    logging.debug(
                        f"thread:{self.name} pull msg {msg.topic}, {msg.partition}"
                    )
                    await self.acr_queue.put(acr_meta)

        except asyncio.CancelledError:
            logging.info(f"thread:{self.name} pull stop")
        except:
            logging.error(
                f"thread:{self.name} exception stop {traceback.format_exc()}")

        logging.info(f"thread:{self.name} over... ")
async def produce_and_consume(loop):
    # Produce
    producer = AIOKafkaProducer(
        loop=loop, bootstrap_servers='localhost:9093',
        security_protocol="SSL", ssl_context=context)

    await producer.start()
    try:
        msg = await producer.send_and_wait(
            'my_topic', b"Super Message", partition=0)
    finally:
        await producer.stop()

    consumer = AIOKafkaConsumer(
        "my_topic", loop=loop, bootstrap_servers='localhost:9093',
        security_protocol="SSL", ssl_context=context)
    await consumer.start()
    try:
        consumer.seek(TopicPartition('my_topic', 0), msg.offset)
        fetch_msg = await consumer.getone()
    finally:
        await consumer.stop()

    print("Success", msg, fetch_msg)