示例#1
0
async def consumer_main(*, loop, topic_name, consumer_settings,
                        schema_registry_url):
    """Main asyncio-based function for the single-topic consumer.
    """
    logger = structlog.get_logger(__name__)

    topic_name = topic_name.replace('_', '-').lower()

    async with aiohttp.ClientSession() as httpsession:
        schema, schema_id = await get_schema(
            topic_name + '-value',
            httpsession,
            schema_registry_url)

    # Start up the Kafka consumer
    consumer = aiokafka.AIOKafkaConsumer(loop=loop, **consumer_settings)

    # Main loop for consuming messages
    try:
        await consumer.start()

        # Subscribe to all topics in the experiment
        consumer.subscribe([topic_name])
        logger.info(f'Started consumer for topic {topic_name}')

        while True:
            async for message in consumer:
                value_fh = BytesIO(message.value)
                value_fh.seek(0)
                value = fastavro.schemaless_reader(
                    value_fh,
                    schema)
                logger.info("Received message", message=value)
    finally:
        consumer.stop()
示例#2
0
 def _create_worker_consumer(
         self,
         transport: 'Transport',
         loop: asyncio.AbstractEventLoop) -> aiokafka.AIOKafkaConsumer:
     isolation_level: str = 'read_uncommitted'
     conf = self.app.conf
     if self.consumer.in_transaction:
         isolation_level = 'read_committed'
     self._assignor = self.app.assignor
     auth_settings = credentials_to_aiokafka_auth(
         conf.broker_credentials, conf.ssl_context)
     max_poll_interval = conf.broker_max_poll_interval or 0
     return aiokafka.AIOKafkaConsumer(
         loop=loop,
         client_id=conf.broker_client_id,
         group_id=conf.id,
         bootstrap_servers=server_list(
             transport.url, transport.default_port),
         partition_assignment_strategy=[self._assignor],
         enable_auto_commit=False,
         auto_offset_reset=conf.consumer_auto_offset_reset,
         max_poll_records=conf.broker_max_poll_records,
         max_poll_interval_ms=int(max_poll_interval * 1000.0),
         max_partition_fetch_bytes=conf.consumer_max_fetch_size,
         fetch_max_wait_ms=1500,
         request_timeout_ms=int(conf.broker_request_timeout * 1000.0),
         check_crcs=conf.broker_check_crcs,
         session_timeout_ms=int(conf.broker_session_timeout * 1000.0),
         heartbeat_interval_ms=int(conf.broker_heartbeat_interval * 1000.0),
         isolation_level=isolation_level,
         traced_from_parent_span=self.traced_from_parent_span,
         start_rebalancing_span=self.start_rebalancing_span,
         start_coordinator_span=self.start_coordinator_span,
         **auth_settings,
     )
示例#3
0
 def _create_worker_consumer(
         self, transport: 'Transport',
         loop: asyncio.AbstractEventLoop) -> aiokafka.AIOKafkaConsumer:
     isolation_level: str = 'read_uncommitted'
     conf = self.app.conf
     if self.consumer.in_transaction:
         isolation_level = 'read_committed'
     self._assignor = self.app.assignor
     return aiokafka.AIOKafkaConsumer(
         loop=loop,
         client_id=conf.broker_client_id,
         group_id=conf.id,
         bootstrap_servers=server_list(transport.url,
                                       transport.default_port),
         partition_assignment_strategy=[self._assignor],
         enable_auto_commit=False,
         auto_offset_reset=conf.consumer_auto_offset_reset,
         max_poll_records=conf.broker_max_poll_records,
         max_partition_fetch_bytes=conf.consumer_max_fetch_size,
         fetch_max_wait_ms=1500,
         request_timeout_ms=int(conf.broker_request_timeout * 1000.0),
         check_crcs=conf.broker_check_crcs,
         session_timeout_ms=int(conf.broker_session_timeout * 1000.0),
         heartbeat_interval_ms=int(conf.broker_heartbeat_interval * 1000.0),
         security_protocol="SSL" if conf.ssl_context else "PLAINTEXT",
         ssl_context=conf.ssl_context,
         isolation_level=isolation_level,
     )
示例#4
0
async def history(request: web.Request):
    """
    Get historic data from the given topic

    get params:
    - start: the start timestamp as milliseconds since 00:00:00 Thursday, 1 January 1970
    - end: (optinoal) the end timestamp as milliseconds since 00:00:00 Thursday, 1 January 1970
    """
    topic = request.match_info['id']
    start = request.query['start']
    end = request.query['end'] if 'end' in request.query else 9999999999999
    if not isinstance(start, int) or not isinstance(end, int):
        raise web.HTTPBadRequest()
    if topic not in request.app['topics']:
        raise web.HTTPNotFound()
    consumer = aiokafka.AIOKafkaConsumer(
        topic,
        loop=asyncio.get_event_loop(),
        api_version='2.2.0',
        bootstrap_servers=request.app['settings'].KAFKA_SERVER)
    await consumer.start()
    topic_partition = aiokafka.TopicPartition(topic, 0)
    offsets = await consumer.offsets_for_times({topic_partition: start})
    offset = offsets[topic_partition].offset
    consumer.seek(topic_partition, offset)
    data = topic.encode('utf-8')
    try:
        async for msg in consumer:
            if msg.timestamp > end:
                break
            data += msg.value
    finally:
        await consumer.stop()
    return web.Response(body=data)
示例#5
0
async def kafka_consumer_factory(topic, config):
    if config["ssl_context"]:
        config = dict(config,
                      ssl_context=create_ssl_context(**config["ssl_context"]))
    consumer = aiokafka.AIOKafkaConsumer(topic, **config)
    await consumer.start()
    return consumer
示例#6
0
async def main(source_topic, sink_topic):
    consumer = aiokafka.AIOKafkaConsumer(
        group_id="events_cons_explain",
        enable_auto_commit=False,
        auto_offset_reset="earliest",
    )
    await consumer.start()
    consumer.subscribe([source_topic])

    producer = aiokafka.AIOKafkaProducer()
    await producer.start()

    mem = defaultdict(list)

    async def handle_msg(msg):
        val = mem[msg.key.decode()]
        new_val = int(msg.value.decode())
        if (
                new_val not in val
        ):  # make idempotent; see the time gap between setting this and the consumer.commit
            val.append(new_val)
        print(f"Out value {msg.key.decode()} -> {shortlist(val)}")
        await producer.send(sink_topic,
                            value=json.dumps(val).encode(),
                            key=msg.key)

    try:
        async for msg in consumer:
            print(f"In value {msg.key.decode()} -> {msg.value.decode()}")
            await handle_msg(msg)
            await consumer.commit()

    finally:
        await consumer.stop()
        await producer.stop()
示例#7
0
 def create_consumer(self, *topics, **kwargs):
     consumer = aiokafka.AIOKafkaConsumer(
         *topics,
         loop=self.Loop,
         bootstrap_servers=self.get_bootstrap_servers(),
         enable_auto_commit=False,
         **kwargs)
     return consumer
示例#8
0
 def __init__(self, group_id, mem_unique_id, auto_offset_reset, topic):
     self._consumer = aiokafka.AIOKafkaConsumer(
         group_id=group_id,
         enable_auto_commit=False,
         auto_offset_reset=auto_offset_reset)
     self._topic = topic
     self._mem = Mem(group_id_id=mem_unique_id)
     self._lock = asyncio.Lock()
     self._rbl = RebalanceListener(self._lock, self._mem)
 def _setup_connection(self) -> aiokafka.AIOKafkaConsumer:
     if self.settings.kafka_ssl_auth:
         ssl_context = create_ssl_context(
             cafile="init/kafka/ca.pem",
             certfile="init/kafka/service.cert",
             keyfile="init/kafka/service.key",
         )
         connection = aiokafka.AIOKafkaConsumer(
             *self.settings.metrics_topics.split(","),
             bootstrap_servers=self.settings.bootstrap_servers,
             ssl_context=ssl_context,
             security_protocol="SSL",
         )
     else:
         connection = aiokafka.AIOKafkaConsumer(
             *self.settings.metrics_topics.split(","),
             bootstrap_servers=self.settings.bootstrap_servers,
         )
     return connection
示例#10
0
def consumer(queue, bootstrap_servers, group_id, name="reader", loop=loop):
    """
    consumer returns a wrapped kafka consumer that will reconnect.
    """
    return ReconnectingClient(
        aiokafka.AIOKafkaConsumer(
            queue, bootstrap_servers=bootstrap_servers, group_id=group_id, loop=loop
        ),
        name,
    )
示例#11
0
    async def consume(self, attempts_count=100, initial_timeout=2, max_timeout=120):
        loop = asyncio.get_running_loop()

        topics = self.dispatcher.received_topics
        running_task = None
        start_consumer = True
        timeout = initial_timeout
        consumer = aiokafka.AIOKafkaConsumer(
            *topics,
            loop=loop,
            bootstrap_servers=self.url,
            group_id=self.group_id,
            enable_auto_commit=False,
            auto_offset_reset=self.auto_offset_reset,
            **self._aiokafka_options,
        )
        try:
            while True:
                try:
                    if start_consumer:
                        await consumer.start()
                        start_consumer = False
                        timeout = initial_timeout
                        logger.info("Kafka Consumer started")
                    async for msg in consumer:
                        for _ in range(attempts_count):
                            try:
                                running_task = asyncio.create_task(
                                    self.handle_msg(msg, consumer)
                                )
                                # Must be waited here, otherwise handling will run in
                                # parallel and will incorrectly commit offsets for not
                                # finished tasks
                                await asyncio.shield(running_task)
                                running_task = None
                                timeout = initial_timeout
                                break
                            except asyncio.CancelledError:
                                if running_task:
                                    await asyncio.wait({running_task}, timeout=timeout)
                                raise
                            except Exception as e:
                                logger.exception(e)
                                await asyncio.sleep(timeout)
                        else:
                            raise AttemptsFinished()

                except (gaierror, KafkaConnectionError):
                    start_consumer = True
                    await consumer.stop()
                    await asyncio.sleep(timeout)
                if timeout < max_timeout:
                    timeout *= 2
        finally:
            await consumer.stop()
示例#12
0
    async def __run(self) -> None:
        consumer = aiokafka.AIOKafkaConsumer(
            bootstrap_servers=cast(List[str], self._app._kafka_servers),
            loop=asyncio.get_event_loop(),
            group_id=self._subscription.group,
            api_version=self._app._kafka_api_version,
            **self._app._kafka_settings or {},
        )
        pattern = fnmatch.translate(self._app.topic_mng.get_topic_id(self._subscription.stream_id))
        listener = CustomConsumerRebalanceListener(consumer, self._app, self._subscription.group)
        consumer.subscribe(pattern=pattern, listener=listener)
        await consumer.start()

        msg_handler = _raw_msg_handler
        sig = inspect.signature(self._subscription.func)
        param_name = [k for k in sig.parameters.keys()][0]
        annotation = sig.parameters[param_name].annotation
        if annotation and annotation != sig.empty:
            if annotation == bytes:
                msg_handler = _bytes_msg_handler  # type: ignore
            elif annotation == aiokafka.structs.ConsumerRecord:
                msg_handler = _record_msg_handler  # type: ignore
            else:
                msg_handler = partial(_pydantic_msg_handler, annotation)  # type: ignore

        await self.emit("started", subscription_consumer=self)
        try:
            # Consume messages
            async for record in consumer:
                try:
                    logger.info(f"Handling msg: {record}")
                    msg_data = orjson.loads(record.value)
                    it = iter(sig.parameters.keys())
                    kwargs: Dict[str, Any] = {next(it): msg_handler(record, msg_data)}
                    for key in it:
                        if key == "schema":
                            kwargs["schema"] = msg_data["schema"]
                        elif key == "record":
                            kwargs["record"] = record
                    await self._subscription.func(**kwargs)
                except UnhandledMessage:
                    # how should we handle this? Right now, fail hard
                    logger.warning(f"Could not process msg: {record}", exc_info=True)
                finally:
                    await self.emit("message", record=record)
        finally:
            try:
                await consumer.commit()
            except Exception:
                logger.info("Could not commit current offsets", exc_info=True)
            try:
                await consumer.stop()
            except Exception:
                logger.warning("Could not properly stop consumer", exc_info=True)
示例#13
0
async def consume_for_simple_topics(*, loop, httpsession, consumer_settings,
                                    schema_registry_url, topic_name):
    consumer_settings.update({
        'group_id': topic_name,
        'client_id': f'{topic_name}-0'  # always only one consumer per topic
    })
    logger = structlog.get_logger(__name__).bind(
        role='consumer',
        group=consumer_settings['group_id'],
        client_id=consumer_settings['client_id'])
    logger.info(f'Getting schemas for topic {topic_name}')

    registry_headers = {'Accept': 'application/vnd.schemaregistry.v1+json'}
    schema_uri = URITemplate(schema_registry_url +
                             '/subjects{/subject}/versions/latest')

    # Get schemas
    r = await httpsession.get(schema_uri.expand(subject=f'{topic_name}-key'),
                              headers=registry_headers)
    data = await r.json()

    r = await httpsession.get(schema_uri.expand(subject=f'{topic_name}-value'),
                              headers=registry_headers)
    data = await r.json()
    value_schema = fastavro.parse_schema(json.loads(data['schema']))

    # Start up the Kafka consumer
    consumer = aiokafka.AIOKafkaConsumer(loop=loop, **consumer_settings)

    # Main loop for consuming messages
    try:
        await consumer.start()

        # Subscribe to all topics in the experiment
        consumer.subscribe([topic_name])

        logger.info(f'Started consumer for topic {topic_name}')
        while True:
            async for message in consumer:
                value_fh = BytesIO(message.value)
                value_fh.seek(0)
                value = fastavro.schemaless_reader(value_fh, value_schema)
                now = datetime.datetime.now(datetime.timezone.utc)
                latency = now - value['timestamp']
                latency_millisec = \
                    latency.seconds * 1000 + latency.microseconds / 1000
                CONSUMED.inc()  # increment prometheus consumption counter
                LATENCY.observe(latency_millisec / 1000)
                LATENCY_SUMMARY.observe(latency_millisec / 1000)
                logger.debug('latency',
                             latency_millisec=latency_millisec,
                             topic=message.topic)
    finally:
        consumer.stop()
示例#14
0
async def spectrogram(request: web.Request):
    """
    Create spectogram from data from the given topic

    post params:
    - start: the start timestamp as milliseconds since 00:00:00 Thursday, 1 January 1970
    - end: (optional) the end timestamp as milliseconds since 00:00:00 Thursday, 1 January 1970
    - channel: Id of channel in topic that should make the plot
    - projectId: Id of the project where plot should be saved
    """
    post = await request.post()
    datasource_id = request.match_info['id']
    datasource = request.app['datasources'].get_source(datasource_id)
    input_byte_format = datasource.input_byte_format
    topic = datasource.topic

    channel_id = int(post.get('channel_id'))
    start = int(post.get('start'))
    end = int(post.get('end'))

    if not isinstance(start, int) or not isinstance(end, int):
        print("bad request")
        raise web.HTTPBadRequest()
    if topic not in request.app['topics']:
        print("no topic found")
        raise web.HTTPNotFound()

    consumer = aiokafka.AIOKafkaConsumer(
        topic,
        loop=asyncio.get_event_loop(),
        api_version='2.2.0',
        bootstrap_servers=request.app['settings'].KAFKA_SERVER)
    await consumer.start()
    topic_partition = aiokafka.TopicPartition(topic, 0)
    offsets = await consumer.offsets_for_times({topic_partition: start})
    if offsets is None:
        raise web.HTTPNotFound()
    offset = offsets[topic_partition].offset
    consumer.seek(topic_partition, offset)
    data = []
    try:
        async for msg in consumer:
            if msg.timestamp > end:
                break
            for i, message in enumerate(
                    struct.iter_unpack(input_byte_format, msg.value)):
                data.append(message[1 + channel_id])
    finally:
        await consumer.stop()
        if len(data) == 0:
            raise web.HTTPBadRequest()
        resp = make_spectrogram(data, (end - start) / 1000)
        return web.json_response(resp, dumps=dumps)
示例#15
0
 def _create_client_consumer(
         self, app: AppT,
         transport: 'Transport') -> aiokafka.AIOKafkaConsumer:
     return aiokafka.AIOKafkaConsumer(
         loop=self.loop,
         client_id=app.conf.broker_client_id,
         bootstrap_servers=server_list(transport.url,
                                       transport.default_port),
         enable_auto_commit=True,
         auto_offset_reset='earliest',
         check_crcs=app.conf.broker_check_crcs,
     )
 def create_consumer(self, *topics, **kwargs):
     consumer = aiokafka.AIOKafkaConsumer(
         *topics,
         loop=self.Loop,
         bootstrap_servers=self.get_bootstrap_servers(),
         enable_auto_commit=False,
         security_protocol=self.Config.get('security_protocol'),
         sasl_mechanism=self.Config.get('sasl_mechanism'),
         sasl_plain_username=self.Config.get('sasl_plain_username') or None,
         sasl_plain_password=self.Config.get('sasl_plain_password') or None,
         **kwargs)
     return consumer
 async def create_consumer_instance(topics, loop, config={}):
     print('Creating Kafka consumer')
     if Kafka._consumer is None:
         if 'bootstrap_servers' not in config:
             raise Exception('No bootstrap servers have been configured')
         consumer = aiokafka.AIOKafkaConsumer(
             loop=loop,
             bootstrap_servers=config['bootstrap_servers'],
             value_deserializer=deserializer)
         await consumer.start()
         consumer.subscribe(topics)
         Kafka._consumer = consumer
     return Kafka._consumer
示例#18
0
文件: app.py 项目: Extintor/kafkaesk
 def consumer_factory(self, group_id: str) -> aiokafka.AIOKafkaConsumer:
     return aiokafka.AIOKafkaConsumer(
         bootstrap_servers=cast(List[str], self._kafka_servers),
         loop=asyncio.get_event_loop(),
         group_id=group_id,
         api_version=self._kafka_api_version,
         enable_auto_commit=False,
         **{
             k: v
             for k, v in self.kafka_settings.items()
             if k in _aiokafka_consumer_settings
         },
     )
示例#19
0
文件: aiokafka.py 项目: whoshuu/faust
 def _create_client_consumer(
         self, app: AppT,
         transport: 'Transport') -> aiokafka.AIOKafkaConsumer:
     return aiokafka.AIOKafkaConsumer(
         loop=self.loop,
         client_id=app.conf.broker_client_id,
         bootstrap_servers=server_list(transport.url,
                                       transport.default_port),
         enable_auto_commit=True,
         auto_offset_reset='earliest',
         check_crcs=app.conf.broker_check_crcs,
         security_protocol="SSL" if app.conf.ssl_context else "PLAINTEXT",
         ssl_context=app.conf.ssl_context,
     )
示例#20
0
async def fft_from_source(request: web.Request):
    datasource_id = request.match_info['id']
    datasource = request.app['datasources'].get_source(datasource_id)
    input_byte_format = datasource.input_byte_format
    metadata = len(datasource.input_names) - len(datasource.output_names)
    topic = datasource.topic
    channel_id = int(request.match_info['channel_id'])
    start = int(request.match_info['start'])
    end = int(request.match_info['end'])
    sample_spacing = float(request.match_info['sample_spacing'])
    print(isinstance(start, int), isinstance(end, int))
    if not isinstance(start, int) or not isinstance(end, int):
        print("bad request")
        raise web.HTTPBadRequest()
    if topic not in request.app['topics']:
        print("no topic found")
        raise web.HTTPNotFound()

    consumer = aiokafka.AIOKafkaConsumer(
        topic,
        loop=asyncio.get_event_loop(),
        api_version='2.2.0',
        bootstrap_servers=request.app['settings'].KAFKA_SERVER)
    await consumer.start()
    topic_partition = aiokafka.TopicPartition(topic, 0)
    offsets = await consumer.offsets_for_times({topic_partition: start})
    if offsets is None:
        print("offsets is none")
        raise web.HTTPNotFound()
    offset = offsets[topic_partition].offset
    consumer.seek(topic_partition, offset)
    data = []
    try:
        async for msg in consumer:
            if msg.timestamp > end:
                break
            for i, message in enumerate(
                    struct.iter_unpack(input_byte_format, msg.value)):
                # print("i and value", i, message)
                # print("value: ", message[metadata + channel_id])
                data.append(message[metadata + channel_id])
    finally:
        await consumer.stop()
        print("data: ", data)
        if len(data) == 0:
            print("no data (len == 0)")
            raise web.HTTPBadRequest()
        resp = get_fft(data, sample_spacing)
        print("resp", resp)
        return web.json_response(resp, dumps=dumps)
示例#21
0
async def consume():
    consumer = aiokafka.AIOKafkaConsumer("welcomerMain",
                                         loop=loop,
                                         bootstrap_servers="localhost:9092")
    await consumer.start()
    print("Started consuming")
    try:
        # Consume messages
        async for msg in consumer:
            data = msgpack.unpackb(msg.value)
            print(json.dumps(data))
            # print("consumed: ", msg.topic, msg.partition, msg.offset,
            #       msg.key, msg.value, msg.timestamp)
    finally:
        # Will leave consumer group; perform autocommit if enabled.
        await consumer.stop()
示例#22
0
文件: kafka.py 项目: fossabot/vk.py
 def __init__(self,
              vk,
              *topics: typing.Tuple[str],
              group_id: str,
              bootstrap_servers: str = "localhost"):
     if aiokafka:
         self._vk = vk
         self.consumer = aiokafka.AIOKafkaConsumer(
             *topics,
             loop=vk.loop,
             bootstrap_servers=bootstrap_servers,
             group_id=group_id)
     else:
         raise RuntimeWarning(
             "Please install aiokafka (pip install aiokafka) for use this extension"
         )
示例#23
0
 def _create_client_consumer(
         self, transport: 'Transport',
         loop: asyncio.AbstractEventLoop) -> aiokafka.AIOKafkaConsumer:
     conf = self.app.conf
     return aiokafka.AIOKafkaConsumer(
         loop=loop,
         client_id=conf.broker_client_id,
         bootstrap_servers=server_list(transport.url,
                                       transport.default_port),
         request_timeout_ms=int(conf.broker_request_timeout * 1000.0),
         enable_auto_commit=True,
         max_poll_records=conf.broker_max_poll_records,
         auto_offset_reset=conf.consumer_auto_offset_reset,
         check_crcs=conf.broker_check_crcs,
         security_protocol="SSL" if conf.ssl_context else "PLAINTEXT",
         ssl_context=conf.ssl_context,
     )
示例#24
0
async def initialize():
    log.debug("Initializing the kafka consumer....")

    global consumer
    global loop

    group_id = f'{KAFKA_CONSUMER_GROUP_PREFIX}-{randint(0, 10000)}'
    log.debug(f'Initializing KafkaConsumer for topic {KAFKA_TOPIC}'
              'group_id {group_id} and using '
              'bootstrap servers {KAFKA_BOOTSTRAP_SERVERS}')

    consumer = aiokafka.AIOKafkaConsumer(
        KAFKA_TOPIC,
        loop=loop,
        bootstrap_servers=KAFKA_BOOTSTRAP_SERVERS,
        group_id=group_id)
    # get cluster layout and join group
    await consumer.start()

    partitions: Set[TopicPartition] = consumer.assignment()
    nr_partitions = len(partitions)
    if nr_partitions != 1:
        log.warning(
            f'Found {nr_partitions} partitions for topic {KAFKA_TOPIC}. Expecting '
            f'only one, remaining partitions will be ignored!')
    for tp in partitions:

        # get the log_end_offset
        end_offset_dict = await consumer.end_offsets([tp])
        end_offset = end_offset_dict[tp]

        if end_offset == 0:
            log.warning(
                f'Topic ({KAFKA_TOPIC}) has no messages (log_end_offset: '
                f'{end_offset}), skipping initialization ...')
            return

        # ACtual code is above one, if there should be no messages in a graceful shutdown
        # mode so there is not much other than start consumner thread to start consuming

        log.debug(
            f'Found log_end_offset: {end_offset} seeking to {end_offset-1}')
        consumer.seek(tp, end_offset - 1)
        msg = await consumer.getone()
        log.info(f'Initializing API with data from msg: {msg}')
        return
示例#25
0
async def consume_from_kafka(app: web.Application):
    """The function responsible for delivering data to the connected clients."""
    consumer = aiokafka.AIOKafkaConsumer(
        loop=asyncio.get_event_loop(),
        bootstrap_servers=app['settings'].KAFKA_SERVER)
    try:
        while True:
            try:
                await consumer.start()
                break
            except KafkaConnectionError:
                logger.exception(
                    'Could not connect to Kafka server, retrying in 30 seconds'
                )
                await asyncio.sleep(30)
        logger.info('Connected to Kafka server')
        consumer.subscribe(pattern='.*')
        while True:
            try:
                messages: Dict[
                    aiokafka.TopicPartition,
                    List[aiokafka.ConsumerRecord]] = await consumer.getmany(
                        timeout_ms=1000)
                for topic, topic_messages in messages.items():
                    for subscriber in app['subscribers'][topic.topic]:
                        # print("subscriber", topic.topic, subscriber)
                        await subscriber.receive(
                            topic.topic,
                            b''.join(message.value
                                     for message in topic_messages))
            except KafkaConnectionError as e:
                logger.exception(
                    'Lost connection to kafka server, waiting 10 seconds before retrying'
                )
                await asyncio.sleep(10)
            except ConnectionAbortedError:
                logger.warning(
                    'Got a connection aborted error when trying to send to a websocket'
                )
    except asyncio.CancelledError:
        pass
    except:
        logger.exception('Exception in client consumer process')
    finally:
        await consumer.stop()
示例#26
0
 def _create_client_consumer(
         self, transport: 'Transport',
         loop: asyncio.AbstractEventLoop) -> aiokafka.AIOKafkaConsumer:
     conf = self.app.conf
     auth_settings = credentials_to_aiokafka_auth(conf.broker_credentials,
                                                  conf.ssl_context)
     return aiokafka.AIOKafkaConsumer(
         loop=loop,
         client_id=conf.broker_client_id,
         bootstrap_servers=server_list(transport.url,
                                       transport.default_port),
         request_timeout_ms=int(conf.broker_request_timeout * 1000.0),
         enable_auto_commit=True,
         max_poll_records=conf.broker_max_poll_records,
         auto_offset_reset=conf.consumer_auto_offset_reset,
         check_crcs=conf.broker_check_crcs,
         **auth_settings,
     )
示例#27
0
async def consume(out_chan: ac.Chan) -> None:
    loop = asyncio.get_running_loop()
    consumer = aiokafka.AIOKafkaConsumer('salutations',
                                         loop=loop,
                                         bootstrap_servers='kafka:9092',
                                         group_id="salutated",
                                         auto_offset_reset="earliest")
    logging.info("Starting consumer")
    await consumer.start()
    try:
        logging.info("Consumer started")
        async for msg in consumer:
            out_chan.put((msg.key, msg.value))
    except asyncio.CancelledError:
        logging.info("Consumer cancelled")
    finally:
        logging.info("Stopping consumer")
        await consumer.stop()
async def initialize():
    loop = asyncio.get_event_loop()
    global consumer
    group_id = f'{KAFKA_CONSUMER_GROUP_PREFIX}-{randint(0, 10000)}'
    log.debug(
        f'Initializing KafkaConsumer for topic {KAFKA_TOPIC}, group_id {group_id}'
        f' and using bootstrap servers {KAFKA_BOOTSTRAP_SERVERS}')
    consumer = aiokafka.AIOKafkaConsumer(
        KAFKA_TOPIC,
        loop=loop,
        bootstrap_servers=KAFKA_BOOTSTRAP_SERVERS,
        group_id=group_id)
    # get cluster layout and join group
    await consumer.start()

    partitions: Set[TopicPartition] = consumer.assignment()
    nr_partitions = len(partitions)
    if nr_partitions != 1:
        log.warning(
            f'Found {nr_partitions} partitions for topic {KAFKA_TOPIC}. Expecting '
            f'only one, remaining partitions will be ignored!')
    for tp in partitions:

        # get the log_end_offset
        end_offset_dict = await consumer.end_offsets([tp])
        end_offset = end_offset_dict[tp]

        if end_offset == 0:
            log.warning(
                f'Topic ({KAFKA_TOPIC}) has no messages (log_end_offset: '
                f'{end_offset}), skipping initialization ...')
            return

        log.debug(
            f'Found log_end_offset: {end_offset} seeking to {end_offset-1}')
        consumer.seek(tp, end_offset - 1)
        msg = await consumer.getone()
        log.info(f'Initializing API with data from msg: {msg}')

        # update the API state
        _update_state(msg)
        return
示例#29
0
	def __init__(self, app, pipeline, connection, id=None, config=None):
		super().__init__(app, pipeline, id=id, config=config)

		topics = re.split(r'\s*,\s*', self.Config['topic'])

		self._group_id = self.Config['group_id']
		if len(self._group_id) == 0: self._group_id = None

		self.Connection = pipeline.locate_connection(app, connection)
		self.Consumer = aiokafka.AIOKafkaConsumer(
			*topics,
			loop = app.Loop,
			bootstrap_servers = self.Connection.get_bootstrap_servers(),
			client_id = self.Config['client_id'],
			group_id = self._group_id,
			max_partition_fetch_bytes = int(self.Config['max_partition_fetch_bytes']),
			auto_offset_reset = self.Config['auto_offset_reset'],
			api_version = self.Config['api_version'],
			enable_auto_commit=False
		)
示例#30
0
async def main():
    loop = asyncio.get_event_loop()
    consumer = aiokafka.AIOKafkaConsumer(
        "my_topic4",
        loop=loop,
        bootstrap_servers="128.0.255.10:9092",
        # Consumer must be in a group to commit
        group_id="test",
        # Autocommit every second
        auto_commit_interval_ms=1000,
        # If committed offset not found, start from begin
        auto_offset_reset="earliest")
    await consumer.start()
    try:
        async for msg in consumer:
            print("{}:{:d}:{:d}: key={} value={} timestamp_ms={}".format(
                msg.topic, msg.partition, msg.offset, msg.key, msg.value,
                msg.timestamp))
    finally:
        await consumer.stop()