コード例 #1
0
ファイル: msgkafka.py プロジェクト: VivekJaganath/pg-scrambLe
    async def aioread(self, topic, loop=None, callback=None, *args):
        """
        Asyncio read from one or several topics. It blocks
        :param topic: can be str: single topic; or str list: several topics
        :param loop: asyncio loop
        :callback: callback function that will handle the message in kafka bus
        :*args: optional arguments for callback function
        :return: topic, key, message
        """

        if not loop:
            loop = self.loop
        try:
            if isinstance(topic, (list, tuple)):
                topic_list = topic
            else:
                topic_list = (topic, )

            self.consumer = AIOKafkaConsumer(loop=loop,
                                             bootstrap_servers=self.broker)
            await self.consumer.start()
            self.consumer.subscribe(topic_list)

            async for message in self.consumer:
                if callback:
                    callback(message.topic, yaml.load(message.key),
                             yaml.load(message.value), *args)
                else:
                    return message.topic, yaml.load(message.key), yaml.load(
                        message.value)
        except KafkaError as e:
            raise MsgException(str(e))
        finally:
            await self.consumer.stop()
コード例 #2
0
    def __init__(self,
                 kafka_servers='localhost:9092',
                 in_topic='aiokafkarpc_in',
                 out_topic='aiokafkarpc_out',
                 out_partitions=(0, ),
                 translation_table=[],
                 *,
                 loop):
        self.call = CallObj(self._call_wrapper)

        self._topic_in = in_topic
        self._loop = loop
        self._waiters = {}
        self._out_topic = out_topic
        self._out_partitions = out_partitions

        default, ext_hook = get_msgpack_hooks(translation_table)
        self.__consumer = AIOKafkaConsumer(
            self._out_topic,
            loop=loop,
            bootstrap_servers=kafka_servers,
            group_id=None,
            key_deserializer=lambda x: x.decode("utf-8"),
            value_deserializer=lambda x: msgpack.unpackb(
                x, ext_hook=ext_hook, encoding="utf-8"))

        self.__producer = AIOKafkaProducer(
            bootstrap_servers=kafka_servers,
            loop=loop,
            key_serializer=lambda x: x.encode("utf-8"),
            value_serializer=lambda x: msgpack.packb(x, default=default))
コード例 #3
0
ファイル: worker.py プロジェクト: bobzsj87/aiokafkadaemon
    def make_consumer(cls, loop, broker_addr, group_id, sasl_opts={}):
        """
        Creates and connects Kafka  consumer to the broker
        :param loop:
        :param broker_addr:
        :return:
        """
        logger.debug('Creating instance of kafka consumer')
        if not sasl_opts:
            consumer = AIOKafkaConsumer(loop=loop,
                                        bootstrap_servers=broker_addr,
                                        group_id=group_id,
                                        session_timeout_ms=60000)
        else:
            consumer = AIOKafkaConsumer(
                loop=loop,
                bootstrap_servers=broker_addr,
                group_id=group_id,
                session_timeout_ms=60000,
                sasl_mechanism='PLAIN',
                sasl_plain_username=sasl_opts['username'],
                sasl_plain_password=sasl_opts['password'],
                security_protocol='SASL_SSL',
                ssl_context=ssl.SSLContext(ssl.PROTOCOL_TLSv1_2))

        logger.info('Connected consumer to kafka on {}'.format(broker_addr))
        return consumer
コード例 #4
0
ファイル: rpc.py プロジェクト: pabraksas/aiokafka_rpc
    def __init__(self,
                 rpc_obj,
                 kafka_servers='localhost:9092',
                 in_topic='aiokafkarpc_in',
                 out_topic='aiokafkarpc_out',
                 translation_table=[],
                 *,
                 loop):
        self._tasks = {}
        self._loop = loop
        self._topic_out = out_topic
        self._rpc_obj = rpc_obj
        self._res_queue = asyncio.Queue(loop=loop)

        default, ext_hook = get_msgpack_hooks(translation_table)
        self.__consumer = AIOKafkaConsumer(
            in_topic,
            loop=loop,
            bootstrap_servers=kafka_servers,
            group_id=in_topic + '-group',
            key_deserializer=lambda x: x.decode("utf-8"),
            value_deserializer=lambda x: msgpack.unpackb(
                x, ext_hook=ext_hook, encoding="utf-8"))

        self.__producer = AIOKafkaProducer(
            bootstrap_servers=kafka_servers,
            loop=loop,
            key_serializer=lambda x: x.encode("utf-8"),
            value_serializer=lambda x: msgpack.packb(x, default=default))
コード例 #5
0
ファイル: kafka.py プロジェクト: slaven92/quiz-backend
class KafkaBackend(BroadcastBackend):
    def __init__(self, url: str):
        self._servers = [urlparse(url).netloc]
        self._consumer_channels: typing.Set = set()

    async def connect(self) -> None:
        loop = asyncio.get_event_loop()
        self._producer = AIOKafkaProducer(loop=loop,
                                          bootstrap_servers=self._servers)
        self._consumer = AIOKafkaConsumer(loop=loop,
                                          bootstrap_servers=self._servers)
        await self._producer.start()
        await self._consumer.start()

    async def disconnect(self) -> None:
        await self._producer.stop()
        await self._consumer.stop()

    async def subscribe(self, channel: str) -> None:
        self._consumer_channels.add(channel)
        self._consumer.subscribe(topics=self._consumer_channels)

    async def unsubscribe(self, channel: str) -> None:
        await self._consumer.unsubscribe()

    async def publish(self, channel: str, message: typing.Any) -> None:
        await self._producer.send_and_wait(channel, message.encode("utf8"))

    async def next_published(self) -> Event:
        message = await self._consumer.getone()
        return Event(channel=message.topic,
                     message=message.value.decode("utf8"))
コード例 #6
0
async def _get_kafka_messages(topic: str, start: int) -> List[ConsumerRecord]:
    def _value_deserializer(value):
        value = value.decode("utf-8")
        try:
            return json.loads(value)
        except JSONDecodeError:
            return ast.literal_eval(value)

    loop = asyncio.get_event_loop()
    consumer = AIOKafkaConsumer(
        topic, value_deserializer=_value_deserializer,
        loop=loop, bootstrap_servers=settings.KAFKA_SERVER,
    )

    await consumer.start()
    try:
        partitions = consumer.partitions_for_topic(topic)
        tps = [TopicPartition(topic, p) for p in partitions]

        offsets = await consumer.offsets_for_times({tp: start for tp in tps})
        for tp, offset in offsets.items():
            offset = offset.offset if offset else (await consumer.end_offsets([tp]))[tp]
            consumer.seek(tp, offset)

        records = await consumer.getmany(*tps, timeout_ms=1000*60)

        messages = []
        for tp in tps:
            messages += records.get(tp, [])
        logger.info(f"Got kafka messages {messages} by key {topic}")
        return messages
    finally:
        # Will leave consumer group; perform autocommit if enabled.
        await consumer.stop()
コード例 #7
0
async def produce_and_consume():
    # Produce
    producer = AIOKafkaProducer(bootstrap_servers='localhost:9093',
                                security_protocol="SSL",
                                ssl_context=context)

    await producer.start()
    try:
        msg = await producer.send_and_wait('my_topic',
                                           b"Super Message",
                                           partition=0)
    finally:
        await producer.stop()

    consumer = AIOKafkaConsumer("my_topic",
                                bootstrap_servers='localhost:9093',
                                security_protocol="SSL",
                                ssl_context=context)
    await consumer.start()
    try:
        consumer.seek(TopicPartition('my_topic', 0), msg.offset)
        fetch_msg = await consumer.getone()
    finally:
        await consumer.stop()

    print("Success", msg, fetch_msg)
コード例 #8
0
    async def __consume(self, loop) -> _QueueInternalResult:
        def deserializer(serialized):
            return json.loads(serialized)

        try:
            consumer = AIOKafkaConsumer(
                self._config.optional_param["topic_name"],
                loop=loop,
                group_id="youyaku_ai_group",
                # isolation_level="read_committed",
                bootstrap_servers=self._config.get_url(),
                value_deserializer=deserializer,
                auto_offset_reset="earliest",
                enable_auto_commit=False,
            )
            await consumer.start()

            # messageのpositionとoffset(終端)を確認し、データがなければ、空のデータを返す
            # TODO : 1パーティションの対応のみなので、パーティションが複数の対応が必要
            partition = list(consumer.assignment())[0]
            position = await consumer.position(partition=partition)
            offset_dict = await consumer.end_offsets(partitions=[partition])
            end = offset_dict[partition]
            if position == end:
                return _QueueInternalResult(result=[], e=None)

            # データを一つ取得
            data = await consumer.getone()
            messages = [data.value]
            await consumer.commit()
        except Exception as e:
            return _QueueInternalResult(result=None, e=e)
        finally:
            await consumer.stop()
        return _QueueInternalResult(result=messages, e=None)
コード例 #9
0
ファイル: kafka.py プロジェクト: slaven92/quiz-backend
 async def connect(self) -> None:
     loop = asyncio.get_event_loop()
     self._producer = AIOKafkaProducer(loop=loop,
                                       bootstrap_servers=self._servers)
     self._consumer = AIOKafkaConsumer(loop=loop,
                                       bootstrap_servers=self._servers)
     await self._producer.start()
     await self._consumer.start()
コード例 #10
0
class BaseKafkaTableBuilder(object):
    """Table builder.

    Builds table using single consumer consuming linearly
    from raw topic.
    """
    def __init__(self, topic, loop):
        self.topic = topic
        self.consumer = None
        self.messages = []
        self.loop = loop
        self.table = defaultdict(int)
        self.key_tps = defaultdict(set)
        self._assignment = None

    async def build(self):
        await self._init_consumer()
        await self._build_table()

    def get_key(self, message):
        return json.loads(message.key.decode())

    def get_value(self, message):
        return json.loads(message.value.decode())

    async def _init_consumer(self):
        if not self.consumer:
            self.consumer = AIOKafkaConsumer(
                self.topic,
                loop=self.loop,
                bootstrap_servers=bootstrap_servers,
                auto_offset_reset="earliest",
            )
            await self.consumer.start()
            self._assignment = self.consumer.assignment()

    async def _build_table(self):
        while True:
            message = await self.consumer.getone()
            self.messages.append(message)
            await self._apply(message)
            if await self._positions() == self._highwaters():
                print("Done building table")
                return

    async def _apply(self, message):
        print(message)

    async def _positions(self):
        assert self.consumer
        return {
            tp: await self.consumer.position(tp)
            for tp in self._assignment
        }

    def _highwaters(self):
        assert self.consumer
        return {tp: self.consumer.highwater(tp) for tp in self._assignment}
コード例 #11
0
ファイル: __init__.py プロジェクト: onna/guillotina_kafka
 async def init(self):
     if self._consumer is None:
         self._consumer = AIOKafkaConsumer(**self.config)
         if isinstance(self.topics, str):
             self._consumer.subscribe(pattern=self.topics)
         if isinstance(self.topics, (list, set, tuple)):
             self._consumer.subscribe(topics=self.topics)
         await self._consumer.start()
     return self._consumer
コード例 #12
0
 async def _create_consumer(self):
     self.consumer = \
         AIOKafkaConsumer(
             self.topic,
             loop=asyncio.get_event_loop(),
             bootstrap_servers=self.bootstrap_servers,
             group_id=self.group,
             enable_auto_commit=False
         )
コード例 #13
0
ファイル: msgkafka.py プロジェクト: TCSOSM-20/common
    async def aioread(self,
                      topic,
                      loop=None,
                      callback=None,
                      aiocallback=None,
                      group_id=None,
                      from_beginning=None,
                      **kwargs):
        """
        Asyncio read from one or several topics.
        :param topic: can be str: single topic; or str list: several topics
        :param loop: asyncio loop. To be DEPRECATED! in near future!!!  loop must be provided inside config at connect
        :param callback: synchronous callback function that will handle the message in kafka bus
        :param aiocallback: async callback function that will handle the message in kafka bus
        :param group_id: kafka group_id to use. Can be False (set group_id to None), None (use general group_id provided
                         at connect inside config), or a group_id string
        :param from_beginning: if True, messages will be obtained from beginning instead of only new ones.
                               If group_id is supplied, only the not processed messages by other worker are obtained.
                               If group_id is None, all messages stored at kafka are obtained.
        :param kwargs: optional keyword arguments for callback function
        :return: If no callback defined, it returns (topic, key, message)
        """

        if not loop:
            loop = self.loop
        if group_id is False:
            group_id = None
        elif group_id is None:
            group_id = self.group_id
        try:
            if isinstance(topic, (list, tuple)):
                topic_list = topic
            else:
                topic_list = (topic, )
            self.consumer = AIOKafkaConsumer(
                loop=loop,
                bootstrap_servers=self.broker,
                group_id=group_id,
                auto_offset_reset="earliest" if from_beginning else "latest")
            await self.consumer.start()
            self.consumer.subscribe(topic_list)

            async for message in self.consumer:
                if callback:
                    callback(message.topic, yaml.safe_load(message.key),
                             yaml.safe_load(message.value), **kwargs)
                elif aiocallback:
                    await aiocallback(message.topic,
                                      yaml.safe_load(message.key),
                                      yaml.safe_load(message.value), **kwargs)
                else:
                    return message.topic, yaml.safe_load(
                        message.key), yaml.safe_load(message.value)
        except KafkaError as e:
            raise MsgException(str(e))
        finally:
            await self.consumer.stop()
コード例 #14
0
 async def _init_consumer(self):
     if not self.consumer:
         self.consumer = AIOKafkaConsumer(
             self.topic,
             loop=self.loop,
             bootstrap_servers=bootstrap_servers,
             auto_offset_reset='earliest',
         )
         await self.consumer.start()
         self._assignment = self.consumer.assignment()
コード例 #15
0
async def kafka_consumer(
    kafka_servers: str,
    kafka_topic: str,
    queue: asyncio.Queue[Response],
    *,
    deserializer: Optional[Callable] = None,
    kafka_ssl_cafile: str = None,
    kafka_ssl_certfile: str = None,
    kafka_ssl_keyfile: str = None,
) -> None:
    """
    kafka_consumer reads data from kafka and send it to a queue
    """
    loop = asyncio.get_event_loop()
    kafka_kwargs = {
        "loop": loop,
        "bootstrap_servers": kafka_servers,
        "client_id": "client-storage",
        "group_id": "my-group",
        "enable_auto_commit": True,
        "auto_commit_interval_ms": 1000,  # Autocommit every second
        "auto_offset_reset": "earliest",  # start from beginning
        "value_deserializer": deserializer,
    }
    if not kafka_ssl_cafile:
        consumer = AIOKafkaConsumer(kafka_topic, **kafka_kwargs)
    else:
        context = create_ssl_context(
            cafile=kafka_ssl_cafile,
            certfile=kafka_ssl_certfile,
            keyfile=kafka_ssl_keyfile,
        )
        consumer = AIOKafkaConsumer(
            kafka_topic,
            security_protocol="SSL",
            ssl_context=context,
            **kafka_kwargs,
        )
    await consumer.start()
    try:
        # Consume messages
        async for msg in consumer:
            if msg.value is not None:
                logger.debug(f"Message received: {msg.value} at {msg.timestamp}")
                try:
                    asyncio.get_event_loop().call_soon_threadsafe(
                        queue.put_nowait, msg.value
                    )
                except asyncio.QueueFull as err:
                    logger.error("queue is full cannot send a response - {}", err)
    finally:
        # Will leave consumer group; perform autocommit if enabled.
        await consumer.stop()
コード例 #16
0
async def consume():
    consumer = AIOKafkaConsumer(
        loop=loop, bootstrap_servers='localhost:9092',
        metadata_max_age_ms=5000, group_id="test2")
    consumer.subscribe(pattern="test*")
    # Get cluster layout and topic/partition allocation
    await consumer.start()
    try:
        async for msg in consumer:
            print(msg.value)
    finally:
        await consumer.stop()
コード例 #17
0
    def __init__(
        self,
        settings: object,
        app: BaseApp,
        serializer: BaseEventSerializer,
        event_topics: List[str],
        event_group: str,
        position: str,
    ) -> None:
        if not hasattr(settings, 'KAFKA_BOOTSTRAP_SERVER'):
            raise Exception('Missing KAFKA_BOOTSTRAP_SERVER config')

        self.max_retries = 10
        if hasattr(settings, 'EVENTY_CONSUMER_MAX_RETRIES'):
            self.max_retries = settings.EVENTY_CONSUMER_MAX_RETRIES

        self.retry_interval = 1000
        if hasattr(settings, 'EVENTY_CONSUMER_RETRY_INTERVAL'):
            self.retry_interval = settings.EVENTY_CONSUMER_RETRY_INTERVAL

        self.retry_backoff_coeff = 2
        if hasattr(settings, 'EVENTY_CONSUMER_RETRY_BACKOFF_COEFF'):
            self.retry_backoff_coeff = settings.EVENTY_CONSUMER_RETRY_BACKOFF_COEFF

        self.app = app
        self.event_topics = event_topics
        self.event_group = event_group
        self.position = position
        self.consumer = None
        self.current_position_checkpoint_callback = None
        self.end_position_checkpoint_callback = None
        bootstrap_servers = settings.KAFKA_BOOTSTRAP_SERVER

        consumer_args: Dict[str, Any]
        consumer_args = {
            'loop': asyncio.get_event_loop(),
            'bootstrap_servers': [bootstrap_servers],
            'enable_auto_commit': False,
            'group_id': self.event_group,
            'value_deserializer': serializer.decode,
            'auto_offset_reset': self.position
        }

        try:
            self.consumer = AIOKafkaConsumer(*self.event_topics,
                                             **consumer_args)

        except Exception as e:
            logger.error(
                f"Unable to connect to the Kafka broker {bootstrap_servers} : {e}"
            )
            raise e
コード例 #18
0
async def consume():
    consumer = AIOKafkaConsumer(loop=loop,
                                bootstrap_servers='localhost:9092',
                                metadata_max_age_ms=5000,
                                group_id="test2")
    consumer.subscribe(pattern="test*")
    # Get cluster layout and topic/partition allocation
    await consumer.start()
    try:
        async for msg in consumer:
            print(msg.value)
    finally:
        await consumer.stop()
コード例 #19
0
 async def aioread(self, topic, loop=None):
     if not loop:
         loop = self.loop
     self.consumer = AIOKafkaConsumer(loop=loop,
                                      bootstrap_servers=self.broker)
     await self.consumer.start()
     self.consumer.subscribe([topic])
     try:
         async for message in self.consumer:
             return yaml.load(message.key), yaml.load(message.value)
     except KafkaError as e:
         raise MsgException(str(e))
     finally:
         await self.consumer.stop()
コード例 #20
0
async def _consumer_benchmark(brokers, topic, num_messages, msg_size, num_runs):
    loop = asyncio.get_event_loop()
    consumer = AIOKafkaConsumer(
        topic, group_id=str(uuid.uuid1()),
        auto_offset_reset="earliest",
        enable_auto_commit=False,
        loop=loop
    )

    await consumer.start()

    print("Starting benchmark for AIOKafka Consumer.")
    run_times = []

    try:
        for _ in range(num_runs):
            run_start_time = time.time()
            await _consume(consumer, num_messages)
            run_time_taken = time.time() - run_start_time
            run_times.append(run_time_taken)
    except asyncio.CancelledError:
        pass
    finally:
        await consumer.stop()

    utils.print_results(
        "AIOKafka Consumer", run_times, num_messages, msg_size
    )
コード例 #21
0
async def consume_urls(app):
    urls_consumer = AIOKafkaConsumer(
        'screenshots',
        loop=app.get('loop'),
        bootstrap_servers=BOOTSTRAP_SERVERS,  #["10.199.13.36:9091", "10.199.13.37:9092", "10.199.13.38:9093"],
        auto_offset_reset='earliest',
        value_deserializer=lambda m: json.loads(m.decode('utf8')),
        consumer_timeout_ms=60000*60,
        group_id='screenshots_recever_1'
    )
    await urls_consumer.start()
    try:
        async for message in urls_consumer:
            real_time = '{0:%Y-%m-%d %H:%M:%S}'.format(
                datetime.datetime.fromtimestamp((message.timestamp // 1000) - 10)
            )
            task_id = message.value.get('task_id', '')
            url = unquote(message.value.get('url', ''))
            screen_id = message.value.get('screen_id', 'empty_id')
            await app['to_process'].put({
                'value': message.value,
                'task_id':task_id,
                'real_time':real_time,
                'url':url,
                'screen_id':screen_id,
                'timestamp': message.timestamp
            })
    except:
        app['logger'].debug('urls_consumer exception')
    finally:
        app['logger'].debug('urls_consumer stopped')
        await urls_consumer.stop()
コード例 #22
0
async def main():
    data_array_size = 10
    try:
        consumer = AIOKafkaConsumer(
            'noaa-json-us-az',
            loop=loop,
            bootstrap_servers='localhost:9092',
            group_id="alert-group-v4",
            auto_offset_reset="earliest"  # earliest or latest 
        )
        # Get cluster layout and join group
        await consumer.start()

        producer = AIOKafkaProducer(loop=loop,
                                    bootstrap_servers='localhost:9092')
        # Get cluster layout and initial topic/partition leadership information
        await producer.start()

        while True:
            data_array = await consume(consumer, data_array_size)
            await produce(producer, process(data_array))
            # sleep(0.2)
    finally:
        await consumer.stop()
        await producer.stop()
コード例 #23
0
ファイル: consumer.py プロジェクト: yoophi/kafka-python-poc
async def consume():
    consumer = AIOKafkaConsumer(
        'my_topic',
        loop=loop,
        bootstrap_servers='localhost:9092',
        group_id="my-group")
    # Get cluster layout and join group `my-group`
    await consumer.start()
    try:
        # Consume messages
        msg = await consumer.getone()
        logger.info(msg)
        logger.info(f'msg.offset = {msg.offset}')  # Unique msg autoincrement ID in this topic-partition.
        logger.info(f'msg.value = {msg.value}')

        tp = TopicPartition(msg.topic, msg.partition)

        position = await consumer.position(tp)
        # Position is the next fetched offset
        assert position == msg.offset + 1

        committed = await consumer.committed(tp)
        logger.info(f'committed = {committed}')
        # print(committed)

    finally:
        # Will leave consumer group; perform autocommit if enabled.
        await consumer.stop()
コード例 #24
0
async def consume():
    logger.info(
        f"Creating MongoDB connection: "
        f"DB {settings.MONGO_DB_NAME} | COLLECTION {settings.MONGO_COLLECTION}"
    )
    db_client = motor.motor_asyncio.AsyncIOMotorClient(settings.MONGO_DB_CONN)
    db = db_client[settings.MONGO_DB_NAME]
    collection = db[settings.MONGO_COLLECTION]

    logger.info("Creating consumer...")
    consumer = AIOKafkaConsumer(settings.KAFKA_TOPIC,
                                bootstrap_servers=settings.KAFKA_CONN,
                                group_id="my-group")
    logger.info(f"Get cluster layout and join group `my-group`...")
    await consumer.start()
    logger.info(
        f"Started consuming for {settings.KAFKA_CONN} [{settings.KAFKA_TOPIC}]..."
    )
    try:
        # Consume messages
        async for msg in consumer:
            logger.info(f"consumed:\n"
                        f" topic: {msg.topic}\n"
                        f" partition: {msg.partition}\n"
                        f" offset: {msg.offset}\n"
                        f" key: {msg.key}\n"
                        f" value: {msg.value}\n"
                        f" timestamp: {msg.timestamp}\n"
                        f" headers: {msg.headers}\n")
            msg = json.loads(msg.value.decode("utf-8"))
            await update_playlist(collection, playlist_id=msg["id"])

    finally:
        # Will leave consumer group; perform autocommit if enabled.
        await consumer.stop()
コード例 #25
0
    async def bench_simple(self):
        topic = self._topic
        loop = asyncio.get_event_loop()

        consumer = AIOKafkaConsumer(topic,
                                    group_id="test_group",
                                    auto_offset_reset="earliest",
                                    enable_auto_commit=False,
                                    bootstrap_servers=self._bootstrap_servers,
                                    loop=loop)
        await consumer.start()

        # We start from after producer connect
        reporter_task = loop.create_task(self._stats_report(loop.time()))
        try:
            total_msgs = 0
            while True:
                msg_set = await consumer.getmany(timeout_ms=1000)
                if not msg_set:
                    break
                for msgs in msg_set.values():
                    len_msgs = len(msgs)
                    self._stats[-1]['count'] += len_msgs
                    total_msgs += len_msgs
                if total_msgs > self._num:
                    break
        except asyncio.CancelledError:
            pass
        finally:
            await consumer.stop()
            reporter_task.cancel()
            await reporter_task
コード例 #26
0
async def main ():
    await init()
    
    where_clause = '''
        ?submeter  rdf:type/rdfs:subClassOf* brick:Meter .
        ?mainmeter rdf:type/rdfs:subClassOf* brick:Meter .
        
        ?mainmeter brick:feedsElectricity ?submeter .
        ?mainmeter rdf:label ?label .
    '''
    result = await query('?label', where_clause)
    print(json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')))
    
    print('Found %d matches' % len(result['results']))
    topic = result['results'][0][0]
    print('Using "%s"' % topic)
    
    consumer = AIOKafkaConsumer(topic, loop=loop, bootstrap_servers=broker)
    await consumer.start()
    
    async for msg in consumer:
        value = json.loads(msg.value.decode('utf-8'))
        print('%d <- %s' % (value, topic))
    
    await consumer.stop()
    await finalize()
コード例 #27
0
    async def consume(self, ):
        consumer = AIOKafkaConsumer(
            'start-analysis',
            loop=self.loop,
            bootstrap_servers=os.environ.get(
                'brokerurl'),  #dev 3.22.100.122:9092 #prod 13.59.52.241:9092
            group_id="1")
        # Get cluster layout and join group `my-group`
        await consumer.start()
        try:
            # Consume messages
            async for msg in consumer:
                data = json.loads(msg.value)
                print(data)
                #this is logic for clear analysis - should be moved elsewhere
                if data['messageId'] == 'clear_analysis':
                    print(data)
                    self.deltable.generate_names(
                        data['params']['cameraId'],
                        data['params']['iotCoreTopic'])
                #message id to drop all these tables
                elif data['messageId'] == 'delete_table':
                    self.drop.generate_tables(data['params']['cameraId'],
                                              data['params']['iotCoreTopic'])
                else:
                    # with open("sample.json", "w") as outfile:
                    # 	json.dump(data, outfile)

                    self.q.put(msg.value)
                    self.db_q.put(msg.value)

        finally:
            #Will leave consumer group; perform autocommit if enabled.
            await consumer.stop()
コード例 #28
0
ファイル: kafka.py プロジェクト: timkpaine/tributary
        async def _listen(
            servers=servers,
            group=group,
            topics=topics,
            json=json,
            wrap=wrap,
            interval=interval,
        ):
            if self._consumer is None:

                self._consumer = AIOKafkaConsumer(*topics,
                                                  bootstrap_servers=servers,
                                                  group_id=group,
                                                  **consumer_kwargs)

                # Get cluster layout and join group `my-group`
                await self._consumer.start()

            async for msg in self._consumer:
                # Consume messages
                # msg.topic, msg.partition, msg.offset, msg.key, msg.value, msg.timestamp

                if json:
                    msg.value = JSON.loads(msg.value)
                if wrap:
                    msg.value = [msg.value]
                yield msg

            # Will leave consumer group; perform autocommit if enabled.
            await self._consumer.stop()
コード例 #29
0
ファイル: kafka_listener.py プロジェクト: abaiken/koku
def asyncio_sources_thread(event_loop):  # pragma: no cover
    """
    Configure Sources listener thread function to run the asyncio event loop.

    Args:
        event_loop: Asyncio event loop.

    Returns:
        None

    """
    try:
        cost_management_type_id = SourcesHTTPClient(Config.SOURCES_FAKE_HEADER).\
            get_cost_management_application_type_id()

        load_process_queue()
        while True:
            consumer = AIOKafkaConsumer(
                Config.SOURCES_TOPIC,
                loop=event_loop, bootstrap_servers=Config.SOURCES_KAFKA_ADDRESS, group_id='hccm-sources'
            )
            event_loop.create_task(listen_for_messages(consumer, cost_management_type_id, PENDING_PROCESS_QUEUE))
            event_loop.create_task(process_messages(PENDING_PROCESS_QUEUE))
            event_loop.create_task(synchronize_sources(PROCESS_QUEUE, cost_management_type_id))
            event_loop.run_forever()
    except SourcesIntegrationError as error:
        err_msg = f'Kafka Connection Failure: {str(error)}. Reconnecting...'
        LOG.error(err_msg)
        time.sleep(Config.RETRY_SECONDS)
    except SourcesHTTPClientError as error:
        LOG.error(f'Unable to connect to Sources REST API.  Check configuration and restart server... Error: {error}')
        exit(0)
    except KeyboardInterrupt:
        exit(0)
コード例 #30
0
async def fetch(to, _from, value, timeout_ms=600 * 1000, loop=loop):
    id = str(uuid.uuid4())

    consumer = AIOKafkaConsumer(_from, loop=loop, bootstrap_servers=kafka_host)

    await consumer.start()
    await asyncio.sleep(0.5)

    await push(to, value, id)

    try:
        end_time = time.time() + timeout_ms / 1000

        while time.time() <= end_time:
            result = await consumer.getmany(timeout_ms=timeout_ms)
            for messages in result.values():
                for msg in messages:
                    key = decode_key(msg.key)
                    if key == id:
                        await consumer.stop()
                        return msgpack.unpackb(msg.value, raw=False)
    finally:
        await consumer.stop()

    raise KafkaTransportError("Fetch timeout")
コード例 #31
0
    async def listen_for_order_book_snapshots(self,
                                              ev_loop: asyncio.BaseEventLoop,
                                              output: asyncio.Queue):
        """
        Listens to real-time order book snapshot messages from Radar Relay.
        """
        while True:
            try:
                consumer: AIOKafkaConsumer = AIOKafkaConsumer(
                    self.SNAPSHOT_TOPIC_NAME,
                    loop=ev_loop,
                    bootstrap_servers=conf.kafka_2["bootstrap_servers"])
                await consumer.start()
                partition: TopicPartition = list(consumer.assignment())[0]
                await consumer.seek_to_end(partition)

                while True:
                    response: Dict[
                        TopicPartition,
                        List[ConsumerRecord]] = await consumer.getmany(
                            partition, timeout_ms=1000)
                    if partition in response:
                        for record in response[partition]:
                            output.put_nowait(
                                self.order_book_class.
                                snapshot_message_from_kafka(record))
            except asyncio.CancelledError:
                raise
            except:
                self.logger().error("Unknown error. Retrying after 5 seconds.",
                                    exc_info=True)
                await asyncio.sleep(5.0)
コード例 #32
0
ファイル: server.py プロジェクト: petrjanda/pysk
def start_server(conf, handler):
    async def process(consumer):
        while True:
            try:
                msg = await consumer.getone()
                value = json.loads(msg.value.decode('utf-8'))
                resp = await handler(value['body']) 

                payload = dict(
                  id=value['id'],
                  body=resp
                )

                await producer.send(value['respond_to'], str.encode(json.dumps(payload)))
            except Exception as err:
                print("Error processing:", msg.key, msg.value, msg.offset, err)

    loop = asyncio.get_event_loop()

    producer = AIOKafkaProducer(
        loop=loop, 
        bootstrap_servers=conf['kafka_url']
    )

    loop.run_until_complete(producer.start())

    consumer = AIOKafkaConsumer(
        'input', 
        loop=loop, 
        bootstrap_servers=conf['kafka_url']
    )

    loop.run_until_complete(consumer.start())

    c_task = loop.create_task(process(consumer))

    try:
        loop.run_forever()
    finally:
        loop.run_until_complete(producer.stop())
        loop.run_until_complete(consumer.stop())
        c_task.cancel()
        loop.close()
コード例 #33
0
async def consume(loop):
    consumer = AIOKafkaConsumer(
        loop=loop, bootstrap_servers='localhost:9092',
        group_id="my_group",           # Consumer must be in a group to commit
        enable_auto_commit=False,      # Will disable autocommit
        auto_offset_reset="none",
        key_deserializer=lambda key: key.decode("utf-8") if key else "",
    )
    await consumer.start()

    local_state = LocalState()
    listener = RebalanceListener(consumer, local_state)
    consumer.subscribe(topics=["test"], listener=listener)

    save_task = loop.create_task(save_state_every_second(local_state))

    try:

        while True:
            try:
                msg_set = await consumer.getmany(timeout_ms=1000)
            except OffsetOutOfRangeError as err:
                # This means that saved file is outdated and should be
                # discarded
                tps = err.args[0].keys()
                local_state.discard_state(tps)
                await consumer.seek_to_beginning(*tps)
                continue

            for tp, msgs in msg_set.items():
                counts = Counter()
                for msg in msgs:
                    print("Process", tp, msg.key)
                    counts[msg.key] += 1
                local_state.add_counts(tp, counts, msg.offset)

    finally:
        await consumer.stop()
        save_task.cancel()
        await save_task
コード例 #34
0
async def produce_and_consume(loop):
    # Produce
    producer = AIOKafkaProducer(
        loop=loop, bootstrap_servers='localhost:9093',
        security_protocol="SSL", ssl_context=context)

    await producer.start()
    try:
        msg = await producer.send_and_wait(
            'my_topic', b"Super Message", partition=0)
    finally:
        await producer.stop()

    consumer = AIOKafkaConsumer(
        "my_topic", loop=loop, bootstrap_servers='localhost:9093',
        security_protocol="SSL", ssl_context=context)
    await consumer.start()
    try:
        consumer.seek(TopicPartition('my_topic', 0), msg.offset)
        fetch_msg = await consumer.getone()
    finally:
        await consumer.stop()

    print("Success", msg, fetch_msg)
コード例 #35
0
ファイル: kafka.py プロジェクト: nautilus/nautilus
    def __init__(self):
        # a dictionary to keep the question/answer correlation ids
        self._request_handlers = {}
        self._pending_outbound = {}
        # if there is no loop assigned
        if not self.loop:
            # use the current one
            self.loop = asyncio.get_event_loop()

        # a placeholder for the event consumer task
        self._consumer_task = None

        # create a consumer instance
        self._consumer = AIOKafkaConsumer(
            self.consumer_channel,
            loop=self.loop,
            bootstrap_servers=self.server,
            auto_offset_reset=self.initial_offset
        )
        self._producer = AIOKafkaProducer(loop=self.loop, bootstrap_servers=self.server)
コード例 #36
0
ファイル: client.py プロジェクト: petrjanda/pysk
def start_server(conf):
    loop = asyncio.get_event_loop()
    responses = {}

    async def consume_task(consumer):
        while True:
            try:
                msg = await consumer.getone()
                data = json.loads(msg.value.decode('utf-8'))
                f = responses[data['id']]
                f.set_result(data['body'])

            except Exception as err:
                print("error while consuming message: ", err)

    async def handle(request):
        body = await request.json()
        id = str(uuid.uuid4())

        payload = dict(
          id=id,
          body=body,
          respond_to=conf['kafka_output_topic']
        )

        f = asyncio.Future()

        responses[id] = f

        req = await produce(conf['kafka_input_topic'], str.encode(json.dumps(payload)))
        resp = await f

        # resp = "ok"

        return Response(body=str(resp).encode('utf-8'))

    async def init(loop):
        app = Application(loop=loop)
        app.router.add_route('POST', '/query', handle)

        handler = app.make_handler()
        srv = await loop.create_server(handler, conf['http_hostname'], conf['http_port'])
        return srv, handler

    async def produce(topic, msg):
        return await(await producer.send(topic, msg))

    producer = AIOKafkaProducer(loop=loop, bootstrap_servers=os.environ.get('KAFKA_URL'))
    consumer = AIOKafkaConsumer('output', loop=loop, bootstrap_servers=os.environ.get('KAFKA_URL'))

    loop.run_until_complete(consumer.start())
    loop.run_until_complete(producer.start())
    srv, handler = loop.run_until_complete(init(loop))

    c_task = loop.create_task(consume_task(consumer))

    try:
        loop.run_forever()
    except KeyboardInterrupt:
        loop.run_until_complete(handler.finish_connections())
        loop.run_until_complete(producer.stop())
        loop.run_until_complete(consumer.stop())

        c_task.cancel()
        loop.close()
コード例 #37
0
ファイル: kafka.py プロジェクト: nautilus/nautilus
class KafkaBroker:
    """
        This class handles two way communication with the kafka
        server. Also allows for a question/answer interface served
        over the kafka stream.

        Args:
            consumer_pattern = None

            server (str): The location of the kafka stream.

            consumer_channel (optional, str): The channel to listen for events
                on.

            consumer_pattern (optional, regex): A regex pattern to match against
                the action types. The action handler is called for every matching
                event. If none is provided, the action handler is called for every
                action.

            producer_channel (optional, str): The default channel to user when
                producing events.

            initial_offset (optional, one of 'latest' or 'earliest'): Where to
                start on the event stream when run.

            loop (optional, ayncio.EventLoop): The event loop that the broker should
                run on.


        Example:

            .. code-block:: python

                from .kafka import KafkaBroker


                class ActionHandler(KafkaBroker):

                    consumer_channel = 'myEvents'
                    server = 'localhost:9092'

                    async def handle_message(self, action_type, payload, **kwds):
                        print("recieved action with type: {}".format(action_type))
                        print("and payload: {}".format(payload))

    """
    loop = None
    server = None
    consumer_channel = None
    producer_channel = None
    initial_offset = 'latest'
    consumer_pattern = None


    def __init__(self):
        # a dictionary to keep the question/answer correlation ids
        self._request_handlers = {}
        self._pending_outbound = {}
        # if there is no loop assigned
        if not self.loop:
            # use the current one
            self.loop = asyncio.get_event_loop()

        # a placeholder for the event consumer task
        self._consumer_task = None

        # create a consumer instance
        self._consumer = AIOKafkaConsumer(
            self.consumer_channel,
            loop=self.loop,
            bootstrap_servers=self.server,
            auto_offset_reset=self.initial_offset
        )
        self._producer = AIOKafkaProducer(loop=self.loop, bootstrap_servers=self.server)


    def start(self):
        """
            This function starts the brokers interaction with the kafka stream
        """
        self.loop.run_until_complete(self._consumer.start())
        self.loop.run_until_complete(self._producer.start())
        self._consumer_task = self.loop.create_task(self._consume_event_callback())


    def stop(self):
        """
            This method stops the brokers interaction with the kafka stream
        """
        self.loop.run_until_complete(self._consumer.stop())
        self.loop.run_until_complete(self._producer.stop())

        # attempt
        try:
            # to cancel the service
            self._consumer_task.cancel()
        # if there was no service
        except AttributeError:
            # keep going
            pass


    async def send(self, payload='', action_type='', channel=None, **kwds):
        """
            This method sends a message over the kafka stream.
        """
        # use a custom channel if one was provided
        channel = channel or self.producer_channel

        # serialize the action type for the
        message = serialize_action(action_type=action_type, payload=payload, **kwds)
        # send the message
        return await self._producer.send(channel, message.encode())


    async def ask(self, action_type, **kwds):
        # create a correlation id for the question
        correlation_id = uuid.uuid4()
        # make sure its unique
        while correlation_id in self._request_handlers:
            # create a new correlation id
            correlation_id = uuid.uuid4()
        # use the integer form of the uuid
        correlation_id = correlation_id.int

        # create a future to wait on before we ask the question
        question_future = asyncio.Future()
        # register the future's callback with the request handler
        self._request_handlers[correlation_id] = question_future.set_result
        # add the entry to the outbound dictionary
        self._pending_outbound[correlation_id] = action_type

        # publish the question
        await self.send(
            correlation_id=correlation_id,
            action_type=action_type,
            **kwds
        )

        # return the response
        return await question_future


    ## internal implementations


    async def handle_message(self, props, action_type=None, payload=None, **kwds):
        raise NotImplementedError()


    async def _consume_event_callback(self):
        # continuously loop
        while True:

            # grab the next message
            msg = await self._consumer.getone()
            # parse the message as json
            message = hydrate_action(msg.value.decode())
            # the correlation_id associated with this message
            correlation_id = message.get('correlation_id')
            # the action type of the message
            action_type = message['action_type']
            # if there is a consumer pattern
            if self.consumer_pattern:
                # if the action_type does not satisfy the pattern
                if not re.match(self.consumer_pattern, message['action_type']):
                    # don't do anything
                    continue

            # if we know how to respond to this message
            if correlation_id and correlation_id in self._request_handlers \
                and action_type != self._pending_outbound[correlation_id]:

                # pass the message to the handler
                self._request_handlers[correlation_id](message['payload'])
                # remove the entry in the handler dict
                del self._request_handlers[correlation_id]
                del self._pending_outbound[correlation_id]

            # otherwise there was no correlation id, pass it along to the general handlers
            else:
                # build the dictionary of message properties
                message_props = {
                    'correlation_id': correlation_id
                }

                # pass it to the handler
                await self.handle_message(
                    props=message_props,
                    **message
                )