Exemplo n.º 1
0
    def make_producer(cls, loop, broker_addr, sasl_opts={}):
        """
        Creates an instance of the AIOKafka producer
        :param loop:
        :param broker_addr:
        :return:
        """
        logger.debug('Creating instance of producer')
        if not sasl_opts:
            producer = AIOKafkaProducer(loop=loop,
                                        bootstrap_servers=broker_addr,
                                        compression_type='snappy')
        else:
            producer = AIOKafkaProducer(
                loop=loop,
                bootstrap_servers=broker_addr,
                compression_type='snappy',
                sasl_mechanism='PLAIN',
                sasl_plain_username=sasl_opts['username'],
                sasl_plain_password=sasl_opts['password'],
                security_protocol='SASL_SSL',
                ssl_context=ssl.SSLContext(ssl.PROTOCOL_TLSv1_2))

        logger.info('Producer connected to kafka on {}'.format(broker_addr))
        return producer
Exemplo n.º 2
0
def create_producer(settings: EnvSettings, **kwargs) -> AIOKafkaProducer:
    if all((settings.kafka_keyile, settings.kafka_cafile,
            settings.kafka_certfile)):
        return AIOKafkaProducer(
            bootstrap_servers=settings.kafka_bootstrap_servers,
            security_protocol='SSL',
            ssl_context=ssl_context(settings),
            **kwargs)

    return AIOKafkaProducer(bootstrap_servers=settings.kafka_bootstrap_servers,
                            **kwargs)
Exemplo n.º 3
0
 def __init__(
     self,
     hass,
     ip_address,
     port,
     topic,
     entities_filter,
     security_protocol,
     username,
     password,
 ):
     """Initialize."""
     self._encoder = DateTimeJSONEncoder()
     self._entities_filter = entities_filter
     self._hass = hass
     ssl_context = ssl_util.client_context()
     self._producer = AIOKafkaProducer(
         loop=hass.loop,
         bootstrap_servers=f"{ip_address}:{port}",
         compression_type="gzip",
         security_protocol=security_protocol,
         ssl_context=ssl_context,
         sasl_mechanism="PLAIN",
         sasl_plain_username=username,
         sasl_plain_password=password,
     )
     self._topic = topic
async def _producer_benchmark(brokers, topic, num_messages, msg_size, num_runs):
    payload = bytearray(b"m" * msg_size)
    producer_config = dict(
        bootstrap_servers=brokers,
    )

    loop = asyncio.get_event_loop()
    producer = AIOKafkaProducer(loop=loop, **producer_config)
    await producer.start()

    print("Starting benchmark for AIOKafka Producer.")
    run_times = []

    try:
        for _ in range(num_runs):
            run_start_time = time.time()
            await _produce(producer, topic, payload, num_messages)
            run_time_taken = time.time() - run_start_time
            run_times.append(run_time_taken)
    except asyncio.CancelledError:
        pass
    finally:
        await producer.stop()

    utils.print_results(
        "AIOKafka Producer", run_times, num_messages, msg_size
    )
Exemplo n.º 5
0
async def init_producer(app):
    """Initialize and cleanup the aiokafka Producer instance

    Notes
    -----
    Use this function as a cleanup context, see
    https://aiohttp.readthedocs.io/en/stable/web_reference.html#aiohttp.web.Application.cleanup_ctx

    To access the producer:

    .. code-block:: python

       producer = app['templatebot/producer']
    """
    # Startup phase
    logger = structlog.get_logger(app["root"]["api.lsst.codes/loggerName"])
    logger.info("Starting Kafka producer")
    loop = asyncio.get_running_loop()
    producer = AIOKafkaProducer(
        loop=loop,
        bootstrap_servers=app["root"]["templatebot/brokerUrl"],
        ssl_context=app["root"]["templatebot/kafkaSslContext"],
        security_protocol=app["root"]["templatebot/kafkaProtocol"],
    )
    await producer.start()
    app["templatebot/producer"] = producer
    logger.info("Finished starting Kafka producer")

    yield

    # cleanup phase
    logger.info("Shutting down Kafka producer")
    await producer.stop()
Exemplo n.º 6
0
 def __init__(self, topic, bootstrap_servers=BOOTSTRAP_SERVERS, **kwargs):
     self.loop = asyncio.get_event_loop()
     producer = AIOKafkaProducer(loop=self.loop,
                                 bootstrap_servers=bootstrap_servers,
                                 **kwargs)
     self.topic = topic
     super(MQWriter, self).__init__(producer, 'AIOKafka Producer')
Exemplo n.º 7
0
async def produce_rand_int():
    producer = AIOKafkaProducer(loop=loop, bootstrap_servers='localhost:9092')

    # Get cluster layout and initial topic/partition leadership information
    await producer.start()
    try:
        batch = producer.create_batch()

        # Populate the batch. The append() method will return metadata for the
        # added message or None if batch is full.
        for i in range(2):
            metadata = batch.append(value=b"msg %04d" % i,
                                    key=None,
                                    timestamp=None)
            assert metadata is not None

        # Optionally close the batch to further submission. If left open, the batch
        # may be appended to by producer.send().
        batch.close()

        # Add the batch to the first partition's submission queue. If this method
        # times out, we can say for sure that batch will never be sent.
        fut = await producer.send_batch(batch, "my_topic", partition=1)

        # Batch will either be delivered or an unrecoverable error will occur.
        # Cancelling this future will not cancel the send.
        record = await fut
        logger.info(record)

    finally:
        # Wait for all pending messages to be delivered or expire.
        await producer.stop()
async def main():
    data_array_size = 10
    try:
        consumer = AIOKafkaConsumer(
            'noaa-json-us-az',
            loop=loop,
            bootstrap_servers='localhost:9092',
            group_id="alert-group-v4",
            auto_offset_reset="earliest"  # earliest or latest 
        )
        # Get cluster layout and join group
        await consumer.start()

        producer = AIOKafkaProducer(loop=loop,
                                    bootstrap_servers='localhost:9092')
        # Get cluster layout and initial topic/partition leadership information
        await producer.start()

        while True:
            data_array = await consume(consumer, data_array_size)
            await produce(producer, process(data_array))
            # sleep(0.2)
    finally:
        await consumer.stop()
        await producer.stop()
Exemplo n.º 9
0
async def produce_and_consume():
    # Produce
    producer = AIOKafkaProducer(loop=asyncio.get_event_loop(),
                                bootstrap_servers=CONFLUENT_KAFKA_BROKER,
                                security_protocol='SASL_SSL',
                                ssl_context=ssl_context,
                                sasl_mechanism='PLAIN',
                                sasl_plain_password='******',
                                sasl_plain_username='******')

    await producer.start()
    try:
        msg = await producer.send_and_wait('test',
                                           b"Super Message",
                                           partition=0)
    finally:
        await producer.stop()

    consumer = AIOKafkaConsumer('test',
                                loop=asyncio.get_event_loop(),
                                bootstrap_servers=CONFLUENT_KAFKA_BROKER,
                                security_protocol='SASL_SSL',
                                ssl_context=ssl_context,
                                sasl_mechanism='PLAIN',
                                sasl_plain_password='******',
                                sasl_plain_username='******')
    await consumer.start()
    try:
        async for msg in consumer:
            print("consumed: ", msg.topic, msg.partition, msg.offset, msg.key,
                  msg.value, msg.timestamp)
    finally:
        await consumer.stop()
Exemplo n.º 10
0
 def __init__(self, kafka_config: KafkaConfig) -> None:
     self._producer = AIOKafkaProducer(
         bootstrap_servers=kafka_config.bootstrap_servers,
         security_protocol=kafka_config.security_protocol,
         ssl_context=kafka_config.ssl_context,
     )
     self._kafka_config = kafka_config
Exemplo n.º 11
0
    async def aiowrite(self, topic, key, msg, loop=None):
        """
        Asyncio write
        :param topic: str kafka topic
        :param key: str kafka key
        :param msg: str or dictionary  kafka message
        :param loop: asyncio loop. To be DEPRECATED! in near future!!!  loop must be provided inside config at connect
        :return: None
        """

        if not loop:
            loop = self.loop
        try:
            self.producer = AIOKafkaProducer(loop=loop,
                                             key_serializer=str.encode,
                                             value_serializer=str.encode,
                                             bootstrap_servers=self.broker)
            await self.producer.start()
            await self.producer.send(topic=topic,
                                     key=key,
                                     value=yaml.safe_dump(
                                         msg, default_flow_style=True))
        except Exception as e:
            raise MsgException(
                "Error publishing topic '{}', key '{}': {}".format(
                    topic, key, e))
        finally:
            await self.producer.stop()
Exemplo n.º 12
0
    def __init__(self,
                 kafka_servers='localhost:9092',
                 in_topic='aiokafkarpc_in',
                 out_topic='aiokafkarpc_out',
                 out_partitions=(0, ),
                 translation_table=[],
                 *,
                 loop):
        self.call = CallObj(self._call_wrapper)

        self._topic_in = in_topic
        self._loop = loop
        self._waiters = {}
        self._out_topic = out_topic
        self._out_partitions = out_partitions

        default, ext_hook = get_msgpack_hooks(translation_table)
        self.__consumer = AIOKafkaConsumer(
            self._out_topic,
            loop=loop,
            bootstrap_servers=kafka_servers,
            group_id=None,
            key_deserializer=lambda x: x.decode("utf-8"),
            value_deserializer=lambda x: msgpack.unpackb(
                x, ext_hook=ext_hook, encoding="utf-8"))

        self.__producer = AIOKafkaProducer(
            bootstrap_servers=kafka_servers,
            loop=loop,
            key_serializer=lambda x: x.encode("utf-8"),
            value_serializer=lambda x: msgpack.packb(x, default=default))
Exemplo n.º 13
0
    def __init__(self,
                 rpc_obj,
                 kafka_servers='localhost:9092',
                 in_topic='aiokafkarpc_in',
                 out_topic='aiokafkarpc_out',
                 translation_table=[],
                 *,
                 loop):
        self._tasks = {}
        self._loop = loop
        self._topic_out = out_topic
        self._rpc_obj = rpc_obj
        self._res_queue = asyncio.Queue(loop=loop)

        default, ext_hook = get_msgpack_hooks(translation_table)
        self.__consumer = AIOKafkaConsumer(
            in_topic,
            loop=loop,
            bootstrap_servers=kafka_servers,
            group_id=in_topic + '-group',
            key_deserializer=lambda x: x.decode("utf-8"),
            value_deserializer=lambda x: msgpack.unpackb(
                x, ext_hook=ext_hook, encoding="utf-8"))

        self.__producer = AIOKafkaProducer(
            bootstrap_servers=kafka_servers,
            loop=loop,
            key_serializer=lambda x: x.encode("utf-8"),
            value_serializer=lambda x: msgpack.packb(x, default=default))
Exemplo n.º 14
0
async def main(kafka_topic=KAFKA_TOPIC, sites=SITES, run_total=None):

    ssl_context = None
    security_protocol = 'PLAINTEXT'
    if KAFKA_SSL_CA is not None:
        ssl_context = create_ssl_context(cafile=KAFKA_SSL_CA,
                                         certfile=KAFKA_SSL_CERT,
                                         keyfile=KAFKA_SSL_KEY)
        security_protocol = 'SSL'

    producer = AIOKafkaProducer(bootstrap_servers=KAFKA_HOST,
                                value_serializer=serializer,
                                security_protocol=security_protocol,
                                ssl_context=ssl_context)

    msh = Scheduler()
    try:
        for site in sites:
            url = site['url']
            log.info(f'Monitoring {url}')
            msh.add_job(
                CronJob(run_total=run_total,
                        name=f'check_{url}').every().second.go(
                            check_site, producer, site, kafka_topic))
        await producer.start()
        await msh.start()
    except Exception as e:
        log.error(f'Got error starting scheduler: {e}')
        raise e
    finally:
        await producer.stop()
Exemplo n.º 15
0
 def __init__(self):
     """Create a report slice state machine."""
     self.processor_name = PROCESSOR_NAME
     state_functions = {
         ReportSlice.RETRY_VALIDATION: self.transition_to_validated,
         ReportSlice.NEW: self.transition_to_started,
         ReportSlice.STARTED: self.transition_to_hosts_uploaded,
         ReportSlice.VALIDATED: self.transition_to_hosts_uploaded,
         ReportSlice.HOSTS_UPLOADED: self.archive_report_and_slices,
         ReportSlice.FAILED_VALIDATION: self.archive_report_and_slices,
         ReportSlice.FAILED_HOSTS_UPLOAD: self.archive_report_and_slices
     }
     state_metrics = {ReportSlice.FAILED_VALIDATION: FAILED_TO_VALIDATE}
     self.producer = AIOKafkaProducer(
         loop=SLICE_PROCESSING_LOOP,
         bootstrap_servers=INSIGHTS_KAFKA_ADDRESS,
         max_request_size=KAFKA_PRODUCER_OVERRIDE_MAX_REQUEST_SIZE)
     super().__init__(
         pre_delegate=self.pre_delegate,
         state_functions=state_functions,
         state_metrics=state_metrics,
         async_states=[ReportSlice.STARTED, ReportSlice.VALIDATED],
         object_prefix='REPORT SLICE',
         object_class=ReportSlice,
         object_serializer=ReportSliceSerializer)
Exemplo n.º 16
0
async def send(loop, total_events=10):
    producer = AIOKafkaProducer(loop=loop, bootstrap_servers='localhost:9092')
    # Get cluster layout and initial topic/partition leadership information
    await producer.start()

    for event_number in range(1, total_events + 1):
        # Produce message
        print(f"Sending event number {event_number}")

        user = UserModel(name=random.choice([
            "Juan",
            "Peter",
            "Michael",
            "Moby",
            "Kim",
        ]),
                         age=random.randint(1, 50))

        # create the message
        message = user.serialize()

        await producer.send_and_wait("my_topic", message)
        # sleep for 2 seconds
        await asyncio.sleep(2)
    else:
        # Wait for all pending messages to be delivered or expire.
        await producer.stop()
        print("Stoping producer...")
Exemplo n.º 17
0
    async def bench_simple(self):
        payload = bytearray(b"m" * self._size)
        topic = self._topic
        partition = self._partition
        loop = asyncio.get_event_loop()

        producer = AIOKafkaProducer(loop=loop, **self._producer_kwargs)
        await producer.start()

        # We start from after producer connect
        reporter_task = loop.create_task(self._stats_report(loop.time()))
        transaction_size = self.transaction_size

        try:
            if not self._is_transactional:
                for i in range(self._num):
                    # payload[i % self._size] = random.randint(0, 255)
                    await producer.send(topic, payload, partition=partition)
                    self._stats[-1]['count'] += 1
            else:
                for i in range(self._num // transaction_size):
                    # payload[i % self._size] = random.randint(0, 255)
                    async with producer.transaction():
                        for _ in range(transaction_size):
                            await producer.send(topic,
                                                payload,
                                                partition=partition)
                            self._stats[-1]['count'] += 1
        except asyncio.CancelledError:
            pass
        finally:
            await producer.stop()
            reporter_task.cancel()
            await reporter_task
Exemplo n.º 18
0
async def producer() -> AsyncGenerator[AIOKafkaProducer, None]:
    """Pytest fixture for a Kafka producer.

    Yields
    ------
    aiokafka.AIOKafkaProducer
        The AIOKafkaProducer, which is already "started" by awaiting its
        `~aiokafka.AIOKafkaProducer.start` method.

    Notes
    -----
    This producer uses the "SAFIR_KAFKA_BROKER_URL" environment variable to
    connect to the Kafka brokers. This is the same environment variable that
    your app already uses to connect to Kafka brokers.

    This producer assumes that the Kafka security protocol is ``PLAINTEXT``.
    """
    producer = AIOKafkaProducer(
        loop=asyncio.get_running_loop(),
        bootstrap_servers=str(os.getenv("SAFIR_KAFKA_BROKER_URL")),
        security_protocol="PLAINTEXT",
    )
    await producer.start()
    yield producer

    await producer.stop()
Exemplo n.º 19
0
async def produce_and_consume():
    # Produce
    producer = AIOKafkaProducer(bootstrap_servers='localhost:9093',
                                security_protocol="SSL",
                                ssl_context=context)

    await producer.start()
    try:
        msg = await producer.send_and_wait('my_topic',
                                           b"Super Message",
                                           partition=0)
    finally:
        await producer.stop()

    consumer = AIOKafkaConsumer("my_topic",
                                bootstrap_servers='localhost:9093',
                                security_protocol="SSL",
                                ssl_context=context)
    await consumer.start()
    try:
        consumer.seek(TopicPartition('my_topic', 0), msg.offset)
        fetch_msg = await consumer.getone()
    finally:
        await consumer.stop()

    print("Success", msg, fetch_msg)
Exemplo n.º 20
0
async def check_websites(run_forever: bool = True):
    """Listen to the event stream for any website check events.

    :param run_forever: Set the `False` to exit after all the events were processed.
    """
    # Setup the Kafka consumer.
    consumer = AIOKafkaConsumer(
        config.CHECK_WEBSITES_TOPIC,
        bootstrap_servers=config.KAFKA_URI,
        security_protocol='SSL',
        ssl_context=config.get_kafka_security_context(),
        group_id='check_websites_group'
    )
    producer = AIOKafkaProducer(
        bootstrap_servers=config.KAFKA_URI,
        security_protocol='SSL',
        ssl_context=config.get_kafka_security_context()
    )
    async with consumer, producer:
        while True:
            async for event in consumer:
                # Get the website status.
                website = Website(**json.loads(event.value.decode()))
                website_status = await check_website_status(website)
                # Write the status to the write status event stream.
                await producer.send(config.WRITE_WEBSITE_STATUS_TOPIC, website_status.json().encode())
            if not run_forever:
                break
Exemplo n.º 21
0
async def send_confirmation(request_id, status):  # pragma: no cover
    """
    Send kafka validation message to Insights Upload service.

    When a new file lands for topic 'hccm' we must validate it
    so that it will be made perminenantly available to other
    apps listening on the 'platform.upload.available' topic.

    Args:
        request_id (String): Request ID for file being confirmed.
        status (String): Either 'success' or 'failure'

    Returns:
        None

    """
    producer = AIOKafkaProducer(
        loop=EVENT_LOOP, bootstrap_servers=Config.INSIGHTS_KAFKA_ADDRESS)
    try:
        await producer.start()
    except (KafkaError, TimeoutError) as err:
        await producer.stop()
        LOG.exception(str(err))
        KAFKA_CONNECTION_ERRORS_COUNTER.inc()
        raise KafkaMsgHandlerError(
            'Unable to connect to kafka server.  Closing producer.')

    try:
        validation = {'request_id': request_id, 'validation': status}
        msg = bytes(json.dumps(validation), 'utf-8')
        LOG.info('Validating message: %s', str(msg))
        await producer.send_and_wait(VALIDATION_TOPIC, msg)
        LOG.info('Validating message complete.')
    finally:
        await producer.stop()
    async def produce_msg(self):
        """启动生产者并发送消息"""
        # 定义一个生产者
        producer = AIOKafkaProducer(loop=self.loop,
                                    bootstrap_servers=self.hosts)
        await producer.start()

        copied_list = self.msg_list.copy()
        # 每次发送消息之后, 把原列表的内容清空, 避免发送重复的消息
        self.msg_list = list()
        try:
            batch = producer.create_batch()
            for msg in copied_list:
                metadata = batch.append(key=None,
                                        value=msg.encode('utf-8'),
                                        timestamp=None)
                if metadata is None:
                    partitions = await producer.partitions_for(self.topic)
                    partition = random.choice(tuple(partitions))
                    await producer.send_batch(batch,
                                              self.topic,
                                              partition=partition)
                    logger.warning(
                        f'发送 {batch.record_count()} 条消息到 topic:{self.topic}, partition:{partition}'
                    )
                    batch = producer.create_batch()
            partitions = await producer.partitions_for(self.topic)
            partition = random.choice(tuple(partitions))
            await producer.send_batch(batch, self.topic, partition=partition)
            logger.warning(
                f'发送 {len(copied_list)} 条消息:{copied_list} 到主题:[{self.topic}], partition:{partition}'
            )
        finally:
            # 等待所有待发消息被发送出去或者过期
            await producer.stop()
Exemplo n.º 23
0
async def start_kafka_producer(app):
    print('starting Kafka producer')
    producer = AIOKafkaProducer(loop=asyncio.get_running_loop(),
                                bootstrap_servers=KAFKA_BOOTSTRAP_SERVER)
    await producer.start()
    print('started Kafka producer')
    app['producer'] = producer
Exemplo n.º 24
0
    async def _run_producer(records):
        client = AIOKafkaProducer(loop=loop,
                                  bootstrap_servers=server,
                                  acks=acks,
                                  max_request_size=max_request_size)
        pending_records = []

        await client.start()
        gen = to_agen(records, loop, get_feedback_observer)
        print("started producer")
        async for record in gen:
            fut = await send_record(client, record[0], record[1], record[2],
                                    record[3])

            pending_records.append(fut)
            if len(pending_records) > 10000:
                _pending_records = pending_records.copy()
                pending_records = []
                await asyncio.gather(*_pending_records)

        # flush pending writes on completion
        print("producer completed")
        _pending_records = pending_records.copy()
        pending_records = []
        await asyncio.gather(*_pending_records)

        await client.flush()
        await client.stop()
        print("producer closed")
Exemplo n.º 25
0
async def main():
    ## kafka producer initialization
    kafkaproducer = AIOKafkaProducer(bootstrap_servers=KAFKA_ENDPOINT)
    print("------ Starting Kafka Producer ------")
    await kafkaproducer.start()
    while True:
        ### Your business logic Starts Here ###
        #message = "This is a sample message"
        message = {
        "event_timestamp": "2021-09-21T19:51:32.903077",
        "event_id": "3aa9152d70f543e7bbe61bfa1d62d0e7",
        "event_vehicle_detected_plate_number": "DAN54P",
        "event_vehicle_lpn_detection_status": "Successful",
        "stationa1": "true",
        "stationa5201": "false",
        "stationa13": "false",
        "stationa2": "false",
        "stationa23": "false",
        "stationb313": "false",
        "stationa4202": "false",
        "stationa41": "false",
        "stationb504": "false"
        }
        time.sleep(5)
        ### Your buiness logic Ends here ###
        try:
            ## Sending message to Kafka Topic
            response = await kafkaproducer.send_and_wait(KAFKA_TOPIC, json.dumps(message).encode('utf-8'))
            await asyncio.sleep(1)
            print("[producer] Message successfully written to Kafka Topic ...")
            print("[producer] Printing RecordMetadata ...")
            print(response)
            print("------------------------------------------------")
        except asyncio.CancelledError:
            print("[producer] cancelled...")
Exemplo n.º 26
0
 async def async_test_upload_to_host_inventory_via_kafka(self):
     """Test uploading to inventory via kafka."""
     self.processor.report_or_slice = self.report_slice
     hosts = {
         str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value'},
         str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo'},
         str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo'},
         str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo'},
         str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo'},
         str(self.uuid6): {'etc_machine_id': 'value'},
         str(self.uuid7): {'subscription_manager_id': 'value'},
         str(self.uuid8): {'system_profile': {'os_release': '7',
                                              'os_kernel_version': '2.6.32'}
                           }}
     test_producer = AIOKafkaProducer(
         loop=report_slice_processor.SLICE_PROCESSING_LOOP,
         bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS
     )
     test_producer.start = CoroutineMock()
     test_producer.send = CoroutineMock()
     test_producer.stop = CoroutineMock()
     with patch('processor.report_slice_processor.AIOKafkaProducer',
                return_value=test_producer):
         with patch('processor.report_slice_processor.asyncio.wait',
                    side_effect=None):
             # all though we are not asserting any results, the test here is
             # that no error was raised
             await self.processor._upload_to_host_inventory_via_kafka(hosts)
async def init():
    global producer

    loop = asyncio.get_event_loop()

    producer = AIOKafkaProducer(loop=loop, bootstrap_servers=broker)
    await producer.start()
Exemplo n.º 28
0
    async def async_test_upload_to_host_inventory_via_kafka_send_exception(self):
        """Test uploading to inventory via kafka."""
        self.processor.report_or_slice = self.report_slice
        hosts = {str(self.uuid): {'bios_uuid': str(self.uuid), 'name': 'value'},
                 str(self.uuid2): {'insights_client_id': 'value', 'name': 'foo'},
                 str(self.uuid3): {'ip_addresses': 'value', 'name': 'foo'},
                 str(self.uuid4): {'mac_addresses': 'value', 'name': 'foo'},
                 str(self.uuid5): {'vm_uuid': 'value', 'name': 'foo'},
                 str(self.uuid6): {'etc_machine_id': 'value'},
                 str(self.uuid7): {'subscription_manager_id': 'value'}}
        test_producer = AIOKafkaProducer(
            loop=report_slice_processor.SLICE_PROCESSING_LOOP,
            bootstrap_servers=report_slice_processor.INSIGHTS_KAFKA_ADDRESS
        )

        # test KafkaConnectionException
        def raise_error():
            """Raise a general error."""
            raise Exception('Test')

        test_producer.start = CoroutineMock()
        test_producer.send = CoroutineMock(side_effect=raise_error)
        test_producer.stop = CoroutineMock()
        with self.assertRaises(msg_handler.KafkaMsgHandlerError):
            with patch('processor.report_slice_processor.AIOKafkaProducer',
                       return_value=test_producer):
                await self.processor._upload_to_host_inventory_via_kafka(hosts)
Exemplo n.º 29
0
 def __init__(self):
     """Create a report processor."""
     self.processor_name = PROCESSOR_NAME
     state_functions = {
         Report.NEW: self.transition_to_started,
         Report.STARTED: self.transition_to_downloaded,
         Report.DOWNLOADED: self.transition_to_validated,
         Report.VALIDATED: self.transition_to_validation_reported,
         Report.VALIDATION_REPORTED: self.archive_report_and_slices,
         Report.FAILED_DOWNLOAD: self.archive_report_and_slices,
         Report.FAILED_VALIDATION: self.archive_report_and_slices,
         Report.FAILED_VALIDATION_REPORTING: self.archive_report_and_slices
     }
     state_metrics = {
         Report.FAILED_DOWNLOAD: FAILED_TO_DOWNLOAD,
         Report.FAILED_VALIDATION: FAILED_TO_VALIDATE
     }
     self.async_states = [Report.VALIDATED]
     self.producer = AIOKafkaProducer(
         loop=REPORT_PROCESSING_LOOP,
         bootstrap_servers=INSIGHTS_KAFKA_ADDRESS)
     super().__init__(pre_delegate=self.pre_delegate,
                      state_functions=state_functions,
                      state_metrics=state_metrics,
                      async_states=self.async_states,
                      object_prefix='REPORT',
                      object_class=Report,
                      object_serializer=ReportSerializer)
Exemplo n.º 30
0
 def __init__(self, topic, bootstrap_servers=CFG.bootstrap_servers, **kwargs):
     self.loop = asyncio.get_event_loop()
     producer = AIOKafkaProducer(loop=self.loop, bootstrap_servers=bootstrap_servers, security_protocol=CFG.security_protocol,
                                 ssl_context=CFG.ssl_context, sasl_mechanism=CFG.sasl_mechanism, sasl_plain_username=CFG.sasl_plain_username,
                                 sasl_plain_password=CFG.sasl_plain_password, **kwargs)
     self.topic = topic
     super().__init__(producer, 'AIOKafka Producer')