def __init__(self, topic):
        if not cfg.CONF.kafka.uri:
            raise Exception('Kafka is not configured correctly! '
                            'Use configuration file to specify Kafka '
                            'uri, for example: '
                            'uri=192.168.1.191:9092')

        self.uri = cfg.CONF.kafka.uri
        self.topic = topic
        self.group = cfg.CONF.kafka.group
        self.wait_time = cfg.CONF.kafka.wait_time
        self.is_async = cfg.CONF.kafka.is_async
        self.ack_time = cfg.CONF.kafka.ack_time
        self.max_retry = cfg.CONF.kafka.max_retry
        self.auto_commit = cfg.CONF.kafka.auto_commit
        self.compact = cfg.CONF.kafka.compact
        self.partitions = cfg.CONF.kafka.partitions
        self.drop_data = cfg.CONF.kafka.drop_data

        config = {
            'queue.buffering.max.messages':
            cfg.CONF.kafka.queue_buffering_max_messages
        }
        self._producer = client_factory.get_kafka_producer(
            self.uri, cfg.CONF.kafka.legacy_kafka_client_enabled, **config)
Beispiel #2
0
    def __init__(self):
        self._topics = CONF.kafka.logs_topics
        self.max_message_size = CONF.log_publisher.max_message_size

        self._kafka_publisher = client_factory.get_kafka_producer(
            CONF.kafka.uri, CONF.kafka.legacy_kafka_client_enabled)

        LOG.info('Initializing LogPublisher <%s>', self)
 def __init__(self):
     self._statsd = get_statsd_client()
     self._consumer = client_factory.get_kafka_consumer(
         CONF.kafka.url, CONF.kafka.group, CONF.kafka.alarm_topic,
         CONF.zookeeper.url, CONF.zookeeper.notification_path,
         CONF.kafka.legacy_kafka_client_enabled)
     self._producer = client_factory.get_kafka_producer(
         CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled)
     self._alarms = ap.AlarmProcessor()
     self._notifier = np.NotificationProcessor()
Beispiel #4
0
    def __init__(self, period):
        self._topic_name = CONF.kafka.periodic[period]

        self._statsd = get_statsd_client()

        self._consumer = client_factory.get_kafka_consumer(
            CONF.kafka.url, CONF.kafka.group, self._topic_name,
            CONF.zookeeper.url, CONF.zookeeper.periodic_path[period],
            CONF.kafka.legacy_kafka_client_enabled)
        self._producer = client_factory.get_kafka_producer(
            CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
        self._period = period
    def __init__(self):
        self._statsd = get_statsd_client()

        self._consumer = client_factory.get_kafka_consumer(
            CONF.kafka.url,
            CONF.kafka.group,
            CONF.kafka.notification_retry_topic,
            CONF.zookeeper.url,
            CONF.zookeeper.notification_retry_path,
            CONF.kafka.legacy_kafka_client_enabled)
        self._producer = client_factory.get_kafka_producer(
            CONF.kafka.url,
            CONF.kafka.legacy_kafka_client_enabled)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()