Esempio n. 1
0
    def __init__(self, kafka_conf, zookeeper_conf, repository):

        self._data_points = []

        self._kafka_topic = kafka_conf.topic

        self._batch_size = kafka_conf.batch_size

        self._consumer = consumer.KafkaConsumer(
            kafka_conf.uri,
            zookeeper_conf.uri,
            kafka_conf.zookeeper_path,
            kafka_conf.group_id,
            kafka_conf.topic,
            repartition_callback=self._flush,
            commit_callback=self._flush,
            commit_timeout=kafka_conf.max_wait_time_seconds)

        self.repository = repository()
        self._start_time = time.time()
        self._end_time = 0
        self.registry = CollectorRegistry()
        multiprocess.MultiProcessCollector(self.registry)
        self.message_counter = Counter('monasca_persister_message_count_total',
                                       'total count of messages', ['version'])
        self.message_counter.labels(version='v1.0')
        self.message_counter_per_topic = Counter(
            'monasca_persister_message_count_per_topic',
            'total number of messages processed from one topic', ['topic'])
        self.message_process_rate_gauge = Gauge(
            'monasca_persister_messages_processed_per_sec',
            'messages processed per second from one topic', ['topic'],
            multiprocess_mode='all')
 def __init__(self):
     self._statsd = get_statsd_client()
     self._consumer = consumer.KafkaConsumer(
         CONF.kafka.url, ','.join(CONF.zookeeper.url),
         CONF.zookeeper.notification_path, CONF.kafka.group,
         CONF.kafka.alarm_topic)
     self._producer = producer.KafkaProducer(CONF.kafka.url)
     self._alarms = ap.AlarmProcessor()
     self._notifier = np.NotificationProcessor()
Esempio n. 3
0
    def __init__(self):
        self._statsd = get_statsd_client()

        self._consumer = consumer.KafkaConsumer(
            CONF.kafka.url, ','.join(CONF.zookeeper.url),
            CONF.zookeeper.notification_retry_path, CONF.kafka.group,
            CONF.kafka.notification_retry_topic)
        self._producer = producer.KafkaProducer(CONF.kafka.url)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
    def __init__(self, period):
        self._topic_name = CONF.kafka.periodic[period]

        self._statsd = get_statsd_client()

        zookeeper_path = CONF.zookeeper.periodic_path[period]
        self._consumer = consumer.KafkaConsumer(CONF.kafka.url,
                                                ','.join(CONF.zookeeper.url),
                                                zookeeper_path,
                                                CONF.kafka.group,
                                                self._topic_name)

        self._producer = producer.KafkaProducer(CONF.kafka.url)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
        self._period = period
Esempio n. 5
0
    def setUp(self):
        self.kafka_client_patcher = mock.patch('monasca_common.kafka.consumer.kafka_client')
        self.kafka_common_patcher = mock.patch('monasca_common.kafka.consumer.kafka_common')
        self.kafka_consumer_patcher = mock.patch('monasca_common.kafka.consumer.kafka_consumer')
        self.kazoo_patcher = mock.patch(
            'monasca_common.kafka.consumer.KazooClient')

        self.mock_kafka_client = self.kafka_client_patcher.start()
        self.mock_kafka_common = self.kafka_common_patcher.start()
        self.mock_kafka_consumer = self.kafka_consumer_patcher.start()
        self.kazoo_patcher.start()

        self.client = self.mock_kafka_client.KafkaClient.return_value
        self.consumer = self.mock_kafka_consumer.SimpleConsumer.return_value

        self.monasca_kafka_consumer = consumer.KafkaConsumer(
            FAKE_KAFKA_URL, FAKE_ZOOKEEPER_URL, FAKE_ZOOKEEPER_PATH,
            FAKE_KAFKA_CONSUMER_GROUP, FAKE_KAFKA_TOPIC)
Esempio n. 6
0
    def __init__(self, kafka_conf, zookeeper_conf, repository):

        self._data_points = []

        self._kafka_topic = kafka_conf.topic

        self._batch_size = kafka_conf.batch_size

        self._consumer = consumer.KafkaConsumer(
            kafka_conf.uri,
            zookeeper_conf.uri,
            kafka_conf.zookeeper_path,
            kafka_conf.group_id,
            kafka_conf.topic,
            repartition_callback=self._flush,
            commit_callback=self._flush,
            commit_timeout=kafka_conf.max_wait_time_seconds)

        self.repository = repository()