def __init__(self, config): self._topics = {} self._topics['notification_topic'] = config['kafka'][ 'notification_topic'] self._topics['retry_topic'] = config['kafka'][ 'notification_retry_topic'] self._statsd = monascastatsd.Client( name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer( config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_path'], config['kafka']['group'], config['kafka']['alarm_topic']) self._producer = KafkaProducer(config['kafka']['url']) self._alarm_ttl = config['processors']['alarm']['ttl'] self._alarms = AlarmProcessor(self._alarm_ttl, config) self._notifier = NotificationProcessor(config['notification_types'])
def __init__(self, config, interval): self._topic_name = config['kafka']['periodic'][interval] self._statsd = monascastatsd.Client( name='monasca', dimensions=BaseProcessor.dimensions) zookeeper_path = config['zookeeper']['periodic_path'][interval] self._consumer = KafkaConsumer(config['kafka']['url'], config['zookeeper']['url'], zookeeper_path, config['kafka']['group'], self._topic_name) self._producer = KafkaProducer(config['kafka']['url']) self._notifier = NotificationProcessor(config['notification_types']) self._db_repo = get_db_repo(config)
def setUp(self): self.kafka_client_patcher = mock.patch('kafka.client') self.kafka_common_patcher = mock.patch('kafka.common') self.kafka_consumer_patcher = mock.patch('kafka.consumer') self.kazoo_patcher = mock.patch( 'monasca_common.kafka.consumer.KazooClient') self.mock_kafka_client = self.kafka_client_patcher.start() self.mock_kafka_common = self.kafka_common_patcher.start() self.mock_kafka_consumer = self.kafka_consumer_patcher.start() self.kazoo_patcher.start() self.client = self.mock_kafka_client.KafkaClient.return_value self.consumer = self.mock_kafka_consumer.SimpleConsumer.return_value self.monasca_kafka_consumer = KafkaConsumer( FAKE_KAFKA_URL, FAKE_ZOOKEEPER_URL, FAKE_ZOOKEEPER_PATH, FAKE_KAFKA_CONSUMER_GROUP, FAKE_KAFKA_TOPIC)
def __init__(self, kafka_conf, zookeeper_conf, repository): self._data_points = [] self._kafka_topic = kafka_conf.topic self._database_batch_size = kafka_conf.database_batch_size self._consumer = KafkaConsumer( kafka_conf.uri, zookeeper_conf.uri, kafka_conf.zookeeper_path, kafka_conf.group_id, kafka_conf.topic, repartition_callback=self._flush, commit_callback=self._flush, commit_timeout=kafka_conf.max_wait_time_seconds) self.repository = repository()
def __init__(self, config): self._retry_interval = config['retry']['interval'] self._retry_max = config['retry']['max_attempts'] self._topics = {} self._topics['notification_topic'] = config['kafka'][ 'notification_topic'] self._topics['retry_topic'] = config['kafka'][ 'notification_retry_topic'] self._statsd = monascastatsd.Client( name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer( config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_retry_path'], config['kafka']['group'], config['kafka']['notification_retry_topic']) self._producer = KafkaProducer(config['kafka']['url']) self._notifier = NotificationProcessor(config['notification_types'])