def __init__(self): self._statsd = get_statsd_client() self._consumer = consumer.KafkaConsumer( CONF.kafka.url, ','.join(CONF.zookeeper.url), CONF.zookeeper.notification_path, CONF.kafka.group, CONF.kafka.alarm_topic) self._producer = producer.KafkaProducer(CONF.kafka.url) self._alarms = ap.AlarmProcessor() self._notifier = np.NotificationProcessor()
def __init__(self): self._statsd = get_statsd_client() self._consumer = client_factory.get_kafka_consumer( CONF.kafka.url, CONF.kafka.group, CONF.kafka.alarm_topic, CONF.zookeeper.url, CONF.zookeeper.notification_path, CONF.kafka.legacy_kafka_client_enabled) self._producer = client_factory.get_kafka_producer( CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled) self._alarms = ap.AlarmProcessor() self._notifier = np.NotificationProcessor()
def __init__(self): self._statsd = get_statsd_client() self._consumer = consumer.KafkaConsumer( CONF.kafka.url, ','.join(CONF.zookeeper.url), CONF.zookeeper.notification_retry_path, CONF.kafka.group, CONF.kafka.notification_retry_topic) self._producer = producer.KafkaProducer(CONF.kafka.url) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo()
def _start_processor(self, notifications, mock_log, mock_smtp, mock_statsd, mock_pymsql): """Start the processor with the proper mocks """ # Since the log runs in another thread I can mock it directly, # instead change the methods to put to a queue mock_log.warn = self.trap.append mock_log.error = self.trap.append mock_smtp.SMTP = self._smtpStub np.NotificationProcessor.insert_configured_plugins = mock.Mock() processor = np.NotificationProcessor() processor.send(notifications)
def _start_processor(self, notifications, mock_log, mock_smtp, mock_statsd): """Start the processor with the proper mocks """ # Since the log runs in another thread I can mock it directly, instead change the methods to put to a queue mock_log.warn = self.trap.append mock_log.error = self.trap.append mock_smtp.SMTP = self._smtpStub config = {} config["email"] = self.email_config processor = (notification_processor.NotificationProcessor(config)) processor.send(notifications)
def __init__(self, period): self._topic_name = CONF.kafka.periodic[period] self._statsd = get_statsd_client() self._consumer = client_factory.get_kafka_consumer( CONF.kafka.url, CONF.kafka.group, self._topic_name, CONF.zookeeper.url, CONF.zookeeper.periodic_path[period], CONF.kafka.legacy_kafka_client_enabled) self._producer = client_factory.get_kafka_producer( CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo() self._period = period
def __init__(self): self._statsd = get_statsd_client() self._consumer = client_factory.get_kafka_consumer( CONF.kafka.url, CONF.kafka.group, CONF.kafka.notification_retry_topic, CONF.zookeeper.url, CONF.zookeeper.notification_retry_path, CONF.kafka.legacy_kafka_client_enabled) self._producer = client_factory.get_kafka_producer( CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo()
def __init__(self, period): self._topic_name = CONF.kafka.periodic[period] self._statsd = get_statsd_client() zookeeper_path = CONF.zookeeper.periodic_path[period] self._consumer = consumer.KafkaConsumer(CONF.kafka.url, ','.join(CONF.zookeeper.url), zookeeper_path, CONF.kafka.group, self._topic_name) self._producer = producer.KafkaProducer(CONF.kafka.url) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo() self._period = period