def test_statsd_default_connection(self): config = {} with patch( 'monasca_notification.common.utils.monascastatsd.Client') as c: utils.get_statsd_client(config) c.assert_called_once_with(dimensions=utils.NOTIFICATION_DIMENSIONS, name=self.base_name)
def test_statsd_default_connection(self): with patch( 'monasca_notification.common.utils.monascastatsd.Client') as c: utils.get_statsd_client() c.assert_called_once_with(dimensions=utils.NOTIFICATION_DIMENSIONS, name=self.base_name, host='127.0.0.1', port=8125)
def test_statsd_update_dimmensions(self): config = {} expected_dimensions = utils.NOTIFICATION_DIMENSIONS.copy() expected_dimensions.update(self.extra_dimensions) with patch( 'monasca_notification.common.utils.monascastatsd.Client') as c: utils.get_statsd_client(config, dimensions=self.extra_dimensions) c.assert_called_once_with(dimensions=expected_dimensions, name=self.base_name)
def test_statsd_update_dimmensions(self): expected_dimensions = utils.NOTIFICATION_DIMENSIONS.copy() expected_dimensions.update(self.extra_dimensions) with patch( 'monasca_notification.common.utils.monascastatsd.Client') as c: utils.get_statsd_client(dimensions=self.extra_dimensions) c.assert_called_once_with(dimensions=expected_dimensions, name=self.base_name, host='127.0.0.1', port=8125)
def test_statsd_config_connection(self): port_number = 9999 hostname = 'www.example.org' config = {'statsd': {'host': hostname, 'port': port_number}} with patch( 'monasca_notification.common.utils.monascastatsd.Client') as c: utils.get_statsd_client(config) c.assert_called_once_with(dimensions=utils.NOTIFICATION_DIMENSIONS, name=self.base_name, port=port_number, host=hostname)
def test_statsd_config_connection(self): port_number = 9999 hostname = 'www.example.org' self.conf_override(group='statsd', host=hostname, port=port_number) with patch( 'monasca_notification.common.utils.monascastatsd.Client') as c: utils.get_statsd_client() c.assert_called_once_with(dimensions=utils.NOTIFICATION_DIMENSIONS, name=self.base_name, port=port_number, host=hostname)
def __init__(self): self._statsd = get_statsd_client() self._consumer = consumer.KafkaConsumer( CONF.kafka.url, ','.join(CONF.zookeeper.url), CONF.zookeeper.notification_path, CONF.kafka.group, CONF.kafka.alarm_topic) self._producer = producer.KafkaProducer(CONF.kafka.url) self._alarms = ap.AlarmProcessor() self._notifier = np.NotificationProcessor()
def __init__(self, config): self.statsd = get_statsd_client(config) notifiers.init(self.statsd) notifiers.load_plugins(config['notification_types']) notifiers.config(config['notification_types']) self._db_repo = get_db_repo(config) self.insert_configured_plugins() self._invalid_type_count = self.statsd.get_counter(name='invalid_type_count') self._sent_failed_count = self.statsd.get_counter(name='sent_failed_count')
def __init__(self): self.statsd = get_statsd_client() notifiers.init(self.statsd) notifiers.load_plugins() notifiers.config() self._db_repo = get_db_repo() self.insert_configured_plugins()
def __init__(self): self._statsd = get_statsd_client() self._consumer = client_factory.get_kafka_consumer( CONF.kafka.url, CONF.kafka.group, CONF.kafka.alarm_topic, CONF.zookeeper.url, CONF.zookeeper.notification_path, CONF.kafka.legacy_kafka_client_enabled) self._producer = client_factory.get_kafka_producer( CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled) self._alarms = ap.AlarmProcessor() self._notifier = np.NotificationProcessor()
def __init__(self): self._statsd = get_statsd_client() self._consumer = consumer.KafkaConsumer( CONF.kafka.url, ','.join(CONF.zookeeper.url), CONF.zookeeper.notification_retry_path, CONF.kafka.group, CONF.kafka.notification_retry_topic) self._producer = producer.KafkaProducer(CONF.kafka.url) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo()
def __init__(self): self._statsd = get_statsd_client() self._consumer = consumer.KafkaConsumer( CONF.kafka.url, ','.join(CONF.zookeeper.url), CONF.zookeeper.notification_retry_path, CONF.kafka.group, CONF.kafka.notification_retry_topic ) self._producer = producer.KafkaProducer(CONF.kafka.url) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo()
def __init__(self, period): self._topic_name = CONF.kafka.periodic[period] self._statsd = get_statsd_client() self._consumer = client_factory.get_kafka_consumer( CONF.kafka.url, CONF.kafka.group, self._topic_name, CONF.zookeeper.url, CONF.zookeeper.periodic_path[period], CONF.kafka.legacy_kafka_client_enabled) self._producer = client_factory.get_kafka_producer( CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo() self._period = period
def __init__(self): self._statsd = get_statsd_client() self._consumer = client_factory.get_kafka_consumer( CONF.kafka.url, CONF.kafka.group, CONF.kafka.notification_retry_topic, CONF.zookeeper.url, CONF.zookeeper.notification_retry_path, CONF.kafka.legacy_kafka_client_enabled) self._producer = client_factory.get_kafka_producer( CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo()
def __init__(self, period): self._topic_name = CONF.kafka.periodic[period] self._statsd = get_statsd_client() zookeeper_path = CONF.zookeeper.periodic_path[period] self._consumer = consumer.KafkaConsumer(CONF.kafka.url, ','.join(CONF.zookeeper.url), zookeeper_path, CONF.kafka.group, self._topic_name) self._producer = producer.KafkaProducer(CONF.kafka.url) self._notifier = notification_processor.NotificationProcessor() self._db_repo = get_db_repo() self._period = period
def __init__(self, alarm_ttl, config): self._alarm_ttl = alarm_ttl self._statsd = get_statsd_client(config) self._db_repo = get_db_repo(config)
def __init__(self): self._alarm_ttl = CONF.alarm_processor.ttl self._statsd = get_statsd_client() self._db_repo = get_db_repo()