Exemplo n.º 1
0
    def __init__(self, config, period):
        super(PeriodicEngine, self).__init__(config, config['kafka']['periodic'][period],
                                             config['zookeeper']['periodic_path'][period])

        self._notifier = notification_processor.NotificationProcessor(config)
        self._db_repo = get_db_repo(config)
        self._period = period
 def __init__(self, config):
     self.statsd = monascastatsd.Client(name='monasca',
                                        dimensions=BaseProcessor.dimensions)
     notifiers.init(self.statsd)
     notifiers.load_plugins(config['notification_types'])
     notifiers.config(config['notification_types'])
     self._db_repo = get_db_repo(config)
     self.insert_configured_plugins()
Exemplo n.º 3
0
    def __init__(self):
        self.statsd = get_statsd_client()
        notifiers.init(self.statsd)

        notifiers.load_plugins()
        notifiers.config()

        self._db_repo = get_db_repo()
        self.insert_configured_plugins()
 def __init__(self, config):
     self.statsd = get_statsd_client(config)
     notifiers.init(self.statsd)
     notifiers.load_plugins(config['notification_types'])
     notifiers.config(config['notification_types'])
     self._db_repo = get_db_repo(config)
     self.insert_configured_plugins()
     self._invalid_type_count = self.statsd.get_counter(name='invalid_type_count')
     self._sent_failed_count = self.statsd.get_counter(name='sent_failed_count')
    def __init__(self):
        self.statsd = get_statsd_client()
        notifiers.init(self.statsd)

        notifiers.load_plugins()
        notifiers.config()

        self._db_repo = get_db_repo()
        self.insert_configured_plugins()
Exemplo n.º 6
0
    def __init__(self):
        self._statsd = get_statsd_client()

        self._consumer = consumer.KafkaConsumer(
            CONF.kafka.url, ','.join(CONF.zookeeper.url),
            CONF.zookeeper.notification_retry_path, CONF.kafka.group,
            CONF.kafka.notification_retry_topic)
        self._producer = producer.KafkaProducer(CONF.kafka.url)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
Exemplo n.º 7
0
    def __init__(self, config):
        super(RetryEngine, self).__init__(config, config['kafka']['notification_retry_topic'],
                                          config['zookeeper']['notification_retry_path'])

        self._retry_interval = config['retry']['interval']
        self._retry_max = config['retry']['max_attempts']

        self._topics = {}
        self._topics['notification_topic'] = config['kafka']['notification_topic']
        self._topics['retry_topic'] = config['kafka']['notification_retry_topic']

        self._notifier = notification_processor.NotificationProcessor(config)
        self._db_repo = get_db_repo(config)
    def __init__(self):
        self._statsd = get_statsd_client()

        self._consumer = consumer.KafkaConsumer(
            CONF.kafka.url,
            ','.join(CONF.zookeeper.url),
            CONF.zookeeper.notification_retry_path,
            CONF.kafka.group,
            CONF.kafka.notification_retry_topic
        )
        self._producer = producer.KafkaProducer(CONF.kafka.url)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
Exemplo n.º 9
0
    def __init__(self, period):
        self._topic_name = CONF.kafka.periodic[period]

        self._statsd = get_statsd_client()

        self._consumer = client_factory.get_kafka_consumer(
            CONF.kafka.url, CONF.kafka.group, self._topic_name,
            CONF.zookeeper.url, CONF.zookeeper.periodic_path[period],
            CONF.kafka.legacy_kafka_client_enabled)
        self._producer = client_factory.get_kafka_producer(
            CONF.kafka.url, CONF.kafka.legacy_kafka_client_enabled)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
        self._period = period
Exemplo n.º 10
0
    def __init__(self):
        self._statsd = get_statsd_client()

        self._consumer = client_factory.get_kafka_consumer(
            CONF.kafka.url,
            CONF.kafka.group,
            CONF.kafka.notification_retry_topic,
            CONF.zookeeper.url,
            CONF.zookeeper.notification_retry_path,
            CONF.kafka.legacy_kafka_client_enabled)
        self._producer = client_factory.get_kafka_producer(
            CONF.kafka.url,
            CONF.kafka.legacy_kafka_client_enabled)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
    def __init__(self, period):
        self._topic_name = CONF.kafka.periodic[period]

        self._statsd = get_statsd_client()

        zookeeper_path = CONF.zookeeper.periodic_path[period]
        self._consumer = consumer.KafkaConsumer(CONF.kafka.url,
                                                ','.join(CONF.zookeeper.url),
                                                zookeeper_path,
                                                CONF.kafka.group,
                                                self._topic_name)

        self._producer = producer.KafkaProducer(CONF.kafka.url)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
        self._period = period
    def __init__(self, config, interval):
        self._topic_name = config['kafka']['periodic'][interval]

        self._statsd = monascastatsd.Client(name='monasca',
                                            dimensions=BaseProcessor.dimensions)

        zookeeper_path = config['zookeeper']['periodic_path'][interval]
        self._consumer = KafkaConsumer(config['kafka']['url'],
                                       config['zookeeper']['url'],
                                       zookeeper_path,
                                       config['kafka']['group'],
                                       self._topic_name)

        self._producer = KafkaProducer(config['kafka']['url'])

        self._notifier = NotificationProcessor(config['notification_types'])
        self._db_repo = get_db_repo(config)
    def __init__(self, period):
        self._topic_name = CONF.kafka.periodic[period]

        self._statsd = get_statsd_client()

        zookeeper_path = CONF.zookeeper.periodic_path[period]
        self._consumer = consumer.KafkaConsumer(CONF.kafka.url,
                                                ','.join(CONF.zookeeper.url),
                                                zookeeper_path,
                                                CONF.kafka.group,
                                                self._topic_name)

        self._producer = producer.KafkaProducer(CONF.kafka.url)

        self._notifier = notification_processor.NotificationProcessor()
        self._db_repo = get_db_repo()
        self._period = period
    def __init__(self, config, interval):
        self._topic_name = config['kafka']['periodic'][interval]

        self._statsd = monascastatsd.Client(
            name='monasca', dimensions=BaseProcessor.dimensions)

        zookeeper_path = config['zookeeper']['periodic_path'][interval]
        self._consumer = KafkaConsumer(config['kafka']['url'],
                                       config['zookeeper']['url'],
                                       zookeeper_path,
                                       config['kafka']['group'],
                                       self._topic_name)

        self._producer = KafkaProducer(config['kafka']['url'])

        self._notifier = NotificationProcessor(config['notification_types'])
        self._db_repo = get_db_repo(config)
Exemplo n.º 15
0
    def __init__(self, config):
        self._retry_interval = config['retry']['interval']
        self._retry_max = config['retry']['max_attempts']

        self._topics = {}
        self._topics['notification_topic'] = config['kafka'][
            'notification_topic']
        self._topics['retry_topic'] = config['kafka'][
            'notification_retry_topic']

        self._statsd = monascastatsd.Client(
            name='monasca', dimensions=BaseProcessor.dimensions)

        self._consumer = KafkaConsumer(
            config['kafka']['url'], config['zookeeper']['url'],
            config['zookeeper']['notification_retry_path'],
            config['kafka']['group'],
            config['kafka']['notification_retry_topic'])

        self._producer = KafkaProducer(config['kafka']['url'])

        self._notifier = NotificationProcessor(config)
        self._db_repo = get_db_repo(config)
Exemplo n.º 16
0
 def __init__(self, alarm_ttl, config):
     self._alarm_ttl = alarm_ttl
     self._statsd = get_statsd_client(config)
     self._db_repo = get_db_repo(config)
 def __init__(self, alarm_ttl, config):
     self._alarm_ttl = alarm_ttl
     self._statsd = monascastatsd.Client(
         name='monasca', dimensions=BaseProcessor.dimensions)
     self._db_repo = get_db_repo(config)
 def __init__(self):
     self._alarm_ttl = CONF.alarm_processor.ttl
     self._statsd = get_statsd_client()
     self._db_repo = get_db_repo()
 def __init__(self):
     self._alarm_ttl = CONF.alarm_processor.ttl
     self._statsd = get_statsd_client()
     self._db_repo = get_db_repo()
 def __init__(self, alarm_ttl, config):
     self._alarm_ttl = alarm_ttl
     self._statsd = monascastatsd.Client(name='monasca',
                                         dimensions=BaseProcessor.dimensions)
     self._db_repo = get_db_repo(config)