def __init__(self, config): self._topics = {} self._topics['notification_topic'] = config['kafka']['notification_topic'] self._topics['retry_topic'] = config['kafka']['notification_retry_topic'] self._statsd = monascastatsd.Client(name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer(config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_path'], config['kafka']['group'], config['kafka']['alarm_topic']) self._producer = KafkaProducer(config['kafka']['url']) if 'ssl' in config['mysql']: ssl_config = config['mysql']['ssl'] else: ssl_config = None self._alarms = AlarmProcessor(config['processors']['alarm']['ttl'], config['mysql']['host'], config['mysql']['user'], config['mysql']['passwd'], config['mysql']['db'], ssl_config) self._notifier = NotificationProcessor(config['notification_types'])
class NotificationEngine(object): def __init__(self, config): self._topics = {} self._topics['notification_topic'] = config['kafka']['notification_topic'] self._topics['retry_topic'] = config['kafka']['notification_retry_topic'] self._statsd = monascastatsd.Client(name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer(config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_path'], config['kafka']['group'], config['kafka']['alarm_topic']) self._producer = KafkaProducer(config['kafka']['url']) if 'ssl' in config['mysql']: ssl_config = config['mysql']['ssl'] else: ssl_config = None self._alarms = AlarmProcessor(config['processors']['alarm']['ttl'], config['mysql']['host'], config['mysql']['user'], config['mysql']['passwd'], config['mysql']['db'], ssl_config) self._notifier = NotificationProcessor(config['notification_types']) def run(self): finished_count = self._statsd.get_counter(name='alarms_finished_count') for alarm in self._consumer: notifications, partition, offset = self._alarms.to_notification(alarm) if notifications: sent, failed = self._notifier.send(notifications) self._producer.publish(self._topics['notification_topic'], sent) self._producer.publish(self._topics['retry_topic'], failed) self._consumer.commit([partition]) finished_count.increment()
def __init__(self, config): self._retry_interval = config['retry']['interval'] self._retry_max = config['retry']['max_attempts'] self._topics = {} self._topics['notification_topic'] = config['kafka'][ 'notification_topic'] self._topics['retry_topic'] = config['kafka'][ 'notification_retry_topic'] self._statsd = monascastatsd.Client( name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer( config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_retry_path'], config['kafka']['group'], config['kafka']['notification_retry_topic']) self._producer = KafkaProducer(config['kafka']['url']) self._notifier = NotificationProcessor(config['notification_types'])
def __init__(self, config): self._retry_interval = config['retry']['interval'] self._retry_max = config['retry']['max_attempts'] self._topics = {} self._topics['notification_topic'] = config['kafka']['notification_topic'] self._topics['retry_topic'] = config['kafka']['notification_retry_topic'] self._statsd = monascastatsd.Client(name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer(config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_retry_path'], config['kafka']['group'], config['kafka']['notification_retry_topic']) self._producer = KafkaProducer(config['kafka']['url']) self._notifier = NotificationProcessor(config['notification_types'])
class RetryEngine(object): def __init__(self, config): self._retry_interval = config['retry']['interval'] self._retry_max = config['retry']['max_attempts'] self._topics = {} self._topics['notification_topic'] = config['kafka']['notification_topic'] self._topics['retry_topic'] = config['kafka']['notification_retry_topic'] self._statsd = monascastatsd.Client(name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer(config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_retry_path'], config['kafka']['group'], config['kafka']['notification_retry_topic']) self._producer = KafkaProducer(config['kafka']['url']) self._notifier = NotificationProcessor(config['notification_types']) def run(self): for raw_notification in self._consumer: partition = raw_notification[0] offset = raw_notification[1].offset message = raw_notification[1].message.value notification_data = json.loads(message) ntype = notification_data['type'] name = notification_data['name'] addr = notification_data['address'] notification = Notification(ntype, partition, offset, name, addr, notification_data['retry_count'], notification_data['raw_alarm']) wait_duration = self._retry_interval - ( time.time() - notification_data['notification_timestamp']) if wait_duration > 0: time.sleep(wait_duration) sent, failed = self._notifier.send([notification]) if sent: self._producer.publish(self._topics['notification_topic'], sent) if failed: notification.retry_count += 1 notification.notification_timestamp = time.time() if notification.retry_count < self._retry_max: log.error("retry failed for {} with name {} " "at {}. " "Saving for later retry.".format(ntype, name, addr)) self._producer.publish(self._topics['retry_topic'], [notification]) else: log.error("retry failed for {} with name {} " "at {} after {} retries. " "Giving up on retry." .format(ntype, name, addr, self._retry_max)) self._consumer.commit([partition])
class RetryEngine(object): def __init__(self, config): self._retry_interval = config['retry']['interval'] self._retry_max = config['retry']['max_attempts'] self._topics = {} self._topics['notification_topic'] = config['kafka'][ 'notification_topic'] self._topics['retry_topic'] = config['kafka'][ 'notification_retry_topic'] self._statsd = monascastatsd.Client( name='monasca', dimensions=BaseProcessor.dimensions) self._consumer = KafkaConsumer( config['kafka']['url'], config['zookeeper']['url'], config['zookeeper']['notification_retry_path'], config['kafka']['group'], config['kafka']['notification_retry_topic']) self._producer = KafkaProducer(config['kafka']['url']) self._notifier = NotificationProcessor(config['notification_types']) def run(self): for raw_notification in self._consumer: partition = raw_notification[0] offset = raw_notification[1].offset message = raw_notification[1].message.value notification_data = json.loads(message) ntype = notification_data['type'] name = notification_data['name'] addr = notification_data['address'] notification = Notification(ntype, partition, offset, name, addr, notification_data['retry_count'], notification_data['raw_alarm']) wait_duration = self._retry_interval - ( time.time() - notification_data['notification_timestamp']) if wait_duration > 0: time.sleep(wait_duration) sent, failed = self._notifier.send([notification]) if sent: self._producer.publish(self._topics['notification_topic'], sent) if failed: notification.retry_count += 1 notification.notification_timestamp = time.time() if notification.retry_count < self._retry_max: log.error("retry failed for {} with name {} " "at {}. " "Saving for later retry.".format( ntype, name, addr)) self._producer.publish(self._topics['retry_topic'], [notification]) else: log.error("retry failed for {} with name {} " "at {} after {} retries. " "Giving up on retry.".format( ntype, name, addr, self._retry_max)) self._consumer.commit([partition])