def __init__(self, connection): config.register_opts(Messaging.amqp_opts) self.queue = connection.SimpleQueue(CONF.amqp_queue) LOG.info('Configured direct publisher on queue "%s"', CONF.amqp_queue)
def __init__(self, host=None, port=None, root=None): config.register_opts(ApiClient.api_opts) self.host = host or CONF.api_host self.port = port or CONF.api_port self.root = root or CONF.api_root
def shared_component(self): self.shared_opts = { 'foo': 'qux' } config.register_opts(self.shared_opts, section='alert-test')
def __init__(self, host=None, port=None, protocol=None, prefix=None): config.register_opts(Carbon.carbon_opts) self.host = host or CONF.carbon_host self.port = port or CONF.carbon_port self.protocol = protocol or CONF.carbon_protocol self.prefix = prefix or CONF.graphite_prefix if self.protocol not in ['udp', 'tcp']: LOG.error("Protocol must be one of: udp, tcp") return LOG.debug('Carbon setup to send %s packets to %s:%s', self.protocol, self.host, self.port) self.addr = (self.host, int(self.port)) if protocol == 'udp': self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) else: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.socket.connect(self.addr) self._connected = True except socket.error: self._connected = False LOG.warning('Carbon server %s not responding on TCP port %s', self.host, self.port)
def __init__(self, host=None, port=None, version='v2'): config.register_opts(ApiClient.api_opts) self.host = host or CONF.api_host self.port = port or CONF.api_port self.version = version or CONF.api_version
def __init__(self): config.register_opts(Messaging.amqp_opts) self.connection = None self.channel = None self.connect()
def __init__(self, host=None, port=None, protocol=None, prefix=None): config.register_opts(Carbon.carbon_opts) self.host = host or CONF.carbon_host self.port = port or CONF.carbon_port self.protocol = protocol or CONF.carbon_protocol self.prefix = prefix or CONF.graphite_prefix self.lock = threading.Lock() if self.protocol not in ['udp', 'tcp']: LOG.error("Protocol must be one of: udp, tcp") return LOG.info('Carbon setup to send %s packets to %s:%s', self.protocol, self.host, self.port) self.addr = (self.host, int(self.port)) if self.protocol == 'udp': self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) else: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.socket.settimeout(0.5) self.socket.connect(self.addr) self._connected = True except (socket.error, socket.timeout): self._connected = False LOG.warning('Carbon server %s not responding on TCP port %s', self.host, self.port) finally: self.socket.settimeout(None)
def __init__(self): config.register_opts(Mongo.mongo_opts) self.db = None self.conn = None self.connect()
def __init__(self, alert): config.register_opts(Mailer.mailer_opt) self.subject = '[%s] %s: %s %s on %s %s' % ( alert.status, alert.environment, alert.severity, alert.event, ','.join(alert.service), alert.resource) self.text = "-" * 60 + "\n" self.text += "[%s] %s: %s %s on %s %s\n" % ( alert.status, alert.environment, alert.severity, alert.event, ','.join(alert.service), alert.resource) self.text += "-" * 60 + "\n\n" self.text += "Alert Details\n\n" self.text += "Alert ID: %s\n" % alert.get_id() self.text += "Create Time: %s\n" % alert.get_date('create_time', 'rfc') self.text += "Environment: %s\n" % alert.environment self.text += "Services: %s\n" % ", ".join(alert.service) self.text += "Resource: %s\n" % alert.resource self.text += "Event: %s\n" % alert.event self.text += "Group: %s\n" % alert.group self.text += "Value: %s\n" % alert.value self.text += "Severity: %s -> %s\n" % (alert.previous_severity, alert.severity) self.text += "Status: %s\n" % alert.status self.text += "Text: %s\n" % alert.text self.text += "Duplicate Count: %s\n" % alert.duplicate_count self.text += "Origin: %s\n" % alert.origin self.text += "Tags: %s\n" % ", ".join(alert.tags) for k, v in alert.attributes.items(): self.text += "%s: %s\n" % (re.sub('([a-z0-9])([A-Z])', r'\1 \2', k).title(), v) self.text += "\n" if 'graphUrl' in alert.attributes: self.text += "Graph\n\n" self.text += '%s\n\n' % alert.attributes['graphUrl'] self.graph_url = alert.attributes['graphUrl'] else: self.graph_url = None if alert.raw_data: self.text += "Raw Data\n\n" self.text += "%s\n\n" % alert.raw_data self.text += "JSON Format\n\n" self.text += "%s\n\n" % alert.get_body() self.text += "To acknowledge this alert visit this URL:\n" self.text += "%s%s\n\n" % (CONF.dashboard_url, alert.get_id()) self.text += "Generated by %s on %s at %s\n" % ( os.path.basename(sys.argv[0]), os.uname()[1], datetime.datetime.now().strftime("%a %d %b %H:%M:%S")) LOG.debug('Email Text: %s', self.text)
def __init__(self, resource, event, correlate=None, group=None, value=None, status=status_code.UNKNOWN, severity=severity_code.NORMAL, previous_severity=severity_code.UNKNOWN, environment=None, service=None, text=None, event_type=None, tags=None, origin=None, repeat=False, duplicate_count=0, threshold_info='n/a', summary=None, timeout=86400, alertid=None, last_receive_id=None, create_time=None, expire_time=None, receive_time=None, last_receive_time=None, trend_indication=None, raw_data=None, more_info=None, graph_urls=None, history=None): config.register_opts(Alert.alert_opts) prog = os.path.basename(sys.argv[0]) if not resource: raise ValueError('Missing mandatory value for resource') if not event: raise ValueError('Missing mandatory value for event') self.resource = resource self.event = event self.correlate = correlate or list() self.group = group or 'Misc' if isfloat(value): self.value = '%.2f' % float(value) else: self.value = value or 'n/a' self.status = status self.severity = severity self.previous_severity = previous_severity self.environment = environment or ['PROD'] self.service = service or ['Undefined'] self.text = text or '' self.event_type = event_type or 'exceptionAlert' self.tags = tags or dict() self.origin = origin or '%s/%s' % (prog, os.uname()[1]) self.repeat = repeat self.duplicate_count = duplicate_count self.threshold_info = threshold_info self.summary = summary or '%s - %s %s is %s on %s %s' % ( ','.join(self.environment), self.severity.capitalize(), self.event, self.value, ','.join(self.service), self.resource) self.timeout = timeout or CONF.global_timeout self.alertid = alertid or str(uuid4()) if last_receive_id: self.last_receive_id = last_receive_id else: self.last_receive_id = self.alertid self.create_time = create_time or datetime.datetime.utcnow() self.expire_time = expire_time or self.create_time + datetime.timedelta(seconds=self.timeout) if receive_time: self.receive_time = receive_time if last_receive_time: self.last_receive_time = last_receive_time if trend_indication: self.trend_indication = trend_indication self.raw_data = raw_data self.more_info = more_info self.graph_urls = graph_urls or list() if history: self.history = history
def __init__(self): config.register_opts(Messaging.amqp_opts) if CONF.debug: setup_logging(loglevel='DEBUG', loggers=['']) self.connection = None self.connect()
def test_sys_options(self): config.register_opts(self.TEST_OPTS, section='alert-test') self.assertEqual(CONF.host, 'host44') self.assertEqual(CONF.port, 55) self.assertEqual(CONF.ack, False) self.assertEqual(CONF.locations, ['london', 'paris']) self.assertEqual(CONF.global_timeout, 86400) self.assertEqual(CONF.none, None)
def __init__(self, prog, **kwargs): config.register_opts(DynectDaemon.dynect_opts) Daemon.__init__(self, prog, kwargs) self.info = {} self.last_info = {} self.updating = False self.dedup = DeDup(threshold=10)
def __init__(self, by_value=None, threshold=None, duration=None): config.register_opts(DeDup.dedup_opts) self.__class__.by_value = by_value or CONF.dedup_by_value self.__class__.threshold = threshold or CONF.dedup_threshold self.__class__.duration = duration or CONF.dedup_duration LOG.info('De-duplicate alerts based on: by_value=%s, threshold=%s, duration=%s', self.__class__.by_value, self.__class__.threshold, self.__class__.duration)
def __init__(self, prog, **kwargs): config.register_opts(AwsDaemon.aws_opts) Daemon.__init__(self, prog, kwargs) self.info = {} self.last = {} self.lookup = {} self.dedup = DeDup()
def __init__(self, channel, name=None): config.register_opts(Messaging.amqp_opts) self.channel = channel self.exchange_name = name or CONF.amqp_queue self.exchange = Exchange(name=self.exchange_name, type='direct', channel=self.channel, durable=True) self.producer = Producer(exchange=self.exchange, channel=self.channel, serializer='json') LOG.info('Configured direct publisher on queue %s', CONF.amqp_queue)
def __init__(self, channel, name=None): config.register_opts(Messaging.amqp_opts) self.channel = channel self.exchange_name = name or CONF.amqp_topic self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel) self.producer = Producer(exchange=self.exchange, channel=self.channel, serializer='json') LOG.info('Configured fanout publisher on topic "%s"', CONF.amqp_topic)
def __init__(self, connection): config.register_opts(Messaging.amqp_opts) self.channel = connection.channel() self.exchange_name = CONF.amqp_topic self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel) self.producer = Producer(exchange=self.exchange, channel=self.channel) LOG.info('Configured fanout publisher on topic "%s"', CONF.amqp_topic)
def __init__(self, alert): config.register_opts(Mailer.mailer_opt) self.subject = '[%s] %s: %s %s on %s %s' % (alert.status, alert.environment, alert.severity, alert.event, ','.join(alert.service), alert.resource) self.text = "-" * 60 + "\n" self.text += "[%s] %s: %s %s on %s %s\n" % (alert.status, alert.environment, alert.severity, alert.event, ','.join(alert.service), alert.resource) self.text += "-" * 60 + "\n\n" self.text += "Alert Details\n\n" self.text += "Alert ID: %s\n" % alert.get_id() self.text += "Create Time: %s\n" % alert.get_date('create_time', 'rfc') self.text += "Environment: %s\n" % alert.environment self.text += "Services: %s\n" % ", ".join(alert.service) self.text += "Resource: %s\n" % alert.resource self.text += "Event: %s\n" % alert.event self.text += "Group: %s\n" % alert.group self.text += "Value: %s\n" % alert.value self.text += "Severity: %s -> %s\n" % (alert.previous_severity, alert.severity) self.text += "Status: %s\n" % alert.status self.text += "Text: %s\n" % alert.text self.text += "Duplicate Count: %s\n" % alert.duplicate_count self.text += "Origin: %s\n" % alert.origin self.text += "Tags: %s\n" % ", ".join(alert.tags) for k, v in alert.attributes.items(): self.text += "%s: %s\n" % (re.sub('([a-z0-9])([A-Z])', r'\1 \2', k).title(), v) self.text += "\n" if 'graphUrl' in alert.attributes: self.text += "Graph\n\n" self.text += '%s\n\n' % alert.attributes['graphUrl'] self.graph_url = alert.attributes['graphUrl'] else: self.graph_url = None if alert.raw_data: self.text += "Raw Data\n\n" self.text += "%s\n\n" % alert.raw_data self.text += "JSON Format\n\n" self.text += "%s\n\n" % alert.get_body() self.text += "To acknowledge this alert visit this URL:\n" self.text += "%s?id=%s\n\n" % (CONF.dashboard_url, alert.get_id()) self.text += "Generated by %s on %s at %s\n" % ( os.path.basename(sys.argv[0]), os.uname()[1], datetime.datetime.now().strftime("%a %d %b %H:%M:%S")) LOG.debug('Email Text: %s', self.text)
def __init__(self): config.register_opts(Mongo.mongo_opts) # Connect to MongoDB try: self.conn = pymongo.MongoClient(CONF.mongo_host, CONF.mongo_port) # version >= 2.4 except AttributeError: self.conn = pymongo.Connection(CONF.mongo_host, CONF.mongo_port) # version < 2.4 except Exception, e: LOG.error('MongoDB Client connection error : %s', e) sys.exit(1)
def __init__(self, by_value=None, threshold=None, duration=None): config.register_opts(DeDup.dedup_opts) self.__class__.by_value = by_value or CONF.dedup_by_value self.__class__.threshold = threshold or CONF.dedup_threshold self.__class__.duration = duration or CONF.dedup_duration LOG.info( 'De-duplicate alerts based on: by_value=%s, threshold=%s, duration=%s', self.__class__.by_value, self.__class__.threshold, self.__class__.duration)
def __init__(self, tokens=None, limit=None, rate=None): config.register_opts(LeakyBucket.token_opts) self.tokens = tokens or CONF.token_limit self.limit = limit or tokens or CONF.token_limit self.rate = rate or float(CONF.token_rate) threading.Thread.__init__(self) self.running = False self.shuttingdown = False
def __init__(self, host=None, port=None, rate=1, prefix=None): config.register_opts(StatsD.statsd_opts) self.host = host or CONF.statsd_host self.port = port or CONF.statsd_port self.rate = rate self.prefix = prefix or CONF.graphite_prefix LOG.info('Statsd setup to send packets to %s:%s with sample rate of %d', self.host, self.port, self.rate) self.addr = (self.host, int(self.port)) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def __init__(self, host=None, port=None, rate=1, prefix=None): config.register_opts(StatsD.statsd_opts) self.host = host or CONF.statsd_host self.port = port or CONF.statsd_port self.rate = rate self.prefix = prefix or CONF.graphite_prefix LOG.debug('Statsd setup to send packets to %s:%s with sample rate of %d', self.host, self.port, self.rate) self.addr = (self.host, int(self.port)) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def setup(name): """Setup logging.""" config.register_opts(log_opts) sys.excepthook = _create_logging_excepthook(name) log_root = getLogger(name) if CONF.use_syslog: facility = CONF.syslog_facility try: syslog = logging.handlers.SysLogHandler(address='/dev/log', facility=facility) except IOError: pass else: log_root.addHandler(syslog) logpath = _get_log_file_path() if logpath: try: filelog = logging.handlers.WatchedFileHandler(logpath, encoding='utf-8') except IOError: raise log_root.addHandler(filelog) # TODO(nsatterl): test mode like openstack?? for handler in log_root.handlers: log_format = _DEFAULT_LOG_FORMAT date_format = _DEFAULT_LOG_DATE_FORMAT handler.setFormatter( logging.Formatter(fmt=log_format, datefmt=date_format)) if CONF.use_stderr: streamlog = ColorHandler() color_fmt = logging.Formatter("%(color)s" + _DEFAULT_LOG_FORMAT + "\033[0m") streamlog.setFormatter(color_fmt) log_root.addHandler(streamlog) if CONF.debug: log_root.setLevel(logging.DEBUG) elif CONF.verbose: log_root.setLevel(logging.INFO) else: log_root.setLevel(logging.WARNING)
def __init__(self, alert): config.register_opts(Mailer.mailer_opt) self.subject = '[%s] %s' % (alert.status, alert.summary) self.text = "-" * 60 + "\n" self.text += "[%s] %s\n" % (alert.status, alert.summary) self.text += "-" * 60 + "\n\n" self.text += "Alert Details\n\n" self.text += "Alert ID: %s\n" % alert.get_id() self.text += "Create Time: %s\n" % alert.get_create_time() self.text += "Environment: %s\n" % ", ".join(alert.environment) self.text += "Service: %s\n" % ", ".join(alert.service) self.text += "Resource: %s\n" % alert.resource self.text += "Event: %s\n" % alert.event self.text += "Group: %s\n" % alert.group self.text += "Value: %s\n" % alert.value self.text += "Severity: %s -> %s\n" % (alert.previous_severity, alert.severity) self.text += "Status: %s\n" % alert.status self.text += "Text: %s\n" % alert.text self.text += "Threshold Info: %s\n" % alert.threshold_info self.text += "Duplicate Count: %s\n" % alert.duplicate_count self.text += "Origin: %s\n" % alert.origin self.text += "Tags: %s\n" % ", ".join(k + '=' + v for k, v in alert.tags.items()) self.text += "More Info: %s\n\n" % alert.more_info if hasattr(alert, 'graph_urls'): self.text += "Graphs\n\n" for graph in alert.graph_urls: self.text += '%s\n' % graph self.text += "\n" if CONF.debug: self.text += "Raw Alert\n\n" self.text += "%s\n\n" % alert.get_body() self.text += "To acknowledge this alert visit this URL:\n" self.text += "%s?id=%s\n\n" % (CONF.dashboard_url, alert.get_id()) self.text += "Generated by %s on %s at %s\n" % ( os.path.basename(sys.argv[0]), os.uname()[1], datetime.datetime.now().strftime("%a %d %b %H:%M:%S")) LOG.debug('Email Text: %s', self.text) self.graph_urls = alert.graph_urls if hasattr(alert, 'graph_urls') else None
def setup(name): """Setup logging.""" config.register_opts(log_opts) sys.excepthook = _create_logging_excepthook(name) log_root = getLogger(name) if CONF.use_syslog: facility = CONF.syslog_facility try: syslog = logging.handlers.SysLogHandler(address='/dev/log', facility=facility) except IOError: pass else: log_root.addHandler(syslog) logpath = _get_log_file_path() if logpath: try: filelog = logging.handlers.WatchedFileHandler(logpath, encoding='utf-8') except IOError: raise log_root.addHandler(filelog) # TODO(nsatterl): test mode like openstack?? for handler in log_root.handlers: log_format = _DEFAULT_LOG_FORMAT date_format = _DEFAULT_LOG_DATE_FORMAT handler.setFormatter(logging.Formatter(fmt=log_format, datefmt=date_format)) if CONF.use_stderr: streamlog = ColorHandler() color_fmt = logging.Formatter("%(color)s" + _DEFAULT_LOG_FORMAT + "\033[0m") streamlog.setFormatter(color_fmt) log_root.addHandler(streamlog) if CONF.debug: log_root.setLevel(logging.DEBUG) elif CONF.verbose: log_root.setLevel(logging.INFO) else: log_root.setLevel(logging.WARNING)
def __init__(self, host=None, port=None, protocol=None): config.register_opts(Gmetric.ganglia_opts) self.host = host or CONF.gmetric_host self.port = port or CONF.gmetric_port self.protocol = protocol or CONF.gmetric_protocol if self.protocol not in PROTOCOLS: LOG.error("Protocol must be one of: %s", ','.join(PROTOCOLS)) return LOG.debug('Gmetric setup to send %s packets to %s:%s', self.protocol, self.host, self.port) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if self.protocol == 'multicast': self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 20) self.hostport = (self.host, int(self.port))
def setup(name): """Setup logging.""" config.register_opts(log_opts) sys.excepthook = _create_logging_excepthook(name) log_root = getLogger(name) if CONF.use_syslog: if sys.platform == "darwin": socket = '/var/run/syslog' else: socket = '/dev/log' facility = CONF.syslog_facility try: syslog = logging.handlers.SysLogHandler(address=socket, facility=facility) except IOError, e: print >>sys.stderr, 'ERROR - Failed to log to syslog socket %s: %s' % (socket, e) else: log_root.addHandler(syslog)
def normalise_alert(alert, trapoid=None, facility=None, level=None, **kwargs): """ Transforms alert based on configuration contained in YAML file. """ config.register_opts(Transformers.transform_opts) if not CONF.yaml_config: return LOG.info('Transform alert %s using %s', alert.id, CONF.yaml_config) if not os.path.exists(CONF.yaml_config): return suppress = False try: conf = yaml.load(open(CONF.yaml_config)) LOG.info('Loaded %d transformer configurations OK', len(conf)) except Exception, e: LOG.error('Failed to load transformer configuration %s: %s', CONF.yaml_config, e) raise RuntimeError
def __init__(self, prog, **kwargs): config.register_opts(IrcbotDaemon.ircbot_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self, resource, event, correlate=None, group=None, value=None, status=status_code.UNKNOWN, severity=severity_code.NORMAL, previous_severity=severity_code.UNKNOWN, environment=None, service=None, text=None, event_type=None, tags=None, origin=None, repeat=False, duplicate_count=0, threshold_info='n/a', summary=None, timeout=86400, alertid=None, last_receive_id=None, create_time=None, expire_time=None, receive_time=None, last_receive_time=None, trend_indication=None, raw_data=None, more_info=None, graph_urls=None, history=None): config.register_opts(Alert.alert_opts) prog = os.path.basename(sys.argv[0]) if not resource: raise ValueError('Missing mandatory value for resource') if not event: raise ValueError('Missing mandatory value for event') self.resource = resource self.event = event self.correlate = correlate or list() self.group = group or 'Misc' if isfloat(value): self.value = '%.2f' % float(value) else: self.value = value or 'n/a' self.status = status self.severity = severity self.previous_severity = previous_severity self.environment = environment or ['PROD'] self.service = service or ['Undefined'] self.text = text or '' self.event_type = event_type or 'exceptionAlert' self.tags = tags or dict() self.origin = origin or '%s/%s' % (prog, os.uname()[1]) self.repeat = repeat self.duplicate_count = duplicate_count self.threshold_info = threshold_info self.summary = summary or '%s - %s %s is %s on %s %s' % ( ','.join(self.environment), self.severity.capitalize(), self.event, self.value, ','.join(self.service), self.resource) self.timeout = timeout or CONF.global_timeout self.alertid = alertid or str(uuid4()) if last_receive_id: self.last_receive_id = last_receive_id else: self.last_receive_id = self.alertid self.create_time = create_time or datetime.datetime.utcnow() self.expire_time = expire_time or self.create_time + datetime.timedelta( seconds=self.timeout) if receive_time: self.receive_time = receive_time if last_receive_time: self.last_receive_time = last_receive_time if trend_indication: self.trend_indication = trend_indication self.raw_data = raw_data self.more_info = more_info self.graph_urls = graph_urls or list() if history: self.history = history
def __init__(self, prog, **kwargs): config.register_opts(AlertaDaemon.alerta_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self): config.register_opts(Messaging.mq_opts) logging.setup('stomp.py')
def __init__(self, prog, **kwargs): config.register_opts(SolarWindsDaemon.solarwinds_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self, prog, **kwargs): config.register_opts(PagerDutyDaemon.pagerduty_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self, prog, **kwargs): config.register_opts(PingerDaemon.pinger_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self, prog, **kwargs): config.register_opts(UrlmonDaemon.urlmon_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self): config.register_opts(CheckerClient.nagios_opts)
def test_interpolation(self): config.register_opts(self.INTER_OPTS, section='alert-test')
def __init__(self, prog, **kwargs): config.register_opts(LoggerDaemon.logger_opts) Daemon.__init__(self, prog, kwargs)
def __init__(self, prog, **kwargs): config.register_opts(CloudWatchDaemon.cloudwatch_opts) Daemon.__init__(self, prog, kwargs)
def shared_component(self): self.shared_opts = {'foo': 'qux'} config.register_opts(self.shared_opts, section='alert-test')