def create_alert(): # Create a new alert try: newAlert = Alert.parse_alert(request.data) except ValueError, e: return jsonify(response={"status": "error", "message": str(e)})
def create_alert(): # Create a new alert try: incomingAlert = Alert.parse_alert(request.data) except ValueError, e: return jsonify(status="error", message=str(e))
def on_message(self, headers, body): LOG.debug("Received: %s", body) try: mailAlert = Alert.parse_alert(body) except ValueError: return alertid = mailAlert.get_id() severity = mailAlert.get_severity() previous_severity = mailAlert.previous_severity if severity in [severity_code.CRITICAL, severity_code.MAJOR]: LOG.info('%s : Queue email because alert severity is important', alertid) elif previous_severity in [severity_code.CRITICAL, severity_code.MAJOR]: LOG.info('%s : Queue email because alert severity was important', alertid) else: LOG.info('%s : Do not queue email, not important enough', alertid) return hold_time = time.time() + _EMAIL_HOLD_TIME if alertid in self.onhold: if severity == severity_code.NORMAL: LOG.info('%s : De-queue alert because it has been cleared', alertid) del self.onhold[alertid] else: LOG.info('%s : Extend queue on-hold time to %s', alertid, hold_time) self.onhold[alertid] = (mailAlert, hold_time) else: LOG.info('%s : Queued alert on hold until %s', alertid, hold_time) self.onhold[alertid] = (mailAlert, hold_time)
def on_message(self, headers, body): if 'type' not in headers or 'correlation-id' not in headers: LOG.warning( 'Malformed header missing "type" or "correlation-id": %s', headers) self.statsd.metric_send('alerta.alerts.rejected', 1) return LOG.info("Received %s %s", headers['type'], headers['correlation-id']) LOG.debug("Received body : %s", body) if headers['type'] == 'Heartbeat': heartbeat = Heartbeat.parse_heartbeat(body) if heartbeat: heartbeat.receive_now() LOG.debug('Queueing successfully parsed heartbeat %s', heartbeat.get_body()) self.queue.put(heartbeat) else: try: alert = Alert.parse_alert(body) except ValueError: self.statsd.metric_send('alerta.alerts.rejected', 1) return if alert: alert.receive_now() LOG.debug('Queueing successfully parsed alert %s', alert.get_body()) self.queue.put(alert)
def on_message(self, headers, body): if 'type' not in headers or 'correlation-id' not in headers: LOG.warning('Malformed header missing "type" or "correlation-id": %s', headers) self.statsd.metric_send('alerta.alerts.rejected', 1) return LOG.info("Received %s %s", headers['type'], headers['correlation-id']) LOG.debug("Received body : %s", body) if headers['type'] == 'Heartbeat': heartbeat = Heartbeat.parse_heartbeat(body) if heartbeat: heartbeat.receive_now() LOG.debug('Queueing successfully parsed heartbeat %s', heartbeat.get_body()) self.queue.put(heartbeat) else: try: alert = Alert.parse_alert(body) except ValueError: self.statsd.metric_send('alerta.alerts.rejected', 1) return if alert: alert.receive_now() LOG.debug('Queueing successfully parsed alert %s', alert.get_body()) self.queue.put(alert)
def create_alert(): # Create a new alert try: newAlert = Alert.parse_alert(request.data) except ValueError, e: return jsonify(response={"status": "error", "message": str(e)})
def on_message(self, headers, body): LOG.debug("Received: %s", body) try: pdAlert = Alert.parse_alert(body) except ValueError: return if 'pagerduty' not in pdAlert.tags: return if pdAlert.status == status_code.OPEN: self.pd.trigger_event(pdAlert) elif pdAlert.status == status_code.ACK: self.pd.acknowledge_event(pdAlert) elif pdAlert.status == status_code.CLOSED: self.pd.resolve_event(pdAlert)
def on_message(self, headers, body): LOG.debug("Received: %s", body) try: logAlert = Alert.parse_alert(body) except ValueError: return if logAlert: LOG.info('%s : [%s] %s', logAlert.last_receive_id, logAlert.status, logAlert.summary) source_host, _, source_path = logAlert.resource.partition(':') document = { '@message': logAlert.summary, '@source': logAlert.resource, '@source_host': source_host, '@source_path': source_path, '@tags': logAlert.tags, '@timestamp': logAlert.last_receive_time, '@type': logAlert.event_type, '@fields': logAlert.get_body() } LOG.debug('Index payload %s', document) index_url = "http://%s:%s/%s/%s" % ( CONF.es_host, CONF.es_port, datetime.datetime.utcnow().strftime( CONF.es_index), logAlert.event_type) LOG.debug('Index URL: %s', index_url) try: response = urllib2.urlopen( index_url, json.dumps(document, cls=DateEncoder)).read() except Exception, e: LOG.error('%s : Alert indexing to %s failed - %s', logAlert.last_receive_id, index_url, e) return try: es_id = json.loads(response)['_id'] LOG.info('%s : Alert indexed at %s/%s', logAlert.last_receive_id, index_url, es_id) except Exception, e: LOG.error('%s : Could not parse elasticsearch reponse: %s', e)
def on_message(self, headers, body): if not self.tokens.get_token(): LOG.warning('%s : No tokens left, rate limiting this alert', headers['correlation-id']) return LOG.debug("Received: %s", body) try: ircAlert = Alert.parse_alert(body) except ValueError: return if ircAlert: LOG.info('%s : Send IRC message to %s', ircAlert.get_id(), CONF.irc_channel) try: msg = 'PRIVMSG %s :%s [%s] %s' % (CONF.irc_channel, ircAlert.get_id(short=True), ircAlert.status, ircAlert.summary) self.irc.send(msg + '\r\n') except Exception, e: LOG.error('%s : IRC send failed - %s', ircAlert.get_id(), e)
def on_message(self, headers, body): if not self.tokens.get_token(): LOG.warning('%s : No tokens left, rate limiting this alert', headers['correlation-id']) return LOG.debug("Received: %s", body) try: ircAlert = Alert.parse_alert(body) except ValueError: return if ircAlert: LOG.info('%s : Send IRC message to %s', ircAlert.get_id(), CONF.irc_channel) try: msg = 'PRIVMSG %s :%s [%s] %s' % (CONF.irc_channel, ircAlert.get_id(short=True), ircAlert.status, ircAlert.summary) self.irc.send(msg + '\r\n') except Exception, e: LOG.error('%s : IRC send failed - %s', ircAlert.get_id(), e)
def on_message(self, headers, body): LOG.info("Received %s %s", headers['type'], headers['correlation-id']) LOG.debug("Received body : %s", body) if headers['type'] == 'Heartbeat': heartbeat = Heartbeat.parse_heartbeat(body) if heartbeat: heartbeat.receive_now() LOG.debug('Queueing successfully parsed heartbeat %s', heartbeat.get_body()) self.queue.put(heartbeat) elif headers['type'].endswith('Alert'): try: alert = Alert.parse_alert(body) except ValueError: self.statsd.metric_send('alerta.alerts.rejected', 1) return if alert: alert.receive_now() LOG.debug('Queueing successfully parsed alert %s', alert.get_body()) self.queue.put(alert)
def on_message(self, headers, body): LOG.debug("Received: %s", body) try: logAlert = Alert.parse_alert(body) except ValueError: return if logAlert: LOG.info('%s : [%s] %s', logAlert.last_receive_id, logAlert.status, logAlert.summary) source_host, _, source_path = logAlert.resource.partition(':') document = { '@message': logAlert.summary, '@source': logAlert.resource, '@source_host': source_host, '@source_path': source_path, '@tags': logAlert.tags, '@timestamp': logAlert.last_receive_time, '@type': logAlert.event_type, '@fields': logAlert.get_body() } LOG.debug('Index payload %s', document) index_url = "http://%s:%s/%s/%s" % (CONF.es_host, CONF.es_port, datetime.datetime.utcnow().strftime(CONF.es_index), logAlert.event_type) LOG.debug('Index URL: %s', index_url) try: response = urllib2.urlopen(index_url, json.dumps(document, cls=DateEncoder)).read() except Exception, e: LOG.error('%s : Alert indexing to %s failed - %s', logAlert.last_receive_id, index_url, e) return try: es_id = json.loads(response)['_id'] LOG.info('%s : Alert indexed at %s/%s', logAlert.last_receive_id, index_url, es_id) except Exception, e: LOG.error('%s : Could not parse elasticsearch reponse: %s', e)
def on_message(self, headers, body): LOG.debug("Received: %s", body) try: mailAlert = Alert.parse_alert(body) except ValueError: return alertid = mailAlert.get_id() severity = mailAlert.get_severity() previous_severity = mailAlert.previous_severity if severity in [severity_code.CRITICAL, severity_code.MAJOR]: LOG.info('%s : Queue email because alert severity is important', alertid) elif previous_severity in [ severity_code.CRITICAL, severity_code.MAJOR ]: LOG.info('%s : Queue email because alert severity was important', alertid) else: LOG.info('%s : Do not queue email, not important enough', alertid) return hold_time = time.time() + _EMAIL_HOLD_TIME if alertid in self.onhold: if severity == severity_code.NORMAL: LOG.info('%s : De-queue alert because it has been cleared', alertid) del self.onhold[alertid] else: LOG.info('%s : Extend queue on-hold time to %s', alertid, hold_time) self.onhold[alertid] = (mailAlert, hold_time) else: LOG.info('%s : Queued alert on hold until %s', alertid, hold_time) self.onhold[alertid] = (mailAlert, hold_time)
def on_message(self, headers, body): LOG.debug("Received: %s", body) try: pdAlert = Alert.parse_alert(body) except ValueError: return # do not trigger new incidents from updates if pdAlert.origin == 'pagerduty/webhook': return if 'pagerduty' not in pdAlert.tags.keys(): return LOG.info('PagerDuty Incident %s status %s', pdAlert.get_id(), pdAlert.status) incident_key = pdAlert.get_id() if pdAlert.status == status_code.OPEN: self.pd.trigger_event(pdAlert, incident_key=incident_key) elif pdAlert.status == status_code.ACK: self.pd.acknowledge_event(pdAlert, incident_key=incident_key) elif pdAlert.status == status_code.CLOSED: self.pd.resolve_event(pdAlert, incident_key=incident_key)