def create_alert(alert, alert_config): alert_class = alerts_mapping.get(alert) or get_module(alert) if not issubclass(alert_class, alerts.Alerter): raise EAException('Alert module %s is not a subclass of Alerter' % (alert)) #missing_options = (rule['type'].required_options | alert_class.required_options) - frozenset(alert_config or []) #if missing_options: # raise EAException('Missing required option(s): %s' % (', '.join(missing_options))) return alert_class(alert_config)
def load_global_config(): filename = "global_config.json" global_config = {} try: global_config = json.loads(open(filename).read(), encoding="utf-8") except ValueError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) return global_config
def __init__(self, *args): super(PercentageMatchRule, self).__init__(*args) self.ts_field = self.rules.get('timestamp_field', '@timestamp') if 'max_percentage' not in self.rules and 'min_percentage' not in self.rules: raise EAException("PercentageMatchRule must have at least one of either min_percentage or max_percentage") self.match_bucket_filter = self.rules['match_bucket_filter'] self.rules['aggregation_query_element'] = self.generate_aggregation_query()
def alert(self, matches): # Format the command and arguments try: command = [command_arg % matches[0] for command_arg in self.rule['command']] self.last_command = command except KeyError as e: raise EAException("Error formatting command: %s" % (e)) # Run command and pipe data try: subp = subprocess.Popen(command, stdin=subprocess.PIPE) if self.rule.get('pipe_match_json'): match_json = json.dumps(matches) + '\n' stdout, stderr = subp.communicate(input=match_json) except OSError as e: raise EAException("Error while running command %s: %s" % (' '.join(command), e))
def __init__(self, *args): super(MetricAggregationRule, self).__init__(*args) self.ts_field = self.rules.get('timestamp_field', '@timestamp') if 'max_threshold' not in self.rules and 'min_threshold' not in self.rules: raise EAException( "MetricAggregationRule must have at least one of either max_threshold or min_threshold" ) self.metric_key = self.rules['metric_agg_key'] + '_' + self.rules[ 'metric_agg_type'] if not self.rules['metric_agg_type'] in self.allowed_aggregations: raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) self.rules[ 'aggregation_query_element'] = self.generate_aggregation_query()
def add_count_data(self, data): """ Add count data to the rule. Data should be of the form {ts: count}. """ if len(data) > 1: raise EAException('add_count_data can only accept one count at a time') for ts, count in data.iteritems(): event = ({self.ts_field: ts}, count) self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) self.check_for_match()
def alert(self, matches): body = self.create_alert_body(matches) # Add JIRA ticket if it exists if self.pipeline is not None and 'jira_ticket' in self.pipeline: url = '%s/browse/%s' % (self.pipeline['jira_server'], self.pipeline['jira_ticket']) body += '\nJIRA ticket: %s' % (url) to_addr = self.rule['email'] email_msg = MIMEText(body.encode('UTF-8'), _charset='UTF-8') email_msg['Subject'] = self.create_title(matches) email_msg['To'] = ', '.join(self.rule['email']) email_msg['From'] = self.from_addr email_msg['Reply-To'] = self.rule.get('email_reply_to', email_msg['To']) if self.rule.get('cc'): email_msg['CC'] = ','.join(self.rule['cc']) to_addr = to_addr + self.rule['cc'] if self.rule.get('bcc'): to_addr = to_addr + self.rule['bcc'] try: if self.smtp_ssl: if self.smtp_port: self.smtp = SMTP_SSL(self.smtp_host, self.smtp_port) else: self.smtp = SMTP_SSL(self.smtp_host) else: if self.smtp_port: self.smtp = SMTP(self.smtp_host, self.smtp_port) else: self.smtp = SMTP(self.smtp_host) self.smtp.ehlo() if self.smtp.has_extn('STARTTLS'): self.smtp.starttls() if 'smtp_auth_file' in self.rule: self.smtp.login(self.user, self.password) except (SMTPException, error) as e: raise EAException("Error connecting to SMTP host: %s" % (e)) except SMTPAuthenticationError as e: raise EAException("SMTP username/password rejected: %s" % (e)) self.smtp.sendmail(self.from_addr, to_addr, email_msg.as_string()) self.smtp.close() elastalert_logger.info("Sent email to %s" % (self.rule['email']))
def alert(self, matches): body = '' for match in matches: body += unicode(BasicMatchString(self.rule, match)) # Separate text of aggregated alerts with dashes if len(matches) > 1: body += '\n----------------------------------------\n' if self.custom_message is None: self.message = self.create_title(matches) else: self.message = self.custom_message.format(**matches[0]) self.recipients = self._parse_responders(self.recipients, self.recipients_args, matches, self.default_reciepients) self.teams = self._parse_responders(self.teams, self.teams_args, matches, self.default_teams) post = {} post['message'] = self.message if self.account: post['user'] = self.account if self.recipients: post['responders'] = [{'username': r, 'type': 'user'} for r in self.recipients] if self.teams: post['teams'] = [{'name': r, 'type': 'team'} for r in self.teams] post['description'] = body post['source'] = 'ElastAlert' for i, tag in enumerate(self.tags): self.tags[i] = tag.format(**matches[0]) post['tags'] = self.tags if self.priority and self.priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): logging.warn("Priority level does not appear to be specified correctly. \ Please make sure to set it to a value between P1 and P5") else: post['priority'] = self.priority if self.alias is not None: post['alias'] = self.alias.format(**matches[0]) logging.debug(json.dumps(post)) headers = { 'Content-Type': 'application/json', 'Authorization': 'GenieKey {}'.format(self.api_key), } # set https proxy, if it was provided proxies = {'https': self.opsgenie_proxy} if self.opsgenie_proxy else None try: r = requests.post(self.to_addr, json=post, headers=headers, proxies=proxies) logging.debug('request response: {0}'.format(r)) if r.status_code != 202: elastalert_logger.info("Error response from {0} \n " "API Response: {1}".format(self.to_addr, r)) r.raise_for_status() logging.info("Alert sent to OpsGenie") except Exception as err: raise EAException("Error sending alert: {0}".format(err))
def load_alerts(rule, alert_field): reqs = rule['type'].required_options try: # Convert all alerts into Alerter objects global_alerts = [] inline_alerts = [] if type(alert_field) != list: alert_field = [alert_field] for alert in alert_field: if isinstance(alert, basestring): global_alerts.append(alerts_mapping[alert] if alert in alerts_mapping else get_module(alert)) if not issubclass(global_alerts[-1], alerts.Alerter): raise EAException('Alert module %s is not a subclass of Alerter' % (alert)) elif isinstance(alert, dict): alert_name = alert.keys()[0] # Each Inline Alert is a tuple, in the form (alert_configuration, alert_class_object) if alert_name in alerts_mapping: inline_alerts.append((alert[alert_name], alerts_mapping[alert_name])) else: inline_alerts.append((alert[alert_name], get_module(alert_name))) if not issubclass(inline_alerts[-1][1], alerts.Alerter): raise EAException('Alert module %s is not a subclass of Alerter' % (alert)) alert_field = [] for (alert_config, alert) in inline_alerts: copied_conf = copy.copy(rule) rule_reqs = alert.required_options copied_conf.update(alert_config) if rule_reqs - frozenset(copied_conf.keys()): raise EAException('Missing required option(s): %s' % (', '.join(rule_reqs - frozenset(copied_conf.keys())))) alert_field.append(alert(copied_conf)) for alert in global_alerts: reqs = reqs.union(alert.required_options) if reqs - frozenset(rule.keys()): raise EAException('Missing required option(s): %s' % (', '.join(reqs - frozenset(rule.keys())))) else: alert_field.append(alert(rule)) except (KeyError, EAException) as e: raise EAException('Error initiating alert %s: %s' % (rule['alert'], e)) return alert_field
def __init__(self, *args): super(CardinalityRule, self).__init__(*args) if 'max_cardinality' not in self.rules and 'min_cardinality' not in self.rules: raise EAException("CardinalityRule must have one of either max_cardinality or min_cardinality") self.ts_field = self.rules.get('timestamp_field', '@timestamp') self.cardinality_field = self.rules['cardinality_field'] self.cardinality_cache = {} self.first_event = {} self.timeframe = self.rules['timeframe']
def __init__(self, rule): super(JiraAlerter, self).__init__(rule) self.server = self.rule['jira_server'] self.get_account(self.rule['jira_account_file']) self.project = self.rule['jira_project'] self.issue_type = self.rule['jira_issuetype'] self.component = self.rule.get('jira_component') self.label = self.rule.get('jira_label') self.assignee = self.rule.get('jira_assignee') self.max_age = self.rule.get('jira_max_age', 30) self.priority = self.rule.get('jira_priority') self.bump_tickets = self.rule.get('jira_bump_tickets', False) self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses') self.bump_in_statuses = self.rule.get('jira_bump_in_statuses') if self.bump_in_statuses and self.bump_not_in_statuses: msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \ (','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses)) intersection = list( set(self.bump_in_statuses) & set(self.bump_in_statuses)) if intersection: msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( msg, ','.join(intersection)) msg += ' This should be simplified to use only one or the other.' logging.warning(msg) self.jira_args = { 'project': { 'key': self.project }, 'issuetype': { 'name': self.issue_type } } if self.component: self.jira_args['components'] = [{'name': self.component}] if self.label: self.jira_args['labels'] = [self.label] if self.assignee: self.jira_args['assignee'] = {'name': self.assignee} try: self.client = JIRA(self.server, basic_auth=(self.user, self.password)) self.get_priorities() except JIRAError as e: # JIRAError may contain HTML, pass along only first 1024 chars raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024])) try: if self.priority is not None: self.jira_args['priority'] = { 'id': self.priority_ids[self.priority] } except KeyError: logging.error("Priority %s not found. Valid priorities are %s" % (self.priority, self.priority_ids.keys()))
def get_dashboard(self, rule, db_name): """ Download dashboard which matches use_kibana_dashboard from elasticsearch. """ es = Elasticsearch(host=rule['es_host'], port=rule['es_port']) if not db_name: raise EAException("use_kibana_dashboard undefined") query = {'query': {'term': {'_id': db_name}}} try: res = es.search(index='kibana-int', doc_type='dashboard', body=query, _source_include=['dashboard']) except ElasticsearchException as e: raise EAException("Error querying for dashboard: %s" % (e)) if res['hits']['hits']: return json.loads(res['hits']['hits'][0]['_source']['dashboard']) else: raise EAException("Could not find dashboard named %s" % (db_name))
def load_modules(rule, args=None): """ Loads things that could be modules. Enhancements, alerts and rule type. """ # Convert rule type into RuleType object if rule['type'] in rules_mapping: rule['type'] = rules_mapping[rule['type']] else: rule['type'] = get_module(rule['type']) if not issubclass(rule['type'], rule_types.RuleType): raise EAException('Rule module %s is not a subclass of RuleType' % (rule['type'])) # Instantiate rule try: rule['type'] = rule['type'](rule, args) except (KeyError, EAException) as e: raise EAException('Error initializing rule %s: %s' % (rule['name'], e)) # Convert rule type into RuleType object rule['actions'] = load_alerts(rule, alert_field=rule['actions'])
def yield_dir_rules(conf, use_rule=None): rule_keys = get_file_paths(conf, use_rule) for rule_key in rule_keys: with open(rule_key) as fh: try: yield rule_key, yaml_loader(fh.read()) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (rule_key, e))
def get_account(self, account_file): """ Gets the username and password from an account file. :param account_file: Name of the file which contains user and password information. """ account_conf = yaml_loader(account_file) if 'user' not in account_conf or 'password' not in account_conf: raise EAException('Account file must have user and password fields') self.user = account_conf['user'] self.password = account_conf['password']
def alert(self, matches): # Format the command and arguments try: command = [command_arg % matches[0] for command_arg in self.rule['command']] self.last_command = command except KeyError as e: raise EAException("Error formatting command: %s" % (e)) # Run command and pipe data try: subp = subprocess.Popen(command, stdin=subprocess.PIPE, shell=self.shell) if self.rule.get('pipe_match_json'): match_json = json.dumps(matches, cls=DateTimeEncoder) + '\n' stdout, stderr = subp.communicate(input=match_json) if self.rule.get("fail_on_non_zero_exit", False) and subp.wait(): raise EAException("Non-zero exit code while running command %s" % (' '.join(command))) except OSError as e: raise EAException("Error while running command %s: %s" % (' '.join(command), e))
def get_module(module_name): """ Loads a module and returns a specific object. module_name should 'module.file.object'. Returns object or raises EAException on error. """ try: module_path, module_class = module_name.rsplit('.', 1) base_module = __import__(module_path, globals(), locals(), [module_class]) module = getattr(base_module, module_class) except (ImportError, AttributeError, ValueError) as e: raise EAException("Could not import module %s: %s" % (module_name, e)) return module
def yield_rules(conf, use_rule=None): if conf['rules_type'] == "api": rules = yield_api_rules(conf, use_rule=use_rule) elif conf['rules_type'] == "dir": rules = yield_dir_rules(conf, use_rule=use_rule) else: raise EAException("'{rules_type}' is an invalid_rules_type".format( rules_type=rules_type)) for k, v in rules: yield k, parse_rule(k, v)
def alert(self, matches): body = '' for match in matches: body += unicode(BasicMatchString(self.rule, match)) # Separate text of aggregated alerts with dashes if len(matches) > 1: body += '\n----------------------------------------\n' if self.custom_message is None: self.message = self.create_title(matches) else: self.message = self.custom_message.format(**matches[0]) post = {} post['message'] = self.message if self.account: post['user'] = self.account if self.recipients: post['responders'] = self._fill_responders(self.recipients, 'user') if self.teams: post['teams'] = self._fill_responders(self.teams, 'team') post['description'] = body post['source'] = 'ElastAlert' post['tags'] = self.tags if self.alias is not None: post['alias'] = self.alias.format(**matches[0]) logging.debug(json.dumps(post)) headers = { 'Content-Type': 'application/json', 'Authorization': 'GenieKey {}'.format(self.api_key), } # set https proxy, if it was provided proxies = { 'https': self.opsgenie_proxy } if self.opsgenie_proxy else None try: r = requests.post(self.to_addr, json=post, headers=headers, proxies=proxies) logging.debug('request response: {0}'.format(r)) if r.status_code != 202: elastalert_logger.info("Error response from {0} \n " "API Response: {1}".format( self.to_addr, r)) r.raise_for_status() logging.info("Alert sent to OpsGenie") except Exception as err: raise EAException("Error sending alert: {0}".format(err))
def load_modules(rule, args=None): """ Loads things that could be modules. Enhancements, alerts and rule type. """ # Set match enhancements match_enhancements = [] for enhancement_name in rule.get('match_enhancements', []): if enhancement_name in dir(enhancements): enhancement = getattr(enhancements, enhancement_name) else: enhancement = get_module(enhancement_name) if not issubclass(enhancement, enhancements.BaseEnhancement): raise EAException( "Enhancement module %s not a subclass of BaseEnhancement" % (enhancement_name)) match_enhancements.append(enhancement(rule)) rule['match_enhancements'] = match_enhancements # Convert rule type into RuleType object if rule['type'] in rules_mapping: rule['type'] = rules_mapping[rule['type']] else: rule['type'] = get_module(rule['type']) if not issubclass(rule['type'], ruletypes.RuleType): raise EAException('Rule module %s is not a subclass of RuleType' % (rule['type'])) # Make sure we have required alert and type options reqs = rule['type'].required_options if reqs - frozenset(rule.keys()): raise EAException('Missing required option(s): %s' % (', '.join(reqs - frozenset(rule.keys())))) # Instantiate rule try: rule['type'] = rule['type'](rule, args) except (KeyError, EAException) as e: raise EAException('Error initializing rule %s: %s' % (rule['name'], e)), None, sys.exc_info()[2] # Instantiate alerts only if we're not in debug mode # In debug mode alerts are not actually sent so don't bother instantiating them if not args or not args.debug: rule['alert'] = load_alerts(rule, alert_field=rule['alert'])
def normalize_config(alert): """Alert config entries are either "alertType" or {"alertType": {"key": "data"}}. This function normalizes them both to the latter format. """ if isinstance(alert, basestring): return alert, rule elif isinstance(alert, dict): name, config = iter(alert.items()).next() config_copy = copy.copy(rule) config_copy.update(config) # warning, this (intentionally) mutates the rule dict return name, config_copy else: raise EAException()
def load_rules_configuration(filename): conf = yaml_loader(filename) for env_var, conf_var in env_settings.items(): if env_var in os.environ: conf[conf_var] = os.environ[env_var] # Make sure we have all required globals if required_globals - frozenset(conf.keys()): raise EAException('{filename} must contain {key}'.format( filename=filename, key=', '.join(required_globals - frozenset(conf.keys())))) conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('disable_rules_on_error', True) conf.setdefault('scan_subdirectories', True) conf.setdefault('rules_type', 'dir') # Convert run_every, buffer_time into a timedelta object try: conf['run_every'] = datetime.timedelta(**conf['run_every']) conf['buffer_time'] = datetime.timedelta(**conf['buffer_time']) if 'alert_time_limit' in conf: conf['alert_time_limit'] = datetime.timedelta( **conf['alert_time_limit']) else: conf['alert_time_limit'] = datetime.timedelta(days=2) if 'old_query_limit' in conf: conf['old_query_limit'] = datetime.timedelta( **conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % (e)) global base_config base_config = copy.deepcopy(conf) return conf
def __init__(self, rule, args=None): super(NewTermsRule, self).__init__(rule, args) self.seen_values = {} # Allow the use of query_key or fields if 'fields' not in self.rules: if 'query_key' not in self.rules: raise EAException("fields or query_key must be specified") self.fields = self.rules['query_key'] else: self.fields = self.rules['fields'] if not self.fields: raise EAException("fields must not be an empty list") if type(self.fields) != list: self.fields = [self.fields] if self.rules.get('use_terms_query') and len(self.fields) != 1: raise EAException( "use_terms_query can only be used with one field at a time") try: self.get_all_terms(args) except Exception as e: # Refuse to start if we cannot get existing terms raise EAException('Error searching for existing terms: %s' % (e))
def load_modules(rule): """ Loads things that could be modules. Enhancements, alerts and rule type. """ # Set match enhancements match_enhancements = [] for enhancement_name in rule.get('match_enhancements', []): if enhancement_name in dir(enhancements): enhancement = getattr(enhancements, enhancement_name) else: enhancement = get_module(enhancement_name) if not issubclass(enhancement, enhancements.BaseEnhancement): raise EAException( "Enhancement module %s not a subclass of BaseEnhancement" % (enhancement_name)) match_enhancements.append(enhancement(rule)) rule['match_enhancements'] = match_enhancements # Convert all alerts into Alerter objects rule_alerts = [] if type(rule['alert']) != list: rule['alert'] = [rule['alert']] for alert in rule['alert']: if alert in alerts_mapping: rule_alerts.append(alerts_mapping[alert]) else: rule_alerts.append(get_module(alert)) if not issubclass(rule_alerts[-1], alerts.Alerter): raise EAException( 'Alert module %s is not a subclass of Alerter' % (alert)) rule['alert'] = rule_alerts # Convert rule type into RuleType object if rule['type'] in rules_mapping: rule['type'] = rules_mapping[rule['type']] else: rule['type'] = get_module(rule['type']) if not issubclass(rule['type'], ruletypes.RuleType): raise EAException('Rule module %s is not a subclass of RuleType' % (rule['type'])) # Make sure we have required alert and type options reqs = rule['type'].required_options for alert in rule['alert']: reqs = reqs.union(alert.required_options) if reqs - frozenset(rule.keys()): raise EAException('Missing required option(s): %s' % (', '.join(reqs - frozenset(rule.keys())))) # Instantiate alert try: rule['alert'] = [alert(rule) for alert in rule['alert']] except (KeyError, EAException) as e: raise EAException('Error initiating alert %s: %s' % (rule['alert'], e)) # Instantiate rule try: rule['type'] = rule['type'](rule) except (KeyError, EAException) as e: raise EAException('Error initializing rule %s: %s' % (rule['name'], e))
def check_ryver_response(self, response): # Early status code check to try to produce a better error message out # of the Ryver error message. # This assumes the actual HTTP error has ht correct "Ryver error # message" format (undocumented). Otherwise, this fails badly if response.status_code == 400: try: message = "Error {} sending message to Ryver on {}: {}".format( response.status_code, response.url, ", ".join(d['message'] for d in response.json()['error']['details'])) except: # If anything went wrong trying to manage this error, skip # custom formatting and let the normal HTTP error handler take # over. pass else: raise EAException(message) try: response.raise_for_status() except requests.HTTPError as e: raise EAException("Error posting to Ryver: {}".format(e))
def __init__(self, *args): super(BaseAggregationRule, self).__init__(*args) bucket_interval = self.rules.get('bucket_interval') if bucket_interval: if 'seconds' in bucket_interval: self.rules['bucket_interval_period'] = str(bucket_interval['seconds']) + 's' elif 'minutes' in bucket_interval: self.rules['bucket_interval_period'] = str(bucket_interval['minutes']) + 'm' elif 'hours' in bucket_interval: self.rules['bucket_interval_period'] = str(bucket_interval['hours']) + 'h' elif 'days' in bucket_interval: self.rules['bucket_interval_period'] = str(bucket_interval['days']) + 'd' elif 'weeks' in bucket_interval: self.rules['bucket_interval_period'] = str(bucket_interval['weeks']) + 'w' else: raise EAException("Unsupported window size") if self.rules.get('use_run_every_query_size'): if total_seconds(self.rules['run_every']) % total_seconds(self.rules['bucket_interval_timedelta']) != 0: raise EAException("run_every must be evenly divisible by bucket_interval if specified") else: if total_seconds(self.rules['buffer_time']) % total_seconds(self.rules['bucket_interval_timedelta']) != 0: raise EAException("Buffer_time must be evenly divisible by bucket_interval if specified")
def load_configuration(filename): """ Load a yaml rule file and fill in the relevant fields with objects. :param filename: The name of a rule configuration file. :return: The rule configuration, a dictionary. """ try: rule = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) rule['rule_file'] = os.path.split(filename)[-1] load_options(rule) load_modules(rule) return rule
def alert(self, matches): body = self.create_alert_body(matches) body = self.fit_body(body) # limit body size json_content = self.content_factory(body) try: response = requests.post(self.url, headers=self.headers, json=json_content) except requests.RequestException as e: raise EAException("Error while contacting Ryver: {}".format(e)) self.check_ryver_response(response) elastalert_logger.info(self.log_message)
def alert(self, matches): alerts = [] for match in matches: myalert = {} myalert['labels'] = {} #elastalert_logger.info("!! : %s" % pprint.pformat(match)) #elastalert_logger.info("?? : %s" % pprint.pformat(self.rule)) for key, val in match["kubernetes"].iteritems(): if key != 'labels': key = self.conform_key(key) myalert['labels'][key] = val else: for key2, val2 in match["kubernetes"]["labels"].iteritems( ): key2 = self.conform_key(key2) myalert['labels'][key2] = val2 myalert['labels']['_index'] = match['_index'] myalert['labels']['timestamp'] = match['@timestamp'] myalert['labels']['severity'] = self.rule.get('severity') myalert['labels']['alertname'] = self.rule.get('name') myalert['annotations'] = {} myalert['annotations']['summary'] = "Log Matched: " + match['log'] myalert['annotations']['description'] = self.rule.get( 'description') myalert['generatorURL'] = "https://" + self.rule.get( 'es_host') + match['kibana_link'] timestamp = datetime.strptime(match['@timestamp'], "%Y-%m-%dT%H:%M:%SZ").isoformat('T') myalert['startsAt'] = timestamp + "Z" # < stupid hack #myalert['endsAt'] = "" alerts.append(myalert) bodydata = simplejson.dumps(alerts, separators=(',', ':'), sort_keys=False) headers = {'content-type': 'application/json'} #elastalert_logger.info("body : %s" % bodydata) #elastalert_logger.info("@@ : %s" % pprint.pformat(bodydata)) try: response = requests.post(self.alertmanager_url + '/api/v1/alerts', data=bodydata, headers=headers) response.raise_for_status() except RequestException as e: raise EAException("Error posting to alertmanager: %s" % e) elastalert_logger.info("Alert sent to AlertManager")
def load_configuration(filename, conf, args=None): """ Load a yaml rule file and fill in the relevant fields with objects. :param filename: The name of a rule configuration file. :param conf: The global configuration dictionary, used for populating defaults. :return: The rule configuration, a dictionary. """ try: rule = yaml_loader(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) rule['rule_file'] = filename load_options(rule, conf, args) load_modules(rule, args) return rule