def __init__(self, rule): super(JiraAlerter, self).__init__(rule) self.server = self.rule['jira_server'] self.get_account(self.rule['jira_account_file']) self.project = self.rule['jira_project'] self.issue_type = self.rule['jira_issuetype'] # Deferred settings refer to values that can only be resolved when a match # is found and as such loading them will be delayed until we find a match self.deferred_settings = [] # We used to support only a single component. This allows us to maintain backwards compatibility # while also giving the user-facing API a more representative name self.components = self.rule.get('jira_components', self.rule.get('jira_component')) # We used to support only a single label. This allows us to maintain backwards compatibility # while also giving the user-facing API a more representative name self.labels = self.rule.get('jira_labels', self.rule.get('jira_label')) self.description = self.rule.get('jira_description', '') self.assignee = self.rule.get('jira_assignee') self.max_age = self.rule.get('jira_max_age', 30) self.priority = self.rule.get('jira_priority') self.bump_tickets = self.rule.get('jira_bump_tickets', False) self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses') self.bump_in_statuses = self.rule.get('jira_bump_in_statuses') self.bump_after_inactivity = self.rule.get( 'jira_bump_after_inactivity', 0) self.bump_only = self.rule.get('jira_bump_only', False) self.transition = self.rule.get('jira_transition_to', False) self.watchers = self.rule.get('jira_watchers') self.client = None if self.bump_in_statuses and self.bump_not_in_statuses: msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \ (','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses)) intersection = list( set(self.bump_in_statuses) & set(self.bump_in_statuses)) if intersection: msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( msg, ','.join(intersection)) msg += ' This should be simplified to use only one or the other.' elastalert_logger.warning(msg) self.reset_jira_args() try: self.client = JIRA(self.server, basic_auth=(self.user, self.password)) self.get_priorities() self.jira_fields = self.client.fields() self.get_arbitrary_fields() except JIRAError as e: # JIRAError may contain HTML, pass along only first 1024 chars raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024])).with_traceback( sys.exc_info()[2]) self.set_priority()
def adjust_deprecated_values(rule): # From rename of simple HTTP alerter if rule.get('type') == 'simple': rule['type'] = 'post' if 'simple_proxy' in rule: rule['http_post_proxy'] = rule['simple_proxy'] if 'simple_webhook_url' in rule: rule['http_post_url'] = rule['simple_webhook_url'] elastalert_logger.warning( '"simple" alerter has been renamed "post" and comptability may be removed in a future release.' )
def __init__(self, *args): super(CommandAlerter, self).__init__(*args) self.last_command = [] self.shell = False try: if isinstance(self.rule['command'], str): self.shell = True if '%' in self.rule['command']: elastalert_logger.warning( 'Warning! You could be vulnerable to shell injection!') self.rule['command'] = [self.rule['command']] except KeyError as e: raise EAException("Error formatting command: %s" % (e))
def _parse_responders(self, responders, responder_args, matches, default_responders): if responder_args: formated_responders = list() responders_values = dict((k, lookup_es_key(matches[0], v)) for k, v in responder_args.items()) responders_values = dict( (k, v) for k, v in responders_values.items() if v) for responder in responders: responder = str(responder) try: formated_responders.append( responder.format(**responders_values)) except KeyError as error: elastalert_logger.warning( "OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) if not formated_responders: elastalert_logger.warning( "OpsGenieAlerter: no responders can be formed. Trying the default responder " ) if not default_responders: elastalert_logger.warning( "OpsGenieAlerter: default responder not set. Falling back" ) formated_responders = responders else: formated_responders = default_responders responders = formated_responders return responders
def alert(self, matches): # Matches is a list of match dictionaries. # It contains more than one match when the alert has # the aggregation option set zm = [] for match in matches: if ':' not in match[self.timestamp_field] or '-' not in match[ self.timestamp_field]: ts_epoch = int(match[self.timestamp_field]) else: try: ts_epoch = int( datetime.strptime(match[self.timestamp_field], self.timestamp_strptime).timestamp()) except ValueError: ts_epoch = int( datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%S%z').timestamp()) zm.append( ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value='1', clock=ts_epoch)) try: response = ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) if response.failed: elastalert_logger.warning( "Missing zabbix host '%s' or host's item '%s', alert will be discarded" % (self.zbx_host, self.zbx_key)) else: elastalert_logger.info("Alert sent to Zabbix") except Exception as e: raise EAException("Error sending alert to Zabbix: %s" % e)
def alert(self, matches): body = '' for match in matches: body += str(BasicMatchString(self.rule, match)) # Separate text of aggregated alerts with dashes if len(matches) > 1: body += '\n----------------------------------------\n' if self.custom_message is None: self.message = self.create_title(matches) else: self.message = self.custom_message.format(**matches[0]) self.recipients = self._parse_responders(self.recipients, self.recipients_args, matches, self.default_reciepients) self.teams = self._parse_responders(self.teams, self.teams_args, matches, self.default_teams) post = {} post['message'] = self.message if self.account: post['user'] = self.account if self.recipients: post['responders'] = [{ 'username': r, 'type': 'user' } for r in self.recipients] if self.teams: post['teams'] = [{'name': r, 'type': 'team'} for r in self.teams] if self.description: post['description'] = self.description.format(**matches[0]) else: post['description'] = body if self.entity: post['entity'] = self.entity.format(**matches[0]) if self.source: post['source'] = self.source.format(**matches[0]) post['tags'] = [] for i, tag in enumerate(self.tags): post['tags'].append(tag.format(**matches[0])) priority = self.priority if priority: priority = priority.format(**matches[0]) if priority and priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): elastalert_logger.warning( "Priority level does not appear to be specified correctly. \ Please make sure to set it to a value between P1 and P5" ) else: post['priority'] = priority if self.alias is not None: post['alias'] = self.alias.format(**matches[0]) details = self.get_details(matches) if details: post['details'] = details elastalert_logger.debug(json.dumps(post)) headers = { 'Content-Type': 'application/json', 'Authorization': 'GenieKey {}'.format(self.api_key), } # set https proxy, if it was provided proxies = { 'https': self.opsgenie_proxy } if self.opsgenie_proxy else None try: r = requests.post(self.to_addr, json=post, headers=headers, proxies=proxies) elastalert_logger.debug('request response: {0}'.format(r)) if r.status_code != 202: elastalert_logger.info("Error response from {0} \n " "API Response: {1}".format( self.to_addr, r)) r.raise_for_status() elastalert_logger.info("Alert sent to OpsGenie") except Exception as err: raise EAException("Error sending alert: {0}".format(err))
def load_options(self, rule, conf, filename, args=None): """ Converts time objects, sets defaults, and validates some settings. :param rule: A dictionary of parsed YAML from a rule config file. :param conf: The global configuration dictionary, used for populating defaults. :param filename: Name of the rule :param args: Arguments """ self.adjust_deprecated_values(rule) try: self.rule_schema.validate(rule) except jsonschema.ValidationError as e: raise EAException("Invalid Rule file: %s\n%s" % (filename, e)) try: # Set all time based parameters if 'timeframe' in rule: rule['timeframe'] = datetime.timedelta(**rule['timeframe']) if 'realert' in rule: rule['realert'] = datetime.timedelta(**rule['realert']) else: if 'aggregation' in rule: rule['realert'] = datetime.timedelta(minutes=0) else: rule['realert'] = datetime.timedelta(minutes=1) if 'aggregation' in rule and not rule['aggregation'].get( 'schedule'): rule['aggregation'] = datetime.timedelta(**rule['aggregation']) if 'query_delay' in rule: rule['query_delay'] = datetime.timedelta(**rule['query_delay']) if 'buffer_time' in rule: rule['buffer_time'] = datetime.timedelta(**rule['buffer_time']) if 'run_every' in rule: rule['run_every'] = datetime.timedelta(**rule['run_every']) if 'bucket_interval' in rule: rule['bucket_interval_timedelta'] = datetime.timedelta( **rule['bucket_interval']) if 'exponential_realert' in rule: rule['exponential_realert'] = datetime.timedelta( **rule['exponential_realert']) if 'kibana_discover_from_timedelta' in rule: rule['kibana_discover_from_timedelta'] = datetime.timedelta( **rule['kibana_discover_from_timedelta']) if 'kibana_discover_to_timedelta' in rule: rule['kibana_discover_to_timedelta'] = datetime.timedelta( **rule['kibana_discover_to_timedelta']) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % e) # Set defaults, copy defaults from config.yaml for key, val in list(self.base_config.items()): rule.setdefault(key, val) rule.setdefault('name', os.path.splitext(filename)[0]) rule.setdefault('realert', datetime.timedelta(seconds=0)) rule.setdefault('aggregation', datetime.timedelta(seconds=0)) rule.setdefault('query_delay', datetime.timedelta(seconds=0)) rule.setdefault('timestamp_field', '@timestamp') rule.setdefault('filter', []) rule.setdefault('timestamp_type', 'iso') rule.setdefault('timestamp_format', '%Y-%m-%dT%H:%M:%SZ') rule.setdefault('_source_enabled', True) rule.setdefault('use_local_time', True) rule.setdefault('description', "") rule.setdefault('jinja_root_name', "_data") rule.setdefault('query_timezone', "") # Set timestamp_type conversion function, used when generating queries and processing hits rule['timestamp_type'] = rule['timestamp_type'].strip().lower() if rule['timestamp_type'] == 'iso': rule['ts_to_dt'] = ts_to_dt rule['dt_to_ts'] = dt_to_ts elif rule['timestamp_type'] == 'unix': rule['ts_to_dt'] = unix_to_dt rule['dt_to_ts'] = dt_to_unix elif rule['timestamp_type'] == 'unix_ms': rule['ts_to_dt'] = unixms_to_dt rule['dt_to_ts'] = dt_to_unixms elif rule['timestamp_type'] == 'custom': def _ts_to_dt_with_format(ts): return ts_to_dt_with_format(ts, ts_format=rule['timestamp_format']) def _dt_to_ts_with_format(dt): ts = dt_to_ts_with_format(dt, ts_format=rule['timestamp_format']) if 'timestamp_format_expr' in rule: # eval expression passing 'ts' and 'dt' return eval(rule['timestamp_format_expr'], { 'ts': ts, 'dt': dt }) else: return ts rule['ts_to_dt'] = _ts_to_dt_with_format rule['dt_to_ts'] = _dt_to_ts_with_format else: raise EAException( 'timestamp_type must be one of iso, unix, or unix_ms') # Add support for client ssl certificate auth if 'verify_certs' in conf: rule.setdefault('verify_certs', conf.get('verify_certs')) rule.setdefault('ca_certs', conf.get('ca_certs')) rule.setdefault('client_cert', conf.get('client_cert')) rule.setdefault('client_key', conf.get('client_key')) # Make sure we have required options if self.required_locals - frozenset(list(rule.keys())): raise EAException('Missing required option(s): %s' % (', '.join(self.required_locals - frozenset(list(rule.keys()))))) if 'include' in rule and type(rule['include']) != list: raise EAException('include option must be a list') raw_query_key = rule.get('query_key') if isinstance(raw_query_key, list): if len(raw_query_key) > 1: rule['compound_query_key'] = raw_query_key rule['query_key'] = ','.join(raw_query_key) elif len(raw_query_key) == 1: rule['query_key'] = raw_query_key[0] else: del (rule['query_key']) if isinstance(rule.get('aggregation_key'), list): rule['compound_aggregation_key'] = rule['aggregation_key'] rule['aggregation_key'] = ','.join(rule['aggregation_key']) if isinstance(rule.get('compare_key'), list): rule['compound_compare_key'] = rule['compare_key'] rule['compare_key'] = ','.join(rule['compare_key']) elif 'compare_key' in rule: rule['compound_compare_key'] = [rule['compare_key']] # Add QK, CK and timestamp to include include = rule.get('include', ['*']) if 'query_key' in rule: include.append(rule['query_key']) if 'compound_query_key' in rule: include += rule['compound_query_key'] if 'compound_aggregation_key' in rule: include += rule['compound_aggregation_key'] if 'compare_key' in rule: include.append(rule['compare_key']) if 'compound_compare_key' in rule: include += rule['compound_compare_key'] if 'top_count_keys' in rule: include += rule['top_count_keys'] include.append(rule['timestamp_field']) rule['include'] = list(set(include)) # Check that query_key is set if use_terms_query if rule.get('use_terms_query'): if 'query_key' not in rule: raise EAException( 'query_key must be specified with use_terms_query') # Warn if use_strf_index is used with %y, %M or %D # (%y = short year, %M = minutes, %D = full date) if rule.get('use_strftime_index'): for token in ['%y', '%M', '%D']: if token in rule.get('index'): elastalert_logger.warning( 'Did you mean to use %s in the index? ' 'The index will be formatted like %s' % (token, datetime.datetime.now().strftime( rule.get('index')))) if rule.get('scan_entire_timeframe') and not rule.get('timeframe'): raise EAException( 'scan_entire_timeframe can only be used if there is a timeframe specified' ) self.load_jinja_template(rule)