示例#1
0
def check_files():
    print("Note: This tool is for testing filters and config syntax. It will not process data or alert.\n")
    parser = argparse.ArgumentParser(description='Validate a rule configuration')
    parser.add_argument('files', metavar='file', type=str, nargs='+', help='rule configuration filename')
    parser.add_argument('--schema-only', action='store_true', help='Show only schema errors; do not run query')
    parser.add_argument('--days', type=int, default=[1, 7], nargs='+', help='Query the previous N days with this rule')
    args = parser.parse_args()

    for filename in args.files:
        with open(filename) as fh:
            conf = yaml.load(fh)
        load_options(conf)
        print("Successfully loaded %s\n" % (conf['name']))

        if args.schema_only:
            continue

        es_client = Elasticsearch(host=conf['es_host'], port=conf['es_port'])
        for days in args.days:
            start_time = ts_now() - datetime.timedelta(days=days)
            end_time = ts_now()
            ts = conf.get('timestamp_field', '@timestamp')
            query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts)
            index = ElastAlerter.get_index(conf, start_time, end_time)
            try:
                res = es_client.search(index, size=1000, body=query)
            except Exception as e:
                print("Error running your filter:")
                print(repr(e)[:2048])
                exit(1)

            num_hits = len(res['hits']['hits'])
            print("Got %s hits from the last %s day%s" % (num_hits if num_hits != 1000 else '1000+', days,
                                                          's' if days > 1 else ''))

        if num_hits:
            print("\nAvailable terms in first hit:")
            terms = res['hits']['hits'][0]['_source']
            print_terms(terms, '')

            pk = conf.get('primary_key')
            ck = conf.get('compare_key')
            if pk and not lookup_es_key(terms, pk):
                print("Warning: primary key %s is either missing or null!")
            if ck and not lookup_es_key(terms, ck):
                print("Warning: compare key %s is either missing or null!")

            include = conf.get('include')
            if include:
                for term in include:
                    if not lookup_es_key(terms, term) and '*' not in term:
                        print("Included term %s may be missing or null" % (term))

            for term in conf.get('top_count_keys', []):
                # If the index starts with 'logstash', fields with .raw will be available but won't in _source
                if term not in terms and not (term.endswith('.raw') and term[:-4] in terms and index.startswith('logstash')):
                    print("top_count_key %s may be missing" % (term))
        print('')
示例#2
0
def test_looking_up_missing_keys(ea):
    record = {
        'Message': '12345',
        'Fields': {
            'severity': 'large',
            'user': '******'
        }
    }

    assert lookup_es_key(record, 'Fields.ts') is None
示例#3
0
def test_looking_up_nested_composite_keys(ea):
    expected = 12467267
    record = {
        'Message': '12345',
        'Fields': {
            'ts.value': expected,
            'severity': 'large',
            'user': '******'
        }
    }

    assert lookup_es_key(record, 'Fields.ts.value') == expected
示例#4
0
def test_looking_up_nested_composite_keys(ea):
    expected = 12467267
    record = {
        'Message': '12345',
        'Fields': {
            'ts.value': expected,
            'severity': 'large',
            'user': '******'
        }
    }

    assert lookup_es_key(record, 'Fields.ts.value') == expected
示例#5
0
    def _add_custom_alert_text(self):
        missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
        alert_text = str(self.rule.get('alert_text', ''))
        if self.rule.get('alert_text_type') == 'alert_text_jinja':
            #  Top fields are accessible via `{{field_name}}` or `{{jinja_root_name['field_name']}}`
            #  `jinja_root_name` dict is useful when accessing *fields with dots in their keys*,
            #  as Jinja treat dot as a nested field.
            template_values = self.rule | self.match
            alert_text = self.rule.get("jinja_template").render(
                template_values | {self.rule['jinja_root_name']: template_values})
        elif 'alert_text_args' in self.rule:
            alert_text_args = self.rule.get('alert_text_args')
            alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args]

            # Support referencing other top-level rule properties
            # This technically may not work if there is a top-level rule property with the same name
            # as an es result key, since it would have been matched in the lookup_es_key call above
            for i, text_value in enumerate(alert_text_values):
                if text_value is None:
                    alert_value = self.rule.get(alert_text_args[i])
                    if alert_value:
                        alert_text_values[i] = alert_value

            alert_text_values = [missing if val is None else val for val in alert_text_values]
            alert_text = alert_text.format(*alert_text_values)
        elif 'alert_text_kw' in self.rule:
            kw = {}
            for name, kw_name in list(self.rule.get('alert_text_kw').items()):
                val = lookup_es_key(self.match, name)

                # Support referencing other top-level rule properties
                # This technically may not work if there is a top-level rule property with the same name
                # as an es result key, since it would have been matched in the lookup_es_key call above
                if val is None:
                    val = self.rule.get(name)

                kw[kw_name] = missing if val is None else val
            alert_text = alert_text.format(**kw)

        self.text += alert_text
示例#6
0
    def lookup_field(self, match: dict, field_name: str, default):
        """Populates a field with values depending on the contents of the Elastalert match
        provided to it.

        Uses a similar algorithm to that implemented to populate the `alert_text_args`.
        First checks any fields found in the match provided, then any fields defined in
        the rule, finally returning the default value provided if no value can be found.
        """
        field_value = lookup_es_key(match, field_name)
        if field_value is None:
            field_value = self.rule.get(field_name, default)

        return field_value
示例#7
0
    def add_data(self, data):
        compare_keys = self.rules['tuplefields']
        compare_values = self.rules['tuplecheck']

        for event in data:
            key_tuple = ''

            # Match the values of the defined keys
            # tuplefields:
            # - keyA
            # - keyB
            # - keyC
            # {"keyA" : "A", "keyB" : "B", "keyC" : "C"}
            # to a string in this format
            # A/B/C
            for key in compare_keys:
                es_key = lookup_es_key(event, key)
                if es_key:
                    key_tuple = (es_key if len(key_tuple) == 0 else '%s/%s' %
                                 (key_tuple, es_key))

            if not key_tuple in self.found_tuples:
                self.found_tuples.append(key_tuple)

        missing = []

        # Check for expected documents
        for value in self.rules['tuplecheck']:
            if not value in self.found_tuples:
                missing.append(value)

        if len(missing):
            self.add_match({
                'direction': 'configured_but_not_found',
                'missing_values': missing
            })

        if ('allow_unconfigured' in self.rules
                and self.rules['allow_unconfigured'] == False):
            unexpected = []

            # Check for unexpected documents
            for value in self.found_tuples:
                if not value in self.rules['tuplecheck']:
                    unexpected.append(value)

            if len(unexpected):
                self.add_match({
                    'direction': 'found_but_not_configured',
                    'unexpected_values': unexpected
                })
示例#8
0
 def populate_fields(self, matches):
     alert_fields = []
     missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
     for field in self.mattermost_msg_fields:
         field = copy.copy(field)
         if 'args' in field:
             args_values = [lookup_es_key(matches[0], arg) or missing for arg in field['args']]
             if 'value' in field:
                 field['value'] = field['value'].format(*args_values)
             else:
                 field['value'] = "\n".join(str(arg) for arg in args_values)
             del(field['args'])
         alert_fields.append(field)
     return alert_fields
示例#9
0
    def populate_links(self, matches):
        alert_links = []
        if self.ms_teams_index_pattern_url != '':
            document_id = lookup_es_key(matches[0], 'UniqueId')
            my_url = '%s%s' % (self.ms_teams_index_pattern_url, document_id)
            name = "Discover in Kibana"

            current_link_pattern = copy.copy(self.link_pattern)
            current_target_pattern = copy.copy(self.target_pattern)

            current_link_pattern['name'] = name

            target_wrapper = []
            target_wrapper.append(current_target_pattern)
            current_link_pattern['targets'] = target_wrapper
            current_link_pattern['targets'][0]['uri'] = my_url

            alert_links.append(current_link_pattern)
        if self.ms_teams_alert_links != '':
            for arg in self.ms_teams_alert_links:
                link_url = lookup_es_key(matches[0], arg['value'])
                name = arg['name']

                current_link_pattern = copy.copy(self.link_pattern)
                current_target_pattern = copy.copy(self.target_pattern)

                if link_url != '' and link_url is not None:
                    current_link_pattern['name'] = name

                    target_wrapper = []
                    target_wrapper.append(current_target_pattern)
                    current_link_pattern['targets'] = target_wrapper
                    current_link_pattern['targets'][0]['uri'] = link_url

                    alert_links.append(current_link_pattern)
        return alert_links
示例#10
0
    def get_details(self, matches):
        details = {}

        for key, value in self.opsgenie_details.items():

            if type(value) is dict:
                if 'field' in value:
                    field_value = lookup_es_key(matches[0], value['field'])
                    if field_value is not None:
                        details[key] = str(field_value)

            elif type(value) is str:
                details[key] = os.path.expandvars(value)

        return details
示例#11
0
    def create_default_title(self, matches, for_search=False):
        # If there is a query_key, use that in the title

        if 'query_key' in self.rule and lookup_es_key(matches[0],
                                                      self.rule['query_key']):
            title = 'ElastAlert: %s matched %s' % (lookup_es_key(
                matches[0], self.rule['query_key']), self.rule['name'])
        else:
            title = 'ElastAlert: %s' % (self.rule['name'])

        if for_search:
            return title

        timestamp = matches[0].get(self.rule['timestamp_field'])
        if timestamp:
            title += ' - %s' % (pretty_ts(timestamp,
                                          self.rule.get('use_local_time')))

        # Add count for spikes
        count = matches[0].get('spike_count')
        if count:
            title += ' - %s+ events' % (count)

        return title
示例#12
0
    def alert(self, matches):
        """ Each match will trigger a POST to the specified endpoint(s). """
        for match in matches:
            payload = match if self.post_all_values else {}
            for post_key, post_value in list(self.post_payload.items()):
                post_key_template = Template(post_key)
                post_key_res = post_key_template.render(**match)
                post_value_template = Template(post_value)
                post_value_res = post_value_template.render(**match)
                payload[post_key_res] = post_value_res

            for post_key, es_key in list(self.post_raw_fields.items()):
                payload[post_key] = lookup_es_key(match, es_key)

            headers = {
                "Content-Type": "application/json",
                "Accept": "application/json;charset=utf-8"
            }
            if self.post_ca_certs:
                verify = self.post_ca_certs
            else:
                verify = not self.post_ignore_ssl_errors
            if self.post_ignore_ssl_errors:
                requests.packages.urllib3.disable_warnings()

            for header_key, header_value in list(
                    self.post_http_headers.items()):
                header_key_template = Template(header_key)
                header_key_res = header_key_template.render(**match)
                header_value_template = Template(header_value)
                header_value_res = header_value_template.render(**match)
                headers[header_key_res] = header_value_res

            proxies = {'https': self.post_proxy} if self.post_proxy else None
            for url in self.post_url:
                try:
                    response = requests.post(url,
                                             data=json.dumps(
                                                 payload, cls=DateTimeEncoder),
                                             headers=headers,
                                             proxies=proxies,
                                             timeout=self.timeout,
                                             verify=verify)
                    response.raise_for_status()
                except RequestException as e:
                    raise EAException("Error posting HTTP Post 2 alert: %s" %
                                      e)
            elastalert_logger.info("HTTP Post 2 alert sent.")
示例#13
0
    def get_incident_key(self, matches):
        if self.pagerduty_incident_key_args:
            incident_key_values = [lookup_es_key(matches[0], arg) for arg in self.pagerduty_incident_key_args]

            # Populate values with rule level properties too
            for i in range(len(incident_key_values)):
                if incident_key_values[i] is None:
                    key_value = self.rule.get(self.pagerduty_incident_key_args[i])
                    if key_value:
                        incident_key_values[i] = key_value

            missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
            incident_key_values = [missing if val is None else val for val in incident_key_values]
            return self.pagerduty_incident_key.format(*incident_key_values)
        else:
            return self.pagerduty_incident_key
示例#14
0
    def resolve_formatted_key(self, key, args, matches):
        if args:
            key_values = [lookup_es_key(matches[0], arg) for arg in args]

            # Populate values with rule level properties too
            for i in range(len(key_values)):
                if key_values[i] is None:
                    key_value = self.rule.get(args[i])
                    if key_value:
                        key_values[i] = key_value

            missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
            key_values = [missing if val is None else val for val in key_values]
            return key.format(*key_values)
        else:
            return key
示例#15
0
def test_setting_keys(ea):
    expected = 12467267
    record = {
        'Message': '12345',
        'Fields': {
            'ts': 'fail',
            'severity': 'large',
            'user': '******'
        }
    }

    # Set the value
    assert set_es_key(record, 'Fields.ts', expected)

    # Get the value again
    assert lookup_es_key(record, 'Fields.ts') == expected
示例#16
0
def test_setting_keys(ea):
    expected = 12467267
    record = {
        'Message': '12345',
        'Fields': {
            'ts': 'fail',
            'severity': 'large',
            'user': '******'
        }
    }

    # Set the value
    assert set_es_key(record, 'Fields.ts', expected)

    # Get the value again
    assert lookup_es_key(record, 'Fields.ts') == expected
示例#17
0
    def get_json_payload(self, match):
        """
            Builds the API Create Alert body, as in
            http://alerta.readthedocs.io/en/latest/api/reference.html#create-an-alert

            For the values that could have references to fields on the match, resolve those references.

        """

        # Using default text and event title if not defined in rule
        alerta_text = self.rule['type'].get_match_str([match]) if self.text == '' else resolve_string(self.text, match, self.missing_text)
        alerta_event = self.create_default_title([match]) if self.event == '' else resolve_string(self.event, match, self.missing_text)

        match_timestamp = lookup_es_key(match, self.rule.get('timestamp_field', '@timestamp'))
        if match_timestamp is None:
            match_timestamp = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
        if self.use_match_timestamp:
            createTime = ts_to_dt(match_timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
        else:
            createTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")

        alerta_payload_dict = {
            'resource': resolve_string(self.resource, match, self.missing_text),
            'severity': resolve_string(self.severity, match),
            'timeout': self.timeout,
            'createTime': createTime,
            'type': self.type,
            'environment': resolve_string(self.environment, match, self.missing_text),
            'origin': resolve_string(self.origin, match, self.missing_text),
            'group': resolve_string(self.group, match, self.missing_text),
            'event': alerta_event,
            'text': alerta_text,
            'value': resolve_string(self.value, match, self.missing_text),
            'service': [resolve_string(a_service, match, self.missing_text) for a_service in self.service],
            'tags': [resolve_string(a_tag, match, self.missing_text) for a_tag in self.tags],
            'correlate': [resolve_string(an_event, match, self.missing_text) for an_event in self.correlate],
            'attributes': dict(list(zip(self.attributes_keys,
                                        [resolve_string(a_value, match, self.missing_text) for a_value in self.attributes_values]))),
            'rawData': self.create_alert_body([match]),
        }

        try:
            payload = json.dumps(alerta_payload_dict, cls=DateTimeEncoder)
        except Exception as e:
            raise Exception("Error building Alerta request: %s" % e)
        return payload
示例#18
0
 def add_match(self, match):
     # TODO this is not technically correct
     # if the term changes multiple times before an alert is sent
     # this data will be overwritten with the most recent change
     change = self.change_map.get(
         hashable(lookup_es_key(match, self.rules['query_key'])))
     extra = {}
     if change:
         extra = {
             'value': change[0],
             'start_time': change[1],
             'duration': change[2]
         }
         elastalert_logger.debug("Description of the changed records  " +
                                 str(dict(match.items() + extra.items())))
     super(BlacklistDurationRule,
           self).add_match(dict(match.items() + extra.items()))
示例#19
0
    def alert(self, matches):
        headers = {'content-type': 'application/json'}
        proxies = {'https': self.proxies} if self.proxies else None
        auth = HTTPBasicAuth(self.alertmanager_basic_auth_login,
                             self.alertmanager_basic_auth_password
                             ) if self.alertmanager_basic_auth_login else None

        self.labels.update({
            label: self._json_or_string(lookup_es_key(matches[0], term))
            for label, term in self.fields.items()
        })
        self.labels.update(alertname=self.alertname,
                           elastalert_rule=self.rule.get('name'))
        self.annotations.update({
            self.title_labelname:
            self.create_title(matches),
            self.body_labelname:
            self.create_alert_body(matches)
        })
        payload = {'annotations': self.annotations, 'labels': self.labels}

        for host in self.hosts:
            try:
                url = '{}/api/{}/alerts'.format(host, self.api_version)

                if self.ca_certs:
                    verify = self.ca_certs
                else:
                    verify = not self.ignore_ssl_errors
                if self.ignore_ssl_errors:
                    requests.packages.urllib3.disable_warnings()

                response = requests.post(url,
                                         data=json.dumps([payload],
                                                         cls=DateTimeEncoder),
                                         headers=headers,
                                         verify=verify,
                                         proxies=proxies,
                                         timeout=self.timeout,
                                         auth=auth)

                warnings.resetwarnings()
                response.raise_for_status()
            except RequestException as e:
                raise EAException("Error posting to Alertmanager: %s" % e)
        elastalert_logger.info("Alert sent to Alertmanager")
示例#20
0
def test_looking_up_arrays(ea):
    record = {
        'flags': [1, 2, 3],
        'objects': [
            {'foo': 'bar'},
            {'foo': [{'bar': 'baz'}]},
            {'foo': {'bar': 'baz'}}
        ]
    }
    assert lookup_es_key(record, 'flags[0]') == 1
    assert lookup_es_key(record, 'flags[1]') == 2
    assert lookup_es_key(record, 'objects[0]foo') == 'bar'
    assert lookup_es_key(record, 'objects[1]foo[0]bar') == 'baz'
    assert lookup_es_key(record, 'objects[2]foo.bar') == 'baz'
    assert lookup_es_key(record, 'objects[1]foo[1]bar') is None
    assert lookup_es_key(record, 'objects[1]foo[0]baz') is None
示例#21
0
    def alert(self, matches):
        body = self.create_alert_body(matches)
        title = self.create_title(matches)
        """ Each match will trigger a POST to the specified endpoint(s). """
        for match in matches:
            payload = match if self.post_all_values else {}
            for post_key, es_key in self.post_payload.items():
                payload[post_key] = lookup_es_key(match, es_key)
            headers = {"Content-type": "application/x-www-form-urlencoded"}
            data = self.post_static_payload
            data['message'] = body
            data['title'] = title

            try:
                conn = httplib.HTTPSConnection("api.pushover.net:443")
                conn.request("POST", "/1/messages.json",
                             urllib.urlencode(data), headers)

            except httplib.HTTPException as e:
                raise EAException("Error posting Pushover alert: %s" % e)
            elastalert_logger.info("Pushover alert sent.")
示例#22
0
    def create_custom_title(self, matches):
        opsgenie_subject = str(self.rule['opsgenie_subject'])

        if self.opsgenie_subject_args:
            opsgenie_subject_values = [
                lookup_es_key(matches[0], arg)
                for arg in self.opsgenie_subject_args
            ]

            for i, subject_value in enumerate(opsgenie_subject_values):
                if subject_value is None:
                    alert_value = self.rule.get(self.opsgenie_subject_args[i])
                    if alert_value:
                        opsgenie_subject_values[i] = alert_value

            opsgenie_subject_values = [
                '<MISSING VALUE>' if val is None else val
                for val in opsgenie_subject_values
            ]
            return opsgenie_subject.format(*opsgenie_subject_values)

        return opsgenie_subject
示例#23
0
    def alert(self, matches):
        """ Will trigger a POST to the specified endpoint(s) containing all matches. """

        matches_payloads = []

        for match in matches:
            match_payload = match if self.post_all_values else {}
            match_payload.update(self.post_static_payload)
            for post_key, es_key in list(self.post_payload.items()):
                match_payload[post_key] = lookup_es_key(match, es_key)

            matches_payloads.append(match_payload)

        payload = {
            'rule': self.rule['name'],
            'title': self.create_title(matches),
            'body': self.create_alert_body(matches),
            'matches': matches_payloads,
        }

        headers = {
            "Content-Type": "application/json",
            "Accept": "application/json;charset=utf-8"
        }
        headers.update(self.post_http_headers)
        proxies = {'https': self.post_proxy} if self.post_proxy else None
        for url in self.post_url:
            try:
                response = requests.post(url,
                                         data=json.dumps(payload,
                                                         cls=DateTimeEncoder),
                                         headers=headers,
                                         proxies=proxies,
                                         timeout=self.timeout)
                response.raise_for_status()
            except RequestException as e:
                raise EAException("Error posting HTTP Post alert: %s" % e)
        elastalert_logger.info("HTTP Post alert sent.")
示例#24
0
    def add_data(self, data):
        if 'query_key' in self.rules:
            qk = self.rules['query_key']
        else:
            qk = None

        for event in data:
            if qk:
                key = hashable(lookup_es_key(event, qk))
            else:
                # If no query_key, we use the key 'all' for all events
                key = 'all'

            # Store the timestamps of recent occurrences, per key
            self.occurrences.setdefault(
                key, EventWindow(self.timeframe(key),
                                 getTimestamp=self.get_ts)).append((event, 1))
            self.check_for_match(key, end=False)

        # We call this multiple times with the 'end' parameter because subclasses
        # may or may not want to check while only partial data has been added
        if key in self.occurrences:  # could have been emptied by previous check
            self.check_for_match(key, end=True)
示例#25
0
    def get_aggregation_summary_text(self, matches):
        text = ''
        if 'aggregation' in self.rule and 'summary_table_fields' in self.rule:
            summary_table_type = self.rule.get('summary_table_type', 'ascii')

            #Type independent prefix
            text = self.rule.get('summary_prefix', '')
            # If a prefix is set, ensure there is a newline between it and the hardcoded
            # 'Aggregation resulted in...' header below
            if text != '':
                text += "\n"

            summary_table_fields = self.rule['summary_table_fields']
            if not isinstance(summary_table_fields, list):
                summary_table_fields = [summary_table_fields]

            # Include a count aggregation so that we can see at a glance how many of each aggregation_key were encountered
            summary_table_fields_with_count = summary_table_fields + ['count']
            text += "Aggregation resulted in the following data for summary_table_fields ==> {0}:\n\n".format(
                summary_table_fields_with_count
            )

            # Prepare match_aggregation used in both table types
            match_aggregation = {}

            # Maintain an aggregate count for each unique key encountered in the aggregation period
            for match in matches:
                key_tuple = tuple([str(lookup_es_key(match, key)) for key in summary_table_fields])
                if key_tuple not in match_aggregation:
                    match_aggregation[key_tuple] = 1
                else:
                    match_aggregation[key_tuple] = match_aggregation[key_tuple] + 1

            # Limit number of rows
            if 'summary_table_max_rows' in self.rule:
                max_rows = self.rule['summary_table_max_rows']
                match_aggregation = {k:v for k, v in Counter(match_aggregation).most_common(max_rows)}

            # Type dependent table style
            if summary_table_type == 'ascii':
                text_table = Texttable(max_width=self.get_aggregation_summary_text__maximum_width())
                text_table.header(summary_table_fields_with_count)
                # Format all fields as 'text' to avoid long numbers being shown as scientific notation
                text_table.set_cols_dtype(['t' for i in summary_table_fields_with_count])

                for keys, count in match_aggregation.items():
                    text_table.add_row([key for key in keys] + [count])
                text += text_table.draw() + '\n\n'

            elif summary_table_type == 'markdown':
                # Adapted from https://github.com/codazoda/tomark/blob/master/tomark/tomark.py
                # Create table header
                text += '| ' + ' | '.join(map(str, summary_table_fields_with_count)) + ' |\n'
                # Create header separator
                text += '|-----' * len(summary_table_fields_with_count) + '|\n'
                # Create table row
                for keys, count in match_aggregation.items():
                    markdown_row = ""
                    for key in keys:
                        markdown_row += '| ' + str(key) + ' '
                    text += markdown_row + '| ' + str(count) + ' |\n'
                text += '\n'

            # max_rows message
            if 'summary_table_max_rows' in self.rule:
                text += f"Showing top {self.rule['summary_table_max_rows']} rows"
                text += "\n"

            # Type independent suffix
            text += self.rule.get('summary_suffix', '')
        return str(text)
示例#26
0
    def test_file(self, conf, args):
        """ Loads a rule config file, performs a query over the last day (args.days), lists available keys
        and prints the number of results. """
        if args.schema_only:
            return []

        # Set up Elasticsearch client and query
        es_client = elasticsearch_client(conf)

        try:
            is_five = es_client.info()['version']['number'].startswith('5')
        except Exception as e:
            print("Error connecting to ElasticSearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if args.stop_error:
                exit(1)
            return None

        if is_five:
            ElastAlerter.modify_rule_for_ES5(conf)

        start_time = ts_now() - datetime.timedelta(days=args.days)
        end_time = ts_now()
        ts = conf.get('timestamp_field', '@timestamp')
        query = ElastAlerter.get_query(conf['filter'],
                                       starttime=start_time,
                                       endtime=end_time,
                                       timestamp_field=ts,
                                       five=is_five)
        print('test query: ' + str(query))
        index = ElastAlerter.get_index(conf, start_time, end_time)

        # Get one document for schema
        try:
            res = es_client.search(index,
                                   size=1,
                                   body=query,
                                   ignore_unavailable=True)
            print('test res: ' + str(res))
        except Exception as e:
            print("Error running your filter:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if args.stop_error:
                exit(1)
            return None
        num_hits = len(res['hits']['hits'])
        if not num_hits:
            return []

        terms = res['hits']['hits'][0]['_source']
        doc_type = res['hits']['hits'][0]['_type']

        # Get a count of all docs
        count_query = ElastAlerter.get_query(conf['filter'],
                                             starttime=start_time,
                                             endtime=end_time,
                                             timestamp_field=ts,
                                             sort=False,
                                             five=is_five)
        try:
            res = es_client.count(index,
                                  doc_type=doc_type,
                                  body=count_query,
                                  ignore_unavailable=True)
        except Exception as e:
            print("Error querying Elasticsearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if args.stop_error:
                exit(1)
            return None

        num_hits = res['count']
        print("Got %s hits from the last %s day%s" %
              (num_hits, args.days, 's' if args.days > 1 else ''))
        print("\nAvailable terms in first hit:")
        print_terms(terms, '')

        # Check for missing keys
        pk = conf.get('primary_key')
        ck = conf.get('compare_key')
        if pk and not lookup_es_key(terms, pk):
            print("Warning: primary key %s is either missing or null!",
                  file=sys.stderr)
        if ck and not lookup_es_key(terms, ck):
            print("Warning: compare key %s is either missing or null!",
                  file=sys.stderr)

        include = conf.get('include')
        if include:
            for term in include:
                if not lookup_es_key(terms, term) and '*' not in term:
                    print("Included term %s may be missing or null" % (term),
                          file=sys.stderr)

        for term in conf.get('top_count_keys', []):
            # If the index starts with 'logstash', fields with .raw will be available but won't in _source
            if term not in terms and not (term.endswith('.raw')
                                          and term[:-4] in terms
                                          and index.startswith('logstash')):
                print("top_count_key %s may be missing" % (term),
                      file=sys.stderr)
        print('')  # Newline

        # Download up to 10,000 documents to save
        if args.save and not args.count:
            try:
                res = es_client.search(index,
                                       size=10000,
                                       body=query,
                                       ignore_unavailable=True)
            except Exception as e:
                print("Error running your filter:", file=sys.stderr)
                print(repr(e)[:2048], file=sys.stderr)
                if args.stop_error:
                    exit(1)
                return None
            num_hits = len(res['hits']['hits'])
            print("Downloaded %s documents to save" % (num_hits))
            return res['hits']['hits']
示例#27
0
    def alert(self, matches):
        # Reset arbitrary fields to pick up changes
        self.get_arbitrary_fields()
        if len(self.deferred_settings) > 0:
            fields = self.client.fields()
            for jira_field in self.deferred_settings:
                value = lookup_es_key(matches[0], self.rule[jira_field][1:])
                self.set_jira_arg(jira_field, value, fields)

        title = self.create_title(matches)

        if self.bump_tickets:
            ticket = self.find_existing_ticket(matches)
            if ticket:
                inactivity_datetime = ts_now() - datetime.timedelta(
                    days=self.bump_after_inactivity)
                if ts_to_dt(ticket.fields.updated) >= inactivity_datetime:
                    if self.pipeline is not None:
                        self.pipeline['jira_ticket'] = None
                        self.pipeline['jira_server'] = self.server
                    return None
                elastalert_logger.info('Commenting on existing ticket %s' %
                                       (ticket.key))
                for match in matches:
                    try:
                        self.comment_on_ticket(ticket, match)
                    except JIRAError as e:
                        elastalert_logger.exception(
                            "Error while commenting on ticket %s: %s" %
                            (ticket, e))
                    if self.labels:
                        for label in self.labels:
                            try:
                                ticket.fields.labels.append(label)
                            except JIRAError as e:
                                elastalert_logger.exception(
                                    "Error while appending labels to ticket %s: %s"
                                    % (ticket, e))
                if self.transition:
                    elastalert_logger.info('Transitioning existing ticket %s' %
                                           (ticket.key))
                    try:
                        self.transition_ticket(ticket)
                    except JIRAError as e:
                        elastalert_logger.exception(
                            "Error while transitioning ticket %s: %s" %
                            (ticket, e))

                if self.pipeline is not None:
                    self.pipeline['jira_ticket'] = ticket
                    self.pipeline['jira_server'] = self.server
                return None
        if self.bump_only:
            return None

        self.jira_args['summary'] = title
        self.jira_args['description'] = self.create_alert_body(matches)

        try:
            self.issue = self.client.create_issue(**self.jira_args)

            # You can not add watchers on initial creation. Only as a follow-up action
            if self.watchers:
                for watcher in self.watchers:
                    try:
                        self.client.add_watcher(self.issue.key, watcher)
                    except Exception as ex:
                        # Re-raise the exception, preserve the stack-trace, and give some
                        # context as to which watcher failed to be added
                        raise Exception(
                            "Exception encountered when trying to add '{0}' as a watcher. Does the user exist?\n{1}"
                            .format(watcher,
                                    ex)).with_traceback(sys.exc_info()[2])

        except JIRAError as e:
            raise EAException(
                "Error creating JIRA ticket using jira_args (%s): %s" %
                (self.jira_args, e))
        elastalert_logger.info("Opened Jira ticket: %s" % (self.issue))

        if self.pipeline is not None:
            self.pipeline['jira_ticket'] = self.issue
            self.pipeline['jira_server'] = self.server
示例#28
0
    def test_file(self, conf, args):
        """ Loads a rule config file, performs a query over the last day (args.days), lists available keys
        and prints the number of results. """
        if args.schema_only:
            return []

        # Set up Elasticsearch client and query
        es_client = elasticsearch_client(conf)

        try:
            ElastAlerter.modify_rule_for_ES5(conf)
        except Exception as e:
            print("Error connecting to ElasticSearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if args.stop_error:
                exit(1)
            return None

        start_time = ts_now() - datetime.timedelta(days=args.days)
        end_time = ts_now()
        ts = conf.get('timestamp_field', '@timestamp')
        query = ElastAlerter.get_query(
            conf['filter'],
            starttime=start_time,
            endtime=end_time,
            timestamp_field=ts,
            to_ts_func=conf['dt_to_ts'],
            five=conf['five']
        )
        index = ElastAlerter.get_index(conf, start_time, end_time)

        # Get one document for schema
        try:
            res = es_client.search(index, size=1, body=query, ignore_unavailable=True)
        except Exception as e:
            print("Error running your filter:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if args.stop_error:
                exit(1)
            return None
        num_hits = len(res['hits']['hits'])
        if not num_hits:
            return []

        terms = res['hits']['hits'][0]['_source']
        doc_type = res['hits']['hits'][0]['_type']

        # Get a count of all docs
        count_query = ElastAlerter.get_query(
            conf['filter'],
            starttime=start_time,
            endtime=end_time,
            timestamp_field=ts,
            to_ts_func=conf['dt_to_ts'],
            sort=False,
            five=conf['five']
        )
        try:
            res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True)
        except Exception as e:
            print("Error querying Elasticsearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if args.stop_error:
                exit(1)
            return None

        num_hits = res['count']

        if args.formatted_output:
            self.formatted_output['hits'] = num_hits
            self.formatted_output['days'] = args.days
            self.formatted_output['terms'] = terms.keys()
            self.formatted_output['result'] = terms
        else:
            print("Got %s hits from the last %s day%s" % (num_hits, args.days, 's' if args.days > 1 else ''))
            print("\nAvailable terms in first hit:")
            print_terms(terms, '')

        # Check for missing keys
        pk = conf.get('primary_key')
        ck = conf.get('compare_key')
        if pk and not lookup_es_key(terms, pk):
            print("Warning: primary key %s is either missing or null!", file=sys.stderr)
        if ck and not lookup_es_key(terms, ck):
            print("Warning: compare key %s is either missing or null!", file=sys.stderr)

        include = conf.get('include')
        if include:
            for term in include:
                if not lookup_es_key(terms, term) and '*' not in term:
                    print("Included term %s may be missing or null" % (term), file=sys.stderr)

        for term in conf.get('top_count_keys', []):
            # If the index starts with 'logstash', fields with .raw will be available but won't in _source
            if term not in terms and not (term.endswith('.raw') and term[:-4] in terms and index.startswith('logstash')):
                print("top_count_key %s may be missing" % (term), file=sys.stderr)
        if not args.formatted_output:
            print('')  # Newline

        # Download up to max_query_size (defaults to 10,000) documents to save
        if (args.save or args.formatted_output) and not args.count:
            try:
                res = es_client.search(index, size=args.max_query_size, body=query, ignore_unavailable=True)
            except Exception as e:
                print("Error running your filter:", file=sys.stderr)
                print(repr(e)[:2048], file=sys.stderr)
                if args.stop_error:
                    exit(1)
                return None
            num_hits = len(res['hits']['hits'])

            if args.save:
                print("Downloaded %s documents to save" % (num_hits))
            return res['hits']['hits']
示例#29
0
    def test_file(self, conf, args):
        """ Loads a rule config file, performs a query over the last day (args.days), lists available keys
        and prints the number of results. """
        load_options(conf, {})
        print("Successfully loaded %s\n" % (conf['name']))

        if args.schema_only:
            return []

        # Set up elasticsearch client and query
        es_config = ElastAlerter.build_es_conn_config(conf)
        es_client = ElastAlerter.new_elasticsearch(es_config)
        start_time = ts_now() - datetime.timedelta(days=args.days)
        end_time = ts_now()
        ts = conf.get('timestamp_field', '@timestamp')
        query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts)
        index = ElastAlerter.get_index(conf, start_time, end_time)

        # Get one document for schema
        try:
            res = es_client.search(index, size=1, body=query, ignore_unavailable=True)
        except Exception as e:
            print("Error running your filter:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            return None
        num_hits = len(res['hits']['hits'])
        if not num_hits:
            return []

        terms = res['hits']['hits'][0]['_source']
        doc_type = res['hits']['hits'][0]['_type']

        # Get a count of all docs
        count_query = ElastAlerter.get_query(conf['filter'], starttime=start_time, endtime=end_time, timestamp_field=ts, sort=False)
        count_query = {'query': {'filtered': count_query}}
        try:
            res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True)
        except Exception as e:
            print("Error querying Elasticsearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            return None

        num_hits = res['count']
        print("Got %s hits from the last %s day%s" % (num_hits, args.days, 's' if args.days > 1 else ''))
        print("\nAvailable terms in first hit:")
        print_terms(terms, '')

        # Check for missing keys
        pk = conf.get('primary_key')
        ck = conf.get('compare_key')
        if pk and not lookup_es_key(terms, pk):
            print("Warning: primary key %s is either missing or null!", file=sys.stderr)
        if ck and not lookup_es_key(terms, ck):
            print("Warning: compare key %s is either missing or null!", file=sys.stderr)

        include = conf.get('include')
        if include:
            for term in include:
                if not lookup_es_key(terms, term) and '*' not in term:
                    print("Included term %s may be missing or null" % (term), file=sys.stderr)

        for term in conf.get('top_count_keys', []):
            # If the index starts with 'logstash', fields with .raw will be available but won't in _source
            if term not in terms and not (term.endswith('.raw') and term[:-4] in terms and index.startswith('logstash')):
                print("top_count_key %s may be missing" % (term), file=sys.stderr)
        print('')  # Newline

        # Download up to 10,000 documents to save
        if args.save and not args.count:
            try:
                res = es_client.search(index, size=10000, body=query, ignore_unavailable=True)
            except Exception as e:
                print("Error running your filter:", file=sys.stderr)
                print(repr(e)[:2048], file=sys.stderr)
                return None
            num_hits = len(res['hits']['hits'])
            print("Downloaded %s documents to save" % (num_hits))
            return res['hits']['hits']

        return None
示例#30
0
    def test_file(self, conf):
        """Loads a rule config file, performs a query over the last day (self.args.days), lists available keys
        and prints the number of results."""
        if self.args.schema_only:
            return []

        # Set up Elasticsearch client and query
        es_client = elasticsearch_client(conf)

        ts = conf.get('timestamp_field', '@timestamp')
        query = ElastAlerter.get_query(conf['filter'],
                                       starttime=self.starttime,
                                       endtime=self.endtime,
                                       timestamp_field=ts,
                                       to_ts_func=conf['dt_to_ts'])
        index = ElastAlerter.get_index(conf, self.starttime, self.endtime)

        # Get one document for schema
        try:
            res = es_client.search(index=index,
                                   size=1,
                                   body=query,
                                   ignore_unavailable=True)
        except Exception as e:
            print("Error running your filter:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if self.args.stop_error:
                exit(3)
            return None
        num_hits = len(res['hits']['hits'])
        if not num_hits:
            print("Didn't get any results.")
            return []

        terms = res['hits']['hits'][0]['_source']

        # Get a count of all docs
        count_query = ElastAlerter.get_query(conf['filter'],
                                             starttime=self.starttime,
                                             endtime=self.endtime,
                                             timestamp_field=ts,
                                             to_ts_func=conf['dt_to_ts'],
                                             sort=False)
        try:
            res = es_client.count(index=index,
                                  body=count_query,
                                  ignore_unavailable=True)
        except Exception as e:
            print("Error querying Elasticsearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            if self.args.stop_error:
                exit(2)
            return None

        num_hits = res['count']

        if self.args.formatted_output:
            self.formatted_output['hits'] = num_hits
            self.formatted_output['days'] = self.args.days
            self.formatted_output['terms'] = list(terms.keys())
            self.formatted_output['result'] = terms
        else:
            print(
                "Got %s hits from the last %s day%s" %
                (num_hits, self.args.days, "s" if self.args.days > 1 else ""))
            print("\nAvailable terms in first hit:")
            print_terms(terms, '')

        # Check for missing keys
        pk = conf.get('primary_key')
        ck = conf.get('compare_key')
        if pk and not lookup_es_key(terms, pk):
            print("Warning: primary key %s is either missing or null!",
                  file=sys.stderr)
        if ck and not lookup_es_key(terms, ck):
            print("Warning: compare key %s is either missing or null!",
                  file=sys.stderr)

        include = conf.get('include')
        if include:
            for term in include:
                if not lookup_es_key(terms, term) and '*' not in term:
                    print("Included term %s may be missing or null" % (term),
                          file=sys.stderr)

        for term in conf.get('top_count_keys', []):
            # If the index starts with 'logstash', fields with .raw will be available but won't in _source
            if term not in terms and not (term.endswith('.raw')
                                          and term[:-4] in terms
                                          and index.startswith('logstash')):
                print("top_count_key %s may be missing" % (term),
                      file=sys.stderr)
        if not self.args.formatted_output:
            print('')  # Newline

        # Download up to max_query_size (defaults to 10,000) documents to save
        if (self.args.save
                or self.args.formatted_output) and not self.args.count:
            try:
                res = es_client.search(index=index,
                                       size=self.args.max_query_size,
                                       body=query,
                                       ignore_unavailable=True)
            except Exception as e:
                print("Error running your filter:", file=sys.stderr)
                print(repr(e)[:2048], file=sys.stderr)
                if self.args.stop_error:
                    exit(2)
                return None
            num_hits = len(res['hits']['hits'])

            if self.args.save:
                print("Downloaded %s documents to save" % (num_hits))
            return res['hits']['hits']
示例#31
0
    def alert(self, matches):
        body = self.create_alert_body(matches)
        title = self.create_title(matches)

        # post to mattermost
        headers = {'content-type': 'application/json'}
        # set https proxy, if it was provided
        proxies = {'https': self.mattermost_proxy} if self.mattermost_proxy else None
        payload = {
            'username': self.mattermost_username_override,
            'attachments': [
                {
                    'fallback': "{0}: {1}".format(title, self.mattermost_msg_pretext),
                    'color': self.mattermost_msg_color,
                    'title': title,
                    'pretext': self.mattermost_msg_pretext,
                    'fields': []
                }
            ]
        }

        if self.rule.get('alert_text_type') == 'alert_text_only':
            payload['attachments'][0]['text'] = body
        else:
            payload['text'] = body

        if self.mattermost_msg_fields != '':
            payload['attachments'][0]['fields'] = self.populate_fields(matches)

        if self.mattermost_icon_url_override != '':
            payload['icon_url'] = self.mattermost_icon_url_override

        if self.mattermost_channel_override != '':
            payload['channel'] = self.mattermost_channel_override

        if self.mattermost_title != '':
            payload['attachments'][0]['title'] = self.mattermost_title

        if self.mattermost_title_link != '':
            payload['attachments'][0]['title_link'] = self.mattermost_title_link

        if self.mattermost_footer != '':
            payload['attachments'][0]['footer'] = self.mattermost_footer

        if self.mattermost_footer_icon != '':
            payload['attachments'][0]['footer_icon'] = self.mattermost_footer_icon

        if self.mattermost_image_url != '':
            payload['attachments'][0]['image_url'] = self.mattermost_image_url

        if self.mattermost_thumb_url != '':
            payload['attachments'][0]['thumb_url'] = self.mattermost_thumb_url

        if self.mattermost_author_name != '':
            payload['attachments'][0]['author_name'] = self.mattermost_author_name

        if self.mattermost_author_link != '':
            payload['attachments'][0]['author_link'] = self.mattermost_author_link

        if self.mattermost_author_icon != '':
            payload['attachments'][0]['author_icon'] = self.mattermost_author_icon

        if self.mattermost_attach_kibana_discover_url:
            kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url')
            if kibana_discover_url:
                payload['attachments'].append({
                    'color': self.mattermost_kibana_discover_color,
                    'title': self.mattermost_kibana_discover_title,
                    'title_link': kibana_discover_url
                })

        for url in self.mattermost_webhook_url:
            try:
                if self.mattermost_ignore_ssl_errors:
                    requests.urllib3.disable_warnings()

                response = requests.post(
                    url, data=json.dumps(payload, cls=DateTimeEncoder),
                    headers=headers, verify=not self.mattermost_ignore_ssl_errors,
                    proxies=proxies)

                warnings.resetwarnings()
                response.raise_for_status()
            except RequestException as e:
                raise EAException("Error posting to Mattermost: %s" % e)
        elastalert_logger.info("Alert sent to Mattermost")
示例#32
0
 def populate_title(self, matches):
     return lookup_es_key(matches[0], self.ms_teams_alert_title)
    def alert(self, matches):
        body = self.my_create_alert_body(matches)

        # Add JIRA ticket if it exists
        if self.pipeline is not None and 'jira_ticket' in self.pipeline:
            url = '%s/browse/%s' % (self.pipeline['jira_server'],
                                    self.pipeline['jira_ticket'])
            body += '\nJIRA ticket: %s' % (url)

        to_addr = self.rule['email']
        if 'email_from_field' in self.rule:
            recipient = lookup_es_key(matches[0],
                                      self.rule['email_from_field'])
            if isinstance(recipient, basestring):
                if '@' in recipient:
                    to_addr = [recipient]
                elif 'email_add_domain' in self.rule:
                    to_addr = [recipient + self.rule['email_add_domain']]
            elif isinstance(recipient, list):
                to_addr = recipient
                if 'email_add_domain' in self.rule:
                    to_addr = [
                        name + self.rule['email_add_domain']
                        for name in to_addr
                    ]
        if self.rule.get('email_format') == 'html':
            email_msg = MIMEText(body.encode('UTF-8'),
                                 'html',
                                 _charset='UTF-8')
        else:
            email_msg = MIMEText(body.encode('UTF-8'), _charset='UTF-8')
        email_msg['Subject'] = self.create_title(matches)
        email_msg['To'] = ', '.join(to_addr)
        email_msg['From'] = self.from_addr
        email_msg['Reply-To'] = self.rule.get('email_reply_to',
                                              email_msg['To'])
        email_msg['Date'] = formatdate()
        if self.rule.get('cc'):
            email_msg['CC'] = ','.join(self.rule['cc'])
            to_addr = to_addr + self.rule['cc']
        if self.rule.get('bcc'):
            to_addr = to_addr + self.rule['bcc']

        try:
            if self.smtp_ssl:
                if self.smtp_port:
                    self.smtp = SMTP_SSL(self.smtp_host,
                                         self.smtp_port,
                                         keyfile=self.smtp_key_file,
                                         certfile=self.smtp_cert_file)
                else:
                    self.smtp = SMTP_SSL(self.smtp_host,
                                         keyfile=self.smtp_key_file,
                                         certfile=self.smtp_cert_file)
            else:
                if self.smtp_port:
                    self.smtp = SMTP(self.smtp_host, self.smtp_port)
                else:
                    self.smtp = SMTP(self.smtp_host)
                self.smtp.ehlo()
                if self.smtp.has_extn('STARTTLS'):
                    self.smtp.starttls(keyfile=self.smtp_key_file,
                                       certfile=self.smtp_cert_file)
            if 'smtp_auth_file' in self.rule:
                self.smtp.login(self.user, self.password)
        except (SMTPException, error) as e:
            raise EAException("Error connecting to SMTP host: %s" % (e))
        except SMTPAuthenticationError as e:
            raise EAException("SMTP username/password rejected: %s" % (e))
        self.smtp.sendmail(self.from_addr, to_addr, email_msg.as_string())
        self.smtp.close()

        elastalert_logger.info("Sent email to %s" % (to_addr))
示例#34
0
    def alert(self, matches):
        body = self.create_alert_body(matches)

        # post to pagerduty
        headers = {'content-type': 'application/json'}
        if self.pagerduty_api_version == 'v2':

            custom_details_payload = {'information': body} if self.pagerduty_v2_payload_include_all_info else {}
            if self.pagerduty_v2_payload_custom_details:
                for match in matches:
                    for custom_details_key, es_key in list(self.pagerduty_v2_payload_custom_details.items()):
                        custom_details_payload[custom_details_key] = lookup_es_key(match, es_key)

            payload = {
                'routing_key': self.pagerduty_service_key,
                'event_action': self.pagerduty_event_type,
                'dedup_key': self.get_incident_key(matches),
                'client': self.pagerduty_client_name,
                'payload': {
                    'class': self.resolve_formatted_key(self.pagerduty_v2_payload_class,
                                                        self.pagerduty_v2_payload_class_args,
                                                        matches),
                    'component': self.resolve_formatted_key(self.pagerduty_v2_payload_component,
                                                            self.pagerduty_v2_payload_component_args,
                                                            matches),
                    'group': self.resolve_formatted_key(self.pagerduty_v2_payload_group,
                                                        self.pagerduty_v2_payload_group_args,
                                                        matches),
                    'severity': self.pagerduty_v2_payload_severity,
                    'source': self.resolve_formatted_key(self.pagerduty_v2_payload_source,
                                                         self.pagerduty_v2_payload_source_args,
                                                         matches),
                    'summary': self.create_title(matches),
                    'custom_details': custom_details_payload,
                },
            }
            match_timestamp = lookup_es_key(matches[0], self.rule.get('timestamp_field', '@timestamp'))
            if match_timestamp:
                payload['payload']['timestamp'] = match_timestamp
        else:
            payload = {
                'service_key': self.pagerduty_service_key,
                'description': self.create_title(matches),
                'event_type': self.pagerduty_event_type,
                'incident_key': self.get_incident_key(matches),
                'client': self.pagerduty_client_name,
                'details': {
                    "information": body,
                },
            }

        # set https proxy, if it was provided
        proxies = {'https': self.pagerduty_proxy} if self.pagerduty_proxy else None
        try:
            response = requests.post(
                self.url,
                data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False),
                headers=headers,
                proxies=proxies
            )
            response.raise_for_status()
        except RequestException as e:
            raise EAException("Error posting to pagerduty: %s" % e)

        if self.pagerduty_event_type == 'trigger':
            elastalert_logger.info("Trigger sent to PagerDuty")
        if self.pagerduty_event_type == 'resolve':
            elastalert_logger.info("Resolve sent to PagerDuty")
        if self.pagerduty_event_type == 'acknowledge':
            elastalert_logger.info("acknowledge sent to PagerDuty")
示例#35
0
    def alert(self, matches):
        alerts = []

        qk = self.rule.get('query_key', None)

        fullmessage = {}
        for match in matches:
            if qk is not None:
                resmatch = lookup_es_key(match, qk)
            else:
                resmatch = None

            if resmatch is not None:
                elastalert_logger.info(
                    'Alert for %s, %s at %s:' %
                    (self.rule['name'], resmatch,
                     lookup_es_key(match, self.rule['timestamp_field'])))
                alerts.append(
                    'Alert for %s, %s at %s:' %
                    (self.rule['name'], resmatch,
                     lookup_es_key(match, self.rule['timestamp_field'])))
                fullmessage['match'] = resmatch
            else:
                elastalert_logger.info(
                    'Rule %s generated an alert at %s:' %
                    (self.rule['name'],
                     lookup_es_key(match, self.rule['timestamp_field'])))
                alerts.append(
                    'Rule %s generated an alert at %s:' %
                    (self.rule['name'],
                     lookup_es_key(match, self.rule['timestamp_field'])))
                fullmessage['match'] = lookup_es_key(
                    match, self.rule['timestamp_field'])
            elastalert_logger.info(str(BasicMatchString(self.rule, match)))

        fullmessage['alerts'] = alerts
        fullmessage['rule'] = self.rule['name']
        fullmessage['rule_file'] = self.rule['rule_file']

        fullmessage['matching'] = str(BasicMatchString(self.rule, match))
        fullmessage['alertDate'] = datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")
        fullmessage['body'] = self.create_alert_body(matches)

        fullmessage['matches'] = matches

        self.stomp_hostname = self.rule.get('stomp_hostname', 'localhost')
        self.stomp_hostport = self.rule.get('stomp_hostport', '61613')
        self.stomp_login = self.rule.get('stomp_login', 'admin')
        self.stomp_password = self.rule.get('stomp_password', 'admin')
        self.stomp_destination = self.rule.get('stomp_destination',
                                               '/queue/ALERT')
        self.stomp_ssl = self.rule.get('stomp_ssl', False)

        try:
            conn = stomp.Connection(
                [(self.stomp_hostname, self.stomp_hostport)],
                use_ssl=self.stomp_ssl)

            conn.connect(self.stomp_login, self.stomp_password)
            # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail.
            time.sleep(1)
            conn.send(self.stomp_destination, json.dumps(fullmessage))
            conn.disconnect()
        except Exception as e:
            raise EAException("Error posting to Stomp: %s" % e)
        elastalert_logger.info("Alert sent to Stomp")
示例#36
0
    def alert(self, matches):
        body = self.create_alert_body(matches)

        body = self.format_body(body)
        # post to slack
        headers = {'content-type': 'application/json'}
        # set https proxy, if it was provided
        proxies = {'https': self.slack_proxy} if self.slack_proxy else None
        payload = {
            'username':
            self.slack_username_override,
            'parse':
            self.slack_parse_override,
            'text':
            self.slack_text_string,
            'attachments': [{
                'color': self.slack_msg_color,
                'title': self.create_title(matches),
                'text': body,
                'mrkdwn_in': ['text', 'pretext'],
                'fields': []
            }]
        }

        # if we have defined fields, populate noteable fields for the alert
        if self.slack_alert_fields != '':
            payload['attachments'][0]['fields'] = self.populate_fields(matches)

        if self.slack_icon_url_override != '':
            payload['icon_url'] = self.slack_icon_url_override
        else:
            payload['icon_emoji'] = self.slack_emoji_override

        if self.slack_title != '':
            payload['attachments'][0]['title'] = self.slack_title

        if self.slack_title_link != '':
            payload['attachments'][0]['title_link'] = self.slack_title_link

        if self.slack_footer != '':
            payload['attachments'][0]['footer'] = self.slack_footer

        if self.slack_footer_icon != '':
            payload['attachments'][0]['footer_icon'] = self.slack_footer_icon

        if self.slack_image_url != '':
            payload['attachments'][0]['image_url'] = self.slack_image_url

        if self.slack_thumb_url != '':
            payload['attachments'][0]['thumb_url'] = self.slack_thumb_url

        if self.slack_author_name != '':
            payload['attachments'][0]['author_name'] = self.slack_author_name

        if self.slack_author_link != '':
            payload['attachments'][0]['author_link'] = self.slack_author_link

        if self.slack_author_icon != '':
            payload['attachments'][0]['author_icon'] = self.slack_author_icon

        if self.slack_msg_pretext != '':
            payload['attachments'][0]['pretext'] = self.slack_msg_pretext

        if self.slack_attach_kibana_discover_url:
            kibana_discover_url = lookup_es_key(matches[0],
                                                'kibana_discover_url')
            if kibana_discover_url:
                payload['attachments'].append({
                    'color': self.slack_kibana_discover_color,
                    'title': self.slack_kibana_discover_title,
                    'title_link': kibana_discover_url
                })

        if self.slack_attach_jira_ticket_url and self.pipeline is not None and 'jira_ticket' in self.pipeline:
            jira_url = '%s/browse/%s' % (self.pipeline['jira_server'],
                                         self.pipeline['jira_ticket'])

            payload['attachments'].append({
                'color': self.slack_jira_ticket_color,
                'title': self.slack_jira_ticket_title,
                'title_link': jira_url
            })

        for url in self.slack_webhook_url:
            for channel_override in self.slack_channel_override:
                try:
                    if self.slack_ca_certs:
                        verify = self.slack_ca_certs
                    else:
                        verify = not self.slack_ignore_ssl_errors
                    if self.slack_ignore_ssl_errors:
                        requests.packages.urllib3.disable_warnings()
                    payload['channel'] = channel_override
                    response = requests.post(url,
                                             data=json.dumps(
                                                 payload, cls=DateTimeEncoder),
                                             headers=headers,
                                             verify=verify,
                                             proxies=proxies,
                                             timeout=self.slack_timeout)
                    warnings.resetwarnings()
                    response.raise_for_status()
                except RequestException as e:
                    raise EAException("Error posting to slack: %s" % e)
        elastalert_logger.info("Alert '%s' sent to Slack" % self.rule['name'])
示例#37
0
    def test_file(self, args):
        """ Loads a rule config file, performs a query over the last day (args.days), lists available keys
        and prints the number of results. """
        filename = args.file
        with open(filename) as fh:
            conf = yaml.load(fh)
        load_options(conf)
        print("Successfully loaded %s\n" % (conf["name"]))

        if args.schema_only:
            return []

        # Set up elasticsearch client and query
        es_client = Elasticsearch(host=conf["es_host"], port=conf["es_port"])
        start_time = ts_now() - datetime.timedelta(days=args.days)
        end_time = ts_now()
        ts = conf.get("timestamp_field", "@timestamp")
        query = ElastAlerter.get_query(conf["filter"], starttime=start_time, endtime=end_time, timestamp_field=ts)
        index = ElastAlerter.get_index(conf, start_time, end_time)

        # Get one document for schema
        try:
            res = es_client.search(index, size=1, body=query, ignore_unavailable=True)
        except Exception as e:
            print("Error running your filter:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            return None
        num_hits = len(res["hits"]["hits"])
        if not num_hits:
            return []

        terms = res["hits"]["hits"][0]["_source"]
        doc_type = res["hits"]["hits"][0]["_type"]

        # Get a count of all docs
        count_query = ElastAlerter.get_query(
            conf["filter"], starttime=start_time, endtime=end_time, timestamp_field=ts, sort=False
        )
        count_query = {"query": {"filtered": count_query}}
        try:
            res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True)
        except Exception as e:
            print("Error querying Elasticsearch:", file=sys.stderr)
            print(repr(e)[:2048], file=sys.stderr)
            return None

        num_hits = res["count"]
        print("Got %s hits from the last %s day%s" % (num_hits, args.days, "s" if args.days > 1 else ""))
        print("\nAvailable terms in first hit:")
        print_terms(terms, "")

        # Check for missing keys
        pk = conf.get("primary_key")
        ck = conf.get("compare_key")
        if pk and not lookup_es_key(terms, pk):
            print("Warning: primary key %s is either missing or null!", file=sys.stderr)
        if ck and not lookup_es_key(terms, ck):
            print("Warning: compare key %s is either missing or null!", file=sys.stderr)

        include = conf.get("include")
        if include:
            for term in include:
                if not lookup_es_key(terms, term) and "*" not in term:
                    print("Included term %s may be missing or null" % (term), file=sys.stderr)

        for term in conf.get("top_count_keys", []):
            # If the index starts with 'logstash', fields with .raw will be available but won't in _source
            if term not in terms and not (
                term.endswith(".raw") and term[:-4] in terms and index.startswith("logstash")
            ):
                print("top_count_key %s may be missing" % (term), file=sys.stderr)
        print("")  # Newline

        # Download up to 10,000 documents to save
        if args.save and not args.count:
            try:
                res = es_client.search(index, size=10000, body=query, ignore_unavailable=True)
            except Exception as e:
                print("Error running your filter:", file=sys.stderr)
                print(repr(e)[:2048], file=sys.stderr)
                return None
            num_hits = len(res["hits"]["hits"])
            print("Downloaded %s documents to save" % (num_hits))
            return res["hits"]["hits"]

        return None