Exemplo n.º 1
0
    def handle_event(self, event, value, qk='all'):
        self.first_event.setdefault(qk, event)

        self.ref_windows.setdefault(
            qk, EventWindow(self.timeframe, getTimestamp=self.get_ts))
        self.cur_windows.setdefault(
            qk,
            EventWindow(self.timeframe, self.ref_windows[qk].append,
                        self.get_ts))

        self.cur_windows[qk].append((event, value))

        # Don't alert if ref window has not yet been filled for this key AND
        if event[self.ts_field] - self.first_event[qk][
                self.ts_field] < self.rules['timeframe'] * 2:
            # ElastAlert has not been running long enough for any alerts OR
            if not self.ref_window_filled_once:
                elastalert_logger.info(
                    'SpikeAggregationRule.handle_event reference window not filled'
                )
                return
            # This rule is not using alert_on_new_data (with query_key) OR
            if not (self.rules.get('query_key')
                    and self.rules.get('alert_on_new_data')):
                elastalert_logger.info(
                    'SpikeAggregationRule.handle_event not alerting on new data'
                )
                return
            # An alert for this qk has recently fired
            if qk in self.skip_checks and event[
                    self.ts_field] < self.skip_checks[qk]:
                elastalert_logger.info(
                    'SpikeAggregationRule.handle_event recent alert')
                return
        else:
            self.ref_window_filled_once = True

        # averages values of reference window, `count()` is a running total, a bit misnamed
        reference = self.ref_windows[qk].count() / len(
            self.ref_windows[qk].data)
        current = self.cur_windows[qk].count() / len(self.cur_windows[qk].data)

        if self.event_matches(reference, current):
            # skip over placeholder events which have count=0
            for match, value in self.cur_windows[qk].data:
                if value:
                    break

            self.add_match(match, qk)
Exemplo n.º 2
0
 def add_terms_data(self, terms):
     for timestamp, buckets in terms.iteritems():
         for bucket in buckets:
             event = ({
                 self.ts_field: timestamp,
                 self.rules['query_key']: bucket['key']
             }, bucket['doc_count'])
             self.occurrences.setdefault(
                 bucket['key'],
                 EventWindow(self.timeframe(bucket['key']),
                             getTimestamp=self.get_ts)).append(event)
             self.check_for_match(bucket['key'])
Exemplo n.º 3
0
    def add_count_data(self, data):
        """ Add count data to the rule. Data should be of the form {ts: count}. """
        if len(data) > 1:
            raise EAException(
                'add_count_data can only accept one count at a time')

        (ts, count), = data.items()

        event = ({self.ts_field: ts}, count)
        self.occurrences.setdefault(
            'all', EventWindow(self.timeframe('all'),
                               getTimestamp=self.get_ts)).append(event)
        self.check_for_match('all')
Exemplo n.º 4
0
 def garbage_collect(self, ts):
     # We add an event with a count of zero to the EventWindow for each key.
     # This will cause the EventWindow to remove events that occurred
     # more than one `timeframe` ago, and call onRemoved on them.
     default = ['all'] if 'query_key' not in self.rules else []
     for key in self.occurrences.keys() or default:
         self.occurrences.setdefault(
             key, EventWindow(self.timeframe(key),
                              getTimestamp=self.get_ts)).append(({
                                  self.ts_field:
                                  ts
                              }, 0))
         self.first_event.setdefault(key, ts)
         self.check_for_match(key)
     super(ProfiledThresholdRule, self).garbage_collect(ts)
Exemplo n.º 5
0
    def add_data(self, data):
        if 'query_key' in self.rules:
            qk = self.rules['query_key']
        else:
            qk = None

        for event in data:
            if qk:
                key = hashable(lookup_es_key(event, qk))
            else:
                # If no query_key, we use the key 'all' for all events
                key = 'all'

            # Store the timestamps of recent occurrences, per key
            self.occurrences.setdefault(
                key, EventWindow(self.timeframe(key),
                                 getTimestamp=self.get_ts)).append((event, 1))
            self.check_for_match(key, end=False)

        # We call this multiple times with the 'end' parameter because subclasses
        # may or may not want to check while only partial data has been added
        if key in self.occurrences:  # could have been emptied by previous check
            self.check_for_match(key, end=True)
Exemplo n.º 6
0
def test_eventwindow():
    timeframe = datetime.timedelta(minutes=10)
    window = EventWindow(timeframe, getTimestamp=lambda e: e[0]['@timestamp'])
    timestamps = [ts_to_dt(x) for x in ['2014-01-01T10:00:00',
                                        '2014-01-01T10:05:00',
                                        '2014-01-01T10:03:00',
                                        '2014-01-01T09:55:00',
                                        '2014-01-01T10:09:00']]
    for ts in timestamps:
        window.append([{'@timestamp': ts}, 1])

    timestamps.sort()
    for exp, actual in zip(timestamps[1:], window.data):
        assert actual[0]['@timestamp'] == exp

    window.append([{'@timestamp': ts_to_dt('2014-01-01T10:14:00')}, 1])
    timestamps.append(ts_to_dt('2014-01-01T10:14:00'))
    for exp, actual in zip(timestamps[3:], window.data):
        assert actual[0]['@timestamp'] == exp