示例#1
0
def test_slack_uses_custom_slack_channel():
    rule = {
        'name': 'Test Rule',
        'type': 'any',
        'slack_webhook_url': ['http://please.dontgohere.slack'],
        'slack_channel_override': '#test-alert',
        'alert': []
    }
    load_modules(rule)
    alert = SlackAlerter(rule)
    match = {
        '@timestamp': '2016-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])

    expected_data = {
        'username': '******',
        'channel': '#test-alert',
        'icon_emoji': ':ghost:',
        'attachments': [
            {
                'color': 'danger',
                'title': rule['name'],
                'text': BasicMatchString(rule, match).__str__(),
                'fields': []
            }
        ],
        'text': '',
        'parse': 'none'
    }
    mock_post_request.assert_called_once_with(rule['slack_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None)
    assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
示例#2
0
def test_pagerduty_alerter():
    rule = {
        'name': 'Test PD Rule',
        'type': 'any',
        'pagerduty_service_key': 'magicalbadgers',
        'pagerduty_client_name': 'ponies inc.',
        'alert': []
    }
    load_modules(rule)
    alert = PagerDutyAlerter(rule)
    match = {
        '@timestamp': '2017-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])
    expected_data = {
        'client': 'ponies inc.',
        'description': 'Test PD Rule',
        'details': {
            'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n'
        },
        'event_type': 'trigger',
        'incident_key': '',
        'service_key': 'magicalbadgers',
    }
    mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None)
    assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
示例#3
0
def test_slack_uses_rule_name_when_custom_title_is_not_provided():
    rule = {
        'name': 'Test Rule',
        'type': 'any',
        'slack_webhook_url': ['http://please.dontgohere.slack'],
        'alert': []
    }
    load_modules(rule)
    alert = SlackAlerter(rule)
    match = {
        '@timestamp': '2016-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])

    expected_data = {
        'username': '******',
        'icon_emoji': ':ghost:',
        'attachments': [
            {
                'color': 'danger',
                'title': rule['name'],
                'text': BasicMatchString(rule, match).__str__(),
                'fields': []
            }
        ]
    }
    mock_post_request.assert_called_once_with(rule['slack_webhook_url'][0], data=json.dumps(expected_data), headers={'content-type': 'application/json'}, proxies=None)
示例#4
0
def test_slack_uses_custom_slack_channel():
    rule = {
        'name': 'Test Rule',
        'type': 'any',
        'slack_webhook_url': ['http://please.dontgohere.slack'],
        'slack_channel_override': '#test-alert',
        'alert': []
    }
    load_modules(rule)
    alert = SlackAlerter(rule)
    match = {
        '@timestamp': '2016-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])

    expected_data = {
        'username': '******',
        'channel': '#test-alert',
        'icon_emoji': ':ghost:',
        'attachments': [
            {
                'color': 'danger',
                'title': rule['name'],
                'text': BasicMatchString(rule, match).__str__(),
                'fields': []
            }
        ],
        'text': '',
        'parse': 'none'
    }
    mock_post_request.assert_called_once_with(rule['slack_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None)
    assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
示例#5
0
def test_simple_alerter():
    rule = {
        'name': 'Test Simple Rule',
        'type': 'any',
        'simple_webhook_url': 'http://test.webhook.url',
        'alert_subject': 'Cool subject',
        'alert': []
    }
    load_modules(rule)
    alert = SimplePostAlerter(rule)
    match = {
        '@timestamp': '2017-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])
    expected_data = {
        'rule': rule['name'],
        'matches': [match]
    }
    mock_post_request.assert_called_once_with(
        rule['simple_webhook_url'],
        data=mock.ANY,
        headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'},
        proxies=None
    )
    assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
示例#6
0
def test_ms_teams_uses_color_and_fixed_width_text():
    rule = {
        'name': 'Test Rule',
        'type': 'any',
        'ms_teams_webhook_url': 'http://test.webhook.url',
        'ms_teams_alert_summary': 'Alert from ElastAlert',
        'ms_teams_alert_fixed_width': True,
        'ms_teams_theme_color': '#124578',
        'alert_subject': 'Cool subject',
        'alert': []
    }
    load_modules(rule)
    alert = MsTeamsAlerter(rule)
    match = {'@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz'}
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])
    body = BasicMatchString(rule, match).__str__()
    body = body.replace('`', "'")
    body = "```{0}```".format('```\n\n```'.join(
        x for x in body.split('\n'))).replace('\n``````', '')
    expected_data = {
        '@type': 'MessageCard',
        '@context': 'http://schema.org/extensions',
        'summary': rule['ms_teams_alert_summary'],
        'title': rule['alert_subject'],
        'themeColor': '#124578',
        'text': body
    }
    mock_post_request.assert_called_once_with(
        rule['ms_teams_webhook_url'],
        data=mock.ANY,
        headers={'content-type': 'application/json'},
        proxies=None)
    assert expected_data == json.loads(
        mock_post_request.call_args_list[0][1]['data'])
示例#7
0
def test_simple_alerter():
    rule = {
        'name': 'Test Simple Rule',
        'type': 'any',
        'simple_webhook_url': 'http://test.webhook.url',
        'alert_subject': 'Cool subject',
        'alert': []
    }
    load_modules(rule)
    alert = SimplePostAlerter(rule)
    match = {
        '@timestamp': '2017-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])
    expected_data = {
        'rule': rule['name'],
        'matches': [match]
    }
    mock_post_request.assert_called_once_with(
        rule['simple_webhook_url'],
        data=mock.ANY,
        headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'},
        proxies=None
    )
    assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
示例#8
0
def test_ms_teams():
    rule = {
        'name': 'Test Rule',
        'type': 'any',
        'ms_teams_webhook_url': 'http://test.webhook.url',
        'ms_teams_alert_summary': 'Alert from ElastAlert',
        'alert_subject': 'Cool subject',
        'alert': []
    }
    load_modules(rule)
    alert = MsTeamsAlerter(rule)
    match = {
        '@timestamp': '2016-01-01T00:00:00',
        'somefield': 'foobarbaz'
    }
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])

    expected_data = {
        '@type': 'MessageCard',
        '@context': 'http://schema.org/extensions',
        'summary': rule['ms_teams_alert_summary'],
        'title': rule['alert_subject'],
        'text': BasicMatchString(rule, match).__str__()
    }
    mock_post_request.assert_called_once_with(
        rule['ms_teams_webhook_url'],
        data=mock.ANY,
        headers={'content-type': 'application/json'},
        proxies=None
    )
    assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
示例#9
0
def test_slack_uses_custom_title():
    rule = {
        'name': 'Test Rule',
        'type': 'any',
        'slack_webhook_url': 'http://please.dontgohere.slack',
        'alert_subject': 'Cool subject',
        'alert': []
    }
    load_modules(rule)
    alert = SlackAlerter(rule)
    match = {'@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz'}
    with mock.patch('requests.post') as mock_post_request:
        alert.alert([match])

    mock_post_request.assert_called_once_with(
        rule['slack_webhook_url'],
        data=mock.ANY,
        headers={'content-type': 'application/json'},
        proxies=None)
    assert rule['alert_subject'] in mock_post_request.call_args_list[0][1][
        'data']
示例#10
0
def test_load_inline_alert_rule():
    test_rule_copy = copy.deepcopy(test_rule)
    test_rule_copy['alert'] = [{
        'email': {
            'email': '*****@*****.**'
        }
    }, {
        'email': {
            'email': '*****@*****.**'
        }
    }]
    test_config_copy = copy.deepcopy(test_config)
    with mock.patch('elastalert.config.yaml_loader') as mock_open:
        mock_open.side_effect = [test_config_copy, test_rule_copy]
        load_modules(test_rule_copy)
        assert isinstance(test_rule_copy['alert'][0],
                          elastalert.alerts.EmailAlerter)
        assert isinstance(test_rule_copy['alert'][1],
                          elastalert.alerts.EmailAlerter)
        assert '*****@*****.**' in test_rule_copy['alert'][0].rule['email']
        assert '*****@*****.**' in test_rule_copy['alert'][1].rule['email']
示例#11
0
def test_load_inline_alert_rule():
    test_rule_copy = copy.deepcopy(test_rule)
    test_rule_copy['alert'] = [
        {
            'email': {
                'email': '*****@*****.**'
            }
        },
        {
            'email': {
                'email': '*****@*****.**'
            }
        }
    ]
    test_config_copy = copy.deepcopy(test_config)
    with mock.patch('elastalert.config.yaml_loader') as mock_open:
        mock_open.side_effect = [test_config_copy, test_rule_copy]
        load_modules(test_rule_copy)
        assert isinstance(test_rule_copy['alert'][0], elastalert.alerts.EmailAlerter)
        assert isinstance(test_rule_copy['alert'][1], elastalert.alerts.EmailAlerter)
        assert '*****@*****.**' in test_rule_copy['alert'][0].rule['email']
        assert '*****@*****.**' in test_rule_copy['alert'][1].rule['email']
示例#12
0
    def run_elastalert(self, rule, conf, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """

        # Load and instantiate rule
        # Pass an args containing the context of whether we're alerting or not
        # It is needed to prevent unnecessary initialization of unused alerters
        load_modules_args = argparse.Namespace()
        load_modules_args.debug = not args.alert
        load_modules(rule, load_modules_args)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return None
            try:
                self.data.sort(key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e), file=sys.stderr)
                if args.stop_error:
                    exit(1)
                return None

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join([random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            if args.end:
                if args.end == 'NOW':
                    endtime = ts_now()
                else:
                    try:
                        endtime = ts_to_dt(args.end)
                    except (TypeError, ValueError):
                        self.handle_error("%s is not a valid ISO8601 timestamp (YYYY-MM-DDTHH:MM:SS+XX:00)" % (args.end))
                        exit(1)
            else:
                endtime = ts_now()
            if args.start:
                try:
                    starttime = ts_to_dt(args.start)
                except (TypeError, ValueError):
                    self.handle_error("%s is not a valid ISO8601 timestamp (YYYY-MM-DDTHH:MM:SS+XX:00)" % (args.start))
                    exit(1)
            else:
                # if days given as command line argument
                if args.days > 0:
                    starttime = endtime - datetime.timedelta(days=args.days)
                else:
                    # if timeframe is given in rule
                    if 'timeframe' in rule:
                        starttime = endtime - datetime.timedelta(seconds=rule['timeframe'].total_seconds() * 1.01)
                    # default is 1 days / 24 hours
                    else:
                        starttime = endtime - datetime.timedelta(days=1)

        # Set run_every to cover the entire time range unless count query, terms query or agg query used
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get('use_count_query') and not rule.get('aggregation_query_element'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback to return empty results
        client.writeback_es = mock.MagicMock()
        client.writeback_es.search.return_value = {"hits": {"hits": []}}

        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:

                if args.formatted_output:
                    self.formatted_output['writeback'] = {}
                else:
                    print("\nWould have written the following documents to writeback index (default is elastalert_status):\n")

                errors = False
                for call in mock_writeback.call_args_list:
                    if args.formatted_output:
                        self.formatted_output['writeback'][call[0][0]] = json.loads(json.dumps(call[0][1], default=str))
                    else:
                        print("%s - %s\n" % (call[0][0], call[0][1]))

                    if call[0][0] == 'elastalert_error':
                        errors = True
                if errors and args.stop_error:
                    exit(1)
示例#13
0
    def run_elastalert(self, rule, conf, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """

        # Load and instantiate rule
        load_modules(rule)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return None
            try:
                self.data.sort(key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e),
                      file=sys.stderr)
                if args.stop_error:
                    exit(1)
                return None

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join(
                    [random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            endtime = ts_now()
            starttime = endtime - datetime.timedelta(days=args.days)

        # Set run_every to cover the entire time range unless count query, terms query or agg query used
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get(
                'use_count_query') and not rule.get(
                    'aggregation_query_element'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback to return empty results
        client.writeback_es = mock.MagicMock()
        client.writeback_es.search.return_value = {"hits": {"hits": []}}

        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:
                print(
                    "\nWould have written the following documents to writeback index (default is elastalert_status):\n"
                )
                errors = False
                for call in mock_writeback.call_args_list:
                    print("%s - %s\n" % (call[0][0], call[0][1]))
                    if call[0][0] == 'elastalert_error':
                        errors = True
                if errors and args.stop_error:
                    exit(1)
示例#14
0
    def run_elastalert(self, rule, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """
        # Mock configuration. Nothing here is used except run_every
        conf = {'rules_folder': 'rules',
                'run_every': datetime.timedelta(minutes=5),
                'buffer_time': datetime.timedelta(minutes=45),
                'alert_time_limit': datetime.timedelta(hours=24),
                'es_host': 'es',
                'es_port': 14900,
                'writeback_index': 'wb',
                'max_query_size': 100000,
                'old_query_limit': datetime.timedelta(weeks=1),
                'disable_rules_on_error': False}

        # Load and instantiate rule
        load_options(rule, conf)
        load_modules(rule)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return
            try:
                self.data.sort(key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e), file=sys.stderr)
                return

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join([random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            endtime = ts_now()
            starttime = endtime - datetime.timedelta(days=args.days)

        # Set run_every to cover the entire time range unless use_count_query or use_terms_query is set
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get('use_count_query'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback for both real data and json data
        client.writeback_es = None
        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:
                print("\nWould have written the following documents to elastalert_status:\n")
                for call in mock_writeback.call_args_list:
                    print("%s - %s\n" % (call[0][0], call[0][1]))
示例#15
0
    def run_elastalert(self, rule, conf, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """

        # Load and instantiate rule
        # Pass an args containing the context of whether we're alerting or not
        # It is needed to prevent unnecessary initialization of unused alerters
        load_modules_args = argparse.Namespace()
        load_modules_args.debug = not args.alert
        load_modules(rule, load_modules_args)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return None
            try:
                sorted(self.data, key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e),
                      file=sys.stderr)
                if args.stop_error:
                    exit(1)
                return None

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join(
                    [random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            if args.end:
                if args.end == 'NOW':
                    endtime = ts_now()
                else:
                    try:
                        endtime = ts_to_dt(args.end)
                    except (TypeError, ValueError):
                        self.handle_error(
                            "%s is not a valid ISO8601 timestamp (YYYY-MM-DDTHH:MM:SS+XX:00)"
                            % (args.end))
                        exit(1)
            else:
                endtime = ts_now()
            if args.start:
                try:
                    starttime = ts_to_dt(args.start)
                except (TypeError, ValueError):
                    self.handle_error(
                        "%s is not a valid ISO8601 timestamp (YYYY-MM-DDTHH:MM:SS+XX:00)"
                        % (args.start))
                    exit(1)
            else:
                # if days given as command line argument
                if args.days > 0:
                    starttime = endtime - datetime.timedelta(days=args.days)
                else:
                    # if timeframe is given in rule
                    if 'timeframe' in rule:
                        starttime = endtime - datetime.timedelta(
                            seconds=rule['timeframe'].total_seconds() * 1.01)
                    # default is 1 days / 24 hours
                    else:
                        starttime = endtime - datetime.timedelta(days=1)

        # Set run_every to cover the entire time range unless count query, terms query or agg query used
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get(
                'use_count_query') and not rule.get(
                    'aggregation_query_element'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback to return empty results
        client.writeback_es = mock.MagicMock()
        client.writeback_es.search.return_value = {"hits": {"hits": []}}

        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:

                if args.formatted_output:
                    self.formatted_output['writeback'] = {}
                else:
                    print(
                        "\nWould have written the following documents to writeback index (default is elastalert_status):\n"
                    )

                errors = False
                for call in mock_writeback.call_args_list:
                    if args.formatted_output:
                        self.formatted_output['writeback'][
                            call[0][0]] = json.loads(
                                json.dumps(call[0][1], default=str))
                    else:
                        print("%s - %s\n" % (call[0][0], call[0][1]))

                    if call[0][0] == 'elastalert_error':
                        errors = True
                if errors and args.stop_error:
                    exit(1)
    def run_elastalert(self, rule, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """
        # Mock configuration. Nothing here is used except run_every
        conf = {'rules_folder': 'rules',
                'run_every': datetime.timedelta(minutes=5),
                'buffer_time': datetime.timedelta(minutes=45),
                'alert_time_limit': datetime.timedelta(hours=24),
                'es_host': 'es',
                'es_port': 14900,
                'writeback_index': 'wb',
                'max_query_size': 10000,
                'old_query_limit': datetime.timedelta(weeks=1),
                'disable_rules_on_error': False}

        # Load and instantiate rule
        load_options(rule, conf)
        load_modules(rule)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return None
            try:
                self.data.sort(key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e), file=sys.stderr)
                return None

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join([random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            endtime = ts_now()
            starttime = endtime - datetime.timedelta(days=args.days)

        # Set run_every to cover the entire time range unless use_count_query or use_terms_query is set
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get('use_count_query'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback for both real data and json data
        client.writeback_es = None
        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:
                print("\nWould have written the following documents to elastalert_status:\n")
                for call in mock_writeback.call_args_list:
                    print("%s - %s\n" % (call[0][0], call[0][1]))
示例#17
0
    def run_elastalert(self, rule, conf, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """

        # Load and instantiate rule
        load_modules(rule)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return None
            try:
                self.data.sort(key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e), file=sys.stderr)
                return None

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join([random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            endtime = ts_now()
            starttime = endtime - datetime.timedelta(days=args.days)

        # Set run_every to cover the entire time range unless count query, terms query or agg query used
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get('use_count_query') and not rule.get('aggregation_query_element'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback to return empty results
        client.writeback_es = mock.MagicMock()
        client.writeback_es.search.return_value = {"hits": {"hits": []}}

        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:
                print("\nWould have written the following documents to writeback index (default is elastalert_status):\n")
                for call in mock_writeback.call_args_list:
                    print("%s - %s\n" % (call[0][0], call[0][1]))