def test_beginning_time_seconds(self): query = SearchQuery(seconds=10) query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {'seconds': 10} default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) too_old_event = default_event too_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 11}) self.populate_test_event(too_old_event) not_old_event = default_event not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 9}) self.populate_test_event(not_old_event) self.flush(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 2
def test_execute_without_size(self): for num in range(0, 1200): self.populate_example_event() query = SearchQuery() query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1000
def test_writing_event_defaults(self): query = SearchQuery() default_event = {} self.populate_test_event(default_event) self.flush(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == [ '_id', '_index', '_score', '_source', '_type' ] saved_event = results['hits'][0]['_source'] assert 'category' in saved_event assert 'details' in saved_event assert 'hostname' in saved_event assert 'mozdefhostname' in saved_event assert 'processid' in saved_event assert 'processname' in saved_event assert 'receivedtimestamp' in saved_event assert 'severity' in saved_event assert 'source' in saved_event assert 'summary' in saved_event assert 'tags' in saved_event assert 'timestamp' in saved_event assert 'utctimestamp' in saved_event assert 'category' in saved_event
def test_writing_with_type(self): query = SearchQuery() default_event = { "_type": "example", "_source": { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } } self.populate_test_event(default_event) self.flush(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == [ '_id', '_index', '_score', '_source', '_type' ] assert results['hits'][0]['_type'] == 'example' assert results['hits'][0]['_source']['summary'] == 'Test summary' assert results['hits'][0]['_source']['details'] == { "note": "Example note" }
def test_without_time_defined(self): query = SearchQuery() query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {} default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 11}) default_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 11}) self.populate_test_event(default_event) not_old_event = default_event not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 9}) not_old_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 9}) self.populate_test_event(not_old_event) self.flush(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 3
def search_and_verify_event(self, expected_event): self.flush('events') search_query = SearchQuery(minutes=5) search_query.add_must(ExistsMatch('tags')) results = search_query.execute(self.es_client) assert len(results['hits']) == 1 saved_event = results['hits'][0]['_source'] self.verify_event(saved_event, expected_event)
def main(self): search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('_type', 'addons'), TermMatch('details.signatureid', 'authfail'), ExistsMatch('details.sourceipaddress'), PhraseMatch('details.msg', "The password was incorrect"), ExistsMatch('details.suser') ]) self.filtersManual(search_query) # Search aggregations, keep X samples of events at most self.searchEventsAggregated('details.suser', samplesLimit=15) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=20)
def test_execute_with_size(self): for num in range(0, 30): self.populate_example_event() self.flush(self.event_index_name) query = SearchQuery() query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client, size=12) assert len(results['hits']) == 12
def test_aggregation_with_aggregation_size(self): for num in range(0, 100): event = {'keyname': 'value' + str(num)} self.populate_test_event(event) search_query = SearchQuery() search_query.add_must(ExistsMatch('keyname')) search_query.add_aggregation(Aggregation('keyname', 2)) results = search_query.execute(self.es_client) assert len(results['aggregations']['keyname']['terms']) == 2
def searchEventsAggregated(self, aggregationPath, samplesLimit=5): """ Search events, aggregate matching ES filters by aggregationPath, store them in self.aggregations as a list of dictionaries keys: value: the text value that was found in the aggregationPath count: the hitcount of the text value events: the sampled list of events that matched allevents: the unsample, total list of matching events aggregationPath can be key.subkey.subkey to specify a path to a dictionary value relative to the _source that's returned from elastic search. ex: details.sourceipaddress """ # We automatically add the key that we're matching on # for aggregation, as a query requirement aggreg_key_exists = ExistsMatch(aggregationPath) if aggreg_key_exists not in self.main_query.must: self.main_query.add_must(aggreg_key_exists) try: esresults = self.main_query.execute(self.es, indices=self.event_indices) results = esresults['hits'] # List of aggregation values that can be counted/summarized by Counter # Example: ['*****@*****.**','*****@*****.**', '*****@*****.**'] for an email aggregField aggregationValues = [] for r in results: aggregationValues.append( getValueByPath(r['_source'], aggregationPath)) # [{value:'*****@*****.**',count:1337,events:[...]}, ...] aggregationList = [] for i in Counter(aggregationValues).most_common(): idict = { 'value': i[0], 'count': i[1], 'events': [], 'allevents': [] } for r in results: if getValueByPath(r['_source'], aggregationPath).encode( 'ascii', 'ignore') == i[0]: # copy events detail into this aggregation up to our samples limit if len(idict['events']) < samplesLimit: idict['events'].append(r) # also copy all events to a non-sampled list # so we mark all events as alerted and don't re-alert idict['allevents'].append(r) aggregationList.append(idict) self.aggregations = aggregationList self.log.debug(self.aggregations) except Exception as e: self.log.error('Error while searching events in ES: {0}'.format(e))
def query_tests(self): tests = { ExistsMatch('summary'): [ { 'summary': 'test' }, { 'summary': 'example test summary' }, ], ExistsMatch('details.note'): [ { 'summary': 'garbage summary', 'details': { 'note': 'test' } }, ] } return tests
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'squid'), ExistsMatch('details.proxyaction'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'cef'), ExistsMatch('details.dhost'), PhraseMatch("details.signatureid", "sensitivefiles") ]) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of events at most self.searchEventsAggregated('details.dhost', samplesLimit=30) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=1)
def test_time_received_timestamp(self): query = SearchQuery(seconds=10) query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {'seconds': 10} received_timestamp_default_event = { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(received_timestamp_default_event) utctimestamp_default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(utctimestamp_default_event) default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) modified_received_timestamp_event = default_event modified_received_timestamp_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 11}) self.populate_test_event(modified_received_timestamp_event) modified_utc_timestamp_event = default_event modified_utc_timestamp_event[ 'utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 9}) self.populate_test_event(modified_utc_timestamp_event) self.flush(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 5
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('eventsource', 'nsm'), TermMatch('category', 'brointel'), TermMatch('details.sources', 'abuse.ch SSLBL'), ExistsMatch('details.sourceipaddress') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def test_simple_writing_event_string(self): event = json.dumps({"key": "example value for string of json test"}) self.es_client.save_event(body=event) self.flush(self.event_index_name) num_events = self.get_num_events() assert num_events == 1 query = SearchQuery() query.add_must(ExistsMatch('key')) results = query.execute(self.es_client) assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type'] assert results['hits'][0]['_source']['key'] == 'example value for string of json test' assert len(results['hits']) == 1 assert results['hits'][0]['_type'] == 'event'
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'intel'), ExistsMatch('details.seenindicator'), TermsMatch('hostname', ['sensor1', 'sensor2', 'sensor3']) ]) self.filtersManual(search_query) # Search aggregations on field 'seenindicator', keep X samples of events at most self.searchEventsAggregated('details.seenindicator', samplesLimit=10) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=10)
def main(self): self.parse_config('correlated_alerts.conf', ['url']) # look for events in last 15 mins search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'bro'), TermMatch('eventsource', 'nsm'), TermMatch('category', 'bronotice'), ExistsMatch('details.sourceipaddress'), PhraseMatch('details.note', 'CrowdStrike::Correlated_Alerts') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def test_writing_with_source(self): query = SearchQuery() default_event = { "_source": { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } } self.populate_test_event(default_event) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert results['hits'][0]['_type'] == 'event'
def main(self): self.parse_config('httpauthbruteforce.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'bro'), TermMatch('eventsource', 'nsm'), TermMatch('category', 'bronotice'), ExistsMatch('details.sourceipaddress'), PhraseMatch('details.note', 'AuthBruteforcing::HTTP_AuthBruteforcing_Attacker'), ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('unauth_scan.conf', ['nsm_host', 'url']) search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('_type', 'bro'), TermMatch('category', 'bronotice'), TermMatch('eventsource', 'nsm'), TermMatch('hostname', self.config.nsm_host), ExistsMatch('details.indicators'), PhraseMatch('details.note', 'Scan::Address_Scan'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('sshbruteforce_bro.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'bro'), TermMatch('eventsource', 'nsm'), TermMatch('category', 'bronotice'), ExistsMatch('details.sourceipaddress'), PhraseMatch('details.note', 'SSH::Password_Guessing'), ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def test_without_utctimestamp(self): query = SearchQuery(days=10) query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {'days': 10} default_event = { "timestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) self.flush(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 0
def main(self): self.parse_config('httperrors.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'bro'), TermMatch('eventsource', 'nsm'), TermMatch('category', 'bronotice'), ExistsMatch('details.sourceipaddress'), PhraseMatch('details.note', 'MozillaHTTPErrors::Excessive_HTTP_Errors_Attacker'), ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def verify_events(options): es_client = ElasticsearchClient(options.esservers) for required_field in options.required_fields: logger.debug('Looking for events without ' + required_field) search_query = SearchQuery(hours=12) search_query.add_must_not(ExistsMatch(required_field)) # Exclude all events that are mozdef related health and stats search_query.add_must_not(TermMatch('_type', 'mozdefstats')) search_query.add_must_not(TermMatch('_type', 'mozdefhealth')) search_query.add_aggregation(Aggregation('_type')) # We don't care about the actual events, we only want the numbers results = search_query.execute(es_client, size=1) for aggreg_term in results['aggregations']['_type']['terms']: count = aggreg_term['count'] category = aggreg_term['key'] logger.error( "Found {0} bad events of _type '{1}' missing '{2}' field". format(count, category, required_field))
def test_simple_query_execute(self): query = SearchQuery() query.add_must(ExistsMatch('note')) assert query.date_timedelta == {} self.populate_example_event() self.flush(self.event_index_name) results = query.execute(self.es_client) assert results.keys() == ['hits', 'meta'] assert results['meta'].keys() == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 1 assert results['hits'][0].keys() == [ '_score', '_type', '_id', '_source', '_index' ] assert type(results['hits'][0]['_id']) == unicode assert results['hits'][0]['_type'] == 'event' assert results['hits'][0]['_index'] == datetime.now().strftime( "events-%Y%m%d") assert results['hits'][0]['_source'].keys() == [ 'note', 'details', 'summary' ] assert results['hits'][0]['_source']['note'] == 'Example note' assert results['hits'][0]['_source']['summary'] == 'Test Summary' assert results['hits'][0]['_source']['details'].keys() == [ 'information' ] assert results['hits'][0]['_source']['details'][ 'information'] == 'Example information' with pytest.raises(KeyError): results['abcdefg'] with pytest.raises(KeyError): results['abcdefg']['test']
def test_array_input(self): queries = [ExistsMatch('note'), TermMatch('note', 'test')] self.query.add_should(queries) assert self.query.should == queries
def test_simple_input(self): self.query.add_should(ExistsMatch('note')) assert self.query.should == [ExistsMatch('note')]
def test_array_input(self): queries = [ExistsMatch('note'), TermMatch('note', 'test')] self.query.add_must_not(queries) assert self.query.must_not == queries
def test_simple_input(self): self.query.add_must_not(ExistsMatch('note')) assert self.query.must_not == [ExistsMatch('note')]