def main(self): self.parse_config("proxy_drop_executable.conf", ["extensions"]) search_query = SearchQuery(minutes=20) search_query.add_must( [ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), ] ) # Only notify on certain file extensions from config filename_regex = "/.*\.({0})/".format(self.config.extensions.replace(",", "|")) search_query.add_must( [QueryStringMatch("details.destination: {}".format(filename_regex))] ) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def test_writing_with_type(self): query = SearchQuery() default_event = { "_type": "example", "_source": { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } } self.populate_test_event(default_event) self.refresh(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == [ '_id', '_index', '_score', '_source', '_type' ] assert results['hits'][0]['_type'] == 'example' assert results['hits'][0]['_source']['summary'] == 'Test summary' assert results['hits'][0]['_source']['details'] == { "note": "Example note" }
def getESAlerts(es): search_query = SearchQuery(minutes=50) # We use an ExistsMatch here just to satisfy the # requirements of a search query must have some "Matchers" search_query.add_must(ExistsMatch('summary')) results = search_query.execute(es, indices=['alerts'], size=10000) return results
def esSearch(es, macassignments=None): ''' Search ES for an event that ties a username to a mac address This example searches for junos wifi correlations on authentication success Expecting an event like: user: [email protected]; mac: 5c:f9:38:b1:de:cf; author reason: roamed session; ssid: ANSSID; AP 46/2\n ''' usermacre=re.compile(r'''user: (?P<username>.*?); mac: (?P<macaddress>.*?); ''',re.IGNORECASE) correlations={} search_query = SearchQuery(minutes=options.correlationminutes) search_query.add_must(TermMatch('details.program', 'AUTHORIZATION-SUCCESS')) search_query.add_must_not(PhraseMatch('summary', 'last-resort')) try: full_results = search_query.execute(es) results = full_results['hits'] for r in results: fields = re.search(usermacre,r['_source']['summary']) if fields: if '{0} {1}'.format(fields.group('username'),fields.group('macaddress')) not in correlations: if fields.group('macaddress')[0:8].lower() in macassignments: entity=macassignments[fields.group('macaddress')[0:8].lower()] else: entity='unknown' correlations['{0} {1}'.format(fields.group('username'),fields.group('macaddress'))]=dict(username=fields.group('username'), macaddress=fields.group('macaddress'), entity=entity, utctimestamp=r['_source']['utctimestamp']) return correlations except ElasticsearchBadServer: logger.error('Elastic Search server could not be reached, check network connectivity')
def test_simple_query_execute(self): query = SearchQuery() query.add_must(ExistsMatch('note')) assert query.date_timedelta == {} self.populate_example_event() self.refresh(self.event_index_name) results = query.execute(self.es_client) assert sorted(results.keys()) == ['hits', 'meta'] assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source'] assert type(results['hits'][0]['_id']) == str assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") assert results['hits'][0]['_source']['note'] == 'Example note' assert results['hits'][0]['_source']['summary'] == 'Test Summary' assert results['hits'][0]['_source']['type'] == 'event' assert list(results['hits'][0]['_source']['details'].keys()) == ['information'] assert results['hits'][0]['_source']['details']['information'] == 'Example information' with pytest.raises(KeyError): results['abcdefg'] with pytest.raises(KeyError): results['abcdefg']['test']
def test_without_time_defined(self): query = SearchQuery() query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {} default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 11}) default_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 11}) self.populate_test_event(default_event) not_old_event = default_event not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 9}) not_old_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 9}) self.populate_test_event(not_old_event) self.refresh(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 3
def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"') ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace(hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace(hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([ RangeMatch('utctimestamp', r1, r2) ]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)
def process_alert(self): search_query = SearchQuery(minutes=20) content = QueryStringMatch(str(self.watchterm)) search_query.add_must(content) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def kibanaDashboards(): resultsList = [] try: es_client = ElasticsearchClient( (list('{0}'.format(s) for s in options.esservers))) search_query = SearchQuery() search_query.add_must(TermMatch('type', 'dashboard')) results = search_query.execute(es_client, indices=['.kibana']) for dashboard in results['hits']: dashboard_id = dashboard['_id'] if dashboard_id.startswith('dashboard:'): dashboard_id = dashboard_id.replace('dashboard:', '') resultsList.append({ 'name': dashboard['_source']['dashboard']['title'], 'id': dashboard_id }) except ElasticsearchInvalidIndex as e: logger.error('Kibana dashboard index not found: {0}\n'.format(e)) except Exception as e: logger.error('Kibana dashboard received error: {0}\n'.format(e)) return json.dumps(resultsList)
def test_aggregation_non_existing_term(self): events = [ { "test": "value", "note": "abvc" }, { "test": "value", "note": "abvc" }, { "test": "value", "note": "think" }, { "test": "value", "summary": "think" }, ] for event in events: self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(TermMatch('test', 'value')) search_query.add_aggregation(Aggregation('example')) results = search_query.execute(self.es_client) assert sorted(results.keys()) == ['aggregations', 'hits', 'meta'] assert len(results['hits']) == 4 assert list(results['aggregations'].keys()) == ['example'] assert list(results['aggregations']['example'].keys()) == ['terms'] assert results['aggregations']['example']['terms'] == []
def main(self): self.parse_config('proxy_drop_non_standard_port.conf', ['excludedports']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', 'TCP_DENIED/-'), TermMatch('details.tcpaction', 'CONNECT') ]) # Only notify on certain ports from config port_regex = "/.*:({0})/".format( self.config.excludedports.replace(',', '|')) search_query.add_must_not( [QueryStringMatch('details.destination: {}'.format(port_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def test_without_time_defined(self): query = SearchQuery() query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {} default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 11}) default_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 11}) self.populate_test_event(default_event) not_old_event = default_event not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 9}) not_old_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 9}) self.populate_test_event(not_old_event) self.refresh(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 3
def test_beginning_time_seconds_received_timestamp(self): query = SearchQuery(seconds=10) query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {'seconds': 10} default_event = { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) too_old_event = default_event too_old_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 11}) too_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 11}) self.populate_test_event(too_old_event) not_old_event = default_event not_old_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 9}) not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 9}) self.populate_test_event(not_old_event) self.refresh(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 2
def main(self): self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', "TCP_DENIED/-") ]) # Only notify on certain domains listed in the config domain_regex = "/({0}).*/".format( self.config.exfil_domains.replace(',', '|')) search_query.add_must( [QueryStringMatch('details.destination: {}'.format(domain_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def process_alert(self, term): search_query = SearchQuery(minutes=20) content = QueryStringMatch(str(term)) search_query.add_must(content) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config("proxy_drop_executable.conf", ["extensions"]) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), ]) # Only notify on certain file extensions from config filename_regex = r"/.*\.({0})/".format( self.config.extensions.replace(",", "|")) search_query.add_must([ QueryStringMatch("details.destination: {}".format(filename_regex)) ]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def test_simple_aggregation_note_field(self): events = [ {"test": "value", "note": "abvc"}, {"test": "value", "note": "abvc"}, {"test": "value", "note": "think"}, {"test": "value", "summary": "think"}, {"test": "value", "note": "abvc space line"}, ] for event in events: self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(TermMatch('test', 'value')) search_query.add_aggregation(Aggregation('note')) results = search_query.execute(self.es_client) assert results['aggregations'].keys() == ['note'] assert results['aggregations']['note'].keys() == ['terms'] assert len(results['aggregations']['note']['terms']) == 3 assert results['aggregations']['note']['terms'][0].keys() == ['count', 'key'] assert results['aggregations']['note']['terms'][0]['count'] == 2 assert results['aggregations']['note']['terms'][0]['key'] == 'abvc' assert results['aggregations']['note']['terms'][1]['count'] == 1 assert results['aggregations']['note']['terms'][1]['key'] == 'abvc space line' assert results['aggregations']['note']['terms'][2]['count'] == 1 assert results['aggregations']['note']['terms'][2]['key'] == 'think'
def test_writing_event_defaults(self): query = SearchQuery() default_event = {} self.populate_test_event(default_event) self.refresh(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == [ '_id', '_index', '_score', '_source', '_type' ] saved_event = results['hits'][0]['_source'] assert 'category' in saved_event assert 'details' in saved_event assert 'hostname' in saved_event assert 'mozdefhostname' in saved_event assert 'processid' in saved_event assert 'processname' in saved_event assert 'receivedtimestamp' in saved_event assert 'severity' in saved_event assert 'source' in saved_event assert 'summary' in saved_event assert 'tags' in saved_event assert 'timestamp' in saved_event assert 'utctimestamp' in saved_event assert 'category' in saved_event
def test_aggregation_multiple_layers(self): events = [ { "test": "value", "details": {"ip": "127.0.0.1"}, }, { "test": "value", "details": {"ip": "127.0.0.1"}, }, { "test": "value", "details": {"ip": "192.168.1.1"}, }, ] for event in events: self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(TermMatch('test', 'value')) search_query.add_aggregation(Aggregation('details.ip')) results = search_query.execute(self.es_client) assert results['aggregations'].keys() == ['details.ip'] assert results['aggregations']['details.ip'].keys() == ['terms'] assert len(results['aggregations']['details.ip']['terms']) == 2 assert results['aggregations']['details.ip']['terms'][0]['count'] == 2 assert results['aggregations']['details.ip']['terms'][0]['key'] == "127.0.0.1" assert results['aggregations']['details.ip']['terms'][1]['count'] == 1 assert results['aggregations']['details.ip']['terms'][1]['key'] == "192.168.1.1"
def test_aggregation_non_existing_layers_term(self): events = [ { "test": "value", "note": "abvc" }, { "test": "value", "note": "abvc" }, { "test": "value", "note": "think" }, { "test": "value", "summary": "think" }, ] for event in events: self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(TermMatch('test', 'value')) search_query.add_aggregation(Aggregation('details.ipinformation')) results = search_query.execute(self.es_client) assert results['aggregations'].keys() == ['details.ipinformation'] assert results['aggregations']['details.ipinformation'].keys() == [ 'terms' ] assert len( results['aggregations']['details.ipinformation']['terms']) == 0
def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config( 'ssh_access_signreleng.json') if self.config['channel'] == '': self.config['channel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format( self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.items(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def kibanaDashboards(): resultsList = [] try: es_client = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers))) search_query = SearchQuery() search_query.add_must(TermMatch('_type', 'dashboard')) results = search_query.execute(es_client, indices=['.kibana']) for dashboard in results['hits']: resultsList.append({ 'name': dashboard['_source']['title'], 'url': "%s#/%s/%s" % ( options.kibanaurl, "dashboard", dashboard['_id'] ) }) except ElasticsearchInvalidIndex as e: sys.stderr.write('Kibana dashboard index not found: {0}\n'.format(e)) except Exception as e: sys.stderr.write('Kibana dashboard received error: {0}\n'.format(e)) return json.dumps(resultsList)
def test_simple_query_execute(self): query = SearchQuery() query.add_must(ExistsMatch('note')) assert query.date_timedelta == {} self.populate_example_event() self.refresh(self.event_index_name) results = query.execute(self.es_client) assert results.keys() == ['hits', 'meta'] assert results['meta'].keys() == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 1 assert results['hits'][0].keys() == ['_score', '_type', '_id', '_source', '_index'] assert type(results['hits'][0]['_id']) == unicode assert results['hits'][0]['_type'] == 'event' assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") assert results['hits'][0]['_source']['note'] == 'Example note' assert results['hits'][0]['_source']['summary'] == 'Test Summary' assert results['hits'][0]['_source']['details'].keys() == ['information'] assert results['hits'][0]['_source']['details']['information'] == 'Example information' with pytest.raises(KeyError): results['abcdefg'] with pytest.raises(KeyError): results['abcdefg']['test']
def test_writing_event_defaults(self): query = SearchQuery() default_event = {} self.populate_test_event(default_event) self.refresh(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type'] saved_event = results['hits'][0]['_source'] assert 'category' in saved_event assert 'details' in saved_event assert 'hostname' in saved_event assert 'mozdefhostname' in saved_event assert 'processid' in saved_event assert 'processname' in saved_event assert 'receivedtimestamp' in saved_event assert 'severity' in saved_event assert 'source' in saved_event assert 'summary' in saved_event assert 'tags' in saved_event assert 'timestamp' in saved_event assert 'utctimestamp' in saved_event assert 'category' in saved_event
def test_beginning_time_seconds_received_timestamp(self): query = SearchQuery(seconds=10) query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {'seconds': 10} default_event = { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) too_old_event = default_event too_old_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 11}) too_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 11}) self.populate_test_event(too_old_event) not_old_event = default_event not_old_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 9}) not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 9}) self.populate_test_event(not_old_event) self.refresh(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 2
def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config('ssh_access_signreleng.json') if self.config['ircchannel'] == '': self.config['ircchannel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format(self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.iteritems(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def search_and_verify_event(self, expected_event): self.refresh('events') search_query = SearchQuery(minutes=5) search_query.add_must(ExistsMatch('tags')) results = search_query.execute(self.es_client) assert len(results['hits']) == 1 saved_event = results['hits'][0]['_source'] self.verify_event(saved_event, expected_event)
def test_execute_without_size(self): for num in range(0, 1200): self.populate_example_event() self.refresh(self.event_index_name) query = SearchQuery() query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1000
def main(self): search_query = SearchQuery(hours=1) search_query.add_must([TermMatch('source', 'cloudtrail')]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(hours=6) day_old_date = toUTC(datetime.now() - timedelta(days=1)).isoformat() search_query.add_must(LessThanMatch('utctimestamp', day_old_date)) self.filtersManual(search_query) self.searchEventsAggregated('mozdefhostname', samplesLimit=1000) self.walkAggregations(threshold=1)
def process_alert(self, alert_config): self.current_alert_time_window = int(alert_config['time_window']) self.current_alert_time_type = alert_config['time_window_type'] search_query_time_window = {self.current_alert_time_type: self.current_alert_time_window} search_query = SearchQuery(**search_query_time_window) search_query.add_must(QueryStringMatch(str(alert_config['search_query']))) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents(description=alert_config['description'])
def find(qes: QueryInterface, username: str, index: str) -> Optional[Entry]: '''Retrieve the locality state for one user from ElasticSearch. ''' search = SearchQuery() search.add_must( [TermMatch('type_', 'locality'), TermMatch('username', username)]) return qes(search, index)
def get_num_events(self): self.refresh('events') search_query = SearchQuery() search_query.add_must(TermMatch('_type', 'event')) search_query.add_aggregation(Aggregation('_type')) results = search_query.execute(self.es_client) if len(results['aggregations']['_type']['terms']) != 0: return results['aggregations']['_type']['terms'][0]['count'] else: return 0
def main(self): self.parse_config('ldap_password_spray.conf', ['threshold_count', 'search_depth_min']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must([ TermMatch('category', 'ldap'), TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS') ]) self.filtersManual(search_query) self.searchEventsAggregated('details.client', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def getSqsStats(es): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('type', 'mozdefhealth'), TermMatch('category', 'mozdef'), TermMatch('tags', 'sqs-latest'), ]) results = search_query.execute(es, indices=['mozdefstate']) return results['hits']
def getSqsStats(es): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'mozdefhealth'), TermMatch('category', 'mozdef'), TermMatch('tags', 'sqs-latest'), ]) results = search_query.execute(es, indices=['mozdefstate']) return results['hits']
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey') ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('tags', 'mig-runner-sshioc'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def test_aggregation_with_aggregation_size(self): for num in range(0, 100): event = {'keyname': 'value' + str(num)} self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(ExistsMatch('keyname')) search_query.add_aggregation(Aggregation('keyname', 2)) results = search_query.execute(self.es_client) assert len(results['aggregations']['keyname']['terms']) == 2
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey') ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def process_alert(self, alert_config): search_query = SearchQuery(minutes=int(alert_config.time_window)) terms = [] for i in alert_config.filters: terms.append(TermMatch(i[0], i[1])) terms.append(QueryStringMatch(str(alert_config.search_string))) search_query.add_must(terms) self.filtersManual(search_query) self.searchEventsAggregated(alert_config.aggregation_key, samplesLimit=int(alert_config.num_samples)) self.walkAggregations(threshold=int(alert_config.num_aggregations), config=alert_config)
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'PutBucketPolicy'), ExistsMatch('details.requestparameters.bucketpolicy.statement.principal') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self._config = self.parse_json_alert_config('feedback_events.json') search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'user_feedback'), TermMatch('details.action', 'escalate') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def test_aggregation_query_execute(self): query = SearchQuery() query.add_must(ExistsMatch('note')) query.add_aggregation(Aggregation('note')) assert query.date_timedelta == {} self.populate_example_event() self.populate_example_event() self.refresh(self.event_index_name) results = query.execute(self.es_client) assert results.keys() == ['hits', 'meta', 'aggregations'] assert results['meta'].keys() == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 2 assert results['hits'][0].keys() == ['_score', '_type', '_id', '_source', '_index'] assert type(results['hits'][0]['_id']) == unicode assert results['hits'][0]['_type'] == TMP_DOC_TYPE assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") assert results['hits'][0]['_source']['note'] == 'Example note' assert results['hits'][0]['_source']['summary'] == 'Test Summary' assert results['hits'][0]['_source']['type'] == 'event' assert results['hits'][0]['_source']['details'].keys() == ['information'] assert results['hits'][0]['_source']['details']['information'] == 'Example information' assert results['hits'][1].keys() == ['_score', '_type', '_id', '_source', '_index'] assert type(results['hits'][1]['_id']) == unicode assert results['hits'][1]['_type'] == TMP_DOC_TYPE assert results['hits'][1]['_index'] == datetime.now().strftime("events-%Y%m%d") assert results['hits'][1]['_source']['note'] == 'Example note' assert results['hits'][1]['_source']['summary'] == 'Test Summary' assert results['hits'][1]['_source']['type'] == 'event' assert results['hits'][1]['_source']['details'].keys() == ['information'] assert results['hits'][1]['_source']['details']['information'] == 'Example information' assert results['aggregations'].keys() == ['note'] assert results['aggregations']['note'].keys() == ['terms'] assert len(results['aggregations']['note']['terms']) == 1 results['aggregations']['note']['terms'].sort() assert results['aggregations']['note']['terms'][0]['count'] == 2 assert results['aggregations']['note']['terms'][0]['key'] == 'Example note'
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('processname', 'audisp-json'), TermMatch('details.processname', 'ssh'), PhraseMatch('details.parentprocess', 'sftp') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'delete') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'StopLogging') ]) search_query.add_must_not(TermMatch('errorcode', 'AccessDenied')) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch("details.actor", "cn=admin,dc=mozilla"), PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()