def test_beginning_time_hours(self): query = SearchQuery(hours=10) assert query.date_timedelta == {'hours': 10} default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'hours': 11}) self.populate_test_event(default_event) not_old_event = default_event not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'hours': 9}) self.populate_test_event(not_old_event) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 2
def kibanaDashboards(): resultsList = [] try: es_client = ElasticsearchClient( (list('{0}'.format(s) for s in options.esservers))) search_query = SearchQuery() search_query.add_must(TermMatch('_type', 'dashboard')) results = search_query.execute(es_client, indices=['.kibana']) for dashboard in results['hits']: resultsList.append({ 'name': dashboard['_source']['title'], 'url': "%s/%s/%s" % (options.kibanaurl, "dashboard", dashboard['_source']['title']) }) except ElasticsearchInvalidIndex as e: sys.stderr.write('Kibana dashboard index not found: {0}\n'.format(e)) except Exception as e: sys.stderr.write('Kibana dashboard received error: {0}\n'.format(e)) return json.dumps(resultsList)
def test_execute_without_size(self): for num in range(0, 1200): self.populate_example_event() query = SearchQuery() query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1000
def test_writing_event_defaults(self): query = SearchQuery() default_event = {} self.populate_test_event(default_event) self.flush(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == [ '_id', '_index', '_score', '_source', '_type' ] saved_event = results['hits'][0]['_source'] assert 'category' in saved_event assert 'details' in saved_event assert 'hostname' in saved_event assert 'mozdefhostname' in saved_event assert 'processid' in saved_event assert 'processname' in saved_event assert 'receivedtimestamp' in saved_event assert 'severity' in saved_event assert 'source' in saved_event assert 'summary' in saved_event assert 'tags' in saved_event assert 'timestamp' in saved_event assert 'utctimestamp' in saved_event assert 'category' in saved_event
def test_writing_with_type(self): query = SearchQuery() default_event = { "_type": "example", "_source": { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } } self.populate_test_event(default_event) self.flush(self.event_index_name) query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client) assert len(results['hits']) == 1 assert sorted(results['hits'][0].keys()) == [ '_id', '_index', '_score', '_source', '_type' ] assert results['hits'][0]['_type'] == 'example' assert results['hits'][0]['_source']['summary'] == 'Test summary' assert results['hits'][0]['_source']['details'] == { "note": "Example note" }
def test_without_time_defined(self): query = SearchQuery() query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {} default_event = { "utctimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 11}) self.populate_test_event(default_event) not_old_event = default_event not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp( {'days': 9}) self.populate_test_event(not_old_event) self.flush(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 3
def esSearch(es): search_query = SearchQuery(minutes=options.aggregationminutes) search_query.add_aggregation(Aggregation('category')) try: results = search_query.execute(es) mozdefstats = dict(utctimestamp=toUTC(datetime.now()).isoformat()) mozdefstats['category'] = 'stats' mozdefstats['hostname'] = socket.gethostname() mozdefstats['mozdefhostname'] = mozdefstats['hostname'] mozdefstats['severity'] = 'INFO' mozdefstats['source'] = 'mozdef' mozdefstats['tags'] = ['mozdef', 'stats'] mozdefstats['summary'] = 'Aggregated category counts' mozdefstats['processid'] = os.getpid() mozdefstats['processname'] = sys.argv[0] mozdefstats['details'] = dict(counts=list()) for bucket in results['aggregations']['category']['terms']: entry = dict() entry[bucket['key']] = bucket['count'] mozdefstats['details']['counts'].append(entry) return mozdefstats except ElasticsearchBadServer: logger.error( 'Elastic Search server could not be reached, check network connectivity' )
def test_beginning_time_seconds_received_timestamp(self): query = SearchQuery(seconds=10) query.add_must(ExistsMatch('summary')) assert query.date_timedelta == {'seconds': 10} default_event = { "receivedtimestamp": UnitTestSuite.current_timestamp(), "summary": "Test summary", "details": { "note": "Example note", } } self.populate_test_event(default_event) too_old_event = default_event too_old_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 11}) self.populate_test_event(too_old_event) not_old_event = default_event not_old_event[ 'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp( {'seconds': 9}) self.populate_test_event(not_old_event) self.flush(self.event_index_name) results = query.execute(self.es_client) assert len(results['hits']) == 2
def search_and_verify_event(self, expected_event): self.flush('events') search_query = SearchQuery(minutes=5) search_query.add_must(ExistsMatch('tags')) results = search_query.execute(self.es_client) assert len(results['hits']) == 1 saved_event = results['hits'][0]['_source'] self.verify_event(saved_event, expected_event)
def main(self): search_query = SearchQuery(hours=1) search_query.add_must([TermMatch('source', 'cloudtrail')]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def test_execute_with_size(self): for num in range(0, 30): self.populate_example_event() self.flush(self.event_index_name) query = SearchQuery() query.add_must(ExistsMatch('summary')) results = query.execute(self.es_client, size=12) assert len(results['hits']) == 12
def main(self): search_query = SearchQuery(hours=6) day_old_date = toUTC(datetime.now() - timedelta(days=1)).isoformat() search_query.add_must(LessThanMatch('utctimestamp', day_old_date)) self.filtersManual(search_query) self.searchEventsAggregated('mozdefhostname', samplesLimit=1000) self.walkAggregations(threshold=1)
def get_object_by_id(self, object_id, indices): id_match = TermMatch('_id', object_id) search_query = SearchQuery() search_query.add_must(id_match) results = search_query.execute(self, indices=indices) if len(results['hits']) == 0: return None else: return results['hits'][0]
def get_object_by_id(self, object_id, indices): id_match = TermMatch('_id', object_id) search_query = SearchQuery() search_query.add_must(id_match) results = search_query.execute(self, indices=indices) if len(results['hits']) == 0: return None else: return results['hits'][0]
def test_aggregation_without_must_fields(self): event = self.generate_default_event() event['_source']['utctimestamp'] = event['_source']['utctimestamp']() self.populate_test_event(event) search_query = SearchQuery(minutes=10) search_query.add_aggregation(Aggregation('summary')) results = search_query.execute(self.es_client) assert results['aggregations']['summary']['terms'][0]['count'] == 1
def searchForSSHKeys(es): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('_type', 'event'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:found matching key accepted publickey') ]) results = search_query.execute(es) return results
def getFrontendStats(es): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'mozdefhealth'), TermMatch('category', 'mozdef'), TermMatch('tags', 'latest'), ]) results = search_query.execute(es, indices=['events']) return results['hits']
def process_alert(self, alert_config): search_query = SearchQuery(minutes=int(alert_config.time_window)) terms = [] for i in alert_config.filters: terms.append(TermMatch(i[0], i[1])) terms.append(QueryStringMatch(str(alert_config.search_string))) search_query.add_must(terms) self.filtersManual(search_query) self.searchEventsAggregated(alert_config.aggregation_key, samplesLimit=int(alert_config.num_samples)) self.walkAggregations(threshold=int(alert_config.num_aggregations), config=alert_config)
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('_type', 'event'), TermMatch('category', 'geomodelnotice'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey') ]) self.filtersManual(search_query) self.searchEventsAggregated('details.hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('tags', 'mig-runner-sshioc'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'squid'), ExistsMatch('details.proxyaction'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self._config = self.parse_json_alert_config('feedback_events.json') search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'user_feedback'), TermMatch('details.action', 'escalate') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch("details.actor", "cn=admin,dc=mozilla"), PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('processname', 'audisp-json'), TermMatch('details.processname', 'ssh'), PhraseMatch('details.parentprocess', 'sftp') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must(PhraseMatch('summary', 'Failsafe Duo login')) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('details.hostname', samplesLimit=10) # alert when >= X matching events in an aggregation # in this case, always self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'add') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('duo_authfail.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'event'), ExistsMatch('details.sourceipaddress'), ExistsMatch('details.username'), PhraseMatch('details.result', 'FRAUD') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('_type', 'cef'), ExistsMatch('details.dhost'), PhraseMatch("details.signatureid", "sensitivefiles") ]) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of events at most self.searchEventsAggregated('details.dhost', samplesLimit=30) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=1)
def main(self): superquery = None run = 0 for user in self._config['users']: if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 r1 = datetime.datetime.now().replace(hour=5, minute=50, second=00).isoformat() r2 = datetime.datetime.now().replace(hour=6, minute=0, second=00).isoformat() search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('_type', 'event'), TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"'), ]) search_query.add_must_not([RangeMatch('utctimestamp', r1, r2)]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.hostname', samplesLimit=10) self.walkAggregations(threshold=1)