def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey') ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('ldap_password_spray.conf', ['threshold_count', 'search_depth_min']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must([ TermMatch('category', 'ldap'), TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS') ]) self.filtersManual(search_query) self.searchEventsAggregated('details.client', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'event'), TermMatch('tags', 'mig-runner-sshkey') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self._config = self.parse_json_alert_config('feedback_events.json') search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'user_feedback'), TermMatch('details.action', 'escalate') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('auth0_bruteforce_user.conf', ['threshold_count', 'search_depth_min', 'severity']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must_not(TermMatch('details.username', '')) search_query.add_must([ TermMatch('tags', 'auth0'), TermMatch('details.eventname', 'Failed Login (wrong password)'), ]) self.filtersManual(search_query) self.searchEventsAggregated('details.username', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('processname', 'audisp-json'), TermMatch('details.processname', 'ssh'), PhraseMatch('details.parentprocess', 'sftp') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'PutBucketPolicy'), ExistsMatch( 'details.requestparameters.bucketpolicy.statement.principal') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('eventName', 'StopLogging') ]) search_query.add_must_not(TermMatch('errorCode', 'AccessDenied')) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'CreateBucket'), TermMatch('details.requestparameters.x-amz-acl', 'public-read-write'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch("details.actor", "cn=admin,dc=mozilla"), PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'add') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('http_auth_bruteforce.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'AuthBruteforcing::HTTP_AuthBruteforcing_Attacker') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('geomodel.conf', ['exclusions', 'url']) search_query = SearchQuery(minutes=30) search_query.add_must([TermMatch('category', 'geomodelnotice')]) # Allow the ability to ignore certain users for exclusion in self.config.exclusions.split(','): search_query.add_must_not(TermMatch('summary', exclusion)) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def enrich( alert: dict, search_window_hours: int, vpn_ip_cidrs: types.List[str], search_fn: types.Callable[[SearchQuery], types.List[dict]], ) -> dict: '''Search for events that describe an assignment of a VPN IP address to the sourceipaddress in an alert. ''' details = alert.get('details', {}) source_ip = details.get('sourceipaddress') if source_ip is None: return alert if netaddr.IPAddress(source_ip) not in netaddr.IPSet(vpn_ip_cidrs): return alert search_vpn_assignment = SearchQuery({ 'hours': search_window_hours, }) search_vpn_assignment.add_must([ TermMatch('tags', 'vpn'), TermMatch('tags', 'netfilter'), TermMatch('details.success', 'true'), TermMatch('details.vpnip', source_ip), PhraseMatch('summary', 'netfilter add upon connection'), ]) assign_events = sorted( [hit.get('_source', {}) for hit in search_fn(search_vpn_assignment)], key=lambda evt: toUTC(evt['utctimestamp']), reverse=True, # Sort into descending order from most recent to least. ) if len(assign_events) == 0: return alert event = assign_events[0] details['vpnassignment'] = { 'username': event['details']['username'], 'originalip': event['details']['sourceipaddress'], } alert['details'] = details return alert
def main(self): self.parse_config('write_audit.conf', ['skipprocess']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'write'), TermMatch('details.auditkey', 'audit'), ]) for processname in self.config.skipprocess.split(): search_query.add_must_not(PhraseMatch('details.processname', processname)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=2)
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('eventsource', 'nsm'), TermMatch('category', 'brointel'), TermMatch('details.sources', 'abuse.ch SSLBL'), ExistsMatch('details.sourceipaddress') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('ldap_bruteforce.conf', ['threshold_count', 'search_depth_min', 'host_exclusions']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must_not(TermMatch('details.user', '')) search_query.add_must([ TermMatch('category', 'ldap'), TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS'), ]) for host_exclusion in self.config.host_exclusions.split(","): search_query.add_must_not([TermMatch("details.server", host_exclusion)]) self.filtersManual(search_query) self.searchEventsAggregated('details.user', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=1) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('details.source', 'notice'), PhraseMatch('details.note', 'Scan::Address_Scan'), QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch'])) ]) search_query.add_must_not([ QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch'])) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('ssh_bruteforce_bro.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'SSH::Password_Guessing') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): # Create a query to look back the last 20 minutes search_query = SearchQuery(minutes=5) # Add search terms to our query search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventverb', 'Describe'), ExistsMatch('details.sourceipv4address') ]) self.filtersManual(search_query) # We aggregate on details.sourceipv4address which is the AWS service name self.searchEventsAggregated('details.sourceipv4address', samplesLimit=2) self.walkAggregations(threshold=5)
def main(self): self.parse_config('http_errors.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'MozillaHTTPErrors::Excessive_HTTP_Errors_Attacker') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def test_aggregation_non_existing_layers_term(self): events = [ { "test": "value", "note": "abvc" }, { "test": "value", "note": "abvc" }, { "test": "value", "note": "think" }, { "test": "value", "summary": "think" }, ] for event in events: self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(TermMatch('test', 'value')) search_query.add_aggregation(Aggregation('details.ipinformation')) results = search_query.execute(self.es_client) assert results['aggregations'].keys() == ['details.ipinformation'] assert results['aggregations']['details.ipinformation'].keys() == [ 'terms' ] assert len( results['aggregations']['details.ipinformation']['terms']) == 0
def kibanaDashboards(): resultsList = [] try: es_client = ElasticsearchClient( (list('{0}'.format(s) for s in options.esservers))) search_query = SearchQuery() search_query.add_must(TermMatch('type', 'dashboard')) results = search_query.execute(es_client, indices=['.kibana']) for dashboard in results['hits']: dashboard_id = dashboard['_id'] if dashboard_id.startswith('dashboard:'): dashboard_id = dashboard_id.replace('dashboard:', '') resultsList.append({ 'name': dashboard['_source']['dashboard']['title'], 'id': dashboard_id }) except ElasticsearchInvalidIndex as e: logger.error('Kibana dashboard index not found: {0}\n'.format(e)) except Exception as e: logger.error('Kibana dashboard received error: {0}\n'.format(e)) return json.dumps(resultsList)
def test_aggregation_non_existing_term(self): events = [ { "test": "value", "note": "abvc" }, { "test": "value", "note": "abvc" }, { "test": "value", "note": "think" }, { "test": "value", "summary": "think" }, ] for event in events: self.populate_test_object(event) self.refresh(self.event_index_name) search_query = SearchQuery() search_query.add_must(TermMatch('test', 'value')) search_query.add_aggregation(Aggregation('example')) results = search_query.execute(self.es_client) assert sorted(results.keys()) == ['aggregations', 'hits', 'meta'] assert len(results['hits']) == 4 assert list(results['aggregations'].keys()) == ['example'] assert list(results['aggregations']['example'].keys()) == ['terms'] assert results['aggregations']['example']['terms'] == []
def test_array_input(self): queries = [ ExistsMatch('note'), TermMatch('note', 'test') ] self.query.add_should(queries) assert self.query.should == queries
def kibanaDashboards(): resultsList = [] try: es_client = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers))) search_query = SearchQuery() search_query.add_must(TermMatch('_type', 'dashboard')) results = search_query.execute(es_client, indices=['.kibana']) for dashboard in results['hits']: resultsList.append({ 'name': dashboard['_source']['title'], 'url': "%s#/%s/%s" % ( options.kibanaurl, "dashboard", dashboard['_id'] ) }) except ElasticsearchInvalidIndex as e: sys.stderr.write('Kibana dashboard index not found: {0}\n'.format(e)) except Exception as e: sys.stderr.write('Kibana dashboard received error: {0}\n'.format(e)) return json.dumps(resultsList)
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'event'), TermMatch('tags', 'duosecurity'), PhraseMatch('details.integration', 'global and external openvpn'), PhraseMatch('details.result', 'FAILURE') ]) self.filtersManual(search_query) # Search aggregations on field 'username', keep X samples of events at most self.searchEventsAggregated('details.username', samplesLimit=5) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=5)
def test_array_input(self): queries = [ ExistsMatch('note'), TermMatch('note', 'test') ] self.query.add_must_not(queries) assert self.query.must_not == queries
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'intel'), ExistsMatch('details.seenindicator'), TermsMatch('hostname', ['sensor1', 'sensor2', 'sensor3']) ]) self.filtersManual(search_query) # Search aggregations on field 'seenindicator', keep X samples of events at most self.searchEventsAggregated('details.seenindicator', samplesLimit=10) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=10)