def esLdapResults(begindateUTC=None, enddateUTC=None): '''an ES query/facet to count success/failed logins''' resultsList = list() if begindateUTC is None: begindateUTC = datetime.now() - timedelta(hours=1) begindateUTC = toUTC(begindateUTC) if enddateUTC is None: enddateUTC = datetime.now() enddateUTC = toUTC(enddateUTC) try: es_client = ElasticsearchClient(list('{0}'.format(s) for s in options.esservers)) search_query = SearchQuery() range_match = RangeMatch('utctimestamp', begindateUTC, enddateUTC) search_query.add_must(range_match) search_query.add_must(TermMatch('tags', 'ldap')) search_query.add_must(TermMatch('details.result', 'LDAP_INVALID_CREDENTIALS')) search_query.add_aggregation(Aggregation('details.result')) search_query.add_aggregation(Aggregation('details.dn')) results = search_query.execute(es_client, indices=['events']) stoplist = ('o', 'mozilla', 'dc', 'com', 'mozilla.com', 'mozillafoundation.org', 'org', 'mozillafoundation') for t in results['aggregations']['details.dn']['terms']: if t['key'] in stoplist: continue failures = 0 success = 0 dn = t['key'] details_query = SearchQuery() details_query.add_must(range_match) details_query.add_must(TermMatch('tags', 'ldap')) details_query.add_must(TermMatch('details.dn', dn)) details_query.add_aggregation(Aggregation('details.result')) results = details_query.execute(es_client) for t in results['aggregations']['details.result']['terms']: if t['key'].upper() == 'LDAP_SUCCESS': success = t['count'] if t['key'].upper() == 'LDAP_INVALID_CREDENTIALS': failures = t['count'] resultsList.append( dict( dn=dn, failures=failures, success=success, begin=begindateUTC.isoformat(), end=enddateUTC.isoformat() ) ) return(json.dumps(resultsList)) except Exception as e: sys.stderr.write('Error trying to get ldap results: {0}\n'.format(e))
def main(self): self.parse_config('proxy_drop_non_standard_port.conf', ['excludedports']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', 'TCP_DENIED/-'), TermMatch('details.tcpaction', 'CONNECT') ]) # Only notify on certain ports from config port_regex = "/.*:({0})/".format( self.config.excludedports.replace(',', '|')) search_query.add_must_not( [QueryStringMatch('details.destination: {}'.format(port_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"') ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace(hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace(hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([ RangeMatch('utctimestamp', r1, r2) ]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config("proxy_drop_executable.conf", ["extensions"]) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), ]) # Only notify on certain file extensions from config filename_regex = r"/.*\.({0})/".format( self.config.extensions.replace(",", "|")) search_query.add_must([ QueryStringMatch("details.destination: {}".format(filename_regex)) ]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config( 'ssh_access_signreleng.json') if self.config['channel'] == '': self.config['channel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format( self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.items(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', "TCP_DENIED/-") ]) # Only notify on certain domains listed in the config domain_regex = "/({0}).*/".format( self.config.exfil_domains.replace(',', '|')) search_query.add_must( [QueryStringMatch('details.destination: {}'.format(domain_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def test_populated_array(self): self.query.add_should(ExistsMatch('details')) self.query.add_should([ExistsMatch('note'), TermMatch('note', 'test')]) assert self.query.should == [ ExistsMatch('details'), ExistsMatch('note'), TermMatch('note', 'test') ]
def find(qes: QueryInterface, username: str, index: str) -> Optional[Entry]: '''Retrieve the locality state for one user from ElasticSearch. ''' search = SearchQuery() search.add_must( [TermMatch('type_', 'locality'), TermMatch('username', username)]) return qes(search, index)
def main(self): self.parse_config('ldap_password_spray.conf', ['threshold_count', 'search_depth_min']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must([ TermMatch('category', 'ldap'), TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS') ]) self.filtersManual(search_query) self.searchEventsAggregated('details.client', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def getSqsStats(es): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('type', 'mozdefhealth'), TermMatch('category', 'mozdef'), TermMatch('tags', 'sqs-latest'), ]) results = search_query.execute(es, indices=['mozdefstate']) return results['hits']
def test_populated_array(self): self.query.add_must_not(ExistsMatch('details')) self.query.add_must_not( [ExistsMatch('note'), TermMatch('note', 'test')]) assert self.query.must_not == [ ExistsMatch('details'), ExistsMatch('note'), TermMatch('note', 'test') ]
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey') ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self._config = self.parse_json_alert_config('feedback_events.json') search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'user_feedback'), TermMatch('details.action', 'escalate') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'event'), TermMatch('tags', 'mig-runner-sshkey') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('eventName', 'StopLogging') ]) search_query.add_must_not(TermMatch('errorCode', 'AccessDenied')) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('auth0_bruteforce_user.conf', ['threshold_count', 'search_depth_min', 'severity']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must_not(TermMatch('details.username', '')) search_query.add_must([ TermMatch('tags', 'auth0'), TermMatch('details.eventname', 'Failed Login (wrong password)'), ]) self.filtersManual(search_query) self.searchEventsAggregated('details.username', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('processname', 'audisp-json'), TermMatch('details.processname', 'ssh'), PhraseMatch('details.parentprocess', 'sftp') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'add') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'PutBucketPolicy'), ExistsMatch( 'details.requestparameters.bucketpolicy.statement.principal') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch("details.actor", "cn=admin,dc=mozilla"), PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'CreateBucket'), TermMatch('details.requestparameters.x-amz-acl', 'public-read-write'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def enrich( alert: dict, search_window_hours: int, vpn_ip_cidrs: types.List[str], search_fn: types.Callable[[SearchQuery], types.List[dict]], ) -> dict: '''Search for events that describe an assignment of a VPN IP address to the sourceipaddress in an alert. ''' details = alert.get('details', {}) source_ip = details.get('sourceipaddress') if source_ip is None: return alert if netaddr.IPAddress(source_ip) not in netaddr.IPSet(vpn_ip_cidrs): return alert search_vpn_assignment = SearchQuery({ 'hours': search_window_hours, }) search_vpn_assignment.add_must([ TermMatch('tags', 'vpn'), TermMatch('tags', 'netfilter'), TermMatch('details.success', 'true'), TermMatch('details.vpnip', source_ip), PhraseMatch('summary', 'netfilter add upon connection'), ]) assign_events = sorted( [hit.get('_source', {}) for hit in search_fn(search_vpn_assignment)], key=lambda evt: toUTC(evt['utctimestamp']), reverse=True, # Sort into descending order from most recent to least. ) if len(assign_events) == 0: return alert event = assign_events[0] details['vpnassignment'] = { 'username': event['details']['username'], 'originalip': event['details']['sourceipaddress'], } alert['details'] = details return alert
def main(self): self.parse_config('geomodel.conf', ['exclusions', 'url']) search_query = SearchQuery(minutes=30) search_query.add_must([TermMatch('category', 'geomodelnotice')]) # Allow the ability to ignore certain users for exclusion in self.config.exclusions.split(','): search_query.add_must_not(TermMatch('summary', exclusion)) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('http_auth_bruteforce.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'AuthBruteforcing::HTTP_AuthBruteforcing_Attacker') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('ldap_bruteforce.conf', ['threshold_count', 'search_depth_min', 'host_exclusions']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must_not(TermMatch('details.user', '')) search_query.add_must([ TermMatch('category', 'ldap'), TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS'), ]) for host_exclusion in self.config.host_exclusions.split(","): search_query.add_must_not([TermMatch("details.server", host_exclusion)]) self.filtersManual(search_query) self.searchEventsAggregated('details.user', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('eventsource', 'nsm'), TermMatch('category', 'brointel'), TermMatch('details.sources', 'abuse.ch SSLBL'), ExistsMatch('details.sourceipaddress') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('write_audit.conf', ['skipprocess']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'write'), TermMatch('details.auditkey', 'audit'), ]) for processname in self.config.skipprocess.split(): search_query.add_must_not(PhraseMatch('details.processname', processname)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=2)
def main(self): search_query = SearchQuery(minutes=1) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('details.source', 'notice'), PhraseMatch('details.note', 'Scan::Address_Scan'), QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch'])) ]) search_query.add_must_not([ QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch'])) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'event'), TermMatch('tags', 'duosecurity'), PhraseMatch('details.integration', 'global and external openvpn'), PhraseMatch('details.result', 'FAILURE') ]) self.filtersManual(search_query) # Search aggregations on field 'username', keep X samples of events at most self.searchEventsAggregated('details.username', samplesLimit=5) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=5)