def query_tests(self): tests = { PhraseMatch('summary', 'test run'): [ { 'summary': 'test sample run' }, { 'notes': 'test run' }, { 'summary': 'example test running' }, ], PhraseMatch('summary', 'test abc'): [ { 'summary': 'example summary test' }, { 'notes': 'we are test here source' }, ], PhraseMatch('summary', 'test'): [ { 'summary': 'we are testing' }, ], } return tests
def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"') ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace(hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace(hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([ RangeMatch('utctimestamp', r1, r2) ]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config( 'ssh_access_signreleng.json') if self.config['channel'] == '': self.config['channel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format( self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.items(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def query_tests(self): tests = [ [ PhraseMatch('summary', 'test run'), [ {'summary': 'test run'}, {'summary': 'this is test run source'}, {'summary': 'this is test run'}, ], ], [ PhraseMatch('summary', 'test'), [ {'summary': 'test here'}, {'summary': 'we are test here source'}, {'summary': 'this is test'}, ] ], [ PhraseMatch('summary', '/test/abc'), [ {'summary': '/test/abc'}, {'summary': '/test/abc/def'}, {'summary': 'path /test/abc'}, ] ], ] return tests
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'event'), TermMatch('tags', 'duosecurity'), PhraseMatch('details.integration', 'global and external openvpn'), PhraseMatch('details.result', 'FAILURE') ]) self.filtersManual(search_query) # Search aggregations on field 'username', keep X samples of events at most self.searchEventsAggregated('details.username', samplesLimit=5) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=5)
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'promiscuous'), PhraseMatch('summary', 'promiscuous'), PhraseMatch('summary', 'on') ]) search_query.add_must_not([ QueryStringMatch('details.dev: veth*'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def esSearch(es, macassignments=None): ''' Search ES for an event that ties a username to a mac address This example searches for junos wifi correlations on authentication success Expecting an event like: user: [email protected]; mac: 5c:f9:38:b1:de:cf; author reason: roamed session; ssid: ANSSID; AP 46/2\n ''' usermacre=re.compile(r'''user: (?P<username>.*?); mac: (?P<macaddress>.*?); ''',re.IGNORECASE) correlations={} search_query = SearchQuery(minutes=options.correlationminutes) search_query.add_must(TermMatch('details.program', 'AUTHORIZATION-SUCCESS')) search_query.add_must_not(PhraseMatch('summary', 'last-resort')) try: full_results = search_query.execute(es) results = full_results['hits'] for r in results: fields = re.search(usermacre,r['_source']['summary']) if fields: if '{0} {1}'.format(fields.group('username'),fields.group('macaddress')) not in correlations: if fields.group('macaddress')[0:8].lower() in macassignments: entity=macassignments[fields.group('macaddress')[0:8].lower()] else: entity='unknown' correlations['{0} {1}'.format(fields.group('username'),fields.group('macaddress'))]=dict(username=fields.group('username'), macaddress=fields.group('macaddress'), entity=entity, utctimestamp=r['_source']['utctimestamp']) return correlations except ElasticsearchBadServer: logger.error('Elastic Search server could not be reached, check network connectivity')
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), PhraseMatch('summary', 'promiscuous'), PhraseMatch('summary', 'entered') ]) search_query.add_must_not([ QueryStringMatch('summary: veth*'), ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def searchESForBROAttackers(es, threshold): search_query = SearchQuery(hours=2) search_query.add_must([ PhraseMatch('category', 'bronotice'), PhraseMatch('details.note', 'MozillaHTTPErrors::Excessive_HTTP_Errors_Attacker') ]) full_results = search_query.execute(es) results = full_results['hits'] # Hit count is buried in the 'sub' field # as: 'sub': u'6 in 1.0 hr, eps: 0' # cull the records for hitcounts over the threshold before returning attackers = list() for r in results: hitcount = int(r['_source']['details']['sub'].split()[0]) if hitcount > threshold: attackers.append(r) return attackers
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey') ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('deadman.conf', ['url', 'hosts']) for host in self.config.hosts.split(","): self.log.debug('Checking deadman for host: {0}'.format(host)) search_query = SearchQuery(minutes=20) search_query.add_must([ PhraseMatch("details.note", "MozillaAlive::Bro_Is_Watching_You"), PhraseMatch("hostname", host), TermMatch('category', 'bro'), TermMatch('source', 'notice') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents(hostname=host)
def main(self): self.parse_config('bruteforce_ssh.conf', ['skiphosts']) search_query = SearchQuery(minutes=2) search_query.add_must([ PhraseMatch('summary', 'failed'), TermMatch('details.program', 'sshd'), TermsMatch('summary', ['login', 'invalid', 'ldap_count_entries', 'publickey', 'keyboard']) ]) for ip_address in self.config.skiphosts.split(): search_query.add_must_not(PhraseMatch('summary', ip_address)) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=10)
def main(self): self.config_file = './unauth_ssh.conf' self.config = None self.initConfiguration() search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format(self.config.hostfilter)), PhraseMatch('summary', 'Accepted publickey for {}'.format(self.config.user)) ]) for x in self.config.skiphosts: search_query.add_must_not(PhraseMatch('summary', x)) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must(PhraseMatch('summary', 'Failsafe Duo login')) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('hostname', samplesLimit=10) # alert when >= X matching events in an aggregation # in this case, always self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('processname', 'audisp-json'), TermMatch('details.processname', 'ssh'), PhraseMatch('details.parentprocess', 'sftp') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch("details.actor", "cn=admin,dc=mozilla"), PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('trace_audit.conf', ['hostfilter']) search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('details.processname', 'strace'), ]) for host in self.config.hostfilter.split(): search_query.add_must_not(PhraseMatch('hostname', host)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=1)
def enrich( alert: dict, search_window_hours: int, vpn_ip_cidrs: types.List[str], search_fn: types.Callable[[SearchQuery], types.List[dict]], ) -> dict: '''Search for events that describe an assignment of a VPN IP address to the sourceipaddress in an alert. ''' details = alert.get('details', {}) source_ip = details.get('sourceipaddress') if source_ip is None: return alert if netaddr.IPAddress(source_ip) not in netaddr.IPSet(vpn_ip_cidrs): return alert search_vpn_assignment = SearchQuery({ 'hours': search_window_hours, }) search_vpn_assignment.add_must([ TermMatch('tags', 'vpn'), TermMatch('tags', 'netfilter'), TermMatch('details.success', 'true'), TermMatch('details.vpnip', source_ip), PhraseMatch('summary', 'netfilter add upon connection'), ]) assign_events = sorted( [hit.get('_source', {}) for hit in search_fn(search_vpn_assignment)], key=lambda evt: toUTC(evt['utctimestamp']), reverse=True, # Sort into descending order from most recent to least. ) if len(assign_events) == 0: return alert event = assign_events[0] details['vpnassignment'] = { 'username': event['details']['username'], 'originalip': event['details']['sourceipaddress'], } alert['details'] = details return alert
def main(self): self.parse_config('duo_authfail.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'authentication'), ExistsMatch('details.sourceipaddress'), ExistsMatch('details.username'), PhraseMatch('details.result', 'FRAUD') ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('http_auth_bruteforce.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'AuthBruteforcing::HTTP_AuthBruteforcing_Attacker') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('write_audit.conf', ['skipprocess']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'write'), TermMatch('details.auditkey', 'audit'), ]) for processname in self.config.skipprocess.split(): search_query.add_must_not(PhraseMatch('details.processname', processname)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=2)
def main(self): search_query = SearchQuery(minutes=1) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('details.source', 'notice'), PhraseMatch('details.note', 'Scan::Address_Scan'), QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch'])) ]) search_query.add_must_not([ QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch'])) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('http_errors.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'MozillaHTTPErrors::Excessive_HTTP_Errors_Attacker') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(hours=4) search_query.add_must([ TermMatch('category', 'ssh_password_auth_policy_violation'), PhraseMatch('tags', 'ssh_password_auth_policy_violation') ]) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=1)
def main(self): self.parse_config('ssh_bruteforce_bro.conf', ['url']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'SSH::Password_Guessing') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def onMessage(self, message): hostname = _most_common_hostname(message.get('events', [])) query = SearchQuery(hours=self._config.search_window_hours) query.add_must([ TermMatch('category', 'syslog'), TermMatch('hostname', hostname), TermMatch('details.program', 'sshd'), PhraseMatch('summary', 'Accepted publickey for '), ]) results = query.execute(self._es_client, indices=self._config.indices_to_search) events = [hit.get('_source', {}) for hit in results.get('hits', [])] return enrich(message, events)
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.actor', '*bind*'), WildcardMatch('details.changepairs', 'delete:*member*') ]) self.filtersManual(search_query) self.searchEventsAggregated('details.email', samplesLimit=50) self.walkAggregations(threshold=1, config={})
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.actor', '*bind*'), WildcardMatch('details.changepairs', '*delete:member*') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('tags', 'firefoxaccounts'), PhraseMatch('details.action', 'accountCreate') ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.email', '*restmail.net'), ]) self.filtersManual(search_query) # Search aggregations on field 'ip', keep X samples of events at most self.searchEventsAggregated('details.ip', samplesLimit=10) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=10)