def main(self): search_query = SearchQuery(minutes=1) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('details.source', 'notice'), PhraseMatch('details.note', 'Scan::Address_Scan'), QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch'])) ]) search_query.add_must_not([ QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch'])) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"') ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace(hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace(hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([ RangeMatch('utctimestamp', r1, r2) ]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config("proxy_drop_executable.conf", ["extensions"]) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), ]) # Only notify on certain file extensions from config filename_regex = r"/.*\.({0})/".format( self.config.extensions.replace(",", "|")) search_query.add_must([ QueryStringMatch("details.destination: {}".format(filename_regex)) ]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def process_alert(self, term): search_query = SearchQuery(minutes=20) content = QueryStringMatch(str(term)) search_query.add_must(content) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('proxy_drop_non_standard_port.conf', ['excludedports']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', 'TCP_DENIED/-'), TermMatch('details.tcpaction', 'CONNECT') ]) # Only notify on certain ports from config port_regex = "/.*:({0})/".format( self.config.excludedports.replace(',', '|')) search_query.add_must_not( [QueryStringMatch('details.destination: {}'.format(port_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def main(self): self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', "TCP_DENIED/-") ]) # Only notify on certain domains listed in the config domain_regex = "/({0}).*/".format( self.config.exfil_domains.replace(',', '|')) search_query.add_must( [QueryStringMatch('details.destination: {}'.format(domain_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config( 'ssh_access_signreleng.json') if self.config['channel'] == '': self.config['channel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format( self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.items(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def process_alert(self, alert_config): self.current_alert_time_window = int(alert_config['time_window']) self.current_alert_time_type = alert_config['time_window_type'] search_query_time_window = {self.current_alert_time_type: self.current_alert_time_window} search_query = SearchQuery(**search_query_time_window) search_query.add_must(QueryStringMatch(str(alert_config['search_query']))) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents(description=alert_config['description'])
def process_alert(self, alert_config): search_query = SearchQuery(minutes=int(alert_config.time_window)) terms = [] for i in alert_config.filters: terms.append(TermMatch(i[0], i[1])) terms.append(QueryStringMatch(str(alert_config.search_string))) search_query.add_must(terms) self.filtersManual(search_query) self.searchEventsAggregated(alert_config.aggregation_key, samplesLimit=int(alert_config.num_samples)) self.walkAggregations(threshold=int(alert_config.num_aggregations), config=alert_config)
def process_alert(self, alert_config): # Set instance variable to populate event attributes about an alert self.custom_alert_name = "{0}:{1}".format( self.classname(), alert_config['custom_alert_name']) search_query = SearchQuery(minutes=int(alert_config.time_window)) terms = [] for i in alert_config.filters: terms.append(TermMatch(i[0], i[1])) terms.append(QueryStringMatch(str(alert_config.search_string))) search_query.add_must(terms) self.filtersManual(search_query) self.searchEventsAggregated(alert_config.aggregation_key, samplesLimit=int(alert_config.num_samples)) self.walkAggregations(threshold=int(alert_config.num_aggregations), config=alert_config)
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'promiscuous'), PhraseMatch('summary', 'promiscuous'), PhraseMatch('summary', 'on') ]) search_query.add_must_not([ QueryStringMatch('details.dev: veth*'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), PhraseMatch('summary', 'promiscuous'), PhraseMatch('summary', 'entered') ]) search_query.add_must_not([ QueryStringMatch('summary: veth*'), ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): with open(_CONFIG_FILE) as cfg_file: self.config = json.load(cfg_file) query_string = ' OR '.join([ '{0}: {1}'.format(_IAM_USER_KEY, user) for user in self.config['rootUsers'] ]) query = SearchQuery(**self.config['searchWindow']) query.add_must([ QueryStringMatch(query_string), TermMatch(_AWS_EVENT_KEY, _ATTACH_POLICY_ACTION) ]) self.filtersManual(query) self.searchEventsAggregated(_AGGREGATE_KEY, samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), ]) # Match on everything that looks like the first octet of either the IPv4 or the IPv6 address # This will over-match, but will get weeded out below ip_regex = "/[0-9a-fA-F]{1,4}.*/" search_query.add_must( [QueryStringMatch("details.host: {}".format(ip_regex))]) self.filtersManual(search_query) self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', 'TCP_DENIED/-') ]) # Match on 1.1.1.1, http://1.1.1.1, or https://1.1.1.1 # This will over-match on short 3-char domains like foo.bar.baz.com, but will get weeded out below ip_regex = '/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/' search_query.add_must([ QueryStringMatch('details.destination: {}'.format(ip_regex)) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): # look for events in last X mins search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('details.user', 'confluence'), QueryStringMatch('hostname: /.*(mana|confluence).*/') ]) search_query.add_must_not(TermMatch('details.originaluser', 'root')) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('hostname', samplesLimit=10) # alert when >= X matching events in an aggregation # in this case, always self.walkAggregations(threshold=1)
def main(self): self.config_file = './unauth_ssh.conf' self.config = None self.initConfiguration() search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format(self.config.hostfilter)), PhraseMatch('summary', 'Accepted publickey for {}'.format(self.config.user)) ]) for x in self.config.skiphosts: search_query.add_must_not(PhraseMatch('summary', x)) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def query_tests(self): tests = { QueryStringMatch('summary: test'): [ { 'summary': 'test' }, ], QueryStringMatch('summary: test conf'): [ { 'summary': 'test' }, { 'summary': 'conf' }, { 'summary': 'test conf' }, ], QueryStringMatch(hostname_test_regex): [ { 'hostname': 'host.groupa.test.def.subdomain.company.com' }, { 'hostname': 'host.groupa.test.def.subdomain.company.com' }, { 'hostname': 'host.groupa.subdomain.domain.company.com' }, { 'hostname': 'host.groupa.subdomain.domain1.company.com' }, { 'hostname': 'host.groupa.subdomain.company.com' }, { 'hostname': 'host1.groupa.subdomain.company.com' }, { 'hostname': 'host1.groupa.test.subdomain.company.com' }, { 'hostname': 'host-1.groupa.test.subdomain.domain.company.com' }, { 'hostname': 'host-v2-test6.groupa.test.subdomain.domain.company.com' }, { 'hostname': 'host1.groupa.subdomain.domain.company.com' }, { 'hostname': 'someotherhost1.hgi.groupa.subdomain.domain1.company.com' }, { 'hostname': 'host2.groupb.subdomain.domain.company.com' }, ], QueryStringMatch(filename_matcher): [ { 'summary': 'test.exe' }, { 'summary': 'test.sh' }, ], } return tests
def query_tests(self): tests = { QueryStringMatch('summary: test'): [ { 'summary': 'example summary' }, { 'summary': 'example summary tes' }, { 'summary': 'testing' }, { 'note': 'test' }, ], QueryStringMatch('summary: test conf'): [ { 'summary': 'testing' }, { 'summary': 'configuration' }, { 'summary': 'testing configuration' }, ], QueryStringMatch(hostname_test_regex): [ { 'hostname': '' }, { 'hostname': 'host.subdomain.company.com' }, { 'hostname': 'host.subdomain.domain1.company.com' }, { 'hostname': 'groupa.abc.company.com' }, { 'hostname': 'asub.subdomain.company.com' }, { 'hostname': 'example.com' }, { 'hostname': 'abc.company.com' }, { 'hostname': 'host1.groupa.asubdomain.company.com' }, { 'hostname': 'host1.groupa.subdomaina.company.com' }, { 'hostname': 'host1.groupaa.subdomain.company.com' }, { 'hostname': 'host1.agroupb.subdomain.company.com' }, ], QueryStringMatch(filename_matcher): [ { 'summary': 'test.exe.abcd' }, { 'summary': 'testexe' }, { 'summary': 'test.1234' }, { 'summary': '.exe.test' }, ], } return tests
def enrich(alert, search_window_hours, search_fn): '''Search for events describing the DHCP assignment for an IP in an alert and add information to the alert's details and summary. ''' # First, we must find the MAC address that requested the offending IP # address listed in the alert. ip = alert['events'][0]['documentsource']['details']['sourceipaddress'] search_mac_assignment = SearchQuery({'hours': search_window_hours}) search_mac_assignment.add_must([ TermMatch('source', 'dhcp'), TermMatch('details.assigned_addr', ip) ]) assign_events = sorted( [ hit.get('_source', {}) for hit in search_fn(search_mac_assignment).get('hits', []) ], key=lambda evt: evt['details']['ts'], reverse=True) # Sort into descending order from most recent to least. if len(assign_events) > 0: mac = assign_events[0]['details']['mac'] else: return alert # Next, we attempt to look up the name of the user who owns the MAC address # in question. When we cannot find a user, we substitute the string # "(no user found)" no_user_found = '(no user found)' search_mac_owner = SearchQuery({'hours': search_window_hours}) query = 'source:local1 AND "{}"'.format(mac) search_mac_owner.add_must(QueryStringMatch(query)) user_events = sorted( [ hit.get('_source', {}) for hit in search_fn(search_mac_owner).get('hits', []) ], key=lambda evt: toUTC(evt['receivedtimestamp']), reverse=True) # Sort into descending order from most recent to least. if len(user_events) > 0: summary_dict = _comma_eq_dict(user_events[0]['summary']) user = summary_dict.get('user_name', no_user_found) else: return alert # Finally, add the details.ipassignment fields and append to the summary. if 'details' not in alert: alert['details'] = {} alert['details']['ipassignment'] = { 'mac': mac, 'user': user } if user != no_user_found: alert['details']['username'] = user alert['summary'] += '; IP assigned to {} ({})'.format(user, mac) return alert
def query_tests(self): tests = [ [ QueryStringMatch('summary: test'), [ { 'summary': 'example summary' }, { 'summary': 'example summary tes' }, { 'summary': 'testing' }, { 'note': 'test' }, ] ], [ QueryStringMatch('summary: test conf'), [ { 'summary': 'testing' }, { 'summary': 'configuration' }, { 'summary': 'testing configuration' }, ] ], [ QueryStringMatch(hostname_test_regex), [ { 'hostname': '' }, { 'hostname': 'host.subdomain.company.com' }, { 'hostname': 'host.subdomain.domain1.company.com' }, { 'hostname': 'groupa.abc.company.com' }, { 'hostname': 'asub.subdomain.company.com' }, { 'hostname': 'example.com' }, { 'hostname': 'abc.company.com' }, { 'hostname': 'host1.groupa.asubdomain.company.com' }, { 'hostname': 'host1.groupa.subdomaina.company.com' }, { 'hostname': 'host1.groupaa.subdomain.company.com' }, { 'hostname': 'host1.agroupb.subdomain.company.com' }, ] ], [ QueryStringMatch(filename_matcher), [ { 'summary': 'test.exe.abcd' }, { 'summary': 'testexe' }, { 'summary': 'test.1234' }, { 'summary': '.exe.test' }, ] ], [ QueryStringMatch(ip_matcher), [{ 'destination': 'https://foo.bar.mozilla.com/somepath' }, { 'destination': 'foo.bar.mozilla.com:80' }, { 'destination': 'http://example.com/somepath' }, { 'destination': 'example.com:443' }] ], ] return tests
def query_tests(self): tests = [ [QueryStringMatch('summary: test'), [ { 'summary': 'test' }, ]], [ QueryStringMatch('summary: test conf'), [ { 'summary': 'test' }, { 'summary': 'conf' }, { 'summary': 'test conf' }, ] ], [ QueryStringMatch(hostname_test_regex), [ { 'hostname': 'host.groupa.test.def.subdomain.company.com' }, { 'hostname': 'host.groupa.test.def.subdomain.company.com' }, { 'hostname': 'host.groupa.subdomain.domain.company.com' }, { 'hostname': 'host.groupa.subdomain.domain1.company.com' }, { 'hostname': 'host.groupa.subdomain.company.com' }, { 'hostname': 'host1.groupa.subdomain.company.com' }, { 'hostname': 'host1.groupa.test.subdomain.company.com' }, { 'hostname': 'host-1.groupa.test.subdomain.domain.company.com' }, { 'hostname': 'host-v2-test6.groupa.test.subdomain.domain.company.com' }, { 'hostname': 'host1.groupa.subdomain.domain.company.com' }, { 'hostname': 'someotherhost1.hgi.groupa.subdomain.domain1.company.com' }, { 'hostname': 'host2.groupb.subdomain.domain.company.com' }, ] ], [ QueryStringMatch(filename_matcher), [ { 'summary': 'test.exe' }, { 'summary': 'test.sh' }, ] ], [ QueryStringMatch(ip_matcher), [ { 'destination': 'http://1.2.3.4/somepath' }, { 'destination': 'https://1.2.3.4/somepath' }, { 'destination': '1.2.3.4/somepath' }, { 'destination': '1.2.3.4/somepath' }, { 'destination': '1.2.3.4:443' }, { 'destination': '1.2.3.4:80' }, # Over-match examples (which need to be validated further in alerts) { 'destination': 'https://foo.bar.baz.com/somepath' }, { 'destination': 'foo.bar.baz.com:80' }, ] ], ] return tests
def onMessage(self, request, response): ''' request: http://bottlepy.org/docs/dev/api.html#the-request-object response: http://bottlepy.org/docs/dev/api.html#the-response-object ''' # an ES query/facet to count success/failed logins # oriented to the data sent via auth02mozdef.py begindateUTC = None enddateUTC = None resultsList = list() if begindateUTC is None: begindateUTC = datetime.now() - timedelta(hours=12) begindateUTC = toUTC(begindateUTC) if enddateUTC is None: enddateUTC = datetime.now() enddateUTC = toUTC(enddateUTC) es_client = ElasticsearchClient( list('{0}'.format(s) for s in self.restoptions['esservers'])) search_query = SearchQuery() # a query to tally users with failed logins date_range_match = RangeMatch('utctimestamp', begindateUTC, enddateUTC) search_query.add_must(date_range_match) search_query.add_must(TermMatch('tags', 'auth0')) search_query.add_must(QueryStringMatch('failed')) search_query.add_must(ExistsMatch('details.username')) search_query.add_aggregation(Aggregation('details.type')) search_query.add_aggregation(Aggregation('details.username')) results = search_query.execute(es_client, indices=['events', 'events-previous']) # any usernames or words to ignore # especially useful if ES is analyzing the username field and breaking apart [email protected] # into user somewhere and .com stoplist = self.options.ignoreusernames.split(',') # walk the aggregate failed users # and look for successes/failures for t in results['aggregations']['details.username']['terms']: if t['key'] in stoplist: continue failures = 0 success = 0 username = t['key'] details_query = SearchQuery() details_query.add_must(date_range_match) details_query.add_must(TermMatch('tags', 'auth0')) details_query.add_must(TermMatch('details.username', username)) details_query.add_aggregation(Aggregation('details.type')) results = details_query.execute(es_client) # details.type is usually "Success Login" or "Failed Login" for t in results['aggregations']['details.type']['terms']: if 'success' in t['key'].lower(): success = t['count'] if 'fail' in t['key'].lower(): failures = t['count'] resultsList.append( dict(username=username, failures=failures, success=success, begin=begindateUTC.isoformat(), end=enddateUTC.isoformat())) response.body = json.dumps(resultsList) response.status = 200 return (request, response)