def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config( 'ssh_access_signreleng.json') if self.config['channel'] == '': self.config['channel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format( self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.items(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"') ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace(hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace(hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([ RangeMatch('utctimestamp', r1, r2) ]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('proxy_drop_non_standard_port.conf', ['excludedports']) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch('category', 'squid'), TermMatch('tags', 'squid'), TermMatch('details.proxyaction', 'TCP_DENIED/-'), TermMatch('details.tcpaction', 'CONNECT') ]) # Only notify on certain ports from config port_regex = "/.*:({0})/".format( self.config.excludedports.replace(',', '|')) search_query.add_must_not( [QueryStringMatch('details.destination: {}'.format(port_regex))]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def esSearch(es, macassignments=None): ''' Search ES for an event that ties a username to a mac address This example searches for junos wifi correlations on authentication success Expecting an event like: user: [email protected]; mac: 5c:f9:38:b1:de:cf; author reason: roamed session; ssid: ANSSID; AP 46/2\n ''' usermacre=re.compile(r'''user: (?P<username>.*?); mac: (?P<macaddress>.*?); ''',re.IGNORECASE) correlations={} search_query = SearchQuery(minutes=options.correlationminutes) search_query.add_must(TermMatch('details.program', 'AUTHORIZATION-SUCCESS')) search_query.add_must_not(PhraseMatch('summary', 'last-resort')) try: full_results = search_query.execute(es) results = full_results['hits'] for r in results: fields = re.search(usermacre,r['_source']['summary']) if fields: if '{0} {1}'.format(fields.group('username'),fields.group('macaddress')) not in correlations: if fields.group('macaddress')[0:8].lower() in macassignments: entity=macassignments[fields.group('macaddress')[0:8].lower()] else: entity='unknown' correlations['{0} {1}'.format(fields.group('username'),fields.group('macaddress'))]=dict(username=fields.group('username'), macaddress=fields.group('macaddress'), entity=entity, utctimestamp=r['_source']['utctimestamp']) return correlations except ElasticsearchBadServer: logger.error('Elastic Search server could not be reached, check network connectivity')
def main(self): search_query = SearchQuery(minutes=15) self.config = self.parse_json_alert_config('ssh_access_signreleng.json') if self.config['ircchannel'] == '': self.config['ircchannel'] = None search_query.add_must([ TermMatch('tags', 'releng'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format(self.config['hostfilter'])), PhraseMatch('summary', 'Accepted publickey for ') ]) for exclusion in self.config['exclusions']: exclusion_query = None for key, value in exclusion.iteritems(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion else: exclusion_query = exclusion_query + phrase_exclusion search_query.add_must_not(exclusion_query) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('auth0_bruteforce_user.conf', ['threshold_count', 'search_depth_min', 'severity']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must_not(TermMatch('details.username', '')) search_query.add_must([ TermMatch('tags', 'auth0'), TermMatch('details.eventname', 'Failed Login (wrong password)'), ]) self.filtersManual(search_query) self.searchEventsAggregated('details.username', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('details.eventname', 'StopLogging') ]) search_query.add_must_not(TermMatch('errorcode', 'AccessDenied')) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('source', 'cloudtrail'), TermMatch('eventName', 'StopLogging') ]) search_query.add_must_not(TermMatch('errorCode', 'AccessDenied')) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('trace_audit.conf', ['hostfilter']) search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('details.processname', 'strace'), ]) for host in self.config.hostfilter.split(): search_query.add_must_not(PhraseMatch('hostname', host)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('geomodel.conf', ['exclusions', 'url']) search_query = SearchQuery(minutes=30) search_query.add_must([TermMatch('category', 'geomodelnotice')]) # Allow the ability to ignore certain users for exclusion in self.config.exclusions.split(','): search_query.add_must_not(TermMatch('summary', exclusion)) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): self.parse_config('write_audit.conf', ['skipprocess', 'expectedusers']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'write'), TermMatch('details.auditkey', 'audit'), ]) for processname in self.config.skipprocess.split(): search_query.add_must_not(PhraseMatch('details.processname', processname)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=2)
def main(self): self.parse_config('write_audit.conf', ['skipprocess']) search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'write'), TermMatch('details.auditkey', 'audit'), ]) for processname in self.config.skipprocess.split(): search_query.add_must_not(PhraseMatch('details.processname', processname)) self.filtersManual(search_query) self.searchEventsAggregated('details.originaluser', samplesLimit=10) self.walkAggregations(threshold=2)
def main(self): search_query = SearchQuery(minutes=1) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('source', 'notice'), PhraseMatch('details.note', 'Scan::Address_Scan'), QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch'])) ]) search_query.add_must_not([ QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch'])) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=1) search_query.add_must([ TermMatch('category', 'bro'), TermMatch('details.source', 'notice'), PhraseMatch('details.note', 'Scan::Address_Scan'), QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch'])) ]) search_query.add_must_not([ QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch'])) ]) self.filtersManual(search_query) self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): self.parse_config('ldap_bruteforce.conf', ['threshold_count', 'search_depth_min', 'host_exclusions']) search_query = SearchQuery(minutes=int(self.config.search_depth_min)) search_query.add_must_not(TermMatch('details.user', '')) search_query.add_must([ TermMatch('category', 'ldap'), TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS'), ]) for host_exclusion in self.config.host_exclusions.split(","): search_query.add_must_not([TermMatch("details.server", host_exclusion)]) self.filtersManual(search_query) self.searchEventsAggregated('details.user', samplesLimit=10) self.walkAggregations(threshold=int(self.config.threshold_count))
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'promiscuous'), PhraseMatch('summary', 'promiscuous'), PhraseMatch('summary', 'on') ]) search_query.add_must_not([ QueryStringMatch('details.dev: veth*'), ]) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=2) search_query.add_must([ TermMatch('category', 'syslog'), PhraseMatch('summary', 'promiscuous'), PhraseMatch('summary', 'entered') ]) search_query.add_must_not([ QueryStringMatch('summary: veth*'), ]) self.filtersManual(search_query) self.searchEventsAggregated('hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.actor', '*bind*'), WildcardMatch('details.changepairs', 'delete:*member*') ]) self.filtersManual(search_query) self.searchEventsAggregated('details.email', samplesLimit=50) self.walkAggregations(threshold=1, config={})
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'add') ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.actor', '*bind*'), ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('tags', 'firefoxaccounts'), PhraseMatch('details.action', 'accountCreate') ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.email', '*restmail.net'), ]) self.filtersManual(search_query) # Search aggregations on field 'ip', keep X samples of events at most self.searchEventsAggregated('details.ip', samplesLimit=10) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=10)
def main(self): search_query = SearchQuery(minutes=15) search_query.add_must([ TermMatch('category', 'ldapChange'), TermMatch('details.changetype', 'modify'), PhraseMatch("summary", "groups") ]) # ignore test accounts and attempts to create accounts that already exist. search_query.add_must_not([ WildcardMatch('details.actor', '*bind*'), WildcardMatch('details.changepairs', '*delete:member*') ]) self.filtersManual(search_query) # Search events self.searchEventsSimple() self.walkEvents()
def main(self): self.config_file = './unauth_ssh.conf' self.config = None self.initConfiguration() search_query = SearchQuery(minutes=30) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('hostname: /{}/'.format(self.config.hostfilter)), PhraseMatch('summary', 'Accepted publickey for {}'.format(self.config.user)) ]) for x in self.config.skiphosts: search_query.add_must_not(PhraseMatch('summary', x)) self.filtersManual(search_query) self.searchEventsSimple() self.walkEvents()
def main(self): # look for events in last X mins search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('category', 'execve'), TermMatch('details.user', 'confluence'), QueryStringMatch('hostname: /.*(mana|confluence).*/') ]) search_query.add_must_not(TermMatch('details.originaluser', 'root')) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('hostname', samplesLimit=10) # alert when >= X matching events in an aggregation # in this case, always self.walkAggregations(threshold=1)
def test_query_class(self): for query, events in self.query_tests().iteritems(): for event in events: if pytest.config.option.delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() self.populate_test_object(event) self.refresh(self.event_index_name) # Testing must search_query = SearchQuery() search_query.add_must(query) query_result = search_query.execute(self.es_client) self.verify_test(query_result, self.positive_test) # Testing must_not search_query = SearchQuery() search_query.add_must_not(query) query_result = search_query.execute(self.es_client) self.verify_test(query_result, self.positive_test is False)
def main(self): self.parse_config('bruteforce_ssh.conf', ['skiphosts']) search_query = SearchQuery(minutes=2) search_query.add_must([ PhraseMatch('summary', 'failed'), TermMatch('details.program', 'sshd'), TermsMatch('summary', ['login', 'invalid', 'ldap_count_entries', 'publickey', 'keyboard']) ]) for ip_address in self.config.skiphosts.split(): search_query.add_must_not(PhraseMatch('summary', ip_address)) self.filtersManual(search_query) # Search aggregations on field 'sourceipaddress', keep X samples of # events at most self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) # alert when >= X matching events in an aggregation self.walkAggregations(threshold=10)
def test_query_class(self): for query, events in self.query_tests().iteritems(): for event in events: if pytest.config.option.delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() self.populate_test_object(event) self.flush(self.event_index_name) # Testing must search_query = SearchQuery() search_query.add_must(query) query_result = search_query.execute(self.es_client) self.verify_test(query_result, self.positive_test) # Testing must_not search_query = SearchQuery() search_query.add_must_not(query) query_result = search_query.execute(self.es_client) self.verify_test(query_result, self.positive_test is False)
def main(self): self.parse_config("proxy_drop_ip.conf", ["ip_whitelist"]) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), ]) # Match on everything that looks like the first octet of either the IPv4 or the IPv6 address # This will over-match, but will get weeded out below ip_regex = "/[0-9a-fA-F]{1,4}.*/" search_query.add_must( [QueryStringMatch("details.host: {}".format(ip_regex))]) for ip in self.config.ip_whitelist.split(","): search_query.add_must_not([TermMatch("details.host", ip)]) self.filtersManual(search_query) self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) self.walkAggregations(threshold=1)
def test_query_class(self): for testcase in self.query_tests(): query = testcase[0] events = testcase[1] for event in events: if self.config_delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() self.populate_test_object(event) self.refresh(self.event_index_name) # Testing must search_query = SearchQuery() search_query.add_must(query) query_result = search_query.execute(self.es_client) self.verify_test(query_result, self.positive_test) # Testing must_not search_query = SearchQuery() search_query.add_must_not(query) query_result = search_query.execute(self.es_client) self.verify_test(query_result, self.positive_test is False)
def main(self): self.parse_config("proxy_drop_non_standard_port.conf", ["excludedports"]) search_query = SearchQuery(minutes=20) search_query.add_must([ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), TermMatch("details.method", "CONNECT"), ]) for port in self.config.excludedports.split(","): search_query.add_must_not( [TermMatch("details.destinationport", port)]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def main(self): self.parse_config("proxy_drop_non_standard_port.conf", ["excludedports"]) search_query = SearchQuery(minutes=20) search_query.add_must( [ TermMatch("category", "proxy"), TermMatch("details.proxyaction", "TCP_DENIED"), TermMatch("details.method", "CONNECT"), ] ) for port in self.config.excludedports.split(","): search_query.add_must_not([TermMatch("details.destinationport", port)]) self.filtersManual(search_query) # Search aggregations on field 'hostname', keep X samples of # events at most self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10) # alert when >= X matching events in an aggregation # I think it makes sense to alert every time here self.walkAggregations(threshold=1)
def verify_events(options): es_client = ElasticsearchClient(options.esservers) for required_field in options.required_fields: logger.debug('Looking for events without ' + required_field) search_query = SearchQuery(hours=12) search_query.add_must_not(ExistsMatch(required_field)) # Exclude all events that are mozdef related health and stats search_query.add_must_not(TermMatch('_type', 'mozdefstats')) search_query.add_must_not(TermMatch('_type', 'mozdefhealth')) search_query.add_aggregation(Aggregation('_type')) # We don't care about the actual events, we only want the numbers results = search_query.execute(es_client, size=1) for aggreg_term in results['aggregations']['_type']['terms']: count = aggreg_term['count'] category = aggreg_term['key'] logger.error( "Found {0} bad events of _type '{1}' missing '{2}' field". format(count, category, required_field))
def verify_events(options): es_client = ElasticsearchClient(options.esservers) for required_field in options.required_fields: logger.debug('Looking for events without ' + required_field) search_query = SearchQuery(hours=12) search_query.add_must_not(ExistsMatch(required_field)) # Exclude all events that are mozdef related health and stats search_query.add_must_not(TermMatch('_type', 'mozdefstats')) search_query.add_must_not(TermMatch('_type', 'mozdefhealth')) search_query.add_aggregation(Aggregation('_type')) # We don't care about the actual events, we only want the numbers results = search_query.execute(es_client, size=1) for aggreg_term in results['aggregations']['_type']['terms']: count = aggreg_term['count'] category = aggreg_term['key'] logger.error("Found {0} bad events of _type '{1}' missing '{2}' field".format( count, category, required_field ))
def main(self): cfg = self._load_config() self.factor_pipeline = self._prepare_factor_pipeline(cfg) if not self.es.index_exists('localities'): settings = { 'mappings': { '_doc': { 'dynamic_templates': [ { 'string_fields': { 'mapping': { 'type': 'keyword' }, 'match': '*', 'match_mapping_type': 'string' } }, ] } } } self.es.create_index('localities', settings) last_execution_record = execution.load(self.es)(_EXEC_INDEX) if last_execution_record is None: cfg_offset = timedelta(**cfg.events.search_window) range_start = toUTC(datetime.now()) - cfg_offset else: range_start = last_execution_record.state.execution_time range_end = toUTC(datetime.now()) query = SearchQuery() query.add_must(RangeMatch('receivedtimestamp', range_start, range_end)) query.add_must(QSMatch(cfg.events.lucene_query)) # Ignore empty usernames query.add_must_not(TermMatch(USERNAME_PATH, '')) # Ignore whitelisted usernames for whitelisted_username in cfg.whitelist.users: query.add_must_not(TermMatch(USERNAME_PATH, whitelisted_username)) # Ignore whitelisted subnets for whitelisted_subnet in cfg.whitelist.cidrs: query.add_must_not( SubnetMatch('details.sourceipaddress', whitelisted_subnet)) self.filtersManual(query) self.searchEventsAggregated(USERNAME_PATH, samplesLimit=1000) self.walkAggregations(threshold=1, config=cfg) if last_execution_record is None: updated_exec = execution.Record.new( execution.ExecutionState.new(range_end)) else: updated_exec = execution.Record( last_execution_record.identifier, execution.ExecutionState.new(range_end)) execution.store(self.es)(updated_exec, _EXEC_INDEX)