def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"') ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace(hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace(hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([ RangeMatch('utctimestamp', r1, r2) ]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)
def esLdapResults(begindateUTC=None, enddateUTC=None): '''an ES query/facet to count success/failed logins''' resultsList = list() if begindateUTC is None: begindateUTC = datetime.now() - timedelta(hours=1) begindateUTC = toUTC(begindateUTC) if enddateUTC is None: enddateUTC = datetime.now() enddateUTC = toUTC(enddateUTC) try: es_client = ElasticsearchClient(list('{0}'.format(s) for s in options.esservers)) search_query = SearchQuery() range_match = RangeMatch('utctimestamp', begindateUTC, enddateUTC) search_query.add_must(range_match) search_query.add_must(TermMatch('tags', 'ldap')) search_query.add_must(TermMatch('details.result', 'LDAP_INVALID_CREDENTIALS')) search_query.add_aggregation(Aggregation('details.result')) search_query.add_aggregation(Aggregation('details.dn')) results = search_query.execute(es_client, indices=['events']) stoplist = ('o', 'mozilla', 'dc', 'com', 'mozilla.com', 'mozillafoundation.org', 'org', 'mozillafoundation') for t in results['aggregations']['details.dn']['terms']: if t['key'] in stoplist: continue failures = 0 success = 0 dn = t['key'] details_query = SearchQuery() details_query.add_must(range_match) details_query.add_must(TermMatch('tags', 'ldap')) details_query.add_must(TermMatch('details.dn', dn)) details_query.add_aggregation(Aggregation('details.result')) results = details_query.execute(es_client) for t in results['aggregations']['details.result']['terms']: if t['key'].upper() == 'LDAP_SUCCESS': success = t['count'] if t['key'].upper() == 'LDAP_INVALID_CREDENTIALS': failures = t['count'] resultsList.append( dict( dn=dn, failures=failures, success=success, begin=begindateUTC.isoformat(), end=enddateUTC.isoformat() ) ) return(json.dumps(resultsList)) except Exception as e: sys.stderr.write('Error trying to get ldap results: {0}\n'.format(e))
def query_tests(self): begin_date = "2016-08-12T21:07:12.316450+00:00" end_date = "2016-08-13T21:07:12.316450+00:00" tests = { RangeMatch('utctimestamp', begin_date, end_date): [ {'utctimestamp': '2016-08-12T21:07:11.316450+00:00'}, {'utctimestamp': '2016-08-13T21:07:13.316450+00:00'}, ], } return tests
def main(self): cfg = self._load_config() self.factor_pipeline = self._prepare_factor_pipeline(cfg) if not self.es.index_exists('localities'): settings = { 'mappings': { '_doc': { 'dynamic_templates': [ { 'string_fields': { 'mapping': { 'type': 'keyword' }, 'match': '*', 'match_mapping_type': 'string' } }, ] } } } self.es.create_index('localities', settings) last_execution_record = execution.load(self.es)(_EXEC_INDEX) if last_execution_record is None: cfg_offset = timedelta(**cfg.events.search_window) range_start = toUTC(datetime.now()) - cfg_offset else: range_start = last_execution_record.state.execution_time range_end = toUTC(datetime.now()) query = SearchQuery() query.add_must(RangeMatch('receivedtimestamp', range_start, range_end)) query.add_must(QSMatch(cfg.events.lucene_query)) # Ignore empty usernames query.add_must_not(TermMatch(USERNAME_PATH, '')) # Ignore whitelisted usernames for whitelisted_username in cfg.whitelist.users: query.add_must_not(TermMatch(USERNAME_PATH, whitelisted_username)) # Ignore whitelisted subnets for whitelisted_subnet in cfg.whitelist.cidrs: query.add_must_not( SubnetMatch('details.sourceipaddress', whitelisted_subnet)) self.filtersManual(query) self.searchEventsAggregated(USERNAME_PATH, samplesLimit=1000) self.walkAggregations(threshold=1, config=cfg) if last_execution_record is None: updated_exec = execution.Record.new( execution.ExecutionState.new(range_end)) else: updated_exec = execution.Record( last_execution_record.identifier, execution.ExecutionState.new(range_end)) execution.store(self.es)(updated_exec, _EXEC_INDEX)
def onMessage(self, request, response): ''' request: http://bottlepy.org/docs/dev/api.html#the-request-object response: http://bottlepy.org/docs/dev/api.html#the-response-object ''' # an ES query/facet to count success/failed logins # oriented to the data having # category: authentication # details.success marked true/false for success/failed auth # details.username as the user begindateUTC = None enddateUTC = None resultsList = list() if begindateUTC is None: begindateUTC = datetime.now() - timedelta(hours=12) begindateUTC = toUTC(begindateUTC) if enddateUTC is None: enddateUTC = datetime.now() enddateUTC = toUTC(enddateUTC) es_client = ElasticsearchClient( list('{0}'.format(s) for s in self.restoptions['esservers'])) search_query = SearchQuery() # a query to tally users with failed logins date_range_match = RangeMatch('utctimestamp', begindateUTC, enddateUTC) search_query.add_must(date_range_match) search_query.add_must(PhraseMatch('category', 'authentication')) search_query.add_must(PhraseMatch('details.success', 'false')) search_query.add_must(ExistsMatch('details.username')) search_query.add_aggregation(Aggregation('details.success')) search_query.add_aggregation(Aggregation('details.username')) results = search_query.execute(es_client, indices=['events', 'events-previous']) # any usernames or words to ignore # especially useful if ES is analyzing the username field and breaking apart [email protected] # into user somewhere and .com stoplist = self.options.ignoreusernames.split(',') # walk the aggregate failed users # and look for successes/failures for t in results['aggregations']['details.username']['terms']: if t['key'] in stoplist: continue failures = 0 success = 0 username = t['key'] details_query = SearchQuery() details_query.add_must(date_range_match) details_query.add_must(PhraseMatch('category', 'authentication')) details_query.add_must(PhraseMatch('details.username', username)) details_query.add_aggregation(Aggregation('details.success')) details_results = details_query.execute(es_client) # details.success is boolean. As an aggregate is an int (0/1) for details_term in details_results['aggregations'][ 'details.success']['terms']: if details_term['key'] == 1: success = details_term['count'] if details_term['key'] == 0: failures = details_term['count'] resultsList.append( dict(username=username, failures=failures, success=success, begin=begindateUTC.isoformat(), end=enddateUTC.isoformat())) response.body = json.dumps(resultsList) response.status = 200 return (request, response)