def esLdapResults(begindateUTC=None, enddateUTC=None): '''an ES query/facet to count success/failed logins''' resultsList = list() if begindateUTC is None: begindateUTC = datetime.now() - timedelta(hours=1) begindateUTC = toUTC(begindateUTC) if enddateUTC is None: enddateUTC = datetime.now() enddateUTC = toUTC(enddateUTC) try: es_client = ElasticsearchClient( list('{0}'.format(s) for s in options.esservers)) search_query = SearchQuery() range_match = RangeMatch('utctimestamp', begindateUTC, enddateUTC) search_query.add_must(range_match) search_query.add_must(TermMatch('tags', 'ldap')) search_query.add_must( TermMatch('details.result', 'LDAP_INVALID_CREDENTIALS')) search_query.add_aggregation(Aggregation('details.result')) search_query.add_aggregation(Aggregation('details.dn')) results = search_query.execute(es_client, indices=['events']) stoplist = ('o', 'mozilla', 'dc', 'com', 'mozilla.com', 'mozillafoundation.org', 'org', 'mozillafoundation') for t in results['aggregations']['details.dn']['terms']: if t['key'] in stoplist: continue failures = 0 success = 0 dn = t['key'] details_query = SearchQuery() details_query.add_must(range_match) details_query.add_must(TermMatch('tags', 'ldap')) details_query.add_must(TermMatch('details.dn', dn)) details_query.add_aggregation(Aggregation('details.result')) results = details_query.execute(es_client) for t in results['aggregations']['details.result']['terms']: if t['key'].upper() == 'LDAP_SUCCESS': success = t['count'] if t['key'].upper() == 'LDAP_INVALID_CREDENTIALS': failures = t['count'] resultsList.append( dict(dn=dn, failures=failures, success=success, begin=begindateUTC.isoformat(), end=enddateUTC.isoformat())) return (json.dumps(resultsList)) except Exception as e: sys.stderr.write('Error trying to get ldap results: {0}\n'.format(e))
def main(self): superquery = None run = 0 for user in self._config['users']: if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 r1 = datetime.datetime.now().replace(hour=5, minute=50, second=00).isoformat() r2 = datetime.datetime.now().replace(hour=6, minute=0, second=00).isoformat() search_query = SearchQuery(minutes=5) search_query.add_must([ TermMatch('_type', 'event'), TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"'), ]) search_query.add_must_not([RangeMatch('utctimestamp', r1, r2)]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.hostname', samplesLimit=10) self.walkAggregations(threshold=1)
def query_tests(self): begin_date = "2016-08-12T21:07:12.316450+00:00" end_date = "2016-08-13T21:07:12.316450+00:00" tests = { RangeMatch('utctimestamp', begin_date, end_date): [ { 'utctimestamp': '2016-08-12T21:07:11.316450+00:00' }, { 'utctimestamp': '2016-08-13T21:07:13.316450+00:00' }, ], } return tests
def main(self): superquery = None run = 0 for user in self._config['users'].values(): if run == 0: superquery = PhraseMatch('summary', user) else: superquery |= PhraseMatch('summary', user) run += 1 search_query = SearchQuery(minutes=10) search_query.add_must([ TermMatch('_type', 'event'), TermMatch('category', 'syslog'), TermMatch('details.program', 'sshd'), QueryStringMatch('summary:"session opened"'), ]) for expectedtime in self._config['scan_expected'].values(): r1 = datetime.datetime.now().replace( hour=int(expectedtime['start_hour']), minute=int(expectedtime['start_minute']), second=int(expectedtime['start_second'])).isoformat() r2 = datetime.datetime.now().replace( hour=int(expectedtime['end_hour']), minute=int(expectedtime['end_minute']), second=int(expectedtime['end_second'])).isoformat() search_query.add_must_not([RangeMatch('utctimestamp', r1, r2)]) search_query.add_must(superquery) self.filtersManual(search_query) self.searchEventsAggregated('details.program', samplesLimit=10) self.walkAggregations(threshold=1)