def test_get_results(self): parser = elastic.Elastic() parser.search.scan = lambda: None parser.add_filters({'pif': 'paf'}) filters = parser.get_filters() parser.get_results() self.assertEqual(parser.get_filters(), filters)
def main(): args = __create_argparser() logging.getLogger("elasticsearch").setLevel(logging.ERROR) logging.getLogger("urllib3").setLevel(logging.ERROR) if args.verbose: logging.basicConfig(level=logging.DEBUG, format='%(message)s') else: logging.basicConfig(level=logging.INFO, format='[+] %(message)s') if args.elastic is True: if elastic_imported is False: print( 'You asked for an elastic source, but you do not have the required dependencies.' ) return source = elastic.Elastic() elif args.flat_file: source = flat_file.FlatFile(args.flat_file) elif args.stdin is True: source = flat_file.FlatFile() else: print( 'Please give me a valid source (or try to relaunch me with `-h` if you are lost).' ) return 1 # Filtering can be used for any operation __filter(source, args.filter, regexp=False, hostname=args.hostname) if args.filter_regexp: __filter(source, args.filter_regexp, regexp=True, hostname=args.hostname) if args.stats: printers.print_statistics(source.get_statistics()) elif args.whitelist: whitelist = list() for module in WL_MODULES: rules = module.generate_whitelist(source, whitelist) whitelist.extend(rules) __whitelist_from_rules(source, rules) if whitelist: print('\n\033[1mGenerated whitelists:\033[0m') print('\t' + ';\n\t'.join(map(nxapi_whitelist.dict_to_str, whitelist)) + ';') else: print( '\n\033[1mnxtool was not able to generate meaningful whitelist\033[0m' ) elif args.typing: printers.print_typed_rules(typing.typification(source)) else: print(printers.print_generic(source.get_results()))
def test_elastic_import(self): dest = elastic.Elastic() source = flat_file.FlatFile('./tests/data/exlog.txt') for log in source.logs: dest.insert([log]) dest.stop() dest.initialize_search() dest.minimum_occurences = 0 dest.percentage = 0 time.sleep(5) self.assertEqual(dest.get_relevant_ids(['id']), {u'1302', u'42000227'}) self.assertEqual(dest.get_top('id'), {1302: 3, 42000227: 1}) self.assertEqual(dest.get_top('uri'), { u'/': 3, u'/phpMyAdmin-2.8.2/scripts/setup.php': 1 }) dest.client.indices.delete(index=dest.index, ignore=[400, 404])
def test_export(self): parser = elastic.Elastic() search = parser.export_search() parser.import_search(parser.export_search()) self.assertEqual(search, parser.export_search())
def test_add_filters(self): parser = elastic.Elastic() parser.add_filters({'pif': 'paf'}) self.assertEqual( parser.get_filters(), { 'query': { 'multi_match': { 'query': 'paf', 'fields': ['pif'] } }, 'size': 10000 }) parser = elastic.Elastic() parser.add_filters({'pif': ['paf', 'pouf']}) self.assertEqual( parser.get_filters(), { 'query': { 'bool': { 'must': [{ 'bool': { 'should': [{ 'multi_match': { 'fields': ['pif'], 'query': 'paf' } }, { 'multi_match': { 'fields': ['pif'], 'query': 'pouf' } }] } }] } }, 'size': 10000 }) parser = elastic.Elastic() parser.add_filters({'pif': []}) self.assertEqual(parser.get_filters(), { 'query': { 'match_all': {} }, 'size': 10000 }) parser = elastic.Elastic() parser.add_filters({'pif': []}, negative=True) self.assertEqual(parser.get_filters(), { 'query': { 'match_all': {} }, 'size': 10000 }) parser = elastic.Elastic() parser.add_filters({'pif': set()}, negative=True) self.assertEqual(parser.get_filters(), { 'query': { 'match_all': {} }, 'size': 10000 }) parser = elastic.Elastic() parser.add_filters({'pif': [ 1, ]}, negative=True) self.assertEqual( parser.get_filters(), { 'query': { 'bool': { 'must_not': [{ 'multi_match': { 'fields': ['pif'], 'query': 1 } }] } }, 'size': 10000 }) parser = elastic.Elastic() parser.add_filters({'pif': 'paf'}, negative=True) self.assertEqual( parser.get_filters(), { 'query': { 'bool': { 'must_not': [{ 'multi_match': { 'fields': ['pif'], 'query': 'paf' } }] } }, 'size': 10000 })
def test_reset_filters(self): parser = elastic.Elastic() search = parser.get_filters() parser.add_filters({'pif': 'paf'}) parser.reset_filters() self.assertEqual(parser.get_filters(), search)