def hunt(urls, threads, exclude_flags, interesting_extensions, interesting_files, stdout_flags, progress_enabled): """ :param int threads: :type exclude_flags: list """ for code in tuple(exclude_flags): match = re.match('^(\d{3})-(\d{3})$', code) if match: exclude_flags.remove(code) exclude_flags += list( map(str, status_code_range(*map(int, match.groups())))) progress_enabled = (sys.stdout.isatty() or sys.stderr.isatty() ) if progress_enabled is None else progress_enabled crawler = Crawler(max_workers=threads, interesting_extensions=interesting_extensions, interesting_files=interesting_files, echo=print if sys.stdout.isatty() else eprint, progress_enabled=progress_enabled) crawler.add_init_urls(*urls) try: catch_keyboard_interrupt(crawler.print_results, crawler.restart)(set(exclude_flags)) except SystemExit: crawler.close() if not sys.stdout.isatty(): output_urls(crawler, stdout_flags)
def test_keyboard_interrupt(self): m = Mock(side_effect=KeyboardInterrupt) with patch('dirhunt.utils.confirm_close', side_effect=KeyboardInterrupt) as mock_confirm_close: with self.assertRaises(KeyboardInterrupt): catch_keyboard_interrupt(m)() mock_confirm_close.assert_called_once()
def hunt(urls, threads, exclude_flags, include_flags, interesting_extensions, interesting_files, stdout_flags, progress_enabled, timeout, max_depth, not_follow_subdomains, exclude_sources, proxies, delay, not_allow_redirects): """Find web directories without bruteforce """ if exclude_flags and include_flags: raise BadOptionUsage('--exclude-flags and --include-flags are mutually exclusive.') welcome() if not urls: click.echo('•_•) OOPS! Add urls to analyze.\nFor example: dirhunt http://domain/path\n\n' 'Need help? Then use dirhunt --help', err=True) return exclude_flags, include_flags = flags_range(exclude_flags), flags_range(include_flags) progress_enabled = (sys.stdout.isatty() or sys.stderr.isatty()) if progress_enabled is None else progress_enabled crawler = Crawler(max_workers=threads, interesting_extensions=interesting_extensions, interesting_files=interesting_files, std=sys.stdout if sys.stdout.isatty() else sys.stderr, progress_enabled=progress_enabled, timeout=timeout, depth=max_depth, not_follow_subdomains=not_follow_subdomains, exclude_sources=exclude_sources, not_allow_redirects=not_allow_redirects, proxies=proxies, delay=delay) crawler.add_init_urls(*urls) try: catch_keyboard_interrupt(crawler.print_results, crawler.restart)(set(exclude_flags), set(include_flags)) except SystemExit: crawler.close() crawler.print_urls_info() if not sys.stdout.isatty(): output_urls(crawler, stdout_flags)