callback=comma_separated_files, default=','.join(STDOUT_FLAGS), help='Return only in stdout the urls of these flags') @click.option('--progress-enabled/--progress-disabled', default=None) @click.option('--timeout', default=10) @click.option( '--max-depth', default=3, help='Maximum links to follow without increasing directories depth') @click.option('--not-follow-subdomains', is_flag=True, help='The subdomains will be ignored') @click.option('--exclude-sources', callback=comma_separated_files, help='Exclude source engines. Possible options: {}'.format( ', '.join([get_source_name(src) for src in SOURCE_CLASSES]))) @click.option('--not-allow-redirects', is_flag=True, help='Redirectors will not be followed') @click.option('--version', is_flag=True, callback=print_version, expose_value=False, is_eager=True) def hunt(urls, threads, exclude_flags, include_flags, interesting_extensions, interesting_files, stdout_flags, progress_enabled, timeout, max_depth, not_follow_subdomains, exclude_sources, not_allow_redirects): """Find web directories without bruteforce """ if exclude_flags and include_flags: raise BadOptionUsage(
def get_crawler(self, **kwargs): return Crawler( interesting_extensions=['php'], interesting_files=['error_log'], exclude_sources=[get_source_name(src) for src in SOURCE_CLASSES], **kwargs)
help='Exclude results with these flags. See documentation.') @click.option('-i', '--include-flags', callback=comma_separated_files, help='Only include results with these flags. See documentation.') @click.option('-e', '--interesting-extensions', callback=comma_separated_files, default=','.join(INTERESTING_EXTS), help='The files found with these extensions are interesting') @click.option('-f', '--interesting-files', callback=comma_separated_files, default=','.join(INTERESTING_FILES), help='The files with these names are interesting') @click.option('--stdout-flags', callback=comma_separated_files, default=','.join(STDOUT_FLAGS), help='Return only in stdout the urls of these flags') @click.option('--progress-enabled/--progress-disabled', default=None) @click.option('--timeout', default=10) @click.option('--max-depth', default=3, help='Maximum links to follow without increasing directories depth') @click.option('--not-follow-subdomains', is_flag=True, help='The subdomains will be ignored') @click.option('--exclude-sources', callback=comma_separated_files, help='Exclude source engines. Possible options: {}'.format(', '.join( [get_source_name(src) for src in SOURCE_CLASSES]) )) @click.option('-p', '--proxies', callback=comma_separated_files, help='Set one or more proxies to alternate between them') @click.option('-d', '--delay', default=0, type=float, help='Delay between requests to avoid bans by the server') @click.option('--not-allow-redirects', is_flag=True, help='Redirectors will not be followed') @click.option('--limit', type=int, default=1000, help='Max number of pages processed to search for directories.') @click.option('--to-file', type=Path(writable=True), default=None, help='Create a report file in JSON.') @click.option('--version', is_flag=True, callback=print_version, expose_value=False, is_eager=True) def hunt(urls, threads, exclude_flags, include_flags, interesting_extensions, interesting_files, stdout_flags, progress_enabled, timeout, max_depth, not_follow_subdomains, exclude_sources, proxies, delay, not_allow_redirects, limit, to_file): """Find web directories without bruteforce """