def setup_reports(self): if self.options["output_file"]: output_file = FileUtils.get_abs_path(self.options["output_file"]) self.output.output_file(output_file) else: if self.targets.qsize() > 1: self.setup_batch_reports() filename = "BATCH" filename += self.get_output_extension() directory_path = self.batch_directory_path else: parsed = urlparse(self.targets.queue[0]) filename = ("{}_".format(parsed.path)) filename += time.strftime("%y-%m-%d_%H-%M-%S") filename += self.get_output_extension() directory_path = FileUtils.build_path( self.report_path, get_valid_filename(parsed.netloc)) filename = get_valid_filename(filename) output_file = FileUtils.build_path(directory_path, filename) if FileUtils.exists(output_file): i = 2 while FileUtils.exists(output_file + "_" + str(i)): i += 1 output_file += "_" + str(i) if not FileUtils.exists(directory_path): FileUtils.create_directory(directory_path) if not FileUtils.exists(directory_path): self.output.error( "Couldn't create the reports folder at {}".format( directory_path)) exit(1) self.output.output_file(output_file) if self.options["output_format"]: self.report_manager = ReportManager( self.options["output_format"], self.options["output_file"] or output_file) else: self.report_manager = ReportManager("plain", output_file)
def __init__(self, script_path, arguments, output): global VERSION program_banner = ( open(FileUtils.build_path(script_path, "banner.txt")) .read() .format(**VERSION) ) self.directories = Queue() self.script_path = script_path self.arguments = arguments self.output = output self.pass_dirs = ["/"] if arguments.raw_file: raw = Raw(arguments.raw_file, arguments.scheme) self.url_list = [raw.url] self.httpmethod = raw.method self.data = raw.body self.headers = raw.headers else: default_headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Accept-Language": "*", "Accept-Encoding": "*", "Keep-Alive": "timeout=15, max=1000", "Cache-Control": "max-age=0", } self.url_list = arguments.url_list self.httpmethod = arguments.httpmethod.lower() self.data = arguments.data self.headers = {**default_headers, **arguments.headers} if arguments.cookie: self.headers["Cookie"] = arguments.cookie if arguments.useragent: self.headers["User-Agent"] = arguments.useragent self.recursion_depth = arguments.recursion_depth if arguments.logs_location and self.validate_path(arguments.logs_location): self.logs_path = FileUtils.build_path(arguments.logs_location) elif self.validate_path(self.script_path): self.logs_path = FileUtils.build_path(self.script_path, "logs") if not FileUtils.exists(self.logs_path): FileUtils.create_directory(self.logs_path) if arguments.output_location and self.validate_path(arguments.output_location): self.report_path = FileUtils.build_path(arguments.output_location) elif self.validate_path(self.script_path): self.report_path = FileUtils.build_path(self.script_path, "reports") if not FileUtils.exists(self.report_path): FileUtils.create_directory(self.report_path) self.blacklists = Dictionary.generate_blacklists(arguments.extensions, self.script_path) self.extensions = arguments.extensions self.prefixes = arguments.prefixes self.suffixes = arguments.suffixes self.threads_count = arguments.threads_count self.output_file = arguments.output_file self.output_format = arguments.output_format self.include_status_codes = arguments.include_status_codes self.exclude_status_codes = arguments.exclude_status_codes self.exclude_sizes = arguments.exclude_sizes self.exclude_texts = arguments.exclude_texts self.exclude_regexps = arguments.exclude_regexps self.exclude_redirects = arguments.exclude_redirects self.replay_proxy = arguments.replay_proxy self.recursive = self.arguments.recursive self.deep_recursive = arguments.deep_recursive self.force_recursive = arguments.force_recursive self.recursion_status_codes = arguments.recursion_status_codes self.minimum_response_size = arguments.minimum_response_size self.maximum_response_size = arguments.maximum_response_size self.scan_subdirs = arguments.scan_subdirs self.exclude_subdirs = arguments.exclude_subdirs self.full_url = arguments.full_url self.skip_on_status = arguments.skip_on_status self.exit_on_error = arguments.exit_on_error self.maxtime = arguments.maxtime self.dictionary = Dictionary( paths=arguments.wordlist, extensions=arguments.extensions, suffixes=arguments.suffixes, prefixes=arguments.prefixes, lowercase=arguments.lowercase, uppercase=arguments.uppercase, capitalization=arguments.capitalization, force_extensions=arguments.force_extensions, exclude_extensions=arguments.exclude_extensions, no_extension=arguments.no_extension, only_selected=arguments.only_selected ) self.jobs_count = len(self.url_list) * ( len(self.scan_subdirs) if self.scan_subdirs else 1 ) self.current_job = 0 self.error_log = None self.error_log_path = None self.threads_lock = threading.Lock() self.batch = False self.batch_session = None self.report_manager = EmptyReportManager() self.report = EmptyReport() self.timer = EmptyTimer() self.output.header(program_banner) self.print_config() if arguments.use_random_agents: self.random_agents = FileUtils.get_lines( FileUtils.build_path(script_path, "db", "user-agents.txt") ) if arguments.autosave_report or arguments.output_file: self.setup_reports() self.setup_error_logs() self.output.error_log_file(self.error_log_path) if self.maxtime: threading.Thread(target=self.time_monitor, daemon=True).start() try: for url in self.url_list: try: gc.collect() url = url if url.endswith("/") else url + "/" self.output.set_target(url, arguments.scheme) try: self.requester = Requester( url, max_pool=arguments.threads_count, max_retries=arguments.max_retries, timeout=arguments.timeout, ip=arguments.ip, proxy=arguments.proxy, proxylist=arguments.proxylist, redirect=arguments.redirect, request_by_hostname=arguments.request_by_hostname, httpmethod=self.httpmethod, data=self.data, scheme=arguments.scheme, ) for key, value in self.headers.items(): self.requester.set_header(key, value) if arguments.auth: self.requester.set_auth(arguments.auth_type, arguments.auth) # Test request to see if server is up self.requester.request("") if arguments.autosave_report or arguments.output_file: self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.base_path) except RequestException as e: self.output.error(e.args[0]["message"]) raise SkipTargetInterrupt if arguments.use_random_agents: self.requester.set_random_agents(self.random_agents) # Initialize directories Queue with start Path self.base_path = self.requester.base_path self.status_skip = None if not self.scan_subdirs: self.directories.put("") for subdir in self.scan_subdirs: self.directories.put(subdir) self.pass_dirs.append(subdir) match_callbacks = [self.match_callback] not_found_callbacks = [self.not_found_callback] error_callbacks = [self.error_callback, self.append_error_log] self.fuzzer = Fuzzer( self.requester, self.dictionary, suffixes=arguments.suffixes, prefixes=arguments.prefixes, exclude_content=arguments.exclude_content, threads=arguments.threads_count, delay=arguments.delay, maxrate=arguments.maxrate, match_callbacks=match_callbacks, not_found_callbacks=not_found_callbacks, error_callbacks=error_callbacks, ) try: self.prepare() except RequestException as e: self.output.error(e.args[0]["message"]) raise SkipTargetInterrupt except SkipTargetInterrupt: self.report.completed = True continue except KeyboardInterrupt: self.output.error("\nCanceled by the user") exit(0) finally: self.error_log.close() self.output.warning("\nTask Completed")
def __init__(self, options, output): self.directories = Queue() self.output = output self.options = options self.pass_dirs = ["/"] if options.raw_file: raw = Raw(options.raw_file) self.url_list = [raw.url] self.httpmethod = raw.method self.data = raw.body self.headers = raw.headers else: self.url_list = options.url_list self.httpmethod = options.httpmethod self.data = options.httpmethod self.headers = {**DEFAULT_HEADERS, **options.headers} if options.cookie: self.headers["Cookie"] = options.cookie if options.useragent: self.headers["User-Agent"] = options.useragent self.random_agents = None if options.use_random_agents: self.random_agents = FileUtils.get_lines( FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt")) self.blacklists = Dictionary.generate_blacklists(options.extensions) self.dictionary = Dictionary( paths=options.wordlist, extensions=options.extensions, suffixes=options.suffixes, prefixes=options.prefixes, lowercase=options.lowercase, uppercase=options.uppercase, capitalization=options.capitalization, force_extensions=options.force_extensions, exclude_extensions=options.exclude_extensions, no_extension=options.no_extension, only_selected=options.only_selected) self.jobs_count = len(self.url_list) * (len(options.scan_subdirs) if options.scan_subdirs else 1) self.current_job = 0 self.batch = False self.batch_session = None self.exit = None self.threads_lock = threading.Lock() self.report_manager = EmptyReportManager() self.report = EmptyReport() self.start_time = time.time() self.output.header(BANNER) self.print_config() if options.autosave_report or options.output_file: if options.autosave_report: self.report_path = options.output_location or FileUtils.build_path( SCRIPT_PATH, "reports") self.validate_dir(self.report_path) self.setup_reports() if options.log_file: self.validate_dir(FileUtils.parent(options.log_file)) FileUtils.create_directory(FileUtils.parent(options.log_file)) self.output.log_file(FileUtils.get_abs_path(options.log_file)) try: for url in self.url_list: try: gc.collect() try: self.requester = Requester( url if url.endswith("/") else url + "/", max_pool=options.threads_count, max_retries=options.max_retries, timeout=options.timeout, ip=options.ip, proxy=options.proxy, proxylist=options.proxylist, redirect=options.follow_redirects, request_by_hostname=options.request_by_hostname, httpmethod=self.httpmethod, data=self.data, scheme=options.scheme, random_agents=self.random_agents, ) self.output.set_target(self.requester.base_url + self.requester.base_path) self.requester.setup() for key, value in self.headers.items(): self.requester.set_header(key, value) if options.auth: self.requester.set_auth(options.auth_type, options.auth) # Test request to check if server is up self.requester.request("") self.write_log("Test request sent for: {}".format( self.requester.base_url)) if options.autosave_report or options.output_file: self.report = Report(self.requester.host, self.requester.port, self.requester.scheme, self.requester.base_path) except RequestException as e: self.output.error(e.args[0]) raise SkipTargetInterrupt self.skip = None if not options.scan_subdirs: self.directories.put("") for subdir in options.scan_subdirs: self.directories.put(subdir) self.pass_dirs.append(subdir) match_callbacks = [self.match_callback, self.append_log] not_found_callbacks = [ self.not_found_callback, self.append_log ] error_callbacks = [ self.error_callback, self.append_error_log ] self.fuzzer = Fuzzer( self.requester, self.dictionary, suffixes=options.suffixes, prefixes=options.prefixes, exclude_response=options.exclude_response, threads=options.threads_count, delay=options.delay, maxrate=options.maxrate, match_callbacks=match_callbacks, not_found_callbacks=not_found_callbacks, error_callbacks=error_callbacks, ) try: self.prepare() except RequestException as e: self.output.error(e.args[0]) raise SkipTargetInterrupt except SkipTargetInterrupt: self.report.completed = True continue except KeyboardInterrupt: self.close("Canceled by the user") self.output.warning("\nTask Completed")