def __init__(self, script_path, arguments, output): global VERSION program_banner = (open( FileUtils.build_path(script_path, "lib", "controller", "banner.txt")).read().format(**VERSION)) self.directories = Queue() self.script_path = script_path self.exit = False self.arguments = arguments self.output = output self.savePath = self.script_path self.doneDirs = [] if arguments.raw_file: # Overwrite python-requests default headers default_headers = { "User-Agent": None, "Accept-Encoding": None, "Accept": None, } _raw = Raw(arguments.raw_file, arguments.scheme) self.urlList = [_raw.url()] self.httpmethod = _raw.method() self.data = _raw.data() self.headers = {**default_headers, **_raw.headers()} self.cookie = _raw.cookie() self.useragent = _raw.user_agent() else: default_headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Accept-Language": "*", "Accept-Encoding": "*", "Keep-Alive": "300", "Cache-Control": "max-age=0", } self.urlList = list(filter(None, dict.fromkeys(arguments.urlList))) self.httpmethod = arguments.httpmethod.lower() self.data = arguments.data self.headers = {**default_headers, **arguments.headers} self.cookie = arguments.cookie self.useragent = arguments.useragent self.recursion_depth = arguments.recursion_depth if arguments.saveHome: savePath = self.getSavePath() if not FileUtils.exists(savePath): FileUtils.create_directory(savePath) if FileUtils.exists(savePath) and not FileUtils.is_dir(savePath): self.output.error( "Cannot use {} because it's a file. Should be a directory". format(savePath)) exit(1) if not FileUtils.can_write(savePath): self.output.error( "Directory {} is not writable".format(savePath)) exit(1) logs = FileUtils.build_path(savePath, "logs") if not FileUtils.exists(logs): FileUtils.create_directory(logs) reports = FileUtils.build_path(savePath, "reports") if not FileUtils.exists(reports): FileUtils.create_directory(reports) self.savePath = savePath self.reportsPath = FileUtils.build_path(self.savePath, "logs") self.blacklists = self.getBlacklists() self.includeStatusCodes = arguments.includeStatusCodes self.excludeStatusCodes = arguments.excludeStatusCodes self.excludeSizes = arguments.excludeSizes self.excludeTexts = arguments.excludeTexts self.excludeRegexps = arguments.excludeRegexps self.excludeRedirects = arguments.excludeRedirects self.recursive = arguments.recursive self.minimumResponseSize = arguments.minimumResponseSize self.maximumResponseSize = arguments.maximumResponseSize self.scanSubdirs = arguments.scanSubdirs self.excludeSubdirs = (arguments.excludeSubdirs if arguments.excludeSubdirs else []) self.dictionary = Dictionary( arguments.wordlist, arguments.extensions, arguments.suffixes, arguments.prefixes, arguments.lowercase, arguments.uppercase, arguments.capitalization, arguments.forceExtensions, arguments.excludeExtensions, arguments.noExtension, arguments.onlySelected) self.allJobs = len(self.scanSubdirs) if self.scanSubdirs else 1 self.currentJob = 0 self.errorLog = None self.errorLogPath = None self.threadsLock = Lock() self.batch = False self.batchSession = None self.skip429 = False self.output.header(program_banner) self.printConfig() self.setupErrorLogs() self.output.errorLogFile(self.errorLogPath) if arguments.autoSave and len(self.urlList) > 1: self.setupBatchReports() self.output.newLine("\nAutoSave path: {0}".format( self.batchDirectoryPath)) if arguments.useRandomAgents: self.randomAgents = FileUtils.get_lines( FileUtils.build_path(script_path, "db", "user-agents.txt")) try: for url in self.urlList: try: gc.collect() self.reportManager = ReportManager() self.currentUrl = url if url.endswith("/") else url + "/" self.output.setTarget(self.currentUrl, self.arguments.scheme) try: self.requester = Requester( url, cookie=self.cookie, useragent=self.useragent, maxPool=arguments.threadsCount, maxRetries=arguments.maxRetries, timeout=arguments.timeout, ip=arguments.ip, proxy=arguments.proxy, proxylist=arguments.proxylist, redirect=arguments.redirect, requestByHostname=arguments.requestByHostname, httpmethod=self.httpmethod, data=self.data, scheme=arguments.scheme, ) for key, value in self.headers.items(): self.requester.setHeader(key, value) self.requester.request("") except RequestException as e: self.output.error(e.args[0]["message"]) raise SkipTargetInterrupt if arguments.useRandomAgents: self.requester.setRandomAgents(self.randomAgents) # Initialize directories Queue with start Path self.basePath = self.requester.basePath if self.scanSubdirs: for subdir in self.scanSubdirs: self.directories.put(subdir) else: self.directories.put("") self.setupReports(self.requester) matchCallbacks = [self.matchCallback] notFoundCallbacks = [self.notFoundCallback] errorCallbacks = [self.errorCallback, self.appendErrorLog] self.fuzzer = Fuzzer( self.requester, self.dictionary, testFailPath=arguments.testFailPath, threads=arguments.threadsCount, delay=arguments.delay, matchCallbacks=matchCallbacks, notFoundCallbacks=notFoundCallbacks, errorCallbacks=errorCallbacks, ) try: self.prepare() except RequestException as e: self.output.error("Fatal error during scanning: " + e.args[0]["message"]) raise SkipTargetInterrupt except SkipTargetInterrupt: continue except KeyboardInterrupt: self.output.error("\nCanceled by the user") exit(0) finally: if not self.errorLog.closed: self.errorLog.close() self.reportManager.close() self.output.warning("\nTask Completed")
def __init__(self, script_path, arguments, output): global VERSION program_banner = (open( FileUtils.build_path(script_path, "lib", "controller", "banner.txt")).read().format(**VERSION)) self.directories = Queue() self.script_path = script_path self.exit = False self.arguments = arguments self.output = output self.done_dirs = [] if arguments.raw_file: _raw = Raw(arguments.raw_file, arguments.scheme) self.url_list = [_raw.url()] self.httpmethod = _raw.method() self.data = _raw.data() self.headers = _raw.headers() else: default_headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Accept-Language": "*", "Accept-Encoding": "*", "Keep-Alive": "300", "Cache-Control": "max-age=0", } self.url_list = list( filter(None, dict.fromkeys(arguments.url_list))) self.httpmethod = arguments.httpmethod.lower() self.data = arguments.data self.headers = {**default_headers, **arguments.headers} if arguments.cookie: self.headers["Cookie"] = arguments.cookie if arguments.useragent: self.headers["User-Agent"] = arguments.useragent self.recursion_depth = arguments.recursion_depth if arguments.logs_location and self.validate_path( arguments.logs_location): self.logs_path = FileUtils.build_path(arguments.logs_location) elif self.validate_path(self.script_path): self.logs_path = FileUtils.build_path(self.script_path, "logs") if not FileUtils.exists(self.logs_path): FileUtils.create_directory(self.logs_path) if arguments.output_location and self.validate_path( arguments.output_location): self.save_path = FileUtils.build_path(arguments.output_location) elif self.validate_path(self.script_path): self.save_path = FileUtils.build_path(self.script_path, "reports") if not FileUtils.exists(self.save_path): FileUtils.create_directory(self.save_path) self.blacklists = self.get_blacklists() self.include_status_codes = arguments.include_status_codes self.exclude_status_codes = arguments.exclude_status_codes self.exclude_sizes = arguments.exclude_sizes self.exclude_texts = arguments.exclude_texts self.exclude_regexps = arguments.exclude_regexps self.exclude_redirects = arguments.exclude_redirects self.recursive = arguments.recursive self.deep_recursive = arguments.deep_recursive self.force_recursive = arguments.force_recursive self.recursion_status_codes = arguments.recursion_status_codes self.minimum_response_size = arguments.minimum_response_size self.maximum_response_size = arguments.maximum_response_size self.maxtime = arguments.maxtime self.scan_subdirs = arguments.scan_subdirs self.exclude_subdirs = arguments.exclude_subdirs self.dictionary = Dictionary( paths=arguments.wordlist, extensions=arguments.extensions, suffixes=arguments.suffixes, prefixes=arguments.prefixes, lowercase=arguments.lowercase, uppercase=arguments.uppercase, capitalization=arguments.capitalization, forced_extensions=arguments.force_extensions, exclude_extensions=arguments.exclude_extensions, no_extension=arguments.no_extension, only_selected=arguments.only_selected) self.all_jobs = len(self.scan_subdirs) if self.scan_subdirs else 1 self.current_job = 0 self.start_time = time.time() self.error_log = None self.error_log_path = None self.threads_lock = Lock() self.batch = False self.batch_session = None self.output.header(program_banner) self.print_config() if arguments.use_random_agents: self.random_agents = FileUtils.get_lines( FileUtils.build_path(script_path, "db", "user-agents.txt")) self.report_manager = EmptyReportManager() self.report = EmptyReport() if arguments.autosave_report or arguments.output_file: if len(self.url_list) > 1: self.setup_batch_reports() self.setup_reports() self.setup_error_logs() self.output.error_log_file(self.error_log_path) try: for url in self.url_list: try: gc.collect() url = url if url.endswith("/") else url + "/" self.output.set_target(url, self.arguments.scheme) try: self.requester = Requester( url, max_pool=arguments.threads_count, max_retries=arguments.max_retries, timeout=arguments.timeout, ip=arguments.ip, proxy=arguments.proxy, proxylist=arguments.proxylist, redirect=arguments.redirect, request_by_hostname=arguments.request_by_hostname, httpmethod=self.httpmethod, data=self.data, scheme=arguments.scheme, ) for key, value in self.headers.items(): self.requester.set_header(key, value) if arguments.auth: self.requester.set_auth(arguments.auth_type, arguments.auth) self.requester.request("") if arguments.autosave_report or arguments.output_file: self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.base_path) except RequestException as e: self.output.error(e.args[0]["message"]) raise SkipTargetInterrupt if arguments.use_random_agents: self.requester.set_random_agents(self.random_agents) # Initialize directories Queue with start Path self.base_path = self.requester.base_path self.status_skip = None for subdir in self.scan_subdirs: self.directories.put(subdir) else: self.directories.put("") match_callbacks = [self.match_callback] not_found_callbacks = [self.not_found_callback] error_callbacks = [ self.error_callback, self.append_error_log ] self.fuzzer = Fuzzer( self.requester, self.dictionary, suffixes=arguments.suffixes, prefixes=arguments.prefixes, exclude_content=arguments.exclude_content, threads=arguments.threads_count, delay=arguments.delay, maxrate=arguments.maxrate, match_callbacks=match_callbacks, not_found_callbacks=not_found_callbacks, error_callbacks=error_callbacks, ) try: self.prepare() except RequestException as e: self.output.error(e.args[0]["message"]) raise SkipTargetInterrupt except SkipTargetInterrupt: self.report.completed = True continue except KeyboardInterrupt: self.output.error("\nCanceled by the user") exit(0) finally: if not self.error_log.closed: self.error_log.close() self.output.warning("\nTask Completed")