Пример #1
0
    def __init__(self, options, output):
        self.targets = Queue()
        self.directories = Queue()
        self.threads_lock = threading.Lock()
        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.output = output

        if options["session_file"]:
            self._import(FileUtils.read(options["session_file"]))
            self.from_export = True
        else:
            self.setup(options)
            self.from_export = False

        self.output.header(BANNER)
        self.output.config(
            ', '.join(self.options["extensions"]),
            ', '.join(self.options["prefixes"]),
            ', '.join(self.options["suffixes"]),
            str(self.options["threads_count"]),
            str(len(self.dictionary)),
            str(self.options["httpmethod"]),
        )

        self.setup_reports()
        self.output.log_file(options["log_file"])

        try:
            self.run()
        except KeyboardInterrupt:
            self.close("Canceled by the user")
Пример #2
0
class Controller(object):
    def __init__(self, options, output):
        self.targets = Queue()
        self.directories = Queue()
        self.threads_lock = threading.Lock()
        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.output = output

        if options["session_file"]:
            self._import(FileUtils.read(options["session_file"]))
            self.from_export = True
        else:
            self.setup(options)
            self.from_export = False

        self.output.header(BANNER)
        self.output.config(
            ', '.join(self.options["extensions"]),
            ', '.join(self.options["prefixes"]),
            ', '.join(self.options["suffixes"]),
            str(self.options["threads_count"]),
            str(len(self.dictionary)),
            str(self.options["httpmethod"]),
        )

        self.setup_reports()
        self.output.log_file(options["log_file"])

        try:
            self.run()
        except KeyboardInterrupt:
            self.close("Canceled by the user")

    def setup(self, options):
        self.options = options
        self.pass_dirs = ['']

        if options["raw_file"]:
            self.options.update(
                zip(["urls", "httpmethod", "headers", "data"],
                    parse_raw(options["raw_file"])))
        else:
            self.options["headers"] = {**DEFAULT_HEADERS, **options["headers"]}
            if options["cookie"]:
                self.options["headers"]["Cookie"] = options["cookie"]
            if options["useragent"]:
                self.options["headers"]["User-Agent"] = options["useragent"]

        self.random_agents = None
        if options["use_random_agents"]:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt"))

        self.targets.queue = deque(options["urls"])
        self.blacklists = Dictionary.generate_blacklists(options["extensions"])
        self.dictionary = Dictionary(
            paths=options["wordlist"],
            extensions=options["extensions"],
            suffixes=options["suffixes"],
            prefixes=options["prefixes"],
            lowercase=options["lowercase"],
            uppercase=options["uppercase"],
            capitalization=options["capitalization"],
            force_extensions=options["force_extensions"],
            exclude_extensions=options["exclude_extensions"],
            no_extension=options["no_extension"],
            only_selected=options["only_selected"])

        self.current_job = 0
        self.batch = False
        self.batch_session = None
        self.exit = None
        self.start_time = time.time()
        self.jobs_count = self.targets.qsize() * (len(
            options["scan_subdirs"]) if options["scan_subdirs"] else 1)

        if options["autosave_report"] or options["output_file"]:
            if options["autosave_report"]:
                self.report_path = options[
                    "output_location"] or FileUtils.build_path(
                        SCRIPT_PATH, "reports")
                self.create_dir(self.report_path)

        if options["log_file"]:
            options["log_file"] = FileUtils.get_abs_path(options["log_file"])
            self.create_dir(FileUtils.parent(options["log_file"]))

    def _import(self, data):
        export = ast.literal_eval(data)
        self.targets.queue = deque(export["targets"])
        self.directories.queue = deque(export["directories"])
        self.dictionary = Dictionary()
        self.dictionary.entries = export["dictionary"]
        self.dictionary.index = export["dictionary_index"]
        self.__dict__ = {**export, **self.__dict__}

    def _export(self, session_file):
        self.targets.queue.insert(0, self.url)
        self.directories.queue.insert(0, self.current_directory)

        # Queue() objects, convert them to list
        for item in ("targets", "directories"):
            self.__dict__[item] = list(self.__dict__[item].queue)

        self.dictionary, self.dictionary_index = self.dictionary.export()
        self.last_output = self.output.export()
        self.current_job -= 1

        data = {
            k: v
            for k, v in self.__dict__.items()
            if k not in EXCLUDED_EXPORT_VARIABLES
        }

        FileUtils.write_lines(session_file, str(data), overwrite=True)

    def run(self):
        while not self.targets.empty():
            try:
                self.skip = None
                url = self.targets.get()

                try:
                    self.requester = Requester(
                        url + ('' if url.endswith('/') else '/'),
                        max_pool=self.options["threads_count"],
                        max_retries=self.options["max_retries"],
                        timeout=self.options["timeout"],
                        ip=self.options["ip"],
                        proxy=self.options["proxy"],
                        proxylist=self.options["proxylist"],
                        redirect=self.options["follow_redirects"],
                        request_by_hostname=self.
                        options["request_by_hostname"],
                        httpmethod=self.options["httpmethod"],
                        data=self.options["data"],
                        scheme=self.options["scheme"],
                        random_agents=self.random_agents,
                    )
                    self.url = self.requester.base_url + self.requester.base_path

                    for key, value in self.options["headers"].items():
                        self.requester.set_header(key, value)

                    if self.options["auth"]:
                        self.requester.set_auth(self.options["auth_type"],
                                                self.options["auth"])

                    if self.from_export:
                        # Rewrite the output from the last scan
                        print(self.last_output)
                    else:
                        self.output.set_target(self.url)

                    self.requester.setup()

                    # Test request to check if server is up
                    self.requester.request('')
                    self.write_log("Test request sent for: {}".format(
                        self.requester.base_url))

                    self.output.url = self.requester.base_url[:-1]

                    if self.options["autosave_report"] or self.options[
                            "output_file"]:
                        self.report = Report(self.requester.host,
                                             self.requester.port,
                                             self.requester.scheme,
                                             self.requester.base_path)

                except RequestException as e:
                    self.output.error(e.args[0])
                    raise SkipTargetInterrupt

                if self.directories.empty():
                    self.directories.queue = deque(
                        self.options["scan_subdirs"])
                    self.pass_dirs.extend(self.options["scan_subdirs"])

                match_callbacks = (self.match_callback, self.append_log)
                not_found_callbacks = (self.not_found_callback,
                                       self.append_log)
                error_callbacks = (self.error_callback, self.append_error_log)
                self.fuzzer = Fuzzer(
                    self.requester,
                    self.dictionary,
                    suffixes=self.options["suffixes"],
                    prefixes=self.options["prefixes"],
                    exclude_response=self.options["exclude_response"],
                    threads=self.options["threads_count"],
                    delay=self.options["delay"],
                    maxrate=self.options["maxrate"],
                    match_callbacks=match_callbacks,
                    not_found_callbacks=not_found_callbacks,
                    error_callbacks=error_callbacks,
                )

                try:
                    self.start()
                except RequestException as e:
                    self.output.error(e.args[0])
                    raise SkipTargetInterrupt

            except SkipTargetInterrupt:
                self.jobs_count -= self.directories.qsize()
                self.directories = Queue()
                self.report.completed = True
                self.dictionary.reset()
                continue

        self.output.warning("\nTask Completed")

    def start(self):
        first = True

        while not self.directories.empty():
            gc.collect()

            self.current_directory = self.directories.get()
            self.current_job += 1

            if not self.from_export or not first:
                msg = '\n' if first else ''
                msg += "[{0}] Starting: {1}".format(time.strftime("%H:%M:%S"),
                                                    self.current_directory)

                self.output.warning(msg)

            self.fuzzer.requester.base_path = self.requester.base_path + self.current_directory
            self.fuzzer.start()
            self.process()
            self.dictionary.reset()

            first = False

        self.report.completed = True

    # Create batch report folder
    def setup_batch_reports(self):
        self.batch = True
        if not self.options["output_file"]:
            self.batch_session = "BATCH-{0}".format(
                time.strftime("%y-%m-%d_%H-%M-%S"))
            self.batch_directory_path = FileUtils.build_path(
                self.report_path, self.batch_session)

            if not FileUtils.exists(self.batch_directory_path):
                FileUtils.create_directory(self.batch_directory_path)

                if not FileUtils.exists(self.batch_directory_path):
                    self.output.error(
                        "Couldn't create batch folder at {}".format(
                            self.batch_directory_path))
                    exit(1)

    # Get file extension for report format
    def get_output_extension(self):
        if self.options["output_format"] not in ("plain", "simple"):
            return ".{0}".format(self.options["output_format"])
        else:
            return ".txt"

    # Create report file
    def setup_reports(self):
        if self.options["output_file"]:
            output_file = FileUtils.get_abs_path(self.options["output_file"])
            self.output.output_file(output_file)
        else:
            if self.targets.qsize() > 1:
                self.setup_batch_reports()
                filename = "BATCH"
                filename += self.get_output_extension()
                directory_path = self.batch_directory_path
            else:
                parsed = urlparse(self.targets.queue[0])
                filename = ("{}_".format(parsed.path))
                filename += time.strftime("%y-%m-%d_%H-%M-%S")
                filename += self.get_output_extension()
                directory_path = FileUtils.build_path(
                    self.report_path, get_valid_filename(parsed.netloc))

            filename = get_valid_filename(filename)
            output_file = FileUtils.build_path(directory_path, filename)

            if FileUtils.exists(output_file):
                i = 2
                while FileUtils.exists(output_file + "_" + str(i)):
                    i += 1

                output_file += "_" + str(i)

            if not FileUtils.exists(directory_path):
                FileUtils.create_directory(directory_path)

                if not FileUtils.exists(directory_path):
                    self.output.error(
                        "Couldn't create the reports folder at {}".format(
                            directory_path))
                    exit(1)

            self.output.output_file(output_file)

        if self.options["output_format"]:
            self.report_manager = ReportManager(
                self.options["output_format"], self.options["output_file"]
                or output_file)
        else:
            self.report_manager = ReportManager("plain", output_file)

    # Create and check if output directory is writable
    def create_dir(self, path):
        if path == '/':
            return

        if not FileUtils.exists(path):
            self.create_dir(FileUtils.parent(path))
        if not FileUtils.is_dir(path):
            self.output.error(
                "{0} is a file, should be a directory".format(path))
            exit(1)
        if not FileUtils.can_write(path):
            self.output.error("Directory {0} is not writable".format(path))
            exit(1)

        FileUtils.create_directory(path)

    # Validate the response by different filters
    def is_valid(self, path, res):
        if res.status in self.options["exclude_status_codes"]:
            return False

        if res.status not in (self.options["include_status_codes"]
                              or range(100, 1000)):
            return False

        if self.blacklists.get(res.status) and path in self.blacklists.get(
                res.status):
            return False

        if human_size(res.length) in self.options["exclude_sizes"]:
            return False

        if res.length < self.options["minimum_response_size"]:
            return False

        if res.length > self.options["maximum_response_size"] != 0:
            return False

        if any(ex_text in res.content
               for ex_text in self.options["exclude_texts"]):
            return False

        if self.options["exclude_regex"] and re.search(
                self.options["exclude_regex"], res.content) is not None:
            return False

        if self.options["exclude_redirect"] and (
                self.options["exclude_redirect"] in res.redirect
                or re.search(self.options["exclude_redirect"],
                             res.redirect) is not None):
            return False

        return True

    # Callback for found paths
    def match_callback(self, path, response):
        if response.status in self.options["skip_on_status"]:
            self.skip = "Skipped the target due to {} status code".format(
                response.status)
            return

        if not self.is_valid(path, response):
            return

        added_to_queue = False

        if response.status in self.options["recursion_status_codes"] and any(
            (self.options["recursive"], self.options["deep_recursive"],
             self.options["force_recursive"])):
            if response.redirect:
                added_to_queue = self.add_redirect_directory(path, response)
            else:
                added_to_queue = self.add_directory(path)

        if self.options["replay_proxy"]:
            self.requester.request(path, proxy=self.options["replay_proxy"])

        self.output.status_report(response, self.options["full_url"],
                                  added_to_queue)
        self.report.add_result(self.current_directory + path, response)
        self.report_manager.update_report(self.report)

    # Callback for invalid paths
    def not_found_callback(self, *args):
        self.output.last_path(
            self.dictionary.index,
            len(self.dictionary),
            self.current_job,
            self.jobs_count,
            self.fuzzer.stand_rate,
        )

    # Callback for errors while fuzzing
    def error_callback(self, path, error_msg):
        if self.options["exit_on_error"]:
            self.exit = "Canceled due to an error"

        self.output.add_connection_error()

    def write_log(self, msg):
        if not self.options["log_file"]:
            return

        line = time.strftime("[%y-%m-%d %H:%M:%S] ")
        line += msg + NEW_LINE
        FileUtils.write_lines(self.options["log_file"], line)

    # Write request to log file
    def append_log(self, path, response):
        msg = "{} {} {} {}".format(
            self.requester.ip or "0", response.status,
            self.options["httpmethod"],
            self.requester.base_url[:-1] + response.path)

        if response.redirect:
            msg += " - REDIRECT TO: {}".format(response.redirect)
        msg += " (LENGTH: {})".format(response.length)

        with self.threads_lock:
            self.write_log(msg)

    # Write error to log file
    def append_error_log(self, path, error_msg):
        url = self.url + self.current_directory + path
        msg = "ERROR: {} {}".format(self.options["httpmethod"], url)
        msg += NEW_LINE + ' ' * 4 + error_msg
        with self.threads_lock:
            self.write_log(msg)

    # Handle CTRL+C
    def handle_pause(self):
        self.output.warning("CTRL+C detected: Pausing threads, please wait...",
                            save=False)
        self.fuzzer.pause()

        _ = 0
        while _ < 7:
            if self.fuzzer.is_stopped():
                break

            time.sleep(0.35)
            _ += 0.35

        while True:
            msg = "[q]uit / [c]ontinue"

            if not self.directories.empty():
                msg += " / [n]ext"

            if not self.targets.empty():
                msg += " / [s]kip target"

            self.output.in_line(msg + ": ")

            option = input()

            if option.lower() == 'q':
                self.output.in_line("[s]ave / [q]uit without saving: ")

                option = input()

                if option.lower() == 'q':
                    self.close("Canceled by the user")
                elif option.lower() == 's':
                    msg = "Save to file [{}]: ".format(DEFAULT_SESSION_FILE)

                    self.output.in_line(msg)

                    session_file = input() or DEFAULT_SESSION_FILE

                    self._export(session_file)
                    self.close("Session saved to: {}".format(session_file))
            elif option.lower() == 'c':
                self.fuzzer.resume()
                return
            elif option.lower() == 'n' and not self.directories.empty():
                self.fuzzer.stop()
                return
            elif option.lower() == 's' and not self.targets.empty():
                raise SkipTargetInterrupt

    # Monitor the fuzzing process
    def process(self):
        while 1:
            try:
                while not self.fuzzer.wait(0.3):
                    if time.time(
                    ) - self.start_time > self.options["maxtime"] != 0:
                        self.skip = "Canceled because the runtime exceeded the maximum set by user"

                    if self.skip:
                        self.close(self.skip, skip=True)
                    elif self.exit:
                        self.close(self.exit)
                        break
                break

            except KeyboardInterrupt:
                self.handle_pause()

    # Add directory to the recursion queue
    def add_directory(self, path):
        dirs = []
        added = False
        # Remove parameters and fragment from the URL
        path = path.split('?')[0].split('#')[0]
        full_path = self.current_directory + path

        if any(
                path.startswith(directory)
                for directory in self.options["exclude_subdirs"]):
            return False

        if self.options["force_recursive"] and not full_path.endswith('/'):
            full_path += '/'

        if self.options["deep_recursive"]:
            i = 0
            for _ in range(path.count('/')):
                i = path.index('/', i) + 1
                dirs.append(self.current_directory + path[:i])
        elif self.options["recursive"] and full_path.endswith('/'):
            dirs.append(full_path)

        for dir in dirs:
            if dir in self.pass_dirs:
                continue
            elif dir.count('/') > self.options["recursion_depth"] != 0:
                continue

            self.directories.put(dir)
            self.pass_dirs.append(dir)

            self.jobs_count += 1
            added = True

        return added

    # Resolve the redirect and add the path to the recursion queue
    # if it's a subdirectory of the current URL
    def add_redirect_directory(self, path, response):
        redirect_url = urljoin(self.requester.base_url, response.redirect)
        redirect_path = urlparse(redirect_url).path

        if redirect_path == response.path + '/':
            path = redirect_path[len(self.requester.base_path +
                                     self.current_directory) + 1:]
            return self.add_directory(path)

        return False

    def close(self, msg=None, skip=False):
        self.fuzzer.stop()
        self.output.error(msg)
        if skip:
            raise SkipTargetInterrupt

        self.report_manager.update_report(self.report)
        exit(0)
Пример #3
0
    def __init__(self, script_path, arguments, output):
        global VERSION
        program_banner = (
            open(FileUtils.build_path(script_path, "banner.txt"))
            .read()
            .format(**VERSION)
        )

        self.directories = Queue()
        self.script_path = script_path
        self.arguments = arguments
        self.output = output
        self.pass_dirs = ["/"]

        if arguments.raw_file:
            raw = Raw(arguments.raw_file, arguments.scheme)
            self.url_list = [raw.url]
            self.httpmethod = raw.method
            self.data = raw.body
            self.headers = raw.headers
        else:
            default_headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
                "Accept-Language": "*",
                "Accept-Encoding": "*",
                "Keep-Alive": "timeout=15, max=1000",
                "Cache-Control": "max-age=0",
            }

            self.url_list = arguments.url_list
            self.httpmethod = arguments.httpmethod.lower()
            self.data = arguments.data
            self.headers = {**default_headers, **arguments.headers}
            if arguments.cookie:
                self.headers["Cookie"] = arguments.cookie
            if arguments.useragent:
                self.headers["User-Agent"] = arguments.useragent

        self.recursion_depth = arguments.recursion_depth

        if arguments.logs_location and self.validate_path(arguments.logs_location):
            self.logs_path = FileUtils.build_path(arguments.logs_location)
        elif self.validate_path(self.script_path):
            self.logs_path = FileUtils.build_path(self.script_path, "logs")
            if not FileUtils.exists(self.logs_path):
                FileUtils.create_directory(self.logs_path)

        if arguments.output_location and self.validate_path(arguments.output_location):
            self.report_path = FileUtils.build_path(arguments.output_location)
        elif self.validate_path(self.script_path):
            self.report_path = FileUtils.build_path(self.script_path, "reports")
            if not FileUtils.exists(self.report_path):
                FileUtils.create_directory(self.report_path)

        self.blacklists = Dictionary.generate_blacklists(arguments.extensions, self.script_path)
        self.extensions = arguments.extensions
        self.prefixes = arguments.prefixes
        self.suffixes = arguments.suffixes
        self.threads_count = arguments.threads_count
        self.output_file = arguments.output_file
        self.output_format = arguments.output_format
        self.include_status_codes = arguments.include_status_codes
        self.exclude_status_codes = arguments.exclude_status_codes
        self.exclude_sizes = arguments.exclude_sizes
        self.exclude_texts = arguments.exclude_texts
        self.exclude_regexps = arguments.exclude_regexps
        self.exclude_redirects = arguments.exclude_redirects
        self.replay_proxy = arguments.replay_proxy
        self.recursive = self.arguments.recursive
        self.deep_recursive = arguments.deep_recursive
        self.force_recursive = arguments.force_recursive
        self.recursion_status_codes = arguments.recursion_status_codes
        self.minimum_response_size = arguments.minimum_response_size
        self.maximum_response_size = arguments.maximum_response_size
        self.scan_subdirs = arguments.scan_subdirs
        self.exclude_subdirs = arguments.exclude_subdirs
        self.full_url = arguments.full_url
        self.skip_on_status = arguments.skip_on_status
        self.exit_on_error = arguments.exit_on_error
        self.maxtime = arguments.maxtime

        self.dictionary = Dictionary(
            paths=arguments.wordlist,
            extensions=arguments.extensions,
            suffixes=arguments.suffixes,
            prefixes=arguments.prefixes,
            lowercase=arguments.lowercase,
            uppercase=arguments.uppercase,
            capitalization=arguments.capitalization,
            force_extensions=arguments.force_extensions,
            exclude_extensions=arguments.exclude_extensions,
            no_extension=arguments.no_extension,
            only_selected=arguments.only_selected
        )

        self.jobs_count = len(self.url_list) * (
            len(self.scan_subdirs) if self.scan_subdirs else 1
        )
        self.current_job = 0
        self.error_log = None
        self.error_log_path = None
        self.threads_lock = threading.Lock()
        self.batch = False
        self.batch_session = None

        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.timer = EmptyTimer()

        self.output.header(program_banner)
        self.print_config()

        if arguments.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(script_path, "db", "user-agents.txt")
            )

        if arguments.autosave_report or arguments.output_file:
            self.setup_reports()

        self.setup_error_logs()
        self.output.error_log_file(self.error_log_path)

        if self.maxtime:
            threading.Thread(target=self.time_monitor, daemon=True).start()

        try:
            for url in self.url_list:
                try:
                    gc.collect()
                    url = url if url.endswith("/") else url + "/"
                    self.output.set_target(url, arguments.scheme)

                    try:
                        self.requester = Requester(
                            url,
                            max_pool=arguments.threads_count,
                            max_retries=arguments.max_retries,
                            timeout=arguments.timeout,
                            ip=arguments.ip,
                            proxy=arguments.proxy,
                            proxylist=arguments.proxylist,
                            redirect=arguments.redirect,
                            request_by_hostname=arguments.request_by_hostname,
                            httpmethod=self.httpmethod,
                            data=self.data,
                            scheme=arguments.scheme,
                        )

                        for key, value in self.headers.items():
                            self.requester.set_header(key, value)

                        if arguments.auth:
                            self.requester.set_auth(arguments.auth_type, arguments.auth)

                        # Test request to see if server is up
                        self.requester.request("")

                        if arguments.autosave_report or arguments.output_file:
                            self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.base_path)

                    except RequestException as e:
                        self.output.error(e.args[0]["message"])
                        raise SkipTargetInterrupt

                    if arguments.use_random_agents:
                        self.requester.set_random_agents(self.random_agents)

                    # Initialize directories Queue with start Path
                    self.base_path = self.requester.base_path
                    self.status_skip = None

                    if not self.scan_subdirs:
                        self.directories.put("")

                    for subdir in self.scan_subdirs:
                        self.directories.put(subdir)
                        self.pass_dirs.append(subdir)

                    match_callbacks = [self.match_callback]
                    not_found_callbacks = [self.not_found_callback]
                    error_callbacks = [self.error_callback, self.append_error_log]

                    self.fuzzer = Fuzzer(
                        self.requester,
                        self.dictionary,
                        suffixes=arguments.suffixes,
                        prefixes=arguments.prefixes,
                        exclude_content=arguments.exclude_content,
                        threads=arguments.threads_count,
                        delay=arguments.delay,
                        maxrate=arguments.maxrate,
                        match_callbacks=match_callbacks,
                        not_found_callbacks=not_found_callbacks,
                        error_callbacks=error_callbacks,
                    )
                    try:
                        self.prepare()
                    except RequestException as e:
                        self.output.error(e.args[0]["message"])
                        raise SkipTargetInterrupt

                except SkipTargetInterrupt:
                    self.report.completed = True
                    continue

        except KeyboardInterrupt:
            self.output.error("\nCanceled by the user")
            exit(0)

        finally:
            self.error_log.close()

        self.output.warning("\nTask Completed")
Пример #4
0
    def run(self):
        while not self.targets.empty():
            try:
                self.skip = None
                url = self.targets.get()

                try:
                    self.requester = Requester(
                        url + ('' if url.endswith('/') else '/'),
                        max_pool=self.options["threads_count"],
                        max_retries=self.options["max_retries"],
                        timeout=self.options["timeout"],
                        ip=self.options["ip"],
                        proxy=self.options["proxy"],
                        proxylist=self.options["proxylist"],
                        redirect=self.options["follow_redirects"],
                        request_by_hostname=self.
                        options["request_by_hostname"],
                        httpmethod=self.options["httpmethod"],
                        data=self.options["data"],
                        scheme=self.options["scheme"],
                        random_agents=self.random_agents,
                    )
                    self.url = self.requester.base_url + self.requester.base_path

                    for key, value in self.options["headers"].items():
                        self.requester.set_header(key, value)

                    if self.options["auth"]:
                        self.requester.set_auth(self.options["auth_type"],
                                                self.options["auth"])

                    if self.from_export:
                        # Rewrite the output from the last scan
                        print(self.last_output)
                    else:
                        self.output.set_target(self.url)

                    self.requester.setup()

                    # Test request to check if server is up
                    self.requester.request('')
                    self.write_log("Test request sent for: {}".format(
                        self.requester.base_url))

                    self.output.url = self.requester.base_url[:-1]

                    if self.options["autosave_report"] or self.options[
                            "output_file"]:
                        self.report = Report(self.requester.host,
                                             self.requester.port,
                                             self.requester.scheme,
                                             self.requester.base_path)

                except RequestException as e:
                    self.output.error(e.args[0])
                    raise SkipTargetInterrupt

                if self.directories.empty():
                    self.directories.queue = deque(
                        self.options["scan_subdirs"])
                    self.pass_dirs.extend(self.options["scan_subdirs"])

                match_callbacks = (self.match_callback, self.append_log)
                not_found_callbacks = (self.not_found_callback,
                                       self.append_log)
                error_callbacks = (self.error_callback, self.append_error_log)
                self.fuzzer = Fuzzer(
                    self.requester,
                    self.dictionary,
                    suffixes=self.options["suffixes"],
                    prefixes=self.options["prefixes"],
                    exclude_response=self.options["exclude_response"],
                    threads=self.options["threads_count"],
                    delay=self.options["delay"],
                    maxrate=self.options["maxrate"],
                    match_callbacks=match_callbacks,
                    not_found_callbacks=not_found_callbacks,
                    error_callbacks=error_callbacks,
                )

                try:
                    self.start()
                except RequestException as e:
                    self.output.error(e.args[0])
                    raise SkipTargetInterrupt

            except SkipTargetInterrupt:
                self.jobs_count -= self.directories.qsize()
                self.directories = Queue()
                self.report.completed = True
                self.dictionary.reset()
                continue

        self.output.warning("\nTask Completed")
Пример #5
0
class Controller(object):
    def __init__(self, script_path, arguments, output):
        global VERSION
        program_banner = (
            open(FileUtils.build_path(script_path, "banner.txt"))
            .read()
            .format(**VERSION)
        )

        self.directories = Queue()
        self.script_path = script_path
        self.arguments = arguments
        self.output = output
        self.pass_dirs = ["/"]

        if arguments.raw_file:
            raw = Raw(arguments.raw_file, arguments.scheme)
            self.url_list = [raw.url]
            self.httpmethod = raw.method
            self.data = raw.body
            self.headers = raw.headers
        else:
            default_headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
                "Accept-Language": "*",
                "Accept-Encoding": "*",
                "Keep-Alive": "timeout=15, max=1000",
                "Cache-Control": "max-age=0",
            }

            self.url_list = arguments.url_list
            self.httpmethod = arguments.httpmethod.lower()
            self.data = arguments.data
            self.headers = {**default_headers, **arguments.headers}
            if arguments.cookie:
                self.headers["Cookie"] = arguments.cookie
            if arguments.useragent:
                self.headers["User-Agent"] = arguments.useragent

        self.recursion_depth = arguments.recursion_depth

        if arguments.logs_location and self.validate_path(arguments.logs_location):
            self.logs_path = FileUtils.build_path(arguments.logs_location)
        elif self.validate_path(self.script_path):
            self.logs_path = FileUtils.build_path(self.script_path, "logs")
            if not FileUtils.exists(self.logs_path):
                FileUtils.create_directory(self.logs_path)

        if arguments.output_location and self.validate_path(arguments.output_location):
            self.report_path = FileUtils.build_path(arguments.output_location)
        elif self.validate_path(self.script_path):
            self.report_path = FileUtils.build_path(self.script_path, "reports")
            if not FileUtils.exists(self.report_path):
                FileUtils.create_directory(self.report_path)

        self.blacklists = Dictionary.generate_blacklists(arguments.extensions, self.script_path)
        self.extensions = arguments.extensions
        self.prefixes = arguments.prefixes
        self.suffixes = arguments.suffixes
        self.threads_count = arguments.threads_count
        self.output_file = arguments.output_file
        self.output_format = arguments.output_format
        self.include_status_codes = arguments.include_status_codes
        self.exclude_status_codes = arguments.exclude_status_codes
        self.exclude_sizes = arguments.exclude_sizes
        self.exclude_texts = arguments.exclude_texts
        self.exclude_regexps = arguments.exclude_regexps
        self.exclude_redirects = arguments.exclude_redirects
        self.replay_proxy = arguments.replay_proxy
        self.recursive = self.arguments.recursive
        self.deep_recursive = arguments.deep_recursive
        self.force_recursive = arguments.force_recursive
        self.recursion_status_codes = arguments.recursion_status_codes
        self.minimum_response_size = arguments.minimum_response_size
        self.maximum_response_size = arguments.maximum_response_size
        self.scan_subdirs = arguments.scan_subdirs
        self.exclude_subdirs = arguments.exclude_subdirs
        self.full_url = arguments.full_url
        self.skip_on_status = arguments.skip_on_status
        self.exit_on_error = arguments.exit_on_error
        self.maxtime = arguments.maxtime

        self.dictionary = Dictionary(
            paths=arguments.wordlist,
            extensions=arguments.extensions,
            suffixes=arguments.suffixes,
            prefixes=arguments.prefixes,
            lowercase=arguments.lowercase,
            uppercase=arguments.uppercase,
            capitalization=arguments.capitalization,
            force_extensions=arguments.force_extensions,
            exclude_extensions=arguments.exclude_extensions,
            no_extension=arguments.no_extension,
            only_selected=arguments.only_selected
        )

        self.jobs_count = len(self.url_list) * (
            len(self.scan_subdirs) if self.scan_subdirs else 1
        )
        self.current_job = 0
        self.error_log = None
        self.error_log_path = None
        self.threads_lock = threading.Lock()
        self.batch = False
        self.batch_session = None

        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.timer = EmptyTimer()

        self.output.header(program_banner)
        self.print_config()

        if arguments.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(script_path, "db", "user-agents.txt")
            )

        if arguments.autosave_report or arguments.output_file:
            self.setup_reports()

        self.setup_error_logs()
        self.output.error_log_file(self.error_log_path)

        if self.maxtime:
            threading.Thread(target=self.time_monitor, daemon=True).start()

        try:
            for url in self.url_list:
                try:
                    gc.collect()
                    url = url if url.endswith("/") else url + "/"
                    self.output.set_target(url, arguments.scheme)

                    try:
                        self.requester = Requester(
                            url,
                            max_pool=arguments.threads_count,
                            max_retries=arguments.max_retries,
                            timeout=arguments.timeout,
                            ip=arguments.ip,
                            proxy=arguments.proxy,
                            proxylist=arguments.proxylist,
                            redirect=arguments.redirect,
                            request_by_hostname=arguments.request_by_hostname,
                            httpmethod=self.httpmethod,
                            data=self.data,
                            scheme=arguments.scheme,
                        )

                        for key, value in self.headers.items():
                            self.requester.set_header(key, value)

                        if arguments.auth:
                            self.requester.set_auth(arguments.auth_type, arguments.auth)

                        # Test request to see if server is up
                        self.requester.request("")

                        if arguments.autosave_report or arguments.output_file:
                            self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.base_path)

                    except RequestException as e:
                        self.output.error(e.args[0]["message"])
                        raise SkipTargetInterrupt

                    if arguments.use_random_agents:
                        self.requester.set_random_agents(self.random_agents)

                    # Initialize directories Queue with start Path
                    self.base_path = self.requester.base_path
                    self.status_skip = None

                    if not self.scan_subdirs:
                        self.directories.put("")

                    for subdir in self.scan_subdirs:
                        self.directories.put(subdir)
                        self.pass_dirs.append(subdir)

                    match_callbacks = [self.match_callback]
                    not_found_callbacks = [self.not_found_callback]
                    error_callbacks = [self.error_callback, self.append_error_log]

                    self.fuzzer = Fuzzer(
                        self.requester,
                        self.dictionary,
                        suffixes=arguments.suffixes,
                        prefixes=arguments.prefixes,
                        exclude_content=arguments.exclude_content,
                        threads=arguments.threads_count,
                        delay=arguments.delay,
                        maxrate=arguments.maxrate,
                        match_callbacks=match_callbacks,
                        not_found_callbacks=not_found_callbacks,
                        error_callbacks=error_callbacks,
                    )
                    try:
                        self.prepare()
                    except RequestException as e:
                        self.output.error(e.args[0]["message"])
                        raise SkipTargetInterrupt

                except SkipTargetInterrupt:
                    self.report.completed = True
                    continue

        except KeyboardInterrupt:
            self.output.error("\nCanceled by the user")
            exit(0)

        finally:
            self.error_log.close()

        self.output.warning("\nTask Completed")

    # Print dirsearch metadata (threads, HTTP method, ...)
    def print_config(self):
        self.output.config(
            ', '.join(self.extensions),
            ', '.join(self.prefixes),
            ', '.join(self.suffixes),
            str(self.threads_count),
            str(len(self.dictionary)),
            str(self.httpmethod),
        )

    def time_monitor(self):
        self.timer = Timer()
        self.timer.count(self.maxtime)
        self.close("\nCanceled because the runtime exceeded the maximal set by user")

    # Create error log file
    def setup_error_logs(self):
        file_name = "errors-{0}.log".format(time.strftime("%y-%m-%d_%H-%M-%S"))
        self.error_log_path = FileUtils.build_path(
            self.logs_path, file_name
        )

        try:
            self.error_log = open(self.error_log_path, "w")
        except PermissionError:
            self.output.error(
                "Couldn't create the error log. Try running again with highest permission"
            )
            sys.exit(1)

    # Create batch report folder
    def setup_batch_reports(self):
        self.batch = True
        if not self.output_file:
            self.batch_session = "BATCH-{0}".format(time.strftime("%y-%m-%d_%H-%M-%S"))
            self.batch_directory_path = FileUtils.build_path(
                self.report_path, self.batch_session
            )

            if not FileUtils.exists(self.batch_directory_path):
                FileUtils.create_directory(self.batch_directory_path)

                if not FileUtils.exists(self.batch_directory_path):
                    self.output.error(
                        "Couldn't create batch folder at {}".format(self.batch_directory_path)
                    )
                    sys.exit(1)

    # Get file extension for report format
    def get_output_extension(self):
        if self.output_format and self.output_format not in ["plain", "simple"]:
            return ".{0}".format(self.output_format)
        else:
            return ".txt"

    # Create report file
    def setup_reports(self):
        if self.output_file:
            output_file = FileUtils.get_abs_path(self.output_file)
            self.output.output_file(output_file)
        else:
            if len(self.url_list) > 1:
                self.setup_batch_reports()
                filename = "BATCH"
                filename += self.get_output_extension()
                directory_path = self.batch_directory_path
            else:
                parsed = urlparse(self.url_list[0])
                filename = (
                    "{}_".format(parsed.path)
                )
                filename += time.strftime("%y-%m-%d_%H-%M-%S")
                filename += self.get_output_extension()
                directory_path = FileUtils.build_path(
                    self.report_path, clean_filename(parsed.netloc)
                )

            filename = clean_filename(filename)
            output_file = FileUtils.build_path(directory_path, filename)

            if FileUtils.exists(output_file):
                i = 2
                while FileUtils.exists(output_file + "_" + str(i)):
                    i += 1

                output_file += "_" + str(i)

            if not FileUtils.exists(directory_path):
                FileUtils.create_directory(directory_path)

                if not FileUtils.exists(directory_path):
                    self.output.error(
                        "Couldn't create the reports folder at {}".format(directory_path)
                    )
                    sys.exit(1)

            self.output.output_file(output_file)

        if self.output_file and self.output_format:
            self.report_manager = ReportManager(self.output_format, self.output_file)
        elif self.output_format:
            self.report_manager = ReportManager(self.output_format, output_file)
        else:
            self.report_manager = ReportManager("plain", output_file)

    # Check if given path is valid (can read/write)
    def validate_path(self, path):
        if not FileUtils.exists(path):
            self.output.error("{0} does not exist".format(path))
            exit(1)

        if FileUtils.exists(path) and not FileUtils.is_dir(path):
            self.output.error("{0} is a file, should be a directory".format(path))
            exit(1)

        if not FileUtils.can_write(path):
            self.output.error("Directory {0} is not writable".format(path))
            exit(1)

        return True

    # Validate the response by different filters
    def valid(self, path):
        if not path:
            return False

        if path.status in self.exclude_status_codes:
            return False

        if self.include_status_codes and path.status not in self.include_status_codes:
            return False

        if self.blacklists.get(path.status) and path.path in self.blacklists.get(path.status):
            return False

        if self.exclude_sizes and human_size(path.length).strip() in self.exclude_sizes:
            return False

        if self.minimum_response_size and self.minimum_response_size > path.length:
            return False

        if self.maximum_response_size and self.maximum_response_size < path.length:
            return False

        for exclude_text in self.exclude_texts:
            if exclude_text in path.body:
                return False

        for exclude_regexp in self.exclude_regexps:
            if (
                re.search(exclude_regexp, path.body)
                is not None
            ):
                return False

        for exclude_redirect in self.exclude_redirects:
            if path.redirect and (
                (
                    re.match(exclude_redirect, path.redirect) is not None
                ) or (
                    exclude_redirect in path.redirect
                )
            ):
                return False

        return True

    # Callback for found paths
    def match_callback(self, path):
        self.index += 1

        for status in self.skip_on_status:
            if path.status == status:
                self.status_skip = status
                return

        if not self.valid(path):
            del path
            return

        added_to_queue = False

        if (
                any([self.recursive, self.deep_recursive, self.force_recursive])
        ) and (
                not self.recursion_status_codes or path.status in self.recursion_status_codes
        ):
            if path.redirect:
                added_to_queue = self.add_redirect_directory(path)
            else:
                added_to_queue = self.add_directory(path.path)

        self.output.status_report(
            path.path, path.response, self.full_url, added_to_queue
        )

        if self.replay_proxy:
            self.requester.request(path.path, proxy=self.replay_proxy)

        new_path = self.current_directory + path.path

        self.report.add_result(new_path, path.status, path.response)
        self.report_manager.update_report(self.report)

        del path

    # Callback for invalid paths
    def not_found_callback(self, path):
        self.index += 1
        self.output.last_path(
            self.index,
            len(self.dictionary),
            self.current_job,
            self.jobs_count,
            self.fuzzer.stand_rate,
        )
        del path

    # Callback for errors while fuzzing
    def error_callback(self, path, error_msg):
        if self.exit_on_error:
            self.close("\nCanceled due to an error")

        else:
            self.output.add_connection_error()

    # Write error to log file
    def append_error_log(self, path, error_msg):
        with self.threads_lock:
            line = time.strftime("[%y-%m-%d %H:%M:%S] - ")
            line += self.requester.base_url + self.base_path + self.current_directory + path + " - " + error_msg
            self.error_log.write(os.linesep + line)
            self.error_log.flush()

    # Handle CTRL+C
    def handle_pause(self, message):
        self.output.warning(message)
        self.timer.pause()
        self.fuzzer.pause()

        Timer.wait(self.fuzzer.is_stopped)

        while True:
            msg = "[q]uit / [c]ontinue"

            if not self.directories.empty():
                msg += " / [n]ext"

            if len(self.url_list) > 1:
                msg += " / [s]kip target"

            self.output.in_line(msg + ": ")

            option = input()

            if option.lower() == "q":
                self.close("\nCanceled by the user")

            elif option.lower() == "c":
                self.timer.resume()
                self.fuzzer.resume()
                return

            elif option.lower() == "n" and not self.directories.empty():
                self.timer.resume()
                self.fuzzer.stop()
                return

            elif option.lower() == "s" and len(self.url_list) > 1:
                self.timer.resume()
                raise SkipTargetInterrupt

    # Monitor the fuzzing process
    def process_paths(self):
        while True:
            try:
                while not self.fuzzer.wait(0.25):
                    # Check if the "skip status code" was returned
                    if self.status_skip:
                        self.close(
                            "\nSkipped the target due to {0} status code".format(self.status_skip),
                            skip=True
                        )
                break

            except KeyboardInterrupt:
                self.handle_pause("CTRL+C detected: Pausing threads, please wait...")

    # Preparation between subdirectory scans
    def prepare(self):
        while not self.directories.empty():
            gc.collect()
            self.current_job += 1
            self.index = 0
            self.current_directory = self.directories.get()
            self.output.warning(
                "[{1}] Starting: {0}".format(
                    self.current_directory, time.strftime("%H:%M:%S")
                )
            )
            self.fuzzer.requester.base_path = self.output.base_path = self.base_path + self.current_directory
            self.fuzzer.start()
            self.process_paths()

        self.report.completed = True
        self.report_manager.update_report(self.report)
        self.report = None

        return

    # Add directory to the recursion queue
    def add_directory(self, path):
        dirs = []
        added = False
        path = path.split("?")[0].split("#")[0]
        full_path = self.current_directory + path

        if any([path.startswith(directory) for directory in self.exclude_subdirs]):
            return False

        # Avoid paths contain consecutive slashes, we haven't had good handler for it yet
        if self.deep_recursive and "//" not in path:
            for i in range(1, path.count("/")):
                dir = self.current_directory + "/".join(path.split("/")[:i]) + "/"
                dirs.append(dir)

        if self.force_recursive:
            if not full_path.endswith("/"):
                full_path += "/"
            dirs.append(full_path)
        elif self.recursive and full_path.endswith("/"):
            dirs.append(full_path)

        for dir in list(set(dirs)):
            if dir in self.pass_dirs:
                continue
            elif self.recursion_depth and dir.count("/") > self.recursion_depth:
                continue

            self.directories.put(dir)
            self.pass_dirs.append(dir)

            self.jobs_count += 1
            added = True

        return added

    # Resolve the redirect and add the path to the recursion queue
    # if it's a subdirectory of the current URL
    def add_redirect_directory(self, path):
        base_path = "/" + self.base_path + self.current_directory + path.path

        redirect_url = urljoin(self.requester.base_url, path.redirect)
        redirect_path = urlparse(redirect_url).path

        if redirect_path == base_path + "/":
            path = redirect_path[len(self.base_path + self.current_directory) + 1:]

            return self.add_directory(path)

        return False

    def close(self, msg=None, skip=False):
        self.fuzzer.stop()
        self.output.error(msg)
        if skip:
            raise SkipTargetInterrupt

        self.report_manager.update_report(self.report)
        exit(0)
Пример #6
0
    def __init__(self, options, output):
        self.directories = Queue()
        self.output = output
        self.options = options
        self.pass_dirs = ["/"]

        if options.raw_file:
            raw = Raw(options.raw_file)
            self.url_list = [raw.url]
            self.httpmethod = raw.method
            self.data = raw.body
            self.headers = raw.headers
        else:
            self.url_list = options.url_list
            self.httpmethod = options.httpmethod
            self.data = options.httpmethod
            self.headers = {**DEFAULT_HEADERS, **options.headers}
            if options.cookie:
                self.headers["Cookie"] = options.cookie
            if options.useragent:
                self.headers["User-Agent"] = options.useragent

        self.random_agents = None
        if options.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt"))

        self.blacklists = Dictionary.generate_blacklists(options.extensions)
        self.dictionary = Dictionary(
            paths=options.wordlist,
            extensions=options.extensions,
            suffixes=options.suffixes,
            prefixes=options.prefixes,
            lowercase=options.lowercase,
            uppercase=options.uppercase,
            capitalization=options.capitalization,
            force_extensions=options.force_extensions,
            exclude_extensions=options.exclude_extensions,
            no_extension=options.no_extension,
            only_selected=options.only_selected)

        self.jobs_count = len(self.url_list) * (len(options.scan_subdirs)
                                                if options.scan_subdirs else 1)
        self.current_job = 0
        self.batch = False
        self.batch_session = None
        self.exit = None

        self.threads_lock = threading.Lock()
        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.start_time = time.time()

        self.output.header(BANNER)
        self.print_config()

        if options.autosave_report or options.output_file:
            if options.autosave_report:
                self.report_path = options.output_location or FileUtils.build_path(
                    SCRIPT_PATH, "reports")
                self.validate_dir(self.report_path)

            self.setup_reports()

        if options.log_file:
            self.validate_dir(FileUtils.parent(options.log_file))
            FileUtils.create_directory(FileUtils.parent(options.log_file))
            self.output.log_file(FileUtils.get_abs_path(options.log_file))

        try:
            for url in self.url_list:
                try:
                    gc.collect()

                    try:
                        self.requester = Requester(
                            url if url.endswith("/") else url + "/",
                            max_pool=options.threads_count,
                            max_retries=options.max_retries,
                            timeout=options.timeout,
                            ip=options.ip,
                            proxy=options.proxy,
                            proxylist=options.proxylist,
                            redirect=options.follow_redirects,
                            request_by_hostname=options.request_by_hostname,
                            httpmethod=self.httpmethod,
                            data=self.data,
                            scheme=options.scheme,
                            random_agents=self.random_agents,
                        )

                        self.output.set_target(self.requester.base_url +
                                               self.requester.base_path)
                        self.requester.setup()

                        for key, value in self.headers.items():
                            self.requester.set_header(key, value)

                        if options.auth:
                            self.requester.set_auth(options.auth_type,
                                                    options.auth)

                        # Test request to check if server is up
                        self.requester.request("")
                        self.write_log("Test request sent for: {}".format(
                            self.requester.base_url))

                        if options.autosave_report or options.output_file:
                            self.report = Report(self.requester.host,
                                                 self.requester.port,
                                                 self.requester.scheme,
                                                 self.requester.base_path)

                    except RequestException as e:
                        self.output.error(e.args[0])
                        raise SkipTargetInterrupt

                    self.skip = None

                    if not options.scan_subdirs:
                        self.directories.put("")

                    for subdir in options.scan_subdirs:
                        self.directories.put(subdir)
                        self.pass_dirs.append(subdir)

                    match_callbacks = [self.match_callback, self.append_log]
                    not_found_callbacks = [
                        self.not_found_callback, self.append_log
                    ]
                    error_callbacks = [
                        self.error_callback, self.append_error_log
                    ]
                    self.fuzzer = Fuzzer(
                        self.requester,
                        self.dictionary,
                        suffixes=options.suffixes,
                        prefixes=options.prefixes,
                        exclude_response=options.exclude_response,
                        threads=options.threads_count,
                        delay=options.delay,
                        maxrate=options.maxrate,
                        match_callbacks=match_callbacks,
                        not_found_callbacks=not_found_callbacks,
                        error_callbacks=error_callbacks,
                    )

                    try:
                        self.prepare()
                    except RequestException as e:
                        self.output.error(e.args[0])
                        raise SkipTargetInterrupt

                except SkipTargetInterrupt:
                    self.report.completed = True
                    continue

        except KeyboardInterrupt:
            self.close("Canceled by the user")

        self.output.warning("\nTask Completed")
Пример #7
0
class Controller(object):
    def __init__(self, options, output):
        self.directories = Queue()
        self.output = output
        self.options = options
        self.pass_dirs = ["/"]

        if options.raw_file:
            raw = Raw(options.raw_file)
            self.url_list = [raw.url]
            self.httpmethod = raw.method
            self.data = raw.body
            self.headers = raw.headers
        else:
            self.url_list = options.url_list
            self.httpmethod = options.httpmethod
            self.data = options.httpmethod
            self.headers = {**DEFAULT_HEADERS, **options.headers}
            if options.cookie:
                self.headers["Cookie"] = options.cookie
            if options.useragent:
                self.headers["User-Agent"] = options.useragent

        self.random_agents = None
        if options.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt"))

        self.blacklists = Dictionary.generate_blacklists(options.extensions)
        self.dictionary = Dictionary(
            paths=options.wordlist,
            extensions=options.extensions,
            suffixes=options.suffixes,
            prefixes=options.prefixes,
            lowercase=options.lowercase,
            uppercase=options.uppercase,
            capitalization=options.capitalization,
            force_extensions=options.force_extensions,
            exclude_extensions=options.exclude_extensions,
            no_extension=options.no_extension,
            only_selected=options.only_selected)

        self.jobs_count = len(self.url_list) * (len(options.scan_subdirs)
                                                if options.scan_subdirs else 1)
        self.current_job = 0
        self.batch = False
        self.batch_session = None
        self.exit = None

        self.threads_lock = threading.Lock()
        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.start_time = time.time()

        self.output.header(BANNER)
        self.print_config()

        if options.autosave_report or options.output_file:
            if options.autosave_report:
                self.report_path = options.output_location or FileUtils.build_path(
                    SCRIPT_PATH, "reports")
                self.validate_dir(self.report_path)

            self.setup_reports()

        if options.log_file:
            self.validate_dir(FileUtils.parent(options.log_file))
            FileUtils.create_directory(FileUtils.parent(options.log_file))
            self.output.log_file(FileUtils.get_abs_path(options.log_file))

        try:
            for url in self.url_list:
                try:
                    gc.collect()

                    try:
                        self.requester = Requester(
                            url if url.endswith("/") else url + "/",
                            max_pool=options.threads_count,
                            max_retries=options.max_retries,
                            timeout=options.timeout,
                            ip=options.ip,
                            proxy=options.proxy,
                            proxylist=options.proxylist,
                            redirect=options.follow_redirects,
                            request_by_hostname=options.request_by_hostname,
                            httpmethod=self.httpmethod,
                            data=self.data,
                            scheme=options.scheme,
                            random_agents=self.random_agents,
                        )

                        self.output.set_target(self.requester.base_url +
                                               self.requester.base_path)
                        self.requester.setup()

                        for key, value in self.headers.items():
                            self.requester.set_header(key, value)

                        if options.auth:
                            self.requester.set_auth(options.auth_type,
                                                    options.auth)

                        # Test request to check if server is up
                        self.requester.request("")
                        self.write_log("Test request sent for: {}".format(
                            self.requester.base_url))

                        if options.autosave_report or options.output_file:
                            self.report = Report(self.requester.host,
                                                 self.requester.port,
                                                 self.requester.scheme,
                                                 self.requester.base_path)

                    except RequestException as e:
                        self.output.error(e.args[0])
                        raise SkipTargetInterrupt

                    self.skip = None

                    if not options.scan_subdirs:
                        self.directories.put("")

                    for subdir in options.scan_subdirs:
                        self.directories.put(subdir)
                        self.pass_dirs.append(subdir)

                    match_callbacks = [self.match_callback, self.append_log]
                    not_found_callbacks = [
                        self.not_found_callback, self.append_log
                    ]
                    error_callbacks = [
                        self.error_callback, self.append_error_log
                    ]
                    self.fuzzer = Fuzzer(
                        self.requester,
                        self.dictionary,
                        suffixes=options.suffixes,
                        prefixes=options.prefixes,
                        exclude_response=options.exclude_response,
                        threads=options.threads_count,
                        delay=options.delay,
                        maxrate=options.maxrate,
                        match_callbacks=match_callbacks,
                        not_found_callbacks=not_found_callbacks,
                        error_callbacks=error_callbacks,
                    )

                    try:
                        self.prepare()
                    except RequestException as e:
                        self.output.error(e.args[0])
                        raise SkipTargetInterrupt

                except SkipTargetInterrupt:
                    self.report.completed = True
                    continue

        except KeyboardInterrupt:
            self.close("Canceled by the user")

        self.output.warning("\nTask Completed")

    # Print dirsearch metadata (threads, HTTP method, ...)
    def print_config(self):
        self.output.config(
            ', '.join(self.options.extensions),
            ', '.join(self.options.prefixes),
            ', '.join(self.options.suffixes),
            str(self.options.threads_count),
            str(len(self.dictionary)),
            str(self.httpmethod),
        )

    # Create batch report folder
    def setup_batch_reports(self):
        self.batch = True
        if not self.options.output_file:
            self.batch_session = "BATCH-{0}".format(
                time.strftime("%y-%m-%d_%H-%M-%S"))
            self.batch_directory_path = FileUtils.build_path(
                self.report_path, self.batch_session)

            if not FileUtils.exists(self.batch_directory_path):
                FileUtils.create_directory(self.batch_directory_path)

                if not FileUtils.exists(self.batch_directory_path):
                    self.output.error(
                        "Couldn't create batch folder at {}".format(
                            self.batch_directory_path))
                    exit(1)

    # Get file extension for report format
    def get_output_extension(self):
        if self.options.output_format not in ("plain", "simple"):
            return ".{0}".format(self.options.output_format)
        else:
            return ".txt"

    # Create report file
    def setup_reports(self):
        if self.options.output_file:
            output_file = FileUtils.get_abs_path(self.options.output_file)
            self.output.output_file(output_file)
        else:
            if len(self.url_list) > 1:
                self.setup_batch_reports()
                filename = "BATCH"
                filename += self.get_output_extension()
                directory_path = self.batch_directory_path
            else:
                parsed = urlparse(self.url_list[0])
                filename = ("{}_".format(parsed.path))
                filename += time.strftime("%y-%m-%d_%H-%M-%S")
                filename += self.get_output_extension()
                directory_path = FileUtils.build_path(
                    self.report_path, clean_filename(parsed.netloc))

            filename = clean_filename(filename)
            output_file = FileUtils.build_path(directory_path, filename)

            if FileUtils.exists(output_file):
                i = 2
                while FileUtils.exists(output_file + "_" + str(i)):
                    i += 1

                output_file += "_" + str(i)

            if not FileUtils.exists(directory_path):
                FileUtils.create_directory(directory_path)

                if not FileUtils.exists(directory_path):
                    self.output.error(
                        "Couldn't create the reports folder at {}".format(
                            directory_path))
                    exit(1)

            self.output.output_file(output_file)

        if self.options.output_format:
            self.report_manager = ReportManager(
                self.options.output_format, self.options.output_file
                or output_file)
        else:
            self.report_manager = ReportManager("plain", output_file)

    # Check if output directory is writable
    def validate_dir(self, path):
        if not FileUtils.exists(path):
            self.validate_output_loc(FileUtils.parent(path))
        if not FileUtils.is_dir(path):
            self.output.error(
                "{0} is a file, should be a directory".format(path))
            exit(1)
        if not FileUtils.can_write(path):
            self.output.error("Directory {0} is not writable".format(path))
            exit(1)

    # Validate the response by different filters
    def is_valid(self, path, res):
        if res.status in self.options.exclude_status_codes:
            return False

        if res.status not in (self.options.include_status_codes
                              or range(100, 1000)):
            return False

        if self.blacklists.get(res.status) and path in self.blacklists.get(
                res.status):
            return False

        if human_size(res.length) in self.options.exclude_sizes:
            return False

        if res.length < self.options.minimum_response_size:
            return False

        if res.length > self.options.maximum_response_size != 0:
            return False

        for exclude_text in self.options.exclude_texts:
            if exclude_text in res.content:
                return False

        for exclude_regexp in self.options.exclude_regexps:
            if re.search(exclude_regexp, res.content) is not None:
                return False

        for exclude_redirect in self.options.exclude_redirects:
            if res.redirect and exclude_redirect in res.redirect or (re.match(
                    exclude_redirect, res.redirect)):
                return False

        return True

    # Callback for found paths
    def match_callback(self, path, response):
        self.index += 1

        if response.status in self.options.skip_on_status:
            self.skip = "Skipped the target due to {} status code".format(
                response.status)
            return

        if not self.is_valid(path, response):
            return

        added_to_queue = False

        if response.status in self.options.recursion_status_codes and any(
            (self.options.recursive, self.options.deep_recursive,
             self.options.force_recursive)):
            if response.redirect:
                added_to_queue = self.add_redirect_directory(path, response)
            else:
                added_to_queue = self.add_directory(path)

        self.output.status_report(response, self.options.full_url,
                                  added_to_queue)

        if self.options.replay_proxy:
            self.requester.request(path, proxy=self.options.replay_proxy)

        self.report.add_result(self.current_directory + path, response)
        self.report_manager.update_report(self.report)

    # Callback for invalid paths
    def not_found_callback(self, *args):
        self.index += 1
        self.output.last_path(
            self.index,
            len(self.dictionary),
            self.current_job,
            self.jobs_count,
            self.fuzzer.stand_rate,
        )

    # Callback for errors while fuzzing
    def error_callback(self, path, error_msg):
        if self.options.exit_on_error:
            self.exit = "Canceled due to an error"

        self.output.add_connection_error()

    def write_log(self, msg):
        if not self.options.log_file:
            return

        line = time.strftime("[%y-%m-%d %H:%M:%S] ")
        line += msg + NEW_LINE
        FileUtils.write_lines(self.options.log_file, line)

    # Write request to log file
    def append_log(self, path, response):
        msg = "{} {} {} {}".format(self.requester.ip or "0", response.status,
                                   self.httpmethod, response.url)

        if response.redirect:
            msg += " - REDIRECT TO: {}".format(response.redirect)
        msg += " (LENGTH: {})".format(response.length)

        with self.threads_lock:
            self.write_log(msg)

    # Write error to log file
    def append_error_log(self, path, error_msg):
        url = self.requester.base_url + self.requester.base_path + self.current_directory + path
        msg = "ERROR: {} {}".format(self.httpmethod, url)
        msg += NEW_LINE + " " * 4 + error_msg
        with self.threads_lock:
            self.write_log(msg)

    # Handle CTRL+C
    def handle_pause(self):
        self.output.warning("CTRL+C detected: Pausing threads, please wait...")
        self.fuzzer.pause()

        _ = 0
        while _ < 7:
            if self.fuzzer.is_stopped():
                break

            time.sleep(0.35)
            _ += 0.35

        while True:
            msg = "[q]uit / [c]ontinue"

            if not self.directories.empty():
                msg += " / [n]ext"

            if len(self.url_list) > 1:
                msg += " / [s]kip target"

            self.output.in_line(msg + ": ")

            option = input()

            if option.lower() == "q":
                self.close("Canceled by the user")

            elif option.lower() == "c":
                self.fuzzer.resume()
                return

            elif option.lower() == "n" and not self.directories.empty():
                self.fuzzer.stop()
                return

            elif option.lower() == "s" and len(self.url_list) > 1:
                raise SkipTargetInterrupt

    # Monitor the fuzzing process
    def process(self):
        while True:
            try:
                while not self.fuzzer.wait(0.3):
                    if time.time(
                    ) - self.start_time > self.options.maxtime != 0:
                        self.skip = "Canceled because the runtime exceeded the maximum set by user"

                    if self.skip:
                        self.close(self.skip, skip=True)
                    elif self.exit:
                        self.close(self.exit)
                        break
                break

            except KeyboardInterrupt:
                self.handle_pause()

    # Preparation between subdirectory scans
    def prepare(self):
        while not self.directories.empty():
            gc.collect()
            self.current_job += 1
            self.index = 0
            self.current_directory = self.directories.get()
            self.output.warning("\n[{1}] Starting: {0}".format(
                self.current_directory, time.strftime("%H:%M:%S")))
            self.fuzzer.requester.base_path = self.requester.base_path + self.current_directory
            self.fuzzer.start()
            self.process()

        self.report.completed = True

    # Add directory to the recursion queue
    def add_directory(self, path):
        dirs = []
        added = False
        # Remove parameters and fragment from the URL
        path = path.split("?")[0].split("#")[0]
        full_path = self.current_directory + path

        if any((path.startswith(directory)
                for directory in self.options.exclude_subdirs)):
            return False

        if self.options.force_recursive and not full_path.endswith("/"):
            full_path += "/"

        if self.options.deep_recursive:
            i = 0
            for _ in range(path.count("/")):
                i = path.index("/", i) + 1
                dirs.append(self.current_directory + path[:i])
        elif self.options.recursive and full_path.endswith("/"):
            dirs.append(full_path)

        for dir in dirs:
            if dir in self.pass_dirs:
                continue
            elif dir.count("/") > self.options.recursion_depth != 0:
                continue

            self.directories.put(dir)
            self.pass_dirs.append(dir)

            self.jobs_count += 1
            added = True

        return added

    # Resolve the redirect and add the path to the recursion queue
    # if it's a subdirectory of the current URL
    def add_redirect_directory(self, path, response):
        redirect_url = urljoin(self.requester.base_url, response.redirect)
        redirect_path = urlparse(redirect_url).path

        if redirect_path == response.path + "/":
            path = redirect_path[len(self.requester.base_path +
                                     self.current_directory) + 1:]
            return self.add_directory(path)

        return False

    def close(self, msg=None, skip=False):
        self.fuzzer.stop()
        self.output.error(msg)
        if skip:
            raise SkipTargetInterrupt

        self.report_manager.update_report(self.report)
        exit(0)
Пример #8
0
class Controller:
    def __init__(self, options, output):
        if options.session_file:
            self._import(options.session_file)
            self.from_export = True
        else:
            self.setup(options, output)
            self.from_export = False

        self.run()

    def _import(self, session_file):
        with open(session_file, "rb") as fd:
            indict, last_output = unpickle(fd)

        self.__dict__ = {**indict, **vars(self)}
        print(last_output)

    def _export(self, session_file):
        self.current_job -= 1
        # Save written output
        last_output = self.output.buffer.rstrip()

        # This attribute doesn't need to be saved
        del self.fuzzer

        with open(session_file, "wb") as fd:
            pickle((vars(self), last_output), fd)

    def setup(self, options, output):
        self.options, self.output = options, output

        if self.options.raw_file:
            self.options.update(
                zip(
                    ["urls", "httpmethod", "headers", "data"],
                    parse_raw(self.options.raw_file),
                )
            )
        else:
            self.options.headers = {**DEFAULT_HEADERS, **self.options.headers}

            if self.options.cookie:
                self.options.headers["Cookie"] = self.options.cookie
            if self.options.useragent:
                self.options.headers["User-Agent"] = self.options.useragent

        self.random_agents = None
        if self.options.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt")
            )

        self.requester = Requester(
            max_pool=self.options.threads_count,
            max_retries=self.options.max_retries,
            timeout=self.options.timeout,
            ip=self.options.ip,
            proxy=self.options.proxy,
            follow_redirects=self.options.follow_redirects,
            httpmethod=self.options.httpmethod,
            headers=self.options.headers,
            data=self.options.data,
            scheme=self.options.scheme,
            random_agents=self.random_agents,
        )
        self.dictionary = Dictionary(
            paths=self.options.wordlist,
            extensions=self.options.extensions,
            suffixes=self.options.suffixes,
            prefixes=self.options.prefixes,
            lowercase=self.options.lowercase,
            uppercase=self.options.uppercase,
            capitalization=self.options.capitalization,
            force_extensions=self.options.force_extensions,
            exclude_extensions=self.options.exclude_extensions,
            no_extension=self.options.no_extension,
            only_selected=self.options.only_selected,
        )
        self.blacklists = Dictionary.generate_blacklists(self.options.extensions)
        self.targets = options.urls
        self.start_time = time.time()
        self.passed_urls = set()
        self.directories = []
        self.report = None
        self.batch = False
        self.current_job = 0
        self.jobs_count = 0
        self.errors = 0
        self.consecutive_errors = 0

        if self.options.auth:
            self.requester.set_auth(self.options.auth_type, self.options.auth)

        if self.options.proxy_auth:
            self.requester.set_proxy_auth(self.options.proxy_auth)

        if self.options.log_file:
            self.options.log_file = FileUtils.get_abs_path(self.options.log_file)

            try:
                FileUtils.create_dir(FileUtils.parent(self.options.log_file))
                if not FileUtils.can_write(self.options.log_file):
                    raise Exception

            except Exception:
                self.output.error(
                    f"Couldn't create log file at {self.options.log_file}"
                )
                exit(1)

        if self.options.autosave_report:
            self.report_path = self.options.output_path or FileUtils.build_path(
                SCRIPT_PATH, "reports"
            )

            try:
                FileUtils.create_dir(self.report_path)
                if not FileUtils.can_write(self.report_path):
                    raise Exception

            except Exception:
                self.output.error(
                    f"Couldn't create report folder at {self.report_path}"
                )
                exit(1)

        self.output.header(BANNER)
        self.output.config(
            ", ".join(self.options["extensions"]),
            ", ".join(self.options["prefixes"]),
            ", ".join(self.options["suffixes"]),
            str(self.options["threads_count"]),
            str(len(self.dictionary)),
            str(self.options["httpmethod"]),
        )

        self.setup_reports()

        if self.options.log_file:
            self.output.log_file(self.options.log_file)

    def run(self):
        match_callbacks = (self.match_callback, self.append_traffic_log)
        not_found_callbacks = (self.not_found_callback, self.append_traffic_log)
        error_callbacks = (self.error_callback, self.append_error_log)

        while self.targets:
            url = self.targets[0]
            self.current_directory = None

            try:
                self.requester.set_target(url if url.endswith("/") else url + "/")
                self.url = self.requester.url + self.requester.base_path

                if not self.directories:
                    for subdir in self.options.scan_subdirs:
                        self.add_directory(subdir)

                if not self.from_export:
                    self.output.set_target(self.url)

                # Test request to check if server is up
                self.requester.request("")
                log(
                    self.options.log_file,
                    "info",
                    f"Test request sent for: {self.url}",
                )

                self.output.url = self.requester.url
                self.fuzzer = Fuzzer(
                    self.requester,
                    self.dictionary,
                    suffixes=self.options.suffixes,
                    prefixes=self.options.prefixes,
                    exclude_response=self.options.exclude_response,
                    threads=self.options.threads_count,
                    delay=self.options.delay,
                    maxrate=self.options.maxrate,
                    match_callbacks=match_callbacks,
                    not_found_callbacks=not_found_callbacks,
                    error_callbacks=error_callbacks,
                )

                if not self.from_export:
                    self.report = Report(
                        self.requester.host,
                        self.requester.port,
                        self.requester.scheme,
                        self.requester.base_path,
                    )

                self.start()

            except (
                InvalidURLException,
                RequestException,
                SkipTargetInterrupt,
                KeyboardInterrupt,
            ) as e:
                self.jobs_count -= len(self.directories)
                self.directories.clear()
                self.dictionary.reset()

                if e.args:
                    self.output.error(e.args[0])
                    self.append_error_log("", e.args[1] if len(e.args) > 1 else e.args[0])

            except QuitInterrupt as e:
                self.output.error(e.args[0])
                self.report_manager.write_report()
                exit(0)

            finally:
                self.report_manager.write_report()
                self.targets.pop(0)

        self.output.warning("\nTask Completed")

    def start(self):
        first = True

        while self.directories:
            try:
                gc.collect()

                self.current_directory = self.directories[0]
                self.current_job += 1

                if not self.from_export:
                    if first:
                        self.output.new_line()

                    current_time = time.strftime("%H:%M:%S")
                    self.output.warning(
                        f"[{current_time}] Starting: {self.current_directory}"
                    )

                self.fuzzer.set_base_path(self.requester.base_path + self.current_directory)
                self.fuzzer.start()
                self.process()

            except KeyboardInterrupt:
                pass

            finally:
                self.dictionary.reset()
                self.directories.pop(0)

                self.from_export = first = False

    def setup_batch_reports(self):
        """Create batch report folder"""

        self.batch = True
        current_time = time.strftime("%y-%m-%d_%H-%M-%S")
        batch_session = f"BATCH-{current_time}"
        batch_directory_path = FileUtils.build_path(self.report_path, batch_session)

        try:
            FileUtils.create_dir(batch_directory_path)
        except Exception:
            self.output.error(f"Couldn't create batch folder at {batch_directory_path}")
            exit(1)

        return batch_directory_path

    def get_output_extension(self):
        if self.options.output_format in ("plain", "simple"):
            return ".txt"

        return f".{self.options.output_format}"

    def setup_reports(self):
        """Create report file"""

        output_file = None

        if self.options.output_file:
            output_file = FileUtils.get_abs_path(self.options.output_file)
        elif self.options.autosave_report:
            if len(self.targets) > 1:
                directory_path = self.setup_batch_reports()
                filename = "BATCH" + self.get_output_extension()
            else:
                parsed = urlparse(self.options.urls[0])
                filename = get_valid_filename(f"{parsed.path}_")
                filename += time.strftime("%y-%m-%d_%H-%M-%S")
                filename += self.get_output_extension()
                directory_path = FileUtils.build_path(
                    self.report_path, get_valid_filename(parsed.netloc)
                )

            output_file = FileUtils.build_path(directory_path, filename)

            if FileUtils.exists(output_file):
                i = 2
                while FileUtils.exists(f"{output_file}_{i}"):
                    i += 1

                output_file += f"_{i}"

            try:
                FileUtils.create_dir(directory_path)
            except Exception:
                self.output.error(
                    f"Couldn't create the reports folder at {directory_path}"
                )
                exit(1)

        self.report_manager = ReportManager(self.options.output_format, output_file)

        if output_file:
            self.output.output_file(output_file)

    def is_valid(self, path, res):
        """Validate the response by different filters"""

        if res.status in self.options.exclude_status_codes:
            return False

        if res.status not in (self.options.include_status_codes or range(100, 1000)):
            return False

        if self.blacklists.get(res.status) and path in self.blacklists.get(res.status):
            return False

        if human_size(res.length).lstrip() in self.options.exclude_sizes:
            return False

        if res.length < self.options.minimum_response_size:
            return False

        if res.length > self.options.maximum_response_size > 0:
            return False

        if any(ex_text in res.content for ex_text in self.options.exclude_texts):
            return False

        if self.options.exclude_regex and re.search(
            self.options.exclude_regex, res.content
        ):
            return False

        if self.options.exclude_redirect and (
            self.options.exclude_redirect in res.redirect
            or re.search(self.options.exclude_redirect, res.redirect)
        ):
            return False

        return True

    def reset_consecutive_errors(self):
        self.consecutive_errors = 0

    def match_callback(self, path, response):
        if response.status in self.options.skip_on_status:
            raise SkipTargetInterrupt(
                f"Skipped the target due to {response.status} status code"
            )

        if not self.is_valid(path, response):
            return

        self.output.status_report(response, self.options.full_url)

        if response.status in self.options.recursion_status_codes and any(
            (
                self.options.recursive,
                self.options.deep_recursive,
                self.options.force_recursive,
            )
        ):
            if response.redirect:
                added_to_queue = self.recur_for_redirect(path, response)
            else:
                added_to_queue = self.recur(path)

            if added_to_queue:
                self.output.new_directories(added_to_queue)

        if self.options.replay_proxy:
            self.requester.request(path, proxy=self.options.replay_proxy)

        self.report.add_result(self.current_directory + path, response)
        self.report_manager.update_report(self.report)
        self.reset_consecutive_errors()

    def not_found_callback(self, *args):
        self.output.last_path(
            self.dictionary.index,
            len(self.dictionary),
            self.current_job,
            self.jobs_count,
            self.fuzzer.rate,
            self.errors,
        )
        self.reset_consecutive_errors()

    def error_callback(self, *args):
        if self.options.exit_on_error:
            raise QuitInterrupt("Canceled due to an error")

        self.errors += 1
        self.consecutive_errors += 1

        if self.consecutive_errors > MAX_CONSECUTIVE_REQUEST_ERRORS:
            raise SkipTargetInterrupt("Too many request errors")

    def append_traffic_log(self, path, response):
        """Write request to log file"""

        url = join_path(self.requester.url, response.path)
        msg = f"{response.status} {self.options.httpmethod} {url}"

        if response.redirect:
            msg += f" - REDIRECT TO: {response.redirect}"

        msg += f" (LENGTH: {response.length})"

        log(self.options.log_file, "traffic", msg)

    def append_error_log(self, path, error_msg):
        """Write error to log file"""

        url = join_path(self.url, self.current_directory, path)
        msg = f"{self.options.httpmethod} {url}"
        msg += NEW_LINE
        msg += " " * 4
        msg += error_msg
        log(self.options.log_file, "error", msg)

    def handle_pause(self):
        self.output.warning(
            "CTRL+C detected: Pausing threads, please wait...", do_save=False
        )
        self.fuzzer.pause()

        start_time = time.time()
        while True:
            is_timed_out = time.time() - start_time > PAUSING_WAIT_TIMEOUT
            if self.fuzzer.is_stopped() or is_timed_out:
                break

            time.sleep(0.2)

        while True:
            msg = "[q]uit / [c]ontinue"

            if len(self.directories) > 1:
                msg += " / [n]ext"

            if len(self.targets) > 1:
                msg += " / [s]kip target"

            self.output.in_line(msg + ": ")

            option = input()

            if option.lower() == "q":
                self.output.in_line("[s]ave / [q]uit without saving: ")

                option = input()

                if option.lower() == "s":
                    msg = f"Save to file [{self.options.session_file or DEFAULT_SESSION_FILE}]: "

                    self.output.in_line(msg)

                    session_file = (
                        input() or self.options.session_file or DEFAULT_SESSION_FILE
                    )

                    self._export(session_file)
                    raise QuitInterrupt(f"Session saved to: {session_file}")
                elif option.lower() == "q":
                    raise QuitInterrupt("Canceled by the user")

            elif option.lower() == "c":
                self.fuzzer.resume()
                return

            elif option.lower() == "n" and len(self.directories) > 1:
                self.fuzzer.stop()
                return

            elif option.lower() == "s" and len(self.targets) > 1:
                raise SkipTargetInterrupt("Target skipped by the user")

    def is_timed_out(self):
        return time.time() - self.start_time > self.options.maxtime > 0

    def process(self):
        while True:
            try:
                while not self.fuzzer.wait(0.25):
                    if self.is_timed_out():
                        raise SkipTargetInterrupt(
                            "Runtime exceeded the maximum set by the user"
                        )

                break

            except KeyboardInterrupt:
                self.handle_pause()

    def add_directory(self, path):
        """Add directory to the recursion queue"""

        # Pass if path is in exclusive directories
        if any(
            path.startswith(directory) for directory in self.options.exclude_subdirs
        ):
            return

        dir = join_path(self.current_directory, path)
        url = join_path(self.url, dir)

        if url in self.passed_urls or dir.count("/") > self.options.recursion_depth > 0:
            return

        self.directories.append(dir)
        self.passed_urls.add(url)
        self.jobs_count += 1

    @locked
    def recur(self, path):
        dirs_count = len(self.directories)
        path = clean_path(path)

        if self.options.force_recursive and not path.endswith("/"):
            path += "/"

        if self.options.deep_recursive:
            i = 0
            for _ in range(path.count("/")):
                i = path.index("/", i) + 1
                self.add_directory(path[:i])
        elif (
            self.options.recursive
            and path.endswith("/")
            and re.search(EXTENSION_REGEX, path[:-1]) is None
        ):
            self.add_directory(path)

        # Return newly added directories
        return self.directories[dirs_count:]

    def recur_for_redirect(self, path, response):
        redirect_path = parse_path(response.redirect)

        if redirect_path == response.path + "/":
            path = redirect_path[
                len(self.requester.base_path + self.current_directory) + 1:
            ]
            return self.recur(path)
Пример #9
0
    def run(self):
        match_callbacks = (self.match_callback, self.append_traffic_log)
        not_found_callbacks = (self.not_found_callback, self.append_traffic_log)
        error_callbacks = (self.error_callback, self.append_error_log)

        while self.targets:
            url = self.targets[0]
            self.current_directory = None

            try:
                self.requester.set_target(url if url.endswith("/") else url + "/")
                self.url = self.requester.url + self.requester.base_path

                if not self.directories:
                    for subdir in self.options.scan_subdirs:
                        self.add_directory(subdir)

                if not self.from_export:
                    self.output.set_target(self.url)

                # Test request to check if server is up
                self.requester.request("")
                log(
                    self.options.log_file,
                    "info",
                    f"Test request sent for: {self.url}",
                )

                self.output.url = self.requester.url
                self.fuzzer = Fuzzer(
                    self.requester,
                    self.dictionary,
                    suffixes=self.options.suffixes,
                    prefixes=self.options.prefixes,
                    exclude_response=self.options.exclude_response,
                    threads=self.options.threads_count,
                    delay=self.options.delay,
                    maxrate=self.options.maxrate,
                    match_callbacks=match_callbacks,
                    not_found_callbacks=not_found_callbacks,
                    error_callbacks=error_callbacks,
                )

                if not self.from_export:
                    self.report = Report(
                        self.requester.host,
                        self.requester.port,
                        self.requester.scheme,
                        self.requester.base_path,
                    )

                self.start()

            except (
                InvalidURLException,
                RequestException,
                SkipTargetInterrupt,
                KeyboardInterrupt,
            ) as e:
                self.jobs_count -= len(self.directories)
                self.directories.clear()
                self.dictionary.reset()

                if e.args:
                    self.output.error(e.args[0])
                    self.append_error_log("", e.args[1] if len(e.args) > 1 else e.args[0])

            except QuitInterrupt as e:
                self.output.error(e.args[0])
                self.report_manager.write_report()
                exit(0)

            finally:
                self.report_manager.write_report()
                self.targets.pop(0)

        self.output.warning("\nTask Completed")