コード例 #1
0
    def setup(self, options):
        self.options = options
        self.pass_dirs = ['']

        if options["raw_file"]:
            self.options.update(
                zip(["urls", "httpmethod", "headers", "data"],
                    parse_raw(options["raw_file"])))
        else:
            self.options["headers"] = {**DEFAULT_HEADERS, **options["headers"]}
            if options["cookie"]:
                self.options["headers"]["Cookie"] = options["cookie"]
            if options["useragent"]:
                self.options["headers"]["User-Agent"] = options["useragent"]

        self.random_agents = None
        if options["use_random_agents"]:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt"))

        self.targets.queue = deque(options["urls"])
        self.blacklists = Dictionary.generate_blacklists(options["extensions"])
        self.dictionary = Dictionary(
            paths=options["wordlist"],
            extensions=options["extensions"],
            suffixes=options["suffixes"],
            prefixes=options["prefixes"],
            lowercase=options["lowercase"],
            uppercase=options["uppercase"],
            capitalization=options["capitalization"],
            force_extensions=options["force_extensions"],
            exclude_extensions=options["exclude_extensions"],
            no_extension=options["no_extension"],
            only_selected=options["only_selected"])

        self.current_job = 0
        self.batch = False
        self.batch_session = None
        self.exit = None
        self.start_time = time.time()
        self.jobs_count = self.targets.qsize() * (len(
            options["scan_subdirs"]) if options["scan_subdirs"] else 1)

        if options["autosave_report"] or options["output_file"]:
            if options["autosave_report"]:
                self.report_path = options[
                    "output_location"] or FileUtils.build_path(
                        SCRIPT_PATH, "reports")
                self.create_dir(self.report_path)

        if options["log_file"]:
            options["log_file"] = FileUtils.get_abs_path(options["log_file"])
            self.create_dir(FileUtils.parent(options["log_file"]))
コード例 #2
0
    def generate_blacklists(extensions, script_path):
        reext = re.compile(r"\%ext\%", re.IGNORECASE).sub
        blacklists = {}

        for status in [400, 403, 500]:
            blacklist_file_name = FileUtils.build_path(script_path, "db")
            blacklist_file_name = FileUtils.build_path(
                blacklist_file_name, "{}_blacklist.txt".format(status)
            )

            if not FileUtils.can_read(blacklist_file_name):
                # Skip if cannot read file
                continue

            blacklists[status] = []

            for line in FileUtils.get_lines(blacklist_file_name):
                # Skip comments
                if line.lstrip().startswith("#"):
                    continue

                if line.startswith("/"):
                    line = line[1:]

                # Classic dirsearch blacklist processing (with %EXT% keyword)
                if "%ext%" in line.lower():
                    for extension in extensions:
                        entry = reext.sub(extension, line)
                        blacklists[status].append(entry)

                # Forced extensions is not used here because -r is only used for wordlist,
                # applying in blacklist may create false negatives

                else:
                    blacklists[status].append(line)

        return blacklists
コード例 #3
0
    def __init__(self, script_path, arguments, output):
        global VERSION
        program_banner = (
            open(FileUtils.build_path(script_path, "banner.txt"))
            .read()
            .format(**VERSION)
        )

        self.directories = Queue()
        self.script_path = script_path
        self.arguments = arguments
        self.output = output
        self.pass_dirs = ["/"]

        if arguments.raw_file:
            raw = Raw(arguments.raw_file, arguments.scheme)
            self.url_list = [raw.url]
            self.httpmethod = raw.method
            self.data = raw.body
            self.headers = raw.headers
        else:
            default_headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
                "Accept-Language": "*",
                "Accept-Encoding": "*",
                "Keep-Alive": "timeout=15, max=1000",
                "Cache-Control": "max-age=0",
            }

            self.url_list = arguments.url_list
            self.httpmethod = arguments.httpmethod.lower()
            self.data = arguments.data
            self.headers = {**default_headers, **arguments.headers}
            if arguments.cookie:
                self.headers["Cookie"] = arguments.cookie
            if arguments.useragent:
                self.headers["User-Agent"] = arguments.useragent

        self.recursion_depth = arguments.recursion_depth

        if arguments.logs_location and self.validate_path(arguments.logs_location):
            self.logs_path = FileUtils.build_path(arguments.logs_location)
        elif self.validate_path(self.script_path):
            self.logs_path = FileUtils.build_path(self.script_path, "logs")
            if not FileUtils.exists(self.logs_path):
                FileUtils.create_directory(self.logs_path)

        if arguments.output_location and self.validate_path(arguments.output_location):
            self.report_path = FileUtils.build_path(arguments.output_location)
        elif self.validate_path(self.script_path):
            self.report_path = FileUtils.build_path(self.script_path, "reports")
            if not FileUtils.exists(self.report_path):
                FileUtils.create_directory(self.report_path)

        self.blacklists = Dictionary.generate_blacklists(arguments.extensions, self.script_path)
        self.extensions = arguments.extensions
        self.prefixes = arguments.prefixes
        self.suffixes = arguments.suffixes
        self.threads_count = arguments.threads_count
        self.output_file = arguments.output_file
        self.output_format = arguments.output_format
        self.include_status_codes = arguments.include_status_codes
        self.exclude_status_codes = arguments.exclude_status_codes
        self.exclude_sizes = arguments.exclude_sizes
        self.exclude_texts = arguments.exclude_texts
        self.exclude_regexps = arguments.exclude_regexps
        self.exclude_redirects = arguments.exclude_redirects
        self.replay_proxy = arguments.replay_proxy
        self.recursive = self.arguments.recursive
        self.deep_recursive = arguments.deep_recursive
        self.force_recursive = arguments.force_recursive
        self.recursion_status_codes = arguments.recursion_status_codes
        self.minimum_response_size = arguments.minimum_response_size
        self.maximum_response_size = arguments.maximum_response_size
        self.scan_subdirs = arguments.scan_subdirs
        self.exclude_subdirs = arguments.exclude_subdirs
        self.full_url = arguments.full_url
        self.skip_on_status = arguments.skip_on_status
        self.exit_on_error = arguments.exit_on_error
        self.maxtime = arguments.maxtime

        self.dictionary = Dictionary(
            paths=arguments.wordlist,
            extensions=arguments.extensions,
            suffixes=arguments.suffixes,
            prefixes=arguments.prefixes,
            lowercase=arguments.lowercase,
            uppercase=arguments.uppercase,
            capitalization=arguments.capitalization,
            force_extensions=arguments.force_extensions,
            exclude_extensions=arguments.exclude_extensions,
            no_extension=arguments.no_extension,
            only_selected=arguments.only_selected
        )

        self.jobs_count = len(self.url_list) * (
            len(self.scan_subdirs) if self.scan_subdirs else 1
        )
        self.current_job = 0
        self.error_log = None
        self.error_log_path = None
        self.threads_lock = threading.Lock()
        self.batch = False
        self.batch_session = None

        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.timer = EmptyTimer()

        self.output.header(program_banner)
        self.print_config()

        if arguments.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(script_path, "db", "user-agents.txt")
            )

        if arguments.autosave_report or arguments.output_file:
            self.setup_reports()

        self.setup_error_logs()
        self.output.error_log_file(self.error_log_path)

        if self.maxtime:
            threading.Thread(target=self.time_monitor, daemon=True).start()

        try:
            for url in self.url_list:
                try:
                    gc.collect()
                    url = url if url.endswith("/") else url + "/"
                    self.output.set_target(url, arguments.scheme)

                    try:
                        self.requester = Requester(
                            url,
                            max_pool=arguments.threads_count,
                            max_retries=arguments.max_retries,
                            timeout=arguments.timeout,
                            ip=arguments.ip,
                            proxy=arguments.proxy,
                            proxylist=arguments.proxylist,
                            redirect=arguments.redirect,
                            request_by_hostname=arguments.request_by_hostname,
                            httpmethod=self.httpmethod,
                            data=self.data,
                            scheme=arguments.scheme,
                        )

                        for key, value in self.headers.items():
                            self.requester.set_header(key, value)

                        if arguments.auth:
                            self.requester.set_auth(arguments.auth_type, arguments.auth)

                        # Test request to see if server is up
                        self.requester.request("")

                        if arguments.autosave_report or arguments.output_file:
                            self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.base_path)

                    except RequestException as e:
                        self.output.error(e.args[0]["message"])
                        raise SkipTargetInterrupt

                    if arguments.use_random_agents:
                        self.requester.set_random_agents(self.random_agents)

                    # Initialize directories Queue with start Path
                    self.base_path = self.requester.base_path
                    self.status_skip = None

                    if not self.scan_subdirs:
                        self.directories.put("")

                    for subdir in self.scan_subdirs:
                        self.directories.put(subdir)
                        self.pass_dirs.append(subdir)

                    match_callbacks = [self.match_callback]
                    not_found_callbacks = [self.not_found_callback]
                    error_callbacks = [self.error_callback, self.append_error_log]

                    self.fuzzer = Fuzzer(
                        self.requester,
                        self.dictionary,
                        suffixes=arguments.suffixes,
                        prefixes=arguments.prefixes,
                        exclude_content=arguments.exclude_content,
                        threads=arguments.threads_count,
                        delay=arguments.delay,
                        maxrate=arguments.maxrate,
                        match_callbacks=match_callbacks,
                        not_found_callbacks=not_found_callbacks,
                        error_callbacks=error_callbacks,
                    )
                    try:
                        self.prepare()
                    except RequestException as e:
                        self.output.error(e.args[0]["message"])
                        raise SkipTargetInterrupt

                except SkipTargetInterrupt:
                    self.report.completed = True
                    continue

        except KeyboardInterrupt:
            self.output.error("\nCanceled by the user")
            exit(0)

        finally:
            self.error_log.close()

        self.output.warning("\nTask Completed")
コード例 #4
0
    def generate(self):
        """
        Dictionary.generate() behaviour

        Classic dirsearch wordlist:
          1. If %EXT% keyword is present, append one with each extension REPLACED.
          2. If the special word is no present, append line unmodified.

        Forced extensions wordlist (NEW):
          This type of wordlist processing is a mix between classic processing
          and DirBuster processing.
              1. If %EXT% keyword is present in the line, immediately process as "classic dirsearch" (1).
              2. If the line does not include the special word AND is NOT terminated by a slash,
                append one with each extension APPENDED (line.ext) and ONLYE ONE with a slash.
              3. If the line does not include the special word and IS ALREADY terminated by slash,
                append line unmodified.
        """

        reext = re.compile(EXTENSION_TAG, re.IGNORECASE)
        result = []

        # Enable to use multiple dictionaries at once
        for dict_file in self._dictionary_files:
            for line in FileUtils.get_lines(dict_file):
                if line.startswith("/"):
                    line = line[1:]

                if self.no_extension:
                    line = line.split(".")[0]

                # Skip comments
                if line.lstrip().startswith("#"):
                    continue

                # Skip if the path contains excluded extensions
                if any("." + extension in line
                       for extension in self.exclude_extensions):
                    continue

                # Classic dirsearch wordlist processing (with %EXT% keyword)
                if EXTENSION_TAG in line.lower():
                    for extension in self.extensions:
                        newline = reext.sub(extension, line)
                        result.append(newline)
                # If "forced extensions" is used and the path is not a directory (terminated by /) or has
                # had an extension already, append extensions to the path
                elif (self.force_extensions and not line.endswith("/")
                      and not re.search(EXTENSION_REGEX, line)):
                    for extension in self.extensions:
                        result.append(line + f".{extension}")

                    result.append(line)
                    result.append(line + "/")
                # Append line unmodified.
                elif not self.only_selected or any(
                        line.endswith(f".{extension}")
                        for extension in self.extensions):
                    result.append(line)

        # Re-add dictionary with prefixes
        result.extend(pref + path for path in result for pref in self.prefixes
                      if not path.startswith(pref))
        # Re-add dictionary with suffixes
        result.extend(path + suff for path in result for suff in self.suffixes
                      if not path.endswith(("/", suff)))

        if self.lowercase:
            self._entries = tuple(entry.lower() for entry in uniq(result))
        elif self.uppercase:
            self._entries = tuple(entry.upper() for entry in uniq(result))
        elif self.capitalization:
            self._entries = tuple(entry.capitalize() for entry in uniq(result))
        else:
            self._entries = tuple(uniq(result))

        del result
コード例 #5
0
ファイル: controller.py プロジェクト: gprime31/dirsearch
    def __init__(self, options, output):
        self.directories = Queue()
        self.output = output
        self.options = options
        self.pass_dirs = ["/"]

        if options.raw_file:
            raw = Raw(options.raw_file)
            self.url_list = [raw.url]
            self.httpmethod = raw.method
            self.data = raw.body
            self.headers = raw.headers
        else:
            self.url_list = options.url_list
            self.httpmethod = options.httpmethod
            self.data = options.httpmethod
            self.headers = {**DEFAULT_HEADERS, **options.headers}
            if options.cookie:
                self.headers["Cookie"] = options.cookie
            if options.useragent:
                self.headers["User-Agent"] = options.useragent

        self.random_agents = None
        if options.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt"))

        self.blacklists = Dictionary.generate_blacklists(options.extensions)
        self.dictionary = Dictionary(
            paths=options.wordlist,
            extensions=options.extensions,
            suffixes=options.suffixes,
            prefixes=options.prefixes,
            lowercase=options.lowercase,
            uppercase=options.uppercase,
            capitalization=options.capitalization,
            force_extensions=options.force_extensions,
            exclude_extensions=options.exclude_extensions,
            no_extension=options.no_extension,
            only_selected=options.only_selected)

        self.jobs_count = len(self.url_list) * (len(options.scan_subdirs)
                                                if options.scan_subdirs else 1)
        self.current_job = 0
        self.batch = False
        self.batch_session = None
        self.exit = None

        self.threads_lock = threading.Lock()
        self.report_manager = EmptyReportManager()
        self.report = EmptyReport()
        self.start_time = time.time()

        self.output.header(BANNER)
        self.print_config()

        if options.autosave_report or options.output_file:
            if options.autosave_report:
                self.report_path = options.output_location or FileUtils.build_path(
                    SCRIPT_PATH, "reports")
                self.validate_dir(self.report_path)

            self.setup_reports()

        if options.log_file:
            self.validate_dir(FileUtils.parent(options.log_file))
            FileUtils.create_directory(FileUtils.parent(options.log_file))
            self.output.log_file(FileUtils.get_abs_path(options.log_file))

        try:
            for url in self.url_list:
                try:
                    gc.collect()

                    try:
                        self.requester = Requester(
                            url if url.endswith("/") else url + "/",
                            max_pool=options.threads_count,
                            max_retries=options.max_retries,
                            timeout=options.timeout,
                            ip=options.ip,
                            proxy=options.proxy,
                            proxylist=options.proxylist,
                            redirect=options.follow_redirects,
                            request_by_hostname=options.request_by_hostname,
                            httpmethod=self.httpmethod,
                            data=self.data,
                            scheme=options.scheme,
                            random_agents=self.random_agents,
                        )

                        self.output.set_target(self.requester.base_url +
                                               self.requester.base_path)
                        self.requester.setup()

                        for key, value in self.headers.items():
                            self.requester.set_header(key, value)

                        if options.auth:
                            self.requester.set_auth(options.auth_type,
                                                    options.auth)

                        # Test request to check if server is up
                        self.requester.request("")
                        self.write_log("Test request sent for: {}".format(
                            self.requester.base_url))

                        if options.autosave_report or options.output_file:
                            self.report = Report(self.requester.host,
                                                 self.requester.port,
                                                 self.requester.scheme,
                                                 self.requester.base_path)

                    except RequestException as e:
                        self.output.error(e.args[0])
                        raise SkipTargetInterrupt

                    self.skip = None

                    if not options.scan_subdirs:
                        self.directories.put("")

                    for subdir in options.scan_subdirs:
                        self.directories.put(subdir)
                        self.pass_dirs.append(subdir)

                    match_callbacks = [self.match_callback, self.append_log]
                    not_found_callbacks = [
                        self.not_found_callback, self.append_log
                    ]
                    error_callbacks = [
                        self.error_callback, self.append_error_log
                    ]
                    self.fuzzer = Fuzzer(
                        self.requester,
                        self.dictionary,
                        suffixes=options.suffixes,
                        prefixes=options.prefixes,
                        exclude_response=options.exclude_response,
                        threads=options.threads_count,
                        delay=options.delay,
                        maxrate=options.maxrate,
                        match_callbacks=match_callbacks,
                        not_found_callbacks=not_found_callbacks,
                        error_callbacks=error_callbacks,
                    )

                    try:
                        self.prepare()
                    except RequestException as e:
                        self.output.error(e.args[0])
                        raise SkipTargetInterrupt

                except SkipTargetInterrupt:
                    self.report.completed = True
                    continue

        except KeyboardInterrupt:
            self.close("Canceled by the user")

        self.output.warning("\nTask Completed")
コード例 #6
0
ファイル: controller.py プロジェクト: O2F/dirsearch
    def setup(self, options, output):
        self.options, self.output = options, output

        if self.options.raw_file:
            self.options.update(
                zip(
                    ["urls", "httpmethod", "headers", "data"],
                    parse_raw(self.options.raw_file),
                )
            )
        else:
            self.options.headers = {**DEFAULT_HEADERS, **self.options.headers}

            if self.options.cookie:
                self.options.headers["Cookie"] = self.options.cookie
            if self.options.useragent:
                self.options.headers["User-Agent"] = self.options.useragent

        self.random_agents = None
        if self.options.use_random_agents:
            self.random_agents = FileUtils.get_lines(
                FileUtils.build_path(SCRIPT_PATH, "db", "user-agents.txt")
            )

        self.requester = Requester(
            max_pool=self.options.threads_count,
            max_retries=self.options.max_retries,
            timeout=self.options.timeout,
            ip=self.options.ip,
            proxy=self.options.proxy,
            follow_redirects=self.options.follow_redirects,
            httpmethod=self.options.httpmethod,
            headers=self.options.headers,
            data=self.options.data,
            scheme=self.options.scheme,
            random_agents=self.random_agents,
        )
        self.dictionary = Dictionary(
            paths=self.options.wordlist,
            extensions=self.options.extensions,
            suffixes=self.options.suffixes,
            prefixes=self.options.prefixes,
            lowercase=self.options.lowercase,
            uppercase=self.options.uppercase,
            capitalization=self.options.capitalization,
            force_extensions=self.options.force_extensions,
            exclude_extensions=self.options.exclude_extensions,
            no_extension=self.options.no_extension,
            only_selected=self.options.only_selected,
        )
        self.blacklists = Dictionary.generate_blacklists(self.options.extensions)
        self.targets = options.urls
        self.start_time = time.time()
        self.passed_urls = set()
        self.directories = []
        self.report = None
        self.batch = False
        self.current_job = 0
        self.jobs_count = 0
        self.errors = 0
        self.consecutive_errors = 0

        if self.options.auth:
            self.requester.set_auth(self.options.auth_type, self.options.auth)

        if self.options.proxy_auth:
            self.requester.set_proxy_auth(self.options.proxy_auth)

        if self.options.log_file:
            self.options.log_file = FileUtils.get_abs_path(self.options.log_file)

            try:
                FileUtils.create_dir(FileUtils.parent(self.options.log_file))
                if not FileUtils.can_write(self.options.log_file):
                    raise Exception

            except Exception:
                self.output.error(
                    f"Couldn't create log file at {self.options.log_file}"
                )
                exit(1)

        if self.options.autosave_report:
            self.report_path = self.options.output_path or FileUtils.build_path(
                SCRIPT_PATH, "reports"
            )

            try:
                FileUtils.create_dir(self.report_path)
                if not FileUtils.can_write(self.report_path):
                    raise Exception

            except Exception:
                self.output.error(
                    f"Couldn't create report folder at {self.report_path}"
                )
                exit(1)

        self.output.header(BANNER)
        self.output.config(
            ", ".join(self.options["extensions"]),
            ", ".join(self.options["prefixes"]),
            ", ".join(self.options["suffixes"]),
            str(self.options["threads_count"]),
            str(len(self.dictionary)),
            str(self.options["httpmethod"]),
        )

        self.setup_reports()

        if self.options.log_file:
            self.output.log_file(self.options.log_file)
コード例 #7
0
ファイル: installation.py プロジェクト: O2F/dirsearch
def get_dependencies():
    try:
        return FileUtils.get_lines(REQUIREMENTS_FILE)
    except FileNotFoundError:
        print("Can't find requirements.txt")
        exit(1)