Esempio n. 1
0
def _check_charset(url: str, res: Response) -> List[Result]:
    results: List[Result] = []

    # if the body is empty, we really don't care about this
    if len(res.content) == 0:
        return results

    try:
        if "Content-Type" in res.headers:
            content_type = str(res.headers["Content-Type"]).lower()

            if "charset" not in content_type and "text/html" in content_type:
                # not charset specified
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res,
                                               {"content-type": content_type}),
                        f"Charset Not Defined in '{res.headers['Content-Type']}' at {url}",
                        Vulnerabilities.HTTP_HEADER_CONTENT_TYPE_NO_CHARSET,
                    ))
        else:
            # content-type missing
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Content-Type Missing: {url} ({res.request.method} - {res.status_code})",
                    Vulnerabilities.HTTP_HEADER_CONTENT_TYPE_MISSING,
                ))
    except Exception:
        output.debug_exception()

    return results
Esempio n. 2
0
def check_json_user_enum(url: str) -> List[Result]:
    results = []
    target = urljoin(url, "wp-json/wp/v2/users")

    res = network.http_get(target, False)
    body = res.text

    if res.status_code < 300 and "slug" in body:
        data = res.json()

        # log the enum finding
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                f"WordPress WP-JSON User Enumeration at {target}",
                Vulnerabilities.APP_WORDPRESS_USER_ENUM_API,
            ))

        # log the individual users
        for user in data:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(
                        res,
                        {
                            "user_id": user["id"],
                            "user_slug": user["slug"],
                            "user_name": user["name"],
                        },
                    ),
                    f"ID: {user['id']}\tUser Slug: '{user['slug']}'\t\tUser Name: '{user['name']}'",
                    Vulnerabilities.APP_WORDPRESS_USER_FOUND,
                ))

    return results
Esempio n. 3
0
def check_options(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_options(url)

    if "Allow" in res.headers:
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                f"Allow HTTP Verbs (OPTIONS): {res.headers['Allow']}",
                Vln.HTTP_OPTIONS_ALLOW,
            )
        )

    if "Public" in res.headers:
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                f"Public HTTP Verbs (OPTIONS): {res.headers['Public']}",
                Vln.HTTP_OPTIONS_PUBLIC,
            )
        )

    results += response_scanner.check_response(url, res)

    return results
Esempio n. 4
0
def identify(url: str) -> Tuple[Union[str, None], List[Result]]:
    results = []

    # find WordPress
    res, path = _identify_by_path(url, "")

    if path is None:
        res, path = _identify_by_path(url, "blog/")

    # check to see if we have a valid hit
    if path is not None:
        # we have a WordPress install, let's see if we can get a version
        body = res.text

        ver = "Unknown"
        # this works for modern versions
        m = re.search(r"login.min.css\?ver=\d+\.\d+\.?\d*", body)
        if m:
            ver = m.group(0).split("=")[1]
        else:
            # the current method doesn't work, fall back to an older method
            m = re.search(r"load-styles.php\?[\w,;=&%]+;ver=\d+\.\d+\.?\d*", body)
            if m:
                ver = m.group(0).split("=")[-1]

        # report that we found WordPress
        results.append(
            Result.from_evidence(
                Evidence.from_response(res, {"version": ver}),
                f"Found WordPress v{ver} at {path}",
                Vulnerabilities.APP_WORDPRESS_VERSION,
            )
        )

        # is this a current version?
        ver = cast(version.Version, version.parse(ver))
        curr_version = version_checker.get_latest_version("wordpress", ver)

        if curr_version is not None and curr_version > ver:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(
                        res,
                        {
                            "installed_version": str(ver),
                            "current_verison": str(curr_version),
                        },
                    ),
                    f"WordPress Outdated: {ver} - Current: {curr_version}",
                    Vulnerabilities.APP_WORDPRESS_OUTDATED,
                )
            )

        return path, results
    else:
        return None, []
Esempio n. 5
0
def check_aspnet_handlers(url: str) -> List[Result]:
    results = []

    file_name = secrets.token_hex(12)

    exts = ["ashx", "aspx", "asmx", "soap", "rem"]

    for ext in exts:
        target = urljoin(url, f"{file_name}.{ext}")
        vuln = False

        res = network.http_get(target, False)
        body = res.text

        if "Location" in res.headers and "aspxerrorpath" in res.headers["Location"]:
            vuln = True
        elif (
            res.status_code >= 400
            and "Remoting.RemotingException" in body
            or "HttpException" in body
            or "FileNotFoundException" in body
        ):
            vuln = True

        if vuln:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res, {"handler": ext}),
                    f"ASP.NET Handler Enumeration: {ext}",
                    Vulnerabilities.SERVER_ASPNET_HANDLER_ENUM,
                )
            )

    return results
Esempio n. 6
0
def check_path_disclosure(wp_url: str) -> List[Result]:
    # this is a list of files that are known to throw a fatal error when accessed directly
    # this is from a manual review of all plugins with at least 1M installs
    urls = [
        "wp-content/plugins/hello.php",
        "wp-content/plugins/akismet/akismet.php",
        "wp-content/plugins/contact-form-7/includes/capabilities.php",
        "wp-content/plugins/wordpress-seo/admin/views/partial-alerts-errors.php",
        "wp-content/plugins/jetpack/load-jetpack.php",
        "wp-content/plugins/jetpack/uninstall.php",
        "wp-content/plugins/duplicate-post/duplicate-post-admin.php",
        "wp-content/plugins/wpforms-lite/includes/admin/class-welcome.php",
        "wp-content/plugins/wp-google-maps/base/includes/welcome.php",
        "wp-content/plugins/wp-super-cache/wp-cache.php",
        "wp-content/plugins/mailchimp-for-wp/integrations/wpforms/bootstrap.php",
        "wp-content/plugins/mailchimp-for-wp/integrations/bootstrap.php",
        "wp-content/plugins/regenerate-thumbnails/regenerate-thumbnails.php",
        "wp-content/plugins/advanced-custom-fields/includes/deprecated.php",
        "wp-content/plugins/redirection/redirection.php",
        "wp-content/plugins/wpforms-lite/includes/admin/importers/class-ninja-forms.php",
        "wp-content/plugins/ninja-forms/includes/deprecated.php",
        "wp-content/plugins/so-widgets-bundle/so-widgets-bundle.php",
        "wp-content/plugins/wp-fastest-cache/templates/preload.php",
        "wp-content/plugins/duplicate-page/duplicatepage.php",
        "wp-content/plugins/better-wp-security/better-wp-security.php",
        "wp-content/plugins/all-in-one-wp-security-and-firewall/other-includes/wp-security-unlock-request.php",
        "wp-content/plugins/related-posts/views/settings.php",
        "wp-content/plugins/wpcontentguard/views/settings.php",
        "wp-content/plugins/simple-social-icons/simple-social-icons.php",
    ]
    results: List[Result] = []

    for url in urls:
        target = urljoin(wp_url, url)

        head = network.http_head(target, False)
        if head.status_code != 404:
            resp = network.http_get(target, False)
            if resp.status_code < 300 or resp.status_code >= 500:
                # we have some kind of response that could be useful
                if "<b>Fatal error</b>:" in resp.text:
                    # we have an error
                    pattern = r"<b>((\/|[A-Z]:\\).*.php)<\/b>"
                    if re.search(pattern, resp.text):
                        try:
                            path = re.findall(pattern, resp.text)[0][0]
                            results.append(
                                Result.from_evidence(
                                    Evidence.from_response(
                                        resp, {"path": path}),
                                    f"WordPress File Path Disclosure: {target} ({path})",
                                    Vulnerabilities.
                                    APP_WORDPRESS_PATH_DISCLOSURE,
                                ))
                        except Exception:
                            output.debug_exception()

            results += response_scanner.check_response(target, resp)

    return results
Esempio n. 7
0
File: php.py Progetto: rahmiy/yawast
    def _process(url: str, res: Response):
        nonlocal results

        if res.status_code == 200 and '<h1 class="p">PHP Version' in res.text:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"PHP Info Found: {url}",
                    Vulnerabilities.SERVER_PHP_PHPINFO,
                ))
Esempio n. 8
0
    def _process(url: str, res: Response):
        nonlocal results

        if res.status_code == 200 and res.content.startswith(
                b"\0\0\0\1Bud1\0"):
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f".DS_Store File Found: {url}",
                    Vulnerabilities.HTTP_DS_STORE_FILE,
                ))
Esempio n. 9
0
File: jira.py Progetto: sgnls/yawast
def check_for_jira(session: Session) -> Tuple[List[Result], Union[str, None]]:
    # this checks for an instance of Jira relative to the session URL
    results: List[Result] = []
    jira_url = None

    try:
        targets = [
            f"{session.url}secure/Dashboard.jspa",
            f"{session.url}jira/secure/Dashboard.jspa",
        ]

        for target in targets:
            res = network.http_get(target, False)

            if (
                res.status_code == 200
                and 'name="application-name" content="JIRA"' in res.text
            ):
                # we have a Jira instance
                jira_url = target

                # try to get the version
                ver_str = "unknown"
                try:
                    ver_pattern = (
                        r"<meta name=\"ajs-version-number\" content=\"([\d\.]+)\">"
                    )
                    version = re.search(ver_pattern, res.text).group(1)

                    build_pattern = (
                        r"<meta name=\"ajs-build-number\" content=\"(\d+)\">"
                    )
                    build = re.search(build_pattern, res.text).group(1)

                    ver_str = f"v{version}-{build}"
                except:
                    output.debug_exception()

                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Jira Installation Found ({ver_str}): {target}",
                        Vulnerabilities.APP_JIRA_FOUND,
                    )
                )

            results += response_scanner.check_response(target, res)

            break
    except Exception:
        output.debug_exception()

    return results, jira_url
Esempio n. 10
0
def check_cve_2019_11043(session: Session, links: List[str]) -> List[Result]:
    min_qsl = 1500
    max_qsl = 1950
    qsl_step = 5
    results = []
    targets = []

    if session.args.php_page is not None and len(session.args.php_page) > 0:
        php_page = str(session.args.php_page)

        if php_page.startswith("http://") or php_page.startswith("https://"):
            targets.append(urljoin(session.url, php_page))
        elif php_page.startswith(session.url):
            targets.append(php_page)

    for link in links:
        if link.endswith(".php"):
            targets.append(link)
        elif link.endswith("/"):
            targets.append(f"{link}index.php")

    def _get_resp(url: str, q_count: int) -> Response:
        path_info = "/PHP\nindex.php"
        u = urlparse(url)
        orig_path = quote(u.path)
        new_path = quote(u.path + path_info)
        delta = len(new_path) - len(path_info) - len(orig_path)
        prime = q_count - delta / 2
        req_url = urljoin(url, new_path + "?" + "Q" * int(prime))

        return network.http_get(req_url, False)

    for target in targets:
        # start by making sure that we have a valid target
        if network.http_head(target, False).status_code < 400:
            # get our baseline status code
            res = _get_resp(target, 1500)
            base_status_code = res.status_code

            for qsl in range(min_qsl + qsl_step, max_qsl, qsl_step):
                res = _get_resp(target, qsl)
                if res.status_code != base_status_code:
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"qsl": qsl}),
                            f"Detected susceptibility to PHP Remote Code Execution (CVE-2019-11043) (QSL {qsl})",
                            Vulnerabilities.SERVER_PHP_CVE_2019_11043,
                        )
                    )
                    break

    return results
Esempio n. 11
0
    def _process(url: str, result: Tuple[bool, Response]):
        nonlocal results

        found, res = result

        if found and '<h1 class="p">PHP Version' in res.text:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"PHP Info Found: {url}",
                    Vulnerabilities.SERVER_PHP_PHPINFO,
                )
            )
Esempio n. 12
0
def check_response(url: str,
                   res: Response,
                   body: Union[str, None] = None) -> List[Result]:
    global _data, _reports
    results = []

    try:
        # make sure we actually have something
        if res is None:
            return []

        if _data is None or len(_data) == 0:
            _get_data()

        if body is None:
            body = res.text

        for rule in _data:
            rule = cast(_MatchRule, rule)

            mtch = re.search(rule.pattern, body)

            if mtch:
                val = mtch.group(int(rule.match_group))

                err_start = body.find(val)

                # get the error, plus 25 characters on each side
                err = body[err_start - 25:err_start + len(val) + 25]
                msg = (f"Found error message (confidence: {rule.confidence}) "
                       f"on {url} ({res.request.method}): ...{err}...")

                if msg not in _reports:
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res),
                            msg,
                            Vulnerabilities.HTTP_ERROR_MESSAGE,
                        ))

                    _reports.append(msg)

                    break
                else:
                    output.debug(f"Ignored duplicate error message: {msg}")
    except Exception:
        output.debug_exception()

    return results
Esempio n. 13
0
    def _process(url: str, res: Response):
        nonlocal results, new_links

        if res.status_code == 200:
            # we found something!
            new_links.append(url)

            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Found backup file: {url}",
                    Vulnerabilities.HTTP_BACKUP_FILE,
                ))

        results += response_scanner.check_response(target, res)
Esempio n. 14
0
def check_trace(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("TRACE", url)
    body = res.text

    if res.status_code == 200 and "TRACE / HTTP/1.1" in body:
        results.append(
            Result.from_evidence(
                Evidence.from_response(res),
                "HTTP TRACE Enabled",
                Vln.HTTP_TRACE_ENABLED,
            ))

    results += response_scanner.check_response(url, res)

    return results
Esempio n. 15
0
def get_results(soup: BeautifulSoup, url: str, res: Response) -> List[Result]:
    global _reports

    results: List[Result] = []

    try:
        parsed = urlparse(url)
        domain = utils.get_domain(parsed.netloc)

        issues, r = _get_retirejs_results(soup, url, domain, res)
        results += r
        for js_url, issue in issues:
            comp = issue["component"]
            ver = issue["version"]

            if "vulnerabilities" in issue:
                for vuln in issue["vulnerabilities"]:
                    info = (
                        f'Vulnerable JavaScript: {comp}-{ver} ({js_url}): Severity: {vuln["severity"]} - '
                        f'Info: {" ".join(vuln["info"])}'
                    )

                    # make sure we haven't reported this issue before
                    if info not in _reports:
                        _reports.append(info)

                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(
                                    res,
                                    {
                                        "js_file": js_url,
                                        "js_lib": comp,
                                        "js_lib_ver": ver,
                                        "vuln_info": list(vuln["info"]),
                                        "vuln_sev": vuln["severity"],
                                    },
                                ),
                                info,
                                Vulnerabilities.JS_VULNERABLE_VERSION,
                            )
                        )
    except Exception:
        output.debug_exception()

    return results
Esempio n. 16
0
def _get_retirejs_results(
    soup: BeautifulSoup, url: str, domain: str, res: Response
) -> Tuple[List[Tuple[str, Dict]], List[Result]]:
    global _data, _checked
    issues = []
    results: List[Result] = []

    if _data is None or len(_data) == 0:
        _get_data()

    if _data is not None:
        # get all the JS files
        files = [i.get("src") for i in soup.find_all("script") if i.get("src")]

        for file in files:
            # fix relative URLs
            if str(file).startswith("//"):
                file = f"https:{file}"
            if str(file).startswith("/") or (not str(file).startswith("http")):
                file = urljoin(url, file)

            if file not in _checked:
                findings = retirejs.scan_endpoint(file, _data)

                _checked.append(file)

                if domain not in file:
                    # external JS file
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"js_file": file}),
                            f"External JavaScript File: {file}",
                            Vulnerabilities.JS_EXTERNAL_FILE,
                        )
                    )

                for find in findings:
                    issues.append((file, find))

        return issues, results
    else:
        # this means we couldn't get the data, so bail
        return [], []
Esempio n. 17
0
def check_telerik_rau_enabled(soup: BeautifulSoup, url: str) -> List[Result]:
    results: List[Result] = []

    parsed = urlparse(url)
    domain = utils.get_domain(parsed.netloc)

    try:
        # get all the scripts
        files = [i.get("src") for i in soup.find_all("script") if i.get("src")]

        for file in files:
            if "Telerik.Web.UI.WebResource.axd" in file:
                # ok, they are using Telerik UI for ASP.NET AJAX
                # fix-up the URL
                if str(file).startswith("//"):
                    file = f"https:{file}"
                if str(file).startswith("/") or (not str(file).startswith("http")):
                    if parsed.scheme == "https":
                        file = urljoin(f"https://{domain}", file)
                    else:
                        file = urljoin(f"http://{domain}", file)

                target = urlparse(file)
                target = target._replace(query="type=rau")

                if domain in target:
                    res = network.http_get(urlunparse(target), False)
                    # NOTE: Typo in "succesfully" is intentional - do not fix
                    if "RadAsyncUpload handler is registered succesfully" in res.text:
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"original_url": url}),
                                f"Telerik UI for ASP.NET AJAX RadAsyncUpload Enabled "
                                f"(Check for CVE-2019-18935) at {target}",
                                Vulnerabilities.APP_TELERIK_UI_RAD_ASYNC_UPLOAD_ENABLED,
                            )
                        )

                        break
    except Exception:
        output.debug_exception()

    return results
Esempio n. 18
0
def check_propfind(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("PROPFIND", url)
    body = res.text

    if res.status_code <= 400 and len(body) > 0:
        if "Content-Type" in res.headers and "text/xml" in res.headers["Content-Type"]:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    "Possible Info Disclosure: PROPFIND Enabled",
                    Vln.HTTP_PROPFIND_ENABLED,
                )
            )

    results += response_scanner.check_response(url, res)

    return results
Esempio n. 19
0
def check_jira_user_registration(jira_url: str) -> List[Result]:
    results: List[Result] = []

    try:
        target = f"{jira_url.rsplit('/', 1)[0]}/Signup!default.jspa"
        res = network.http_get(target, False)

        if res.status_code == 200 and "<title>Sign up for Jira" in res.text:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Jira User Registration Enabled: {target}",
                    Vulnerabilities.APP_JIRA_USER_REG_ENABLED,
                ))

        results += response_scanner.check_response(target, res)
    except Exception:
        output.debug_exception()

    return results
Esempio n. 20
0
def _check_url(url: str, targets: List[str]) -> Tuple[List[str], List[Result]]:
    files: List[str] = []
    results: List[Result] = []

    for target in targets:
        target_url = urljoin(url, target)

        res = network.http_get(target_url, False)

        results += response_scanner.check_response(target_url, res)

        if res.status_code < 300:
            files.append(target_url)
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"File found: {target_url}",
                    Vulnerabilities.SERVER_SPECIAL_FILE_EXPOSED,
                ))

    return files, results
Esempio n. 21
0
def _get_retirejs_results(
        soup: BeautifulSoup, url: str, domain: str,
        res: Response) -> Tuple[List[Tuple[str, Dict]], List[Result]]:
    global _data, _checked
    issues = []
    results: List[Result] = []

    if _data is None or len(_data) == 0:
        _get_data()

    if _data is not None:
        # get all the JS files
        elements = [i for i in soup.find_all("script") if i.get("src")]

        for element in elements:
            file = element.get("src")
            # fix relative URLs
            if str(file).startswith("//"):
                file = f"https:{file}"
            if str(file).startswith("/") or (not str(file).startswith("http")):
                if urlparse(url).scheme == "https":
                    file = urljoin(f"https://{domain}", file)
                else:
                    file = urljoin(f"http://{domain}", file)

            if file not in _checked:
                findings = retirejs.scan_endpoint(file, _data)

                _checked.append(file)

                if domain not in file:
                    # external JS file
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {
                                "js_file": file,
                                "element": str(element)
                            }),
                            f"External JavaScript File: {file}",
                            Vulnerabilities.JS_EXTERNAL_FILE,
                        ))

                    # we have an external script; check for SRI
                    if not element.get("integrity"):
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {
                                    "js_file": file,
                                    "element": str(element)
                                }),
                                f"External JavaScript Without SRI: {file}",
                                Vulnerabilities.JS_EXTERNAL_NO_SRI,
                            ))

                for find in findings:
                    issues.append((file, find))

        return issues, results
    else:
        # this means we couldn't get the data, so bail
        return [], []
Esempio n. 22
0
def _check_cache_headers(url: str, res: Response) -> List[Result]:
    results = []

    try:
        if "Cache-Control" in res.headers:
            # we have the header, check the content
            if "public" in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: Public: {url}",
                        Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_PUBLIC,
                    ))

            if "no-cache" not in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: no-cache Not Found: {url}",
                        Vulnerabilities.
                        HTTP_HEADER_CACHE_CONTROL_NO_CACHE_MISSING,
                    ))

            if "no-store" not in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: no-store Not Found: {url}",
                        Vulnerabilities.
                        HTTP_HEADER_CACHE_CONTROL_NO_STORE_MISSING,
                    ))

            if "private" not in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: private Not Found: {url}",
                        Vulnerabilities.
                        HTTP_HEADER_CACHE_CONTROL_PRIVATE_MISSING,
                    ))
        else:
            # header missing
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Cache-Control Header Not Found: {url}",
                    Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_MISSING,
                ))

        if "Expires" not in res.headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Expires Header Not Found: {url}",
                    Vulnerabilities.HTTP_HEADER_EXPIRES_MISSING,
                ))

        else:
            # TODO: parse the value and see if it's less than now
            pass

        if "Pragma" not in res.headers or "no-cache" not in str(
                res.headers["Pragma"]):
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Pragma: no-cache Not Found: {url}",
                    Vulnerabilities.HTTP_HEADER_PRAGMA_NO_CACHE_MISSING,
                ))
    except Exception:
        output.debug_exception()

    return results
Esempio n. 23
0
def _get_links(base_url: str, urls: List[str], queue, pool):
    global _links, _insecure, _tasks, _lock

    max_length = 1024 * 1024 * 3  # 3MB

    results: List[Result] = []

    # fail-safe to make sure we don't go too crazy
    if len(_links) > 10000:
        # if we have more than 10,000 URLs in our list, just stop
        output.debug(
            "Spider: Link list contains > 10,000 items. Stopped gathering more links."
        )

        return

    for url in urls:
        try:
            # list of pages found that will need to be processed
            to_process: List[str] = []

            res = network.http_get(url, False)

            # get the length, so that we don't parse huge documents
            if "Content-Length" in res.headers:
                length = int(res.headers["Content-Length"])
            else:
                length = len(res.content)

            if http_utils.is_text(res) and length < max_length:
                soup = BeautifulSoup(res.text, "html.parser")
            else:
                # no clue what this is
                soup = None

            results += response_scanner.check_response(url, res, soup)

            if soup is not None:
                for link in soup.find_all("a"):
                    href = link.get("href")

                    if str(href).startswith(
                            "/") and not str(href).startswith("//"):
                        href = urljoin(base_url, href)

                    if href is not None:
                        # check to see if this link is in scope
                        if base_url in href and href not in _links:
                            if "." in href.split("/")[-1]:
                                file_ext = href.split("/")[-1].split(".")[-1]
                            else:
                                file_ext = None

                            with _lock:
                                _links.append(href)

                            # filter out some of the obvious binary files
                            if file_ext is None or file_ext not in [
                                    "gzip",
                                    "jpg",
                                    "jpeg",
                                    "gif",
                                    "woff",
                                    "zip",
                                    "exe",
                                    "gz",
                                    "pdf",
                            ]:
                                if not _is_unsafe_link(href, link.string):
                                    to_process.append(href)
                                else:
                                    output.debug(
                                        f"Skipping unsafe URL: {link.string} - {href}"
                                    )
                            else:
                                output.debug(
                                    f'Skipping URL "{href}" due to file extension "{file_ext}"'
                                )
                        else:
                            if (base_url.startswith("https://")
                                    and str(href).startswith("http://")
                                    and str(href) not in _insecure):
                                # link from secure to insecure
                                with _lock:
                                    _insecure.append(str(href))

                                results.append(
                                    Result.from_evidence(
                                        Evidence.from_response(
                                            res, {"link": href}),
                                        f"Insecure Link: {url} links to {href}",
                                        Vulnerabilities.HTTP_INSECURE_LINK,
                                    ))

            # handle redirects
            if "Location" in res.headers:
                redirect = res.headers["Location"]

                # check for relative link
                if str(redirect).startswith("/"):
                    redirect = urljoin(base_url, redirect)

                # make sure that we aren't redirected out of scope
                if base_url in redirect:
                    to_process.append(redirect)

            if len(to_process) > 0:
                asy = pool.apply_async(_get_links,
                                       (base_url, to_process, queue, pool))

                with _lock:
                    _tasks.append(asy)
        except Exception:
            output.debug_exception()

    output.debug(f"GetLinks Task Completed - {len(results)} issues found.")
    queue.put(results)
Esempio n. 24
0
def get_header_issues(res: Response, raw: str, url: str) -> List[Result]:
    results: List[Result] = []

    try:
        headers = res.headers

        if "X-Powered-By" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f'X-Powered-By Header Present: {headers["X-Powered-By"]} ({url})',
                    Vln.HTTP_HEADER_X_POWERED_BY,
                ))

            # check to see if this is a php version
            results += php.check_version(headers["X-Powered-By"], raw, url)

        if "X-XSS-Protection" in headers:
            # header is present, check the value
            if "0" in headers["X-XSS-Protection"]:
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"X-XSS-Protection Disabled Header Present ({url})",
                        Vln.HTTP_HEADER_X_XSS_PROTECTION_DISABLED,
                    ))
        else:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-XSS-Protection Header Not Present ({url})",
                    Vln.HTTP_HEADER_X_XSS_PROTECTION_MISSING,
                ))

        if "X-Runtime" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-Runtime Header Present; likely indicates a RoR application ({url})",
                    Vln.HTTP_HEADER_X_RUNTIME,
                ))

        if "X-Backend-Server" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f'X-Backend-Server Header Present: {headers["X-Backend-Server"]} ({url})',
                    Vln.HTTP_HEADER_X_BACKEND_SERVER,
                ))

        if "Via" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f'Via Header Present: #{headers["Via"]} ({url})',
                    Vln.HTTP_HEADER_VIA,
                ))

        if "X-Frame-Options" in headers:
            if "allow" in str(headers["X-Frame-Options"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f'X-Frame-Options Header: {headers["X-Frame-Options"]} ({url})',
                        Vln.HTTP_HEADER_X_FRAME_OPTIONS_ALLOW,
                    ))
        else:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-Frame-Options Header Not Present ({url})",
                    Vln.HTTP_HEADER_X_FRAME_OPTIONS_MISSING,
                ))

        if "X-Content-Type-Options" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-Content-Type-Options Header Not Present ({url})",
                    Vln.HTTP_HEADER_X_CONTENT_TYPE_OPTIONS_MISSING,
                ))

        if "Content-Security-Policy" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Content-Security-Policy Header Not Present ({url})",
                    Vln.HTTP_HEADER_CONTENT_SECURITY_POLICY_MISSING,
                ))

        if "Referrer-Policy" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Referrer-Policy Header Not Present ({url})",
                    Vln.HTTP_HEADER_REFERRER_POLICY_MISSING,
                ))

        if "Feature-Policy" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Feature-Policy Header Not Present ({url})",
                    Vln.HTTP_HEADER_FEATURE_POLICY_MISSING,
                ))

        if "Access-Control-Allow-Origin" in headers:
            if headers["Access-Control-Allow-Origin"] == "*":
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Access-Control-Allow-Origin: Unrestricted ({url})",
                        Vln.HTTP_HEADER_CORS_ACAO_UNRESTRICTED,
                    ))

        if "Strict-Transport-Security" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Strict-Transport-Security Header Not Present ({url})",
                    Vln.HTTP_HEADER_HSTS_MISSING,
                ))

        if "Server" in headers:
            results += get_server_banner_issues(headers["Server"], raw, url,
                                                headers)
    except Exception:
        output.debug_exception()

    return results
Esempio n. 25
0
def get_header_issues(res: Response, raw: str, url: str) -> List[Result]:
    results: List[Result] = []

    try:
        headers = res.headers

        if "X-Powered-By" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f'X-Powered-By Header Present: {headers["X-Powered-By"]} ({url})',
                    Vln.HTTP_HEADER_X_POWERED_BY,
                )
            )

            # check to see if this is a php version
            results += php.check_version(headers["X-Powered-By"], raw, url)

        if "X-XSS-Protection" in headers:
            # header is present, check the value
            if "0" in headers["X-XSS-Protection"]:
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"X-XSS-Protection Disabled Header Present ({url})",
                        Vln.HTTP_HEADER_X_XSS_PROTECTION_DISABLED,
                    )
                )
        else:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-XSS-Protection Header Not Present ({url})",
                    Vln.HTTP_HEADER_X_XSS_PROTECTION_MISSING,
                )
            )

        if "X-Runtime" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-Runtime Header Present; likely indicates a RoR application ({url})",
                    Vln.HTTP_HEADER_X_RUNTIME,
                )
            )

        if "X-Backend-Server" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f'X-Backend-Server Header Present: {headers["X-Backend-Server"]} ({url})',
                    Vln.HTTP_HEADER_X_BACKEND_SERVER,
                )
            )

        if "Via" in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f'Via Header Present: #{headers["Via"]} ({url})',
                    Vln.HTTP_HEADER_VIA,
                )
            )

        if "X-Frame-Options" in headers:
            if "allow" in str(headers["X-Frame-Options"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f'X-Frame-Options Header: {headers["X-Frame-Options"]} ({url})',
                        Vln.HTTP_HEADER_X_FRAME_OPTIONS_ALLOW,
                    )
                )
        else:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-Frame-Options Header Not Present ({url})",
                    Vln.HTTP_HEADER_X_FRAME_OPTIONS_MISSING,
                )
            )

        if "X-Content-Type-Options" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"X-Content-Type-Options Header Not Present ({url})",
                    Vln.HTTP_HEADER_X_CONTENT_TYPE_OPTIONS_MISSING,
                )
            )

        if "Content-Security-Policy" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Content-Security-Policy Header Not Present ({url})",
                    Vln.HTTP_HEADER_CONTENT_SECURITY_POLICY_MISSING,
                )
            )

        if "Referrer-Policy" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Referrer-Policy Header Not Present ({url})",
                    Vln.HTTP_HEADER_REFERRER_POLICY_MISSING,
                )
            )

        if "Feature-Policy" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Feature-Policy Header Not Present ({url})",
                    Vln.HTTP_HEADER_FEATURE_POLICY_MISSING,
                )
            )

        if "Access-Control-Allow-Origin" in headers:
            if headers["Access-Control-Allow-Origin"] == "*":
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Access-Control-Allow-Origin: Unrestricted ({url})",
                        Vln.HTTP_HEADER_CORS_ACAO_UNRESTRICTED,
                    )
                )

        if "Strict-Transport-Security" not in headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Strict-Transport-Security Header Not Present ({url})",
                    Vln.HTTP_HEADER_HSTS_MISSING,
                )
            )

        if "Server" in headers:
            results += get_server_banner_issues(headers["Server"], raw, url, headers)

        # check to see if any headers are duplicated.
        # we have to access a private member, as it's the only access to the raw headers
        if res.raw._original_response is not None:
            raw_headers = str(res.raw._original_response.headers).splitlines(False)
            raw_headers_checked: List[str] = []

            for raw_header in raw_headers:
                header_name = raw_header.split(":")[0]

                if header_name not in raw_headers_checked:
                    raw_headers_checked.append(header_name)

                    for dup in raw_headers:
                        dup_name = dup.split(":")[0]

                        if dup_name == header_name and dup != raw_header:
                            # we have a second header, with a different value
                            results.append(
                                Result.from_evidence(
                                    Evidence.from_response(res),
                                    f"Header {header_name} set multiple times with different values at {url}",
                                    Vln.HTTP_HEADER_DUPLICATE,
                                )
                            )

                            break
    except Exception:
        output.debug_exception()

    return results
Esempio n. 26
0
def _get_cookie_issues(cookies: List[str], url: str,
                       res: Response) -> List[Result]:
    global _checked_cookies

    # setup the checked list
    if Vulnerabilities.COOKIE_MISSING_SECURE_FLAG not in _checked_cookies:
        _checked_cookies[Vulnerabilities.COOKIE_MISSING_SECURE_FLAG] = []
    if Vulnerabilities.COOKIE_INVALID_SECURE_FLAG not in _checked_cookies:
        _checked_cookies[Vulnerabilities.COOKIE_INVALID_SECURE_FLAG] = []
    if Vulnerabilities.COOKIE_MISSING_HTTPONLY_FLAG not in _checked_cookies:
        _checked_cookies[Vulnerabilities.COOKIE_MISSING_HTTPONLY_FLAG] = []
    if Vulnerabilities.COOKIE_MISSING_SAMESITE_FLAG not in _checked_cookies:
        _checked_cookies[Vulnerabilities.COOKIE_MISSING_SAMESITE_FLAG] = []
    if Vulnerabilities.COOKIE_WITH_SAMESITE_NONE_FLAG not in _checked_cookies:
        _checked_cookies[Vulnerabilities.COOKIE_WITH_SAMESITE_NONE_FLAG] = []
    if Vulnerabilities.COOKIE_INVALID_SAMESITE_NONE_FLAG not in _checked_cookies:
        _checked_cookies[
            Vulnerabilities.COOKIE_INVALID_SAMESITE_NONE_FLAG] = []

    results: List[Result] = []

    try:
        parsed = urlparse(url)

        for cookie in cookies:
            comp = cookie.split(";")

            # get the name
            name = comp[0].split("=")[0]

            # normalize the components
            comp = list(map(str.strip, comp))
            comp = list(map(str.lower, comp))

            # check Secure flag
            if "secure" not in comp and parsed.scheme == "https":
                if (name not in _checked_cookies[
                        Vulnerabilities.COOKIE_MISSING_SECURE_FLAG]):
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Missing Secure Flag: {cookie}",
                            Vulnerabilities.COOKIE_MISSING_SECURE_FLAG,
                        ))

                    _checked_cookies[Vulnerabilities.
                                     COOKIE_MISSING_SECURE_FLAG].append(name)
            elif "secure" in comp and parsed.scheme == "http":
                # secure flag over HTTP is invalid
                if (name not in _checked_cookies[
                        Vulnerabilities.COOKIE_INVALID_SECURE_FLAG]):
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Secure Flag Invalid (over HTTP): {cookie}",
                            Vulnerabilities.COOKIE_INVALID_SECURE_FLAG,
                        ))

                    _checked_cookies[Vulnerabilities.
                                     COOKIE_INVALID_SECURE_FLAG].append(name)

            # check HttpOnly flag
            if "httponly" not in comp:
                if (name not in _checked_cookies[
                        Vulnerabilities.COOKIE_MISSING_HTTPONLY_FLAG]):
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Missing HttpOnly Flag: {cookie}",
                            Vulnerabilities.COOKIE_MISSING_HTTPONLY_FLAG,
                        ))

                    _checked_cookies[Vulnerabilities.
                                     COOKIE_MISSING_HTTPONLY_FLAG].append(name)

            # check SameSite flag
            if ("samesite=lax" not in comp and "samesite=strict" not in comp
                    and "samesite=none" not in comp):
                if (name not in _checked_cookies[
                        Vulnerabilities.COOKIE_MISSING_SAMESITE_FLAG]):
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Missing SameSite Flag: {cookie}",
                            Vulnerabilities.COOKIE_MISSING_SAMESITE_FLAG,
                        ))

                    _checked_cookies[Vulnerabilities.
                                     COOKIE_MISSING_SAMESITE_FLAG].append(name)

            # check SameSite=None flag
            if "samesite=none" in comp:
                if "secure" in comp:
                    if (name not in _checked_cookies[
                            Vulnerabilities.COOKIE_WITH_SAMESITE_NONE_FLAG]):
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"cookie": name}),
                                f"Cookie With SameSite=None Flag: {cookie}",
                                Vulnerabilities.COOKIE_WITH_SAMESITE_NONE_FLAG,
                            ))

                        _checked_cookies[
                            Vulnerabilities.
                            COOKIE_WITH_SAMESITE_NONE_FLAG].append(name)
                else:
                    if (name not in _checked_cookies[
                            Vulnerabilities.COOKIE_INVALID_SAMESITE_NONE_FLAG]
                        ):
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"cookie": name}),
                                f"Cookie SameSite=None Flag Invalid (without Secure flag): {cookie}",
                                Vulnerabilities.
                                COOKIE_INVALID_SAMESITE_NONE_FLAG,
                            ))

                        _checked_cookies[
                            Vulnerabilities.
                            COOKIE_INVALID_SAMESITE_NONE_FLAG].append(name)
    except Exception:
        output.debug_exception()

    return results
Esempio n. 27
0
def _file_search(session: Session, orig_links: List[str]) -> List[str]:
    new_files: List[str] = []
    file_good, file_res, path_good, path_res = network.check_404_response(session.url)

    # these are here for data typing
    results: Union[List[Result], None]
    links: Union[List[str], None]

    if not file_good:
        reporter.display(
            "Web server does not respond properly to file 404 errors.",
            Issue(
                Vulnerabilities.SERVER_INVALID_404_FILE,
                session.url,
                Evidence.from_response(file_res),
            ),
        )
    if not path_good:
        reporter.display(
            "Web server does not respond properly to path 404 errors.",
            Issue(
                Vulnerabilities.SERVER_INVALID_404_PATH,
                session.url,
                Evidence.from_response(path_res),
            ),
        )

    if not (file_good or path_good):
        output.norm(
            "Site does not respond properly to non-existent file/path requests; skipping some checks."
        )

    if file_good:
        links, results = special_files.check_special_files(session.url)
        if results:
            reporter.display_results(results, "\t")

        new_files += links

        if session.args.files:
            output.empty()
            output.norm("Searching for common files (this will take a few minutes)...")

            with Spinner():
                try:
                    links, results = file_search.find_files(session.url)
                except Exception as error:
                    output.debug_exception()
                    output.error(f"Error running scan: {str(error)}")
                    results = None
                    links = None

            if results is not None and results:
                reporter.display_results(results, "\t")

            if links is not None and links:
                new_files += links

                for l in links:
                    if l not in orig_links:
                        output.norm(f"\tNew file found: {l}")

                output.empty()

        # check for common backup files
        all_links = orig_links + new_files
        with Spinner():
            backups, res = file_search.find_backups(all_links)
        if res:
            reporter.display_results(res, "\t")
        if backups:
            new_files += backups

    if path_good:
        links, results = special_files.check_special_paths(session.url)

        if results:
            reporter.display_results(results, "\t")

        new_files += links

        if session.args.dir:
            output.empty()
            output.norm(
                "Searching for common directories (this will take a few minutes)..."
            )

            with Spinner():
                try:
                    links, results = file_search.find_directories(
                        session.url,
                        session.args.dirlistredir,
                        session.args.dirrecursive,
                    )
                except Exception as error:
                    output.debug_exception()
                    output.error(f"Error running scan: {str(error)}")
                    results = None
                    links = None

            if results is not None and results:
                reporter.display_results(results, "\t")

            if links is not None and links:
                new_files += links

                for l in links:
                    if l not in orig_links:
                        output.norm(f"\tNew directory found: {l}")

                output.empty()

    # check for .DS_Store files
    if file_good:
        res = file_search.find_ds_store(new_files)

        if res:
            reporter.display_results(res, "\t")

    return new_files
Esempio n. 28
0
def _get_cookie_issues(cookies: List[str], url: str, res: Response) -> List[Result]:
    global _checked_cookies

    # setup the checked list
    if Vln.COOKIE_MISSING_SECURE_FLAG not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_MISSING_SECURE_FLAG] = []
    if Vln.COOKIE_INVALID_SECURE_FLAG not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_INVALID_SECURE_FLAG] = []
    if Vln.COOKIE_MISSING_HTTPONLY_FLAG not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_MISSING_HTTPONLY_FLAG] = []
    if Vln.COOKIE_MISSING_SAMESITE_FLAG not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_MISSING_SAMESITE_FLAG] = []
    if Vln.COOKIE_WITH_SAMESITE_NONE_FLAG not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_WITH_SAMESITE_NONE_FLAG] = []
    if Vln.COOKIE_INVALID_SAMESITE_NONE_FLAG not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_INVALID_SAMESITE_NONE_FLAG] = []
    if Vln.COOKIE_BIGIP_IP_DISCLOSURE not in _checked_cookies:
        _checked_cookies[Vln.COOKIE_BIGIP_IP_DISCLOSURE] = []

    results: List[Result] = []

    try:
        parsed = urlparse(url)

        for cookie in cookies:
            comp = cookie.split(";")

            # get the name
            name = comp[0].split("=")[0]

            # get the value
            value = comp[0].split("=")[1]

            # normalize the components
            comp = list(map(str.strip, comp))  # trim the string to clear the spaces
            comp = list(
                map(str.lower, comp)  # make it all lowercase, to simplify checks
            )

            # check for BigIP IP Disclosure
            if "BIGip" in name:
                if name not in _checked_cookies[Vln.COOKIE_BIGIP_IP_DISCLOSURE]:
                    _checked_cookies[Vln.COOKIE_BIGIP_IP_DISCLOSURE].append(name)
                    decoded = _decode_big_ip_cookie(value)

                    if decoded is not None:
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"cookie": name}),
                                f"Big-IP Internal IP Address Disclosure: {name}: {decoded}",
                                Vln.COOKIE_BIGIP_IP_DISCLOSURE,
                            )
                        )

            # check Secure flag
            if "secure" not in comp and parsed.scheme == "https":
                if name not in _checked_cookies[Vln.COOKIE_MISSING_SECURE_FLAG]:
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Missing Secure Flag: {cookie}",
                            Vln.COOKIE_MISSING_SECURE_FLAG,
                        )
                    )

                    _checked_cookies[Vln.COOKIE_MISSING_SECURE_FLAG].append(name)
            elif "secure" in comp and parsed.scheme == "http":
                # secure flag over HTTP is invalid
                if name not in _checked_cookies[Vln.COOKIE_INVALID_SECURE_FLAG]:
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Secure Flag Invalid (over HTTP): {cookie}",
                            Vln.COOKIE_INVALID_SECURE_FLAG,
                        )
                    )

                    _checked_cookies[Vln.COOKIE_INVALID_SECURE_FLAG].append(name)

            # check HttpOnly flag
            if "httponly" not in comp:
                if name not in _checked_cookies[Vln.COOKIE_MISSING_HTTPONLY_FLAG]:
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Missing HttpOnly Flag: {cookie}",
                            Vln.COOKIE_MISSING_HTTPONLY_FLAG,
                        )
                    )

                    _checked_cookies[Vln.COOKIE_MISSING_HTTPONLY_FLAG].append(name)

            # check SameSite flag
            if (
                "samesite=lax" not in comp
                and "samesite=strict" not in comp
                and "samesite=none" not in comp
            ):
                if name not in _checked_cookies[Vln.COOKIE_MISSING_SAMESITE_FLAG]:
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res, {"cookie": name}),
                            f"Cookie Missing SameSite Flag: {cookie}",
                            Vln.COOKIE_MISSING_SAMESITE_FLAG,
                        )
                    )

                    _checked_cookies[Vln.COOKIE_MISSING_SAMESITE_FLAG].append(name)

            # check SameSite=None flag
            if "samesite=none" in comp:
                if "secure" in comp:
                    if name not in _checked_cookies[Vln.COOKIE_WITH_SAMESITE_NONE_FLAG]:
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"cookie": name}),
                                f"Cookie With SameSite=None Flag: {cookie}",
                                Vln.COOKIE_WITH_SAMESITE_NONE_FLAG,
                            )
                        )

                        _checked_cookies[Vln.COOKIE_WITH_SAMESITE_NONE_FLAG].append(
                            name
                        )
                else:
                    if (
                        name
                        not in _checked_cookies[Vln.COOKIE_INVALID_SAMESITE_NONE_FLAG]
                    ):
                        results.append(
                            Result.from_evidence(
                                Evidence.from_response(res, {"cookie": name}),
                                f"Cookie SameSite=None Flag Invalid (without Secure flag): {cookie}",
                                Vln.COOKIE_INVALID_SAMESITE_NONE_FLAG,
                            )
                        )

                        _checked_cookies[Vln.COOKIE_INVALID_SAMESITE_NONE_FLAG].append(
                            name
                        )
    except Exception:
        output.debug_exception()

    return results
Esempio n. 29
0
def _check_cache_headers(url: str, res: Response) -> List[Result]:
    results = []

    try:
        if "Cache-Control" in res.headers:
            # we have the header, check the content
            if "public" in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: Public: {url}",
                        Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_PUBLIC,
                    )
                )

            if "no-cache" not in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: no-cache Not Found: {url}",
                        Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_NO_CACHE_MISSING,
                    )
                )

            if "no-store" not in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: no-store Not Found: {url}",
                        Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_NO_STORE_MISSING,
                    )
                )

            if "private" not in str(res.headers["Cache-Control"]).lower():
                results.append(
                    Result.from_evidence(
                        Evidence.from_response(res),
                        f"Cache-Control: private Not Found: {url}",
                        Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_PRIVATE_MISSING,
                    )
                )
        else:
            # header missing
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Cache-Control Header Not Found: {url}",
                    Vulnerabilities.HTTP_HEADER_CACHE_CONTROL_MISSING,
                )
            )

        if "Expires" not in res.headers:
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Expires Header Not Found: {url}",
                    Vulnerabilities.HTTP_HEADER_EXPIRES_MISSING,
                )
            )
        else:
            # parse the date, and check to see if it's in the past
            try:
                # using fuzzy=true here could lead to some false positives due to it doing whatever it can to produce
                # a valid date - but it is the most forgiving option we have to ensure odd servers don't cause issues
                dt = parse(res.headers["Expires"], fuzzy=True)
                if dt > datetime.now(tz.UTC):
                    # Expires is in the future - it's an issue
                    results.append(
                        Result.from_evidence(
                            Evidence.from_response(res),
                            f"Expires Header - Future Dated ({res.headers['Expires']}): {url}",
                            Vulnerabilities.HTTP_HEADER_EXPIRES_FUTURE,
                        )
                    )
            except Exception:
                output.debug_exception()

        if "Pragma" not in res.headers or "no-cache" not in str(res.headers["Pragma"]):
            results.append(
                Result.from_evidence(
                    Evidence.from_response(res),
                    f"Pragma: no-cache Not Found: {url}",
                    Vulnerabilities.HTTP_HEADER_PRAGMA_NO_CACHE_MISSING,
                )
            )
    except Exception:
        output.debug_exception()

    return results