コード例 #1
0
ファイル: http_basic.py プロジェクト: sasqwatch/yawast
def check_options(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_options(url)

    if "Allow" in res.headers:
        results.append(
            Result(
                f"Allow HTTP Verbs (OPTIONS): {res.headers['Allow']}",
                Vulnerabilities.HTTP_OPTIONS_ALLOW,
                url,
                [
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            )
        )

    if "Public" in res.headers:
        results.append(
            Result(
                f"Public HTTP Verbs (OPTIONS): {res.headers['Allow']}",
                Vulnerabilities.HTTP_OPTIONS_PUBLIC,
                url,
                [
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            )
        )

    results += response_scanner.check_response(url, res)

    return results
コード例 #2
0
ファイル: nginx.py プロジェクト: rurbin3/yawast
def check_status(url: str) -> List[Result]:
    results: List[Result] = []
    search = ["status/", "stats/"]

    for path in search:
        target = urljoin(url, path)

        res = network.http_get(target, False)
        body = res.text

        if res.status_code == 200 and "Active connections:" in body:
            results.append(
                Result(
                    f"Nginx status page found: {target}",
                    Vulnerabilities.SERVER_NGINX_STATUS_EXPOSED,
                    target,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                ))

        results += response_scanner.check_response(target, res)

    return results
コード例 #3
0
    def test_get_header_issues_powered_by(self):
        url = "http://example.com"

        with requests_mock.Mocker(real_http=True) as m:
            m.get(
                url,
                text="body",
                headers={
                    "X-XSS-Protection": "1",
                    "X-Frame-Options": "blah",
                    "X-Content-Type-Options": "nosniff",
                    "Content-Security-Policy": "blah",
                    "Referrer-Policy": "blah",
                    "Feature-Policy": "blah",
                    "Strict-Transport-Security": "blah",
                    "X-Powered-By": "blah",
                },
            )

            resp = requests.get(url)

        res = http_basic.get_header_issues(
            resp, network.http_build_raw_response(resp), url
        )

        self.assertEqual(1, len(res))
        self.assertIn("X-Powered-By Header Present", res[0].message)
コード例 #4
0
    def test_get_header_issues_dup_header(self):
        network.init("", "", "")
        output.setup(False, False, False)

        # we are using www.google.com as they return multiple Set-Cookie headers
        url = "https://www.google.com"

        output.setup(False, True, True)
        with utils.capture_sys_output() as (stdout, stderr):
            resp = requests.get(url)
            results = http_basic.get_header_issues(
                resp, network.http_build_raw_response(resp), url
            )

        self.assertIsNotNone(results)
        self.assertTrue(len(results) > 0)
        self.assertNotIn("Exception", stderr.getvalue())
        self.assertNotIn("Error", stdout.getvalue())
        self.assertTrue(
            any(
                "Header Set-Cookie set multiple times with different values"
                in r.message
                for r in results
            )
        )
コード例 #5
0
def check_asp_net_debug(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom(
        "DEBUG", url, additional_headers={"Command": "stop-debug", "Accept": "*/*"}
    )

    if res.status_code == 200 and "OK" in res.text:
        # we've got a hit, but could be a false positive
        # try this again, with a different verb
        xres = network.http_custom(
            "XDEBUG", url, additional_headers={"Command": "stop-debug", "Accept": "*/*"}
        )

        # if we get a 200 when using an invalid verb, it's a false positive
        # if we get something else, then the DEBUG actually did something
        if xres.status_code != 200:
            results.append(
                Result(
                    "ASP.NET Debugging Enabled",
                    Vulnerabilities.SERVER_ASPNET_DEBUG_ENABLED,
                    url,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                )
            )
        else:
            output.debug("Server responds to invalid HTTP verbs with status 200")

    results += response_scanner.check_response(url, res)

    return results
コード例 #6
0
def check_cve_2019_0232(links: List[str]) -> List[Result]:
    results: List[Result] = []
    targets: List[str] = []

    for link in links:
        if "cgi-bin" in link:
            if "?" in link:
                targets.append(f"{link}&dir")
            else:
                targets.append(f"{link}?dir")

    for target in targets:
        res = network.http_get(target, False)
        body = res.text

        if "<DIR>" in body:
            # we have a hit
            results.append(
                Result(
                    f"Apache Tomcat RCE (CVE-2019-0232): {target}",
                    Vulnerabilities.SERVER_TOMCAT_CVE_2019_0232,
                    target,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                ))

        results += response_scanner.check_response(target, res)

    return results
コード例 #7
0
def check_response(url: str,
                   res: Response,
                   soup: Union[BeautifulSoup, None] = None) -> List[Result]:
    # make sure we actually have something
    if res is None:
        return []

    results: List[Result] = []

    raw_full = network.http_build_raw_response(res)

    if http_utils.is_text(res):
        body = res.text

        if soup is None:
            soup = BeautifulSoup(body, "html.parser")

        # check for things thar require parsed HTML
        results += retirejs.get_results(soup, url, res)
        results += apache_tomcat.get_version(url, res)
        results += error_checker.check_response(url, res, body)

        results += _check_cache_headers(url, res)

    results += http_basic.get_header_issues(res, raw_full, url)
    results += http_basic.get_cookie_issues(res, url)

    # this function will trigger a recursive call, as it calls this to check the response.
    # to deal with this, we'll check the caller, to make sure it's not what we're about to call.
    if "check_cve_2019_5418" not in inspect.stack()[1].function:
        results += rails.check_cve_2019_5418(url)

    results += _check_charset(url, res)

    return results
コード例 #8
0
def get_version(url: str,
                res: Response,
                method: Optional[str] = None) -> List[Result]:
    """Check a server response to see if it contains a Tomcat version.

    :param method:
    :param url:
    :param res:
    :return:
    """

    results: List[Result] = []

    body = res.text
    ver = _get_version_from_body(body, res.status_code)
    if ver is not None:
        msg = f"Apache Tomcat version exposed: {ver}"
        if method is not None:
            msg += f" (Via {method})"

        results.append(
            Result(
                msg,
                Vulnerabilities.SERVER_TOMCAT_VERSION,
                url,
                [
                    ver,
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            ))

        results += _check_version_outdated(ver, url, body)

    return results
コード例 #9
0
def check_cve_2017_12615(url: str) -> List[Result]:
    results = []

    try:
        file_name = secrets.token_hex(12)
        check_value = secrets.token_hex(12)

        target = urljoin(url, f"{file_name}.jsp/")
        res_put = network.http_put(target, f"<% out.println({check_value});%>",
                                   False)

        if res_put.status_code < 300:
            # code should be 2xx for this to work
            # now we need to check to see if it worked
            created_file = urljoin(url, f"{file_name}.jsp")

            res_get = network.http_get(created_file, False)

            if check_value in res_get.text:
                # we have RCE
                results.append(
                    Result(
                        f"Apache Tomcat PUT RCE (CVE-2017-12615): {created_file}",
                        Vulnerabilities.SERVER_TOMCAT_CVE_2017_12615,
                        url,
                        [
                            network.http_build_raw_request(res_put.request),
                            network.http_build_raw_response(res_put),
                            network.http_build_raw_request(res_get.request),
                            network.http_build_raw_response(res_get),
                        ],
                    ))
            else:
                results += response_scanner.check_response(
                    created_file, res_get)
        else:
            # if we didn't get a hit, go ahead and scan it to see if there's
            #  anything else that we should be picking up.
            results += response_scanner.check_response(target, res_put)
    except Exception:
        output.debug_exception()

    return results
コード例 #10
0
    def from_response(cls,
                      response: Response,
                      custom: Optional[Dict[str, Any]] = None):
        ev = cls(
            response.request.url,
            network.http_build_raw_request(response.request),
            network.http_build_raw_response(response),
            custom,
        )

        return ev
コード例 #11
0
ファイル: test_http.py プロジェクト: marciopocebon/yawast
    def test_get_header_issues_no_sec_headers(self):
        url = "http://example.com"

        with requests_mock.Mocker(real_http=True) as m:
            m.get(url, text="body")

            resp = requests.get(url)

        res = http_basic.get_header_issues(
            resp, network.http_build_raw_response(resp), url)

        self.assertEqual(7, len(res))
コード例 #12
0
ファイル: apache_tomcat.py プロジェクト: sgnls/yawast
def check_struts_sample(url: str) -> List[Result]:
    results: List[Result] = []

    try:
        # make sure we have real 404s
        file_good, _, _, _ = network.check_404_response(url)
        if not file_good:
            return results

        search = [
            "Struts2XMLHelloWorld/User/home.action",
            "struts2-showcase/showcase.action",
            "struts2-showcase/titles/index.action",
            "struts2-bootstrap-showcase/",
            "struts2-showcase/index.action",
            "struts2-bootstrap-showcase/index.action",
            "struts2-rest-showcase/",
        ]

        for path in search:
            target = urljoin(url, path)

            res = network.http_get(target, False)

            # check for other issues
            results += response_scanner.check_response(target, res)

            if res.status_code == 200:
                results.append(
                    Result(
                        f"Struts Sample Found: {target}",
                        Vulnerabilities.SERVER_TOMCAT_STRUTS_SAMPLE,
                        target,
                        [
                            network.http_build_raw_request(res.request),
                            network.http_build_raw_response(res),
                        ],
                    )
                )
    except Exception:
        output.debug_exception()

    return results
コード例 #13
0
ファイル: apache_tomcat.py プロジェクト: sgnls/yawast
def check_manager_password(url: str) -> List[Result]:
    results = []

    try:
        creds = [
            b"tomcat:tomcat",
            b"tomcat:password",
            b"tomcat:",
            b"admin:admin",
            b"admin:password",
            b"admin:",
        ]

        for cred in creds:
            ce = base64.b64encode(cred)

            res = network.http_get(url, False, {"Authorization": ce})
            body = res.text

            if (
                '<font size="+2">Tomcat Web Application Manager</font>' in body
                or '<font size="+2">Tomcat Virtual Host Manager</font>' in body
            ):
                # we got in
                results.append(
                    Result(
                        f"Apache Tomcat Weak Manager Password: '******' - {url}",
                        Vulnerabilities.SERVER_TOMCAT_MANAGER_WEAK_PASSWORD,
                        url,
                        [
                            network.http_build_raw_request(res.request),
                            network.http_build_raw_response(res),
                        ],
                    )
                )
            else:
                # if we didn't get a hit, go ahead and scan it to see if there's
                #  anything else that we should be picking up.
                results += response_scanner.check_response(url, res)
    except Exception:
        output.debug_exception()

    return results
コード例 #14
0
ファイル: apache_tomcat.py プロジェクト: sgnls/yawast
def check_manager(url: str) -> List[Result]:
    results = []

    try:
        for p in [urljoin(url, "manager/"), urljoin(url, "host-manager/")]:
            # check for both Tomcat 6, and 7+
            for path in [urljoin(p, "html/"), p]:

                res = network.http_get(path, False)

                body = res.text

                if "<tt>conf/tomcat-users.xml</tt>" in body:
                    # we have a finding
                    vuln = Vulnerabilities.SERVER_TOMCAT_MANAGER_EXPOSED
                    if "host-manager" in path:
                        vuln = Vulnerabilities.SERVER_TOMCAT_HOST_MANAGER_EXPOSED

                    results.append(
                        Result(
                            f"Apache Tomcat Manager found: {path}",
                            vuln,
                            path,
                            [
                                network.http_build_raw_request(res.request),
                                network.http_build_raw_response(res),
                            ],
                        )
                    )

                    # check to see if we can get in with a default password
                    results += check_manager_password(url)
                else:
                    # if we didn't get a hit, go ahead and scan it to see if there's
                    #  anything else that we should be picking up.
                    results += response_scanner.check_response(path, res)
    except Exception:
        output.debug_exception()

    return results
コード例 #15
0
ファイル: http_basic.py プロジェクト: sasqwatch/yawast
def check_trace(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("TRACE", url)
    body = res.text

    if res.status_code == 200 and "TRACE / HTTP/1.1" in body:
        results.append(
            Result(
                "HTTP TRACE Enabled",
                Vulnerabilities.HTTP_TRACE_ENABLED,
                url,
                [
                    network.http_build_raw_request(res.request),
                    network.http_build_raw_response(res),
                ],
            )
        )

    results += response_scanner.check_response(url, res)

    return results
コード例 #16
0
ファイル: http_basic.py プロジェクト: sasqwatch/yawast
def check_propfind(url: str) -> List[Result]:
    results: List[Result] = []

    res = network.http_custom("PROPFIND", url)
    body = res.text

    if res.status_code <= 400 and len(body) > 0:
        if "Content-Type" in res.headers and "text/xml" in res.headers["Content-Type"]:
            results.append(
                Result(
                    "Possible Info Disclosure: PROPFIND Enabled",
                    Vulnerabilities.HTTP_PROPFIND_ENABLED,
                    url,
                    [
                        network.http_build_raw_request(res.request),
                        network.http_build_raw_response(res),
                    ],
                )
            )

    results += response_scanner.check_response(url, res)

    return results
コード例 #17
0
def check_response(
    url: str, res: Response, soup: Union[BeautifulSoup, None] = None
) -> List[Result]:
    # make sure we actually have something
    if res is None:
        return []

    results: List[Result] = []

    raw_full = network.http_build_raw_response(res)

    if soup or network.response_body_is_text(res):
        body = res.text

        if soup is None:
            soup = BeautifulSoup(body, "html.parser")

        # check for things thar require parsed HTML
        results += retirejs.get_results(soup, url, res)
        results += apache_tomcat.get_version(url, res)
        results += error_checker.check_response(url, res, body)
        results += iis.check_telerik_rau_enabled(soup, url)

        results += _check_cache_headers(url, res)

    results += http_basic.get_header_issues(res, raw_full, url)
    results += http_basic.get_cookie_issues(res, url)

    # only check for this if we have a good response - no point in doing this for errors
    if res.status_code < 400:
        results += rails.check_cve_2019_5418(url)

    # we perform this check even if the response isn't text as this also covers missing content-type
    results += _check_charset(url, res)

    return results
コード例 #18
0
ファイル: http.py プロジェクト: sasqwatch/yawast
def scan(args: Namespace, url: str, domain: str):
    reporter.register_data("url", url)
    reporter.register_data("domain", domain)

    output.empty()
    output.norm("HEAD:")
    head = network.http_head(url)

    raw = network.http_build_raw_response(head)
    for line in raw.splitlines():
        output.norm(f"\t{line}")

    output.empty()

    res = http_basic.get_header_issues(head, raw, url)
    if len(res) > 0:
        output.norm("Header Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    res = http_basic.get_cookie_issues(head, raw, url)
    if len(res) > 0:
        output.norm("Cookie Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    # check for WAF signatures
    res = waf.get_waf(head.headers, raw, url)
    if len(res) > 0:
        output.norm("WAF Detection:")

        reporter.display_results(res, "\t")
        output.empty()

    output.norm("Performing vulnerability scan (this will take a while)...")

    links: List[str] = []
    with Spinner():
        try:
            links, res = spider.spider(url)
        except Exception as error:
            output.debug_exception()
            output.error(f"Error running scan: {str(error)}")

    output.norm(f"Identified {len(links) + 1} pages.")
    output.empty()

    if len(res) > 0:
        output.norm("Issues Detected:")

        reporter.display_results(res, "\t")
        output.empty()

    # get files, and add those to the link list
    links += _file_search(args, url, links)

    res = apache_httpd.check_all(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = apache_tomcat.check_all(url, links)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = nginx.check_all(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = iis.check_all(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = http_basic.check_propfind(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = http_basic.check_trace(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = http_basic.check_options(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    wp_path, res = wordpress.identify(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    if wp_path is not None:
        res = wordpress.check_json_user_enum(wp_path)
        if len(res) > 0:
            reporter.display_results(res, "\t")
コード例 #19
0
ファイル: http.py プロジェクト: rurbin3/yawast
def scan(session: Session):
    reporter.register_data("url", session.url)
    reporter.register_data("domain", session.domain)

    output.empty()
    output.norm("HEAD:")
    head = network.http_head(session.url)

    raw = network.http_build_raw_response(head)
    for line in raw.splitlines():
        output.norm(f"\t{line}")

    output.empty()

    res = http_basic.get_header_issues(head, raw, session.url)
    if res:
        output.norm("Header Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    res = http_basic.get_cookie_issues(head, session.url)
    if res:
        output.norm("Cookie Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    # check for WAF signatures
    res = waf.get_waf(head.headers, raw, session.url)
    if res:
        output.norm("WAF Detection:")

        reporter.display_results(res, "\t")
        output.empty()

    # check the HSTS preload status
    results = http_basic.check_hsts_preload(session.url)
    if len(results) > 0:
        reporter.register_data("hsts_preload_status", results)

        output.norm("HSTS Preload Status:")
        for result in results:
            chrome = result["chrome"] is not None
            firefox = result["firefox"] is not None
            tor = result["tor"] is not None

            output.norm(
                f"\t({result['domain']}) Chrome: {chrome}\tFirefox: {firefox}\t\tTor: {tor}"
            )
        output.empty()

    methods, res = http_basic.check_http_methods(session.url)
    if len(methods) == 0:
        output.norm("Server responds to invalid HTTP methods - check skipped.")
    else:
        reporter.register_data({"http_methods_supported": methods})

        output.norm("Supported HTTP methods:")

        for method in methods:
            output.norm(f"\t{method}")

    output.empty()

    if res:
        reporter.display_results(res, "\t")
        output.empty()

    output.norm("Performing vulnerability scan (this will take a while)...")

    links: List[str] = []
    with Spinner():
        try:
            links, res = spider.spider(session.url)
        except Exception as error:
            output.debug_exception()
            output.error(f"Error running scan: {str(error)}")

    output.norm(f"Identified {len(links) + 1} pages.")
    output.empty()

    if res:
        output.norm("Issues Detected:")

        reporter.display_results(res, "\t")
        output.empty()

    # get files, and add those to the link list
    links += _file_search(session, links)

    if (
        session.args.pass_reset_page is not None
        and len(session.args.pass_reset_page) > 0
    ):
        _check_password_reset(session)

    with Spinner():
        res = http_basic.check_local_ip_disclosure(session)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_httpd.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_tomcat.check_all(session.url, links)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = nginx.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = iis.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_propfind(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_trace(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_options(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = php.find_phpinfo(links)
    if res:
        reporter.display_results(res, "\t")

    if session.args.php_page is not None and len(session.args.php_page) > 0:
        with Spinner():
            res = php.check_cve_2019_11043(session, links)
        if res:
            reporter.display_results(res, "\t")

    with Spinner():
        res, jira_path = jira.check_for_jira(session)
    if res:
        reporter.display_results(res, "\t")

    if jira_path is not None:
        with Spinner():
            res = jira.check_jira_user_registration(jira_path)
        if res:
            reporter.display_results(res, "\t")

    with Spinner():
        wp_path, res = wordpress.identify(session.url)
    if res:
        reporter.display_results(res, "\t")

    if wp_path is not None:
        with Spinner():
            res = wordpress.check_json_user_enum(wp_path)
            res += wordpress.check_path_disclosure(wp_path)
        if res:
            reporter.display_results(res, "\t")
コード例 #20
0
def scan(session: Session):
    reporter.register_data("url", session.url)
    reporter.register_data("domain", session.domain)

    output.empty()
    output.norm("HEAD:")
    head = network.http_head(session.url)

    raw = network.http_build_raw_response(head)
    for line in raw.splitlines():
        output.norm(f"\t{line}")

    output.empty()

    res = http_basic.get_header_issues(head, raw, session.url)
    if res:
        output.norm("Header Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    res = http_basic.get_cookie_issues(head, session.url)
    if res:
        output.norm("Cookie Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    # check for WAF signatures
    res = waf.get_waf(head.headers, raw, session.url)
    if res:
        output.norm("WAF Detection:")

        reporter.display_results(res, "\t")
        output.empty()

    output.norm("Performing vulnerability scan (this will take a while)...")

    links: List[str] = []
    with Spinner():
        try:
            links, res = spider.spider(session.url)
        except Exception as error:
            output.debug_exception()
            output.error(f"Error running scan: {str(error)}")

    output.norm(f"Identified {len(links) + 1} pages.")
    output.empty()

    if res:
        output.norm("Issues Detected:")

        reporter.display_results(res, "\t")
        output.empty()

    # get files, and add those to the link list
    links += _file_search(session, links)

    if (
        session.args.pass_reset_page is not None
        and len(session.args.pass_reset_page) > 0
    ):
        _check_password_reset(session)

    with Spinner():
        res = http_basic.check_local_ip_disclosure(session)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_httpd.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_tomcat.check_all(session.url, links)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = nginx.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = iis.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_propfind(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_trace(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_options(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = php.find_phpinfo(links)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res, jira_path = jira.check_for_jira(session)
    if res:
        reporter.display_results(res, "\t")

    if jira_path is not None:
        with Spinner():
            res = jira.check_jira_user_registration(jira_path)
        if res:
            reporter.display_results(res, "\t")

    with Spinner():
        wp_path, res = wordpress.identify(session.url)
    if res:
        reporter.display_results(res, "\t")

    if wp_path is not None:
        with Spinner():
            res = wordpress.check_json_user_enum(wp_path)
            res += wordpress.check_path_disclosure(wp_path)
        if res:
            reporter.display_results(res, "\t")