예제 #1
0
def _check_password_reset(session: Session, element_name: Optional[str] = None):
    user = session.args.user
    if user is None:
        user = utils.prompt("What is a valid user? ")

    try:
        with Spinner():
            res = password_reset.check_resp_user_enum(session, user, element_name)

        if res:
            reporter.display_results(res, "\t")
    except WebDriverException as e:
        output.error("Selenium error encountered: " + e.msg)
    except PasswordResetElementNotFound as e:
        if element_name is not None:
            # we failed to find the element, and we had one specified - this isn't going to work
            output.error(
                "Unable to find a matching element to perform the User Enumeration via Password Reset: "
                + str(e)
            )
        else:
            # we failed, because we don't have the element - so we prompt for it.
            print(
                "Unable to find a known element to enter the user name. Please identify the proper element."
            )
            print(
                "If this element seems to be common, please request that it be added: https://github.com/adamcaudill/yawast/issues"
            )
            name = utils.prompt("What is the user/email entry element name? ")

            _check_password_reset(session, name)
    except Exception as e:
        output.error(
            "Failed to execute Password Reset Page User Enumeration: " + str(e)
        )
예제 #2
0
파일: network.py 프로젝트: sgnls/yawast
def _check_open_ports(domain: str, url: str, file: Optional[str] = None):
    try:
        output.empty()
        output.norm("Open Ports:")

        ips = basic.get_ips(domain)

        for ip in ips:
            with Spinner():
                res = port_scan.check_open_ports(url, ip, file)

            if len(res) > 0:
                reporter.display_results(res, "\t")
    except Exception as error:
        output.error(f"Error checking for open ports: {str(error)}")
예제 #3
0
def scan(session: Session):
    reporter.register_data("url", session.url)
    reporter.register_data("domain", session.domain)

    output.empty()
    output.norm("HEAD:")
    head = network.http_head(session.url)

    raw = network.http_build_raw_response(head)
    for line in raw.splitlines():
        output.norm(f"\t{line}")

    output.empty()

    res = http_basic.get_header_issues(head, raw, session.url)
    if res:
        output.norm("Header Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    res = http_basic.get_cookie_issues(head, session.url)
    if res:
        output.norm("Cookie Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    # check for WAF signatures
    res = waf.get_waf(head.headers, raw, session.url)
    if res:
        output.norm("WAF Detection:")

        reporter.display_results(res, "\t")
        output.empty()

    output.norm("Performing vulnerability scan (this will take a while)...")

    links: List[str] = []
    with Spinner():
        try:
            links, res = spider.spider(session.url)
        except Exception as error:
            output.debug_exception()
            output.error(f"Error running scan: {str(error)}")

    output.norm(f"Identified {len(links) + 1} pages.")
    output.empty()

    if res:
        output.norm("Issues Detected:")

        reporter.display_results(res, "\t")
        output.empty()

    # get files, and add those to the link list
    links += _file_search(session, links)

    if (
        session.args.pass_reset_page is not None
        and len(session.args.pass_reset_page) > 0
    ):
        _check_password_reset(session)

    with Spinner():
        res = http_basic.check_local_ip_disclosure(session)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_httpd.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_tomcat.check_all(session.url, links)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = nginx.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = iis.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_propfind(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_trace(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_options(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = php.find_phpinfo(links)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res, jira_path = jira.check_for_jira(session)
    if res:
        reporter.display_results(res, "\t")

    if jira_path is not None:
        with Spinner():
            res = jira.check_jira_user_registration(jira_path)
        if res:
            reporter.display_results(res, "\t")

    with Spinner():
        wp_path, res = wordpress.identify(session.url)
    if res:
        reporter.display_results(res, "\t")

    if wp_path is not None:
        with Spinner():
            res = wordpress.check_json_user_enum(wp_path)
            res += wordpress.check_path_disclosure(wp_path)
        if res:
            reporter.display_results(res, "\t")
예제 #4
0
def _file_search(session: Session, orig_links: List[str]) -> List[str]:
    new_files: List[str] = []
    file_good, file_res, path_good, path_res = network.check_404_response(session.url)

    # these are here for data typing
    results: Union[List[Result], None]
    links: Union[List[str], None]

    if not file_good:
        reporter.display(
            "Web server does not respond properly to file 404 errors.",
            Issue(
                Vulnerabilities.SERVER_INVALID_404_FILE,
                session.url,
                Evidence.from_response(file_res),
            ),
        )
    if not path_good:
        reporter.display(
            "Web server does not respond properly to path 404 errors.",
            Issue(
                Vulnerabilities.SERVER_INVALID_404_PATH,
                session.url,
                Evidence.from_response(path_res),
            ),
        )

    if not (file_good or path_good):
        output.norm(
            "Site does not respond properly to non-existent file/path requests; skipping some checks."
        )

    if file_good:
        links, results = special_files.check_special_files(session.url)
        if results:
            reporter.display_results(results, "\t")

        new_files += links

        if session.args.files:
            output.empty()
            output.norm("Searching for common files (this will take a few minutes)...")

            with Spinner():
                try:
                    links, results = file_search.find_files(session.url)
                except Exception as error:
                    output.debug_exception()
                    output.error(f"Error running scan: {str(error)}")
                    results = None
                    links = None

            if results is not None and results:
                reporter.display_results(results, "\t")

            if links is not None and links:
                new_files += links

                for l in links:
                    if l not in orig_links:
                        output.norm(f"\tNew file found: {l}")

                output.empty()

        # check for common backup files
        all_links = orig_links + new_files
        with Spinner():
            backups, res = file_search.find_backups(all_links)
        if res:
            reporter.display_results(res, "\t")
        if backups:
            new_files += backups

    if path_good:
        links, results = special_files.check_special_paths(session.url)

        if results:
            reporter.display_results(results, "\t")

        new_files += links

        if session.args.dir:
            output.empty()
            output.norm(
                "Searching for common directories (this will take a few minutes)..."
            )

            with Spinner():
                try:
                    links, results = file_search.find_directories(
                        session.url,
                        session.args.dirlistredir,
                        session.args.dirrecursive,
                    )
                except Exception as error:
                    output.debug_exception()
                    output.error(f"Error running scan: {str(error)}")
                    results = None
                    links = None

            if results is not None and results:
                reporter.display_results(results, "\t")

            if links is not None and links:
                new_files += links

                for l in links:
                    if l not in orig_links:
                        output.norm(f"\tNew directory found: {l}")

                output.empty()

    # check for .DS_Store files
    if file_good:
        res = file_search.find_ds_store(new_files)

        if res:
            reporter.display_results(res, "\t")

    return new_files
예제 #5
0
파일: http.py 프로젝트: rurbin3/yawast
def scan(session: Session):
    reporter.register_data("url", session.url)
    reporter.register_data("domain", session.domain)

    output.empty()
    output.norm("HEAD:")
    head = network.http_head(session.url)

    raw = network.http_build_raw_response(head)
    for line in raw.splitlines():
        output.norm(f"\t{line}")

    output.empty()

    res = http_basic.get_header_issues(head, raw, session.url)
    if res:
        output.norm("Header Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    res = http_basic.get_cookie_issues(head, session.url)
    if res:
        output.norm("Cookie Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    # check for WAF signatures
    res = waf.get_waf(head.headers, raw, session.url)
    if res:
        output.norm("WAF Detection:")

        reporter.display_results(res, "\t")
        output.empty()

    # check the HSTS preload status
    results = http_basic.check_hsts_preload(session.url)
    if len(results) > 0:
        reporter.register_data("hsts_preload_status", results)

        output.norm("HSTS Preload Status:")
        for result in results:
            chrome = result["chrome"] is not None
            firefox = result["firefox"] is not None
            tor = result["tor"] is not None

            output.norm(
                f"\t({result['domain']}) Chrome: {chrome}\tFirefox: {firefox}\t\tTor: {tor}"
            )
        output.empty()

    methods, res = http_basic.check_http_methods(session.url)
    if len(methods) == 0:
        output.norm("Server responds to invalid HTTP methods - check skipped.")
    else:
        reporter.register_data({"http_methods_supported": methods})

        output.norm("Supported HTTP methods:")

        for method in methods:
            output.norm(f"\t{method}")

    output.empty()

    if res:
        reporter.display_results(res, "\t")
        output.empty()

    output.norm("Performing vulnerability scan (this will take a while)...")

    links: List[str] = []
    with Spinner():
        try:
            links, res = spider.spider(session.url)
        except Exception as error:
            output.debug_exception()
            output.error(f"Error running scan: {str(error)}")

    output.norm(f"Identified {len(links) + 1} pages.")
    output.empty()

    if res:
        output.norm("Issues Detected:")

        reporter.display_results(res, "\t")
        output.empty()

    # get files, and add those to the link list
    links += _file_search(session, links)

    if (
        session.args.pass_reset_page is not None
        and len(session.args.pass_reset_page) > 0
    ):
        _check_password_reset(session)

    with Spinner():
        res = http_basic.check_local_ip_disclosure(session)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_httpd.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = apache_tomcat.check_all(session.url, links)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = nginx.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = iis.check_all(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_propfind(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_trace(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = http_basic.check_options(session.url)
    if res:
        reporter.display_results(res, "\t")

    with Spinner():
        res = php.find_phpinfo(links)
    if res:
        reporter.display_results(res, "\t")

    if session.args.php_page is not None and len(session.args.php_page) > 0:
        with Spinner():
            res = php.check_cve_2019_11043(session, links)
        if res:
            reporter.display_results(res, "\t")

    with Spinner():
        res, jira_path = jira.check_for_jira(session)
    if res:
        reporter.display_results(res, "\t")

    if jira_path is not None:
        with Spinner():
            res = jira.check_jira_user_registration(jira_path)
        if res:
            reporter.display_results(res, "\t")

    with Spinner():
        wp_path, res = wordpress.identify(session.url)
    if res:
        reporter.display_results(res, "\t")

    if wp_path is not None:
        with Spinner():
            res = wordpress.check_json_user_enum(wp_path)
            res += wordpress.check_path_disclosure(wp_path)
        if res:
            reporter.display_results(res, "\t")
예제 #6
0
파일: http.py 프로젝트: sasqwatch/yawast
def scan(args: Namespace, url: str, domain: str):
    reporter.register_data("url", url)
    reporter.register_data("domain", domain)

    output.empty()
    output.norm("HEAD:")
    head = network.http_head(url)

    raw = network.http_build_raw_response(head)
    for line in raw.splitlines():
        output.norm(f"\t{line}")

    output.empty()

    res = http_basic.get_header_issues(head, raw, url)
    if len(res) > 0:
        output.norm("Header Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    res = http_basic.get_cookie_issues(head, raw, url)
    if len(res) > 0:
        output.norm("Cookie Issues:")

        reporter.display_results(res, "\t")
        output.empty()

    # check for WAF signatures
    res = waf.get_waf(head.headers, raw, url)
    if len(res) > 0:
        output.norm("WAF Detection:")

        reporter.display_results(res, "\t")
        output.empty()

    output.norm("Performing vulnerability scan (this will take a while)...")

    links: List[str] = []
    with Spinner():
        try:
            links, res = spider.spider(url)
        except Exception as error:
            output.debug_exception()
            output.error(f"Error running scan: {str(error)}")

    output.norm(f"Identified {len(links) + 1} pages.")
    output.empty()

    if len(res) > 0:
        output.norm("Issues Detected:")

        reporter.display_results(res, "\t")
        output.empty()

    # get files, and add those to the link list
    links += _file_search(args, url, links)

    res = apache_httpd.check_all(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = apache_tomcat.check_all(url, links)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = nginx.check_all(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = iis.check_all(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = http_basic.check_propfind(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = http_basic.check_trace(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    res = http_basic.check_options(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    wp_path, res = wordpress.identify(url)
    if len(res) > 0:
        reporter.display_results(res, "\t")

    if wp_path is not None:
        res = wordpress.check_json_user_enum(wp_path)
        if len(res) > 0:
            reporter.display_results(res, "\t")