def test_check_opts_public(self): url = "http://example.com" with requests_mock.Mocker() as m: m.register_uri("OPTIONS", url, status_code=200, headers={"Public": "GET"}) res = http_basic.check_options(url) self.assertTrue(any("Public HTTP Verbs (OPTIONS)" in r.message for r in res))
def test_check_opts_none_ok(self): url = "http://example.com" with requests_mock.Mocker() as m: m.register_uri("OPTIONS", url, status_code=200) res = http_basic.check_options(url) for r in res: self.assertNotIn("HTTP Verbs (OPTIONS)", r.message)
def scan(session: Session): reporter.register_data("url", session.url) reporter.register_data("domain", session.domain) output.empty() output.norm("HEAD:") head = network.http_head(session.url) raw = network.http_build_raw_response(head) for line in raw.splitlines(): output.norm(f"\t{line}") output.empty() res = http_basic.get_header_issues(head, raw, session.url) if res: output.norm("Header Issues:") reporter.display_results(res, "\t") output.empty() res = http_basic.get_cookie_issues(head, session.url) if res: output.norm("Cookie Issues:") reporter.display_results(res, "\t") output.empty() # check for WAF signatures res = waf.get_waf(head.headers, raw, session.url) if res: output.norm("WAF Detection:") reporter.display_results(res, "\t") output.empty() output.norm("Performing vulnerability scan (this will take a while)...") links: List[str] = [] with Spinner(): try: links, res = spider.spider(session.url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") output.norm(f"Identified {len(links) + 1} pages.") output.empty() if res: output.norm("Issues Detected:") reporter.display_results(res, "\t") output.empty() # get files, and add those to the link list links += _file_search(session, links) if ( session.args.pass_reset_page is not None and len(session.args.pass_reset_page) > 0 ): _check_password_reset(session) with Spinner(): res = http_basic.check_local_ip_disclosure(session) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_httpd.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_tomcat.check_all(session.url, links) if res: reporter.display_results(res, "\t") with Spinner(): res = nginx.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = iis.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_propfind(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_trace(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_options(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = php.find_phpinfo(links) if res: reporter.display_results(res, "\t") with Spinner(): res, jira_path = jira.check_for_jira(session) if res: reporter.display_results(res, "\t") if jira_path is not None: with Spinner(): res = jira.check_jira_user_registration(jira_path) if res: reporter.display_results(res, "\t") with Spinner(): wp_path, res = wordpress.identify(session.url) if res: reporter.display_results(res, "\t") if wp_path is not None: with Spinner(): res = wordpress.check_json_user_enum(wp_path) res += wordpress.check_path_disclosure(wp_path) if res: reporter.display_results(res, "\t")
def scan(session: Session): reporter.register_data("url", session.url) reporter.register_data("domain", session.domain) output.empty() output.norm("HEAD:") head = network.http_head(session.url) raw = network.http_build_raw_response(head) for line in raw.splitlines(): output.norm(f"\t{line}") output.empty() res = http_basic.get_header_issues(head, raw, session.url) if res: output.norm("Header Issues:") reporter.display_results(res, "\t") output.empty() res = http_basic.get_cookie_issues(head, session.url) if res: output.norm("Cookie Issues:") reporter.display_results(res, "\t") output.empty() # check for WAF signatures res = waf.get_waf(head.headers, raw, session.url) if res: output.norm("WAF Detection:") reporter.display_results(res, "\t") output.empty() # check the HSTS preload status results = http_basic.check_hsts_preload(session.url) if len(results) > 0: reporter.register_data("hsts_preload_status", results) output.norm("HSTS Preload Status:") for result in results: chrome = result["chrome"] is not None firefox = result["firefox"] is not None tor = result["tor"] is not None output.norm( f"\t({result['domain']}) Chrome: {chrome}\tFirefox: {firefox}\t\tTor: {tor}" ) output.empty() methods, res = http_basic.check_http_methods(session.url) if len(methods) == 0: output.norm("Server responds to invalid HTTP methods - check skipped.") else: reporter.register_data({"http_methods_supported": methods}) output.norm("Supported HTTP methods:") for method in methods: output.norm(f"\t{method}") output.empty() if res: reporter.display_results(res, "\t") output.empty() output.norm("Performing vulnerability scan (this will take a while)...") links: List[str] = [] with Spinner(): try: links, res = spider.spider(session.url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") output.norm(f"Identified {len(links) + 1} pages.") output.empty() if res: output.norm("Issues Detected:") reporter.display_results(res, "\t") output.empty() # get files, and add those to the link list links += _file_search(session, links) if ( session.args.pass_reset_page is not None and len(session.args.pass_reset_page) > 0 ): _check_password_reset(session) with Spinner(): res = http_basic.check_local_ip_disclosure(session) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_httpd.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_tomcat.check_all(session.url, links) if res: reporter.display_results(res, "\t") with Spinner(): res = nginx.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = iis.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_propfind(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_trace(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_options(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = php.find_phpinfo(links) if res: reporter.display_results(res, "\t") if session.args.php_page is not None and len(session.args.php_page) > 0: with Spinner(): res = php.check_cve_2019_11043(session, links) if res: reporter.display_results(res, "\t") with Spinner(): res, jira_path = jira.check_for_jira(session) if res: reporter.display_results(res, "\t") if jira_path is not None: with Spinner(): res = jira.check_jira_user_registration(jira_path) if res: reporter.display_results(res, "\t") with Spinner(): wp_path, res = wordpress.identify(session.url) if res: reporter.display_results(res, "\t") if wp_path is not None: with Spinner(): res = wordpress.check_json_user_enum(wp_path) res += wordpress.check_path_disclosure(wp_path) if res: reporter.display_results(res, "\t")
def scan(args: Namespace, url: str, domain: str): reporter.register_data("url", url) reporter.register_data("domain", domain) output.empty() output.norm("HEAD:") head = network.http_head(url) raw = network.http_build_raw_response(head) for line in raw.splitlines(): output.norm(f"\t{line}") output.empty() res = http_basic.get_header_issues(head, raw, url) if len(res) > 0: output.norm("Header Issues:") reporter.display_results(res, "\t") output.empty() res = http_basic.get_cookie_issues(head, raw, url) if len(res) > 0: output.norm("Cookie Issues:") reporter.display_results(res, "\t") output.empty() # check for WAF signatures res = waf.get_waf(head.headers, raw, url) if len(res) > 0: output.norm("WAF Detection:") reporter.display_results(res, "\t") output.empty() output.norm("Performing vulnerability scan (this will take a while)...") links: List[str] = [] with Spinner(): try: links, res = spider.spider(url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") output.norm(f"Identified {len(links) + 1} pages.") output.empty() if len(res) > 0: output.norm("Issues Detected:") reporter.display_results(res, "\t") output.empty() # get files, and add those to the link list links += _file_search(args, url, links) res = apache_httpd.check_all(url) if len(res) > 0: reporter.display_results(res, "\t") res = apache_tomcat.check_all(url, links) if len(res) > 0: reporter.display_results(res, "\t") res = nginx.check_all(url) if len(res) > 0: reporter.display_results(res, "\t") res = iis.check_all(url) if len(res) > 0: reporter.display_results(res, "\t") res = http_basic.check_propfind(url) if len(res) > 0: reporter.display_results(res, "\t") res = http_basic.check_trace(url) if len(res) > 0: reporter.display_results(res, "\t") res = http_basic.check_options(url) if len(res) > 0: reporter.display_results(res, "\t") wp_path, res = wordpress.identify(url) if len(res) > 0: reporter.display_results(res, "\t") if wp_path is not None: res = wordpress.check_json_user_enum(wp_path) if len(res) > 0: reporter.display_results(res, "\t")