def check_redirect(session: Session): # perform some connection testing if session.url_parsed.scheme == "http": session.supports_http = True try: # check for TLS redirect tls_redirect = network.check_ssl_redirect(session.url) if tls_redirect is not None and tls_redirect != session.url: print(f"Server redirects to TLS: Scanning: {tls_redirect}") session.update_url(tls_redirect) session.redirects_https = True except Exception: output.debug_exception() # we tried to connect to port 80, and it failed # this could mean a couple things, first, we need to # see if it answers to 443 session.update_scheme("https") print("Server does not respond to HTTP, switching to HTTPS") print() print(f"Scanning: {session.url}") # grab the head, to see if we get anything try: network.http_head(session.url, timeout=5) session.supports_https = True session.supports_http = False print() except Exception as err: output.debug_exception() raise ValueError( f"Fatal Error: Can not connect to {session.url} ({str(err)})" ) else: session.supports_https = True # if we are scanning HTTPS, try HTTP to see what it does try: network.http_head(session.get_http_url(), timeout=5) session.supports_http = True print("Server responds to HTTP requests") print() except Exception: output.debug_exception() print("Server does not respond to HTTP requests") print() # check for www redirect www_redirect = network.check_www_redirect(session.url) if www_redirect is not None and www_redirect != session.url: print(f"Server performs WWW redirect: Scanning: {www_redirect}") session.update_url(www_redirect)
def check_path_disclosure(wp_url: str) -> List[Result]: # this is a list of files that are known to throw a fatal error when accessed directly # this is from a manual review of all plugins with at least 1M installs urls = [ "wp-content/plugins/hello.php", "wp-content/plugins/akismet/akismet.php", "wp-content/plugins/contact-form-7/includes/capabilities.php", "wp-content/plugins/wordpress-seo/admin/views/partial-alerts-errors.php", "wp-content/plugins/jetpack/load-jetpack.php", "wp-content/plugins/jetpack/uninstall.php", "wp-content/plugins/duplicate-post/duplicate-post-admin.php", "wp-content/plugins/wpforms-lite/includes/admin/class-welcome.php", "wp-content/plugins/wp-google-maps/base/includes/welcome.php", "wp-content/plugins/wp-super-cache/wp-cache.php", "wp-content/plugins/mailchimp-for-wp/integrations/wpforms/bootstrap.php", "wp-content/plugins/mailchimp-for-wp/integrations/bootstrap.php", "wp-content/plugins/regenerate-thumbnails/regenerate-thumbnails.php", "wp-content/plugins/advanced-custom-fields/includes/deprecated.php", "wp-content/plugins/redirection/redirection.php", "wp-content/plugins/wpforms-lite/includes/admin/importers/class-ninja-forms.php", "wp-content/plugins/ninja-forms/includes/deprecated.php", "wp-content/plugins/so-widgets-bundle/so-widgets-bundle.php", "wp-content/plugins/wp-fastest-cache/templates/preload.php", "wp-content/plugins/duplicate-page/duplicatepage.php", "wp-content/plugins/better-wp-security/better-wp-security.php", "wp-content/plugins/all-in-one-wp-security-and-firewall/other-includes/wp-security-unlock-request.php", "wp-content/plugins/related-posts/views/settings.php", "wp-content/plugins/wpcontentguard/views/settings.php", "wp-content/plugins/simple-social-icons/simple-social-icons.php", ] results: List[Result] = [] for url in urls: target = urljoin(wp_url, url) head = network.http_head(target, False) if head.status_code != 404: resp = network.http_get(target, False) if resp.status_code < 300 or resp.status_code >= 500: # we have some kind of response that could be useful if "<b>Fatal error</b>:" in resp.text: # we have an error pattern = r"<b>((\/|[A-Z]:\\).*.php)<\/b>" if re.search(pattern, resp.text): try: path = re.findall(pattern, resp.text)[0][0] results.append( Result.from_evidence( Evidence.from_response( resp, {"path": path}), f"WordPress File Path Disclosure: {target} ({path})", Vulnerabilities. APP_WORDPRESS_PATH_DISCLOSURE, )) except Exception: output.debug_exception() results += response_scanner.check_response(target, resp) return results
def _check_url(urls: List[str], queue, follow_redirections, recursive) -> None: files: List[str] = [] results: List[Result] = [] for url in urls: try: # get the HEAD first, we only really care about actual files res = network.http_head(url, False) if res.status_code < 300: # run a scan on the full result, so we can ensure that we get any issues results += response_scanner.check_response( url, network.http_get(url, False)) files.append(url) if recursive: fl, re = find_directories(url, follow_redirections, recursive) files.extend(fl) results.extend(re) elif res.status_code < 400 and follow_redirections: if "Location" in res.headers: _check_url([res.headers["Location"]], queue, follow_redirections, recursive) except Exception as error: output.debug(f"Error checking URL ({url}): {str(error)}") queue.put((files, results))
def check_cve_2019_11043(session: Session, links: List[str]) -> List[Result]: min_qsl = 1500 max_qsl = 1950 qsl_step = 5 results = [] targets = [] if session.args.php_page is not None and len(session.args.php_page) > 0: php_page = str(session.args.php_page) if php_page.startswith("http://") or php_page.startswith("https://"): targets.append(urljoin(session.url, php_page)) elif php_page.startswith(session.url): targets.append(php_page) for link in links: if link.endswith(".php"): targets.append(link) elif link.endswith("/"): targets.append(f"{link}index.php") def _get_resp(url: str, q_count: int) -> Response: path_info = "/PHP\nindex.php" u = urlparse(url) orig_path = quote(u.path) new_path = quote(u.path + path_info) delta = len(new_path) - len(path_info) - len(orig_path) prime = q_count - delta / 2 req_url = urljoin(url, new_path + "?" + "Q" * int(prime)) return network.http_get(req_url, False) for target in targets: # start by making sure that we have a valid target if network.http_head(target, False).status_code < 400: # get our baseline status code res = _get_resp(target, 1500) base_status_code = res.status_code for qsl in range(min_qsl + qsl_step, max_qsl, qsl_step): res = _get_resp(target, qsl) if res.status_code != base_status_code: results.append( Result.from_evidence( Evidence.from_response(res, {"qsl": qsl}), f"Detected susceptibility to PHP Remote Code Execution (CVE-2019-11043) (QSL {qsl})", Vulnerabilities.SERVER_PHP_CVE_2019_11043, ) ) break return results
def scan(session: Session): reporter.register_data("url", session.url) reporter.register_data("domain", session.domain) output.empty() output.norm("HEAD:") head = network.http_head(session.url) raw = network.http_build_raw_response(head) for line in raw.splitlines(): output.norm(f"\t{line}") output.empty() res = http_basic.get_header_issues(head, raw, session.url) if res: output.norm("Header Issues:") reporter.display_results(res, "\t") output.empty() res = http_basic.get_cookie_issues(head, session.url) if res: output.norm("Cookie Issues:") reporter.display_results(res, "\t") output.empty() # check for WAF signatures res = waf.get_waf(head.headers, raw, session.url) if res: output.norm("WAF Detection:") reporter.display_results(res, "\t") output.empty() output.norm("Performing vulnerability scan (this will take a while)...") links: List[str] = [] with Spinner(): try: links, res = spider.spider(session.url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") output.norm(f"Identified {len(links) + 1} pages.") output.empty() if res: output.norm("Issues Detected:") reporter.display_results(res, "\t") output.empty() # get files, and add those to the link list links += _file_search(session, links) if ( session.args.pass_reset_page is not None and len(session.args.pass_reset_page) > 0 ): _check_password_reset(session) with Spinner(): res = http_basic.check_local_ip_disclosure(session) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_httpd.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_tomcat.check_all(session.url, links) if res: reporter.display_results(res, "\t") with Spinner(): res = nginx.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = iis.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_propfind(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_trace(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_options(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = php.find_phpinfo(links) if res: reporter.display_results(res, "\t") with Spinner(): res, jira_path = jira.check_for_jira(session) if res: reporter.display_results(res, "\t") if jira_path is not None: with Spinner(): res = jira.check_jira_user_registration(jira_path) if res: reporter.display_results(res, "\t") with Spinner(): wp_path, res = wordpress.identify(session.url) if res: reporter.display_results(res, "\t") if wp_path is not None: with Spinner(): res = wordpress.check_json_user_enum(wp_path) res += wordpress.check_path_disclosure(wp_path) if res: reporter.display_results(res, "\t")
def scan(session: Session): reporter.register_data("url", session.url) reporter.register_data("domain", session.domain) output.empty() output.norm("HEAD:") head = network.http_head(session.url) raw = network.http_build_raw_response(head) for line in raw.splitlines(): output.norm(f"\t{line}") output.empty() res = http_basic.get_header_issues(head, raw, session.url) if res: output.norm("Header Issues:") reporter.display_results(res, "\t") output.empty() res = http_basic.get_cookie_issues(head, session.url) if res: output.norm("Cookie Issues:") reporter.display_results(res, "\t") output.empty() # check for WAF signatures res = waf.get_waf(head.headers, raw, session.url) if res: output.norm("WAF Detection:") reporter.display_results(res, "\t") output.empty() # check the HSTS preload status results = http_basic.check_hsts_preload(session.url) if len(results) > 0: reporter.register_data("hsts_preload_status", results) output.norm("HSTS Preload Status:") for result in results: chrome = result["chrome"] is not None firefox = result["firefox"] is not None tor = result["tor"] is not None output.norm( f"\t({result['domain']}) Chrome: {chrome}\tFirefox: {firefox}\t\tTor: {tor}" ) output.empty() methods, res = http_basic.check_http_methods(session.url) if len(methods) == 0: output.norm("Server responds to invalid HTTP methods - check skipped.") else: reporter.register_data({"http_methods_supported": methods}) output.norm("Supported HTTP methods:") for method in methods: output.norm(f"\t{method}") output.empty() if res: reporter.display_results(res, "\t") output.empty() output.norm("Performing vulnerability scan (this will take a while)...") links: List[str] = [] with Spinner(): try: links, res = spider.spider(session.url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") output.norm(f"Identified {len(links) + 1} pages.") output.empty() if res: output.norm("Issues Detected:") reporter.display_results(res, "\t") output.empty() # get files, and add those to the link list links += _file_search(session, links) if ( session.args.pass_reset_page is not None and len(session.args.pass_reset_page) > 0 ): _check_password_reset(session) with Spinner(): res = http_basic.check_local_ip_disclosure(session) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_httpd.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = apache_tomcat.check_all(session.url, links) if res: reporter.display_results(res, "\t") with Spinner(): res = nginx.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = iis.check_all(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_propfind(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_trace(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = http_basic.check_options(session.url) if res: reporter.display_results(res, "\t") with Spinner(): res = php.find_phpinfo(links) if res: reporter.display_results(res, "\t") if session.args.php_page is not None and len(session.args.php_page) > 0: with Spinner(): res = php.check_cve_2019_11043(session, links) if res: reporter.display_results(res, "\t") with Spinner(): res, jira_path = jira.check_for_jira(session) if res: reporter.display_results(res, "\t") if jira_path is not None: with Spinner(): res = jira.check_jira_user_registration(jira_path) if res: reporter.display_results(res, "\t") with Spinner(): wp_path, res = wordpress.identify(session.url) if res: reporter.display_results(res, "\t") if wp_path is not None: with Spinner(): res = wordpress.check_json_user_enum(wp_path) res += wordpress.check_path_disclosure(wp_path) if res: reporter.display_results(res, "\t")
def _get_resp(url: str) -> Response: return network.http_head(url, False)
def start(args, url): print(f"Scanning: {url}") # parse the URL, we'll need this parsed = urlparse(url) # get rid of any port number & credentials that may exist domain = utils.get_domain(parsed.netloc) # make sure it resolves try: socket.gethostbyname(domain) except socket.gaierror as error: print(f"Fatal Error: Unable to resolve {domain} ({str(error)})") return # perform some connection testing if parsed.scheme == "http": try: # check for TLS redirect tls_redirect = network.check_ssl_redirect(url) if tls_redirect != url: print(f"Server redirects to TLS: Scanning: {tls_redirect}") url = tls_redirect parsed = urlparse(url) except Exception: output.debug_exception() # we tried to connect to port 80, and it failed # this could mean a couple things, first, we need to # see if it answers to 443 parsed = parsed._replace(scheme="https") url = urlunparse(parsed) print("Server does not respond to HTTP, switching to HTTPS") print() print(f"Scanning: {url}") # grab the head, to see if we get anything try: network.http_head(url, timeout=5) print() except Exception as err: output.debug_exception() print(f"Fatal Error: Can not connect to {url} ({str(err)})") return else: # if we are scanning HTTPS, try HTTP to see what it does try: http_parsed = parsed._replace(scheme="http") http_url = urlunparse(http_parsed) network.http_head(http_url, timeout=5) print("Server responds to HTTP requests") print() except Exception: output.debug_exception() print("Server does not respond to HTTP requests") print() # check for www redirect www_redirect = network.check_www_redirect(url) if www_redirect is not None and www_redirect != url: print(f"Server performs WWW redirect: Scanning: {www_redirect}") url = www_redirect if not args.nodns: dns.scan(args, url, domain) # check to see if we are looking at an HTTPS server if parsed.scheme == "https" and not args.nossl: if args.internalssl or utils.is_ip( domain) or utils.get_port(url) != 443: # use internal scanner ssl_internal.scan(args, url, domain) else: try: ssl_labs.scan(args, url, domain) except Exception as error: output.debug_exception() output.error(f"Error running scan with SSL Labs: {str(error)}") if args.tdessessioncount: ssl_sweet32.scan(args, url, domain) http.scan(args, url, domain) # reset any stored data http.reset() return
def scan(args: Namespace, url: str, domain: str): reporter.register_data("url", url) reporter.register_data("domain", domain) output.empty() output.norm("HEAD:") head = network.http_head(url) raw = network.http_build_raw_response(head) for line in raw.splitlines(): output.norm(f"\t{line}") output.empty() res = http_basic.get_header_issues(head, raw, url) if len(res) > 0: output.norm("Header Issues:") reporter.display_results(res, "\t") output.empty() res = http_basic.get_cookie_issues(head, raw, url) if len(res) > 0: output.norm("Cookie Issues:") reporter.display_results(res, "\t") output.empty() # check for WAF signatures res = waf.get_waf(head.headers, raw, url) if len(res) > 0: output.norm("WAF Detection:") reporter.display_results(res, "\t") output.empty() output.norm("Performing vulnerability scan (this will take a while)...") links: List[str] = [] with Spinner(): try: links, res = spider.spider(url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") output.norm(f"Identified {len(links) + 1} pages.") output.empty() if len(res) > 0: output.norm("Issues Detected:") reporter.display_results(res, "\t") output.empty() # get files, and add those to the link list links += _file_search(args, url, links) res = apache_httpd.check_all(url) if len(res) > 0: reporter.display_results(res, "\t") res = apache_tomcat.check_all(url, links) if len(res) > 0: reporter.display_results(res, "\t") res = nginx.check_all(url) if len(res) > 0: reporter.display_results(res, "\t") res = iis.check_all(url) if len(res) > 0: reporter.display_results(res, "\t") res = http_basic.check_propfind(url) if len(res) > 0: reporter.display_results(res, "\t") res = http_basic.check_trace(url) if len(res) > 0: reporter.display_results(res, "\t") res = http_basic.check_options(url) if len(res) > 0: reporter.display_results(res, "\t") wp_path, res = wordpress.identify(url) if len(res) > 0: reporter.display_results(res, "\t") if wp_path is not None: res = wordpress.check_json_user_enum(wp_path) if len(res) > 0: reporter.display_results(res, "\t")