def check_www_redirect(url): parsed = urlparse(url) req = http_head(url, False) # make sure we received a redirect response if req.status_code >= 300 & req.status_code < 400: location = req.headers.get("location") if location is None: return url try: parsed_location = urlparse(location) location_domain = utils.get_domain(parsed_location.netloc) domain = utils.get_domain(parsed.netloc) if (domain.startswith("www") and (not location_domain.startswith("www")) and location_domain in domain): parsed_location = parsed._replace( netloc=parsed_location.netloc) return urlunparse(parsed_location) elif ((not domain.startswith("www")) and location_domain.startswith("www") and domain in location_domain): parsed_location = parsed._replace( netloc=parsed_location.netloc) return urlunparse(parsed_location) except ValueError: return url else: return url
def command_ssl(args, urls): for val in enumerate(urls): url = utils.extract_url(val[1]) reporter.setup(utils.get_domain(url)) ssl.start(args, url)
def command_dns(args, urls): for val in enumerate(urls): url = utils.extract_url(val[1]) reporter.setup(utils.get_domain(url)) session = Session(args, url) dns.start(session)
def check_404_response(url: str) -> Tuple[bool, Response, bool, Response]: domain = utils.get_domain(url) _get_404_handling(domain, url) return ( _file_not_found_handling[domain]["file"], _file_not_found_handling[domain]["file_res"], _file_not_found_handling[domain]["path"], _file_not_found_handling[domain]["path_res"], )
def start(args, url): print(f"Scanning: {url}") # parse the URL, we'll need this parsed = urlparse(url) # get rid of any port number & credentials that may exist domain = utils.get_domain(parsed.netloc) # make sure it resolves try: socket.gethostbyname(domain) except socket.gaierror as error: print(f"Fatal Error: Unable to resolve {domain} ({str(error)})") return if parsed.scheme == "http": try: # check for TLS redirect tls_redirect = network.check_ssl_redirect(url) if tls_redirect != url: print(f"Server redirects to TLS: Scanning: {tls_redirect}") url = tls_redirect parsed = urlparse(url) except Exception as error: output.debug_exception() output.error(f"Failed to connect to {url} ({str(error)})") return www_redirect = network.check_www_redirect(url) if www_redirect is not None and www_redirect != url: print(f"Server performs WWW redirect: Scanning: {www_redirect}") url = www_redirect parsed = urlparse(url) # check to see if we are looking at an HTTPS server if parsed.scheme == "https": if args.internalssl or utils.is_ip(domain) or utils.get_port(url) != 443: # use internal scanner ssl_internal.scan(args, url, domain) else: try: ssl_labs.scan(args, url, domain) except Exception as error: output.debug_exception() output.error(f"Error running scan with SSL Labs: {str(error)}") if args.tdessessioncount: ssl_sweet32.scan(args, url, domain)
def _get_ip(res: HTTPResponse) -> Union[str, None]: loc = res.getheader("Location") if loc is not None: # it's a redirect, check to see if there's an IP in it parsed = urlparse(loc) domain = utils.get_domain(parsed.netloc) if utils.is_ip(domain): # it's an IP, now, is it private? if utils.is_private_ip(domain): return domain else: return None return None
def check_hsts_preload(url: str) -> List[dict]: hsts_service = "https://hstspreload.com/api/v1/status/" results: List[dict] = [] domain = utils.get_domain(url) if not checkers.is_ip_address(domain): while domain.count(".") > 0: # get the HSTS preload status for the domain res, _ = network.http_json(f"{hsts_service}{domain}") results.append(res) domain = domain.split(".", 1)[-1] if PublicSuffixList().is_public(domain): break return results
def get_results(soup: BeautifulSoup, url: str, res: Response) -> List[Result]: global _reports results: List[Result] = [] try: parsed = urlparse(url) domain = utils.get_domain(parsed.netloc) issues, r = _get_retirejs_results(soup, url, domain, res) results += r for js_url, issue in issues: comp = issue["component"] ver = issue["version"] if "vulnerabilities" in issue: for vuln in issue["vulnerabilities"]: info = ( f'Vulnerable JavaScript: {comp}-{ver} ({js_url}): Severity: {vuln["severity"]} - ' f'Info: {" ".join(vuln["info"])}' ) # make sure we haven't reported this issue before if info not in _reports: _reports.append(info) results.append( Result.from_evidence( Evidence.from_response( res, { "js_file": js_url, "js_lib": comp, "js_lib_ver": ver, "vuln_info": list(vuln["info"]), "vuln_sev": vuln["severity"], }, ), info, Vulnerabilities.JS_VULNERABLE_VERSION, ) ) except Exception: output.debug_exception() return results
def start(args, url): print(f"Scanning: {url}") # parse the URL, we'll need this parsed = urlparse(url) # get rid of any port number & credentials that may exist domain = utils.get_domain(parsed.netloc) # make sure it resolves try: socket.gethostbyname(domain) except socket.gaierror as error: print(f"Fatal Error: Unable to resolve {domain} ({str(error)})") return dns.scan(args, url, domain)
def check_telerik_rau_enabled(soup: BeautifulSoup, url: str) -> List[Result]: results: List[Result] = [] parsed = urlparse(url) domain = utils.get_domain(parsed.netloc) try: # get all the scripts files = [i.get("src") for i in soup.find_all("script") if i.get("src")] for file in files: if "Telerik.Web.UI.WebResource.axd" in file: # ok, they are using Telerik UI for ASP.NET AJAX # fix-up the URL if str(file).startswith("//"): file = f"https:{file}" if str(file).startswith("/") or (not str(file).startswith("http")): if parsed.scheme == "https": file = urljoin(f"https://{domain}", file) else: file = urljoin(f"http://{domain}", file) target = urlparse(file) target = target._replace(query="type=rau") if domain in target: res = network.http_get(urlunparse(target), False) # NOTE: Typo in "succesfully" is intentional - do not fix if "RadAsyncUpload handler is registered succesfully" in res.text: results.append( Result.from_evidence( Evidence.from_response(res, {"original_url": url}), f"Telerik UI for ASP.NET AJAX RadAsyncUpload Enabled " f"(Check for CVE-2019-18935) at {target}", Vulnerabilities.APP_TELERIK_UI_RAD_ASYNC_UPLOAD_ENABLED, ) ) break except Exception: output.debug_exception() return results
def update_url(self, url: str): self.url = url self.url_parsed = urlparse(url) self.domain = utils.get_domain(self.url_parsed.netloc)
def __init__(self, args: Namespace, url: str): self.args = args self.url = url self.url_parsed = urlparse(url) self.domain = utils.get_domain(self.url_parsed.netloc)
def test_get_domain_ipv6_port(self): self.assertEqual( "[3ffe:2a00:100:7031::1]", get_domain("[3ffe:2a00:100:7031::1]:80") )
def test_get_domain_ipv4_port(self): self.assertEqual("127.0.0.1", get_domain("127.0.0.1:80"))
def test_get_domain_ipv4_creds_port(self): self.assertEqual("127.0.0.1", get_domain("user:[email protected]:80"))
def test_get_domain_creds_port(self): self.assertEqual("adamcaudill.com", get_domain("user:[email protected]:80"))
def http_file_exists( url: str, allow_redirects=True, timeout: Optional[int] = 30 ) -> Tuple[bool, Response]: # first, check our 404 handling domain = utils.get_domain(url) _get_404_handling(domain, url) if _file_not_found_handling[domain]["file"]: if _file_not_found_handling[domain]["head"]: # we have good HEAD handling - we will start with head, as it's more efficient for us head = http_head(url, allow_redirects=allow_redirects, timeout=timeout) # check for ok, and for server-side errors if head.status_code == 200 or head.status_code >= 500: # file exists, grab it get = http_get(url, allow_redirects=allow_redirects, timeout=timeout) return True, get else: return False, head else: # head isn't handled properly, default to GET get = http_get(url, allow_redirects=allow_redirects, timeout=timeout) return get.status_code == 200, get else: # the server doesn't handle 404s properly - there are a few different flavors of this issue, each # different version requires a different approach file_res = cast(Response, _file_not_found_handling[domain]["file_res"]) if file_res.status_code == 200: # in this case, everything gets a 200, even if it doesn't exist # to handle this, we need to look at the response, and see if we can work out if it's # a file not found error, or something else. get = http_get(url, allow_redirects=allow_redirects, timeout=timeout) if response_body_is_text(file_res): if response_body_is_text(get): # in case the responses are the same, check that first, then move on to comparing # this should be caught by the code below, but this is faster if file_res.content == get.content: return False, get # both are text, so we need to compare to see how similar they are with ExecutionTimer() as tm: ratio = SequenceMatcher(None, file_res.text, get.text).ratio() output.debug( f"Fuzzy Matching used. Text from known 404 and '{get.url}' compared in {tm.to_ms()}ms" ) # check to see if we have an alignment of less than 90% between the known 404, and this response # if it's less than 90%, we will assume that the response is different, and we have a hit # this is somewhat error prone, as it depends on details of how the application works, though # most errors should be very similar, so the false positive rate should be low. if ratio < 0.9: output.debug( f"Fuzzy Matching used. Text from known 404 and '{get.url}' have a " f"similarity of {ratio} - assuming valid file." ) return True, get else: return False, get else: # if file_res is text, and this isn't, safe to call this a valid hit return True, get else: # this is a case that makes no sense. who knows what's going on here. return file_res.content == get.content, get elif file_res.status_code in range(300, 399): # they are sending a redirect on file not found # we can't honor the allow_redirects flag, as we can't tell if it's a legit redirect, or an error # we should though get a 200 for valid hits get = http_get(url, allow_redirects=False, timeout=timeout) return get.status_code == 200, get elif file_res.status_code >= 400: # they are sending an error code that isn't 404 - in this case, we should still get a 200 on a valid hit get = http_get(url, allow_redirects=allow_redirects, timeout=timeout) return get.status_code == 200, get else: # shrug get = http_get(url, allow_redirects=allow_redirects, timeout=timeout) return get.status_code == 200, get
def test_get_domain_port(self): self.assertEqual("adamcaudill.com", get_domain("adamcaudill.com:80"))
def test_get_domain_http(self): self.assertEqual("adamcaudill.com", get_domain("http://adamcaudill.com"))
def test_get_domain_clean(self): self.assertEqual("adamcaudill.com", get_domain("adamcaudill.com"))
def check_local_ip_disclosure(session: Session) -> List[Result]: def _send_http_10_get( con: Union[SslConnection, socket.socket] ) -> Tuple[str, HTTPResponse]: req = ( "HEAD / HTTP/1.0\r\n" "User-Agent: {user_agent}\r\n" "Accept: */*\r\n\r\n".format(user_agent=network.YAWAST_UA) ) if type(con) is SslConnection: con.ssl_client.write(req.encode("utf_8")) res = http_response_parser.HttpResponseParser.parse_from_ssl_connection( con.ssl_client ) else: con.sendall(req.encode("utf_8")) res = http_response_parser.HttpResponseParser.parse_from_socket(con) return req, res def _resp_to_str(res: HTTPResponse) -> str: ver = "1.1" if res.version == 11 else "1.0" body = f"HTTP/{ver} {res.code} {res.reason}\r\n" for k, val in res.headers.items(): body += f"{k}: {val}\r\n" return body def _get_ip(res: HTTPResponse) -> Union[str, None]: loc = res.getheader("Location") if loc is not None: # it's a redirect, check to see if there's an IP in it parsed = urlparse(loc) domain = utils.get_domain(parsed.netloc) if utils.is_ip(domain): # it's an IP, now, is it private? if utils.is_private_ip(domain): return domain else: return None return None def _get_result(client, prt): req, resp = _send_http_10_get(client) ip = _get_ip(resp) if ip is not None: results.append( Result( f"Private IP Found: {ip} via HTTP 1.0 Redirect", Vln.SERVER_INT_IP_EXP_HTTP10, session.url, { "request": req, "response": _resp_to_str(resp), "ip": {ip}, "port": prt, }, ) ) results: List[Result] = [] if session.url_parsed.scheme == "https": conn_tester = server_connectivity_tester.ServerConnectivityTester( hostname=session.domain, port=utils.get_port(session.url) ) server_info = conn_tester.perform() conn = ssl_connection_configurator.SslConnectionConfigurator.get_connection( ssl_version=OpenSslVersionEnum.SSLV23, server_info=server_info, should_ignore_client_auth=True, ssl_verify_locations=None, should_use_legacy_openssl=False, ) try: conn.connect() _get_result(conn, utils.get_port(session.url)) except Exception: output.debug_exception() if session.supports_http: url = session.get_http_url() port = utils.get_port(url) conn = socket.socket() conn.connect((utils.get_domain(url), port)) try: _get_result(conn, port) except Exception: output.debug_exception() return results
def test_get_domain_ipv6_clean(self): self.assertEqual( "[3ffe:2a00:100:7031::1]", get_domain("[3ffe:2a00:100:7031::1]") )
def test_get_domain_ipv4_clean(self): self.assertEqual("127.0.0.1", get_domain("127.0.0.1"))
def start(args, url): print(f"Scanning: {url}") # parse the URL, we'll need this parsed = urlparse(url) # get rid of any port number & credentials that may exist domain = utils.get_domain(parsed.netloc) # make sure it resolves try: socket.gethostbyname(domain) except socket.gaierror as error: print(f"Fatal Error: Unable to resolve {domain} ({str(error)})") return # perform some connection testing if parsed.scheme == "http": try: # check for TLS redirect tls_redirect = network.check_ssl_redirect(url) if tls_redirect != url: print(f"Server redirects to TLS: Scanning: {tls_redirect}") url = tls_redirect parsed = urlparse(url) except Exception: output.debug_exception() # we tried to connect to port 80, and it failed # this could mean a couple things, first, we need to # see if it answers to 443 parsed = parsed._replace(scheme="https") url = urlunparse(parsed) print("Server does not respond to HTTP, switching to HTTPS") print() print(f"Scanning: {url}") # grab the head, to see if we get anything try: network.http_head(url, timeout=5) print() except Exception as err: output.debug_exception() print(f"Fatal Error: Can not connect to {url} ({str(err)})") return else: # if we are scanning HTTPS, try HTTP to see what it does try: http_parsed = parsed._replace(scheme="http") http_url = urlunparse(http_parsed) network.http_head(http_url, timeout=5) print("Server responds to HTTP requests") print() except Exception: output.debug_exception() print("Server does not respond to HTTP requests") print() # check for www redirect www_redirect = network.check_www_redirect(url) if www_redirect is not None and www_redirect != url: print(f"Server performs WWW redirect: Scanning: {www_redirect}") url = www_redirect if not args.nodns: dns.scan(args, url, domain) # check to see if we are looking at an HTTPS server if parsed.scheme == "https" and not args.nossl: if args.internalssl or utils.is_ip( domain) or utils.get_port(url) != 443: # use internal scanner ssl_internal.scan(args, url, domain) else: try: ssl_labs.scan(args, url, domain) except Exception as error: output.debug_exception() output.error(f"Error running scan with SSL Labs: {str(error)}") if args.tdessessioncount: ssl_sweet32.scan(args, url, domain) http.scan(args, url, domain) # reset any stored data http.reset() return