def test_check_404(self): network.init("", "", "X-Test=123") url = "https://adamcaudill.com/" output.setup(False, False, False) with utils.capture_sys_output() as (stdout, stderr): with requests_mock.Mocker() as m: m.get(requests_mock.ANY, text="body", status_code=200) try: file, _, _, _ = network.check_404_response(url) except Exception as error: self.assertIsNone(error) self.assertNotIn("Exception", stderr.getvalue()) self.assertNotIn("Error", stderr.getvalue())
def check_struts_sample(url: str) -> List[Result]: results: List[Result] = [] try: # make sure we have real 404s file_good, _, _, _ = network.check_404_response(url) if not file_good: return results search = [ "Struts2XMLHelloWorld/User/home.action", "struts2-showcase/showcase.action", "struts2-showcase/titles/index.action", "struts2-bootstrap-showcase/", "struts2-showcase/index.action", "struts2-bootstrap-showcase/index.action", "struts2-rest-showcase/", ] for path in search: target = urljoin(url, path) res = network.http_get(target, False) # check for other issues results += response_scanner.check_response(target, res) if res.status_code == 200: results.append( Result( f"Struts Sample Found: {target}", Vulnerabilities.SERVER_TOMCAT_STRUTS_SAMPLE, target, [ network.http_build_raw_request(res.request), network.http_build_raw_response(res), ], ) ) except Exception: output.debug_exception() return results
def _file_search(session: Session, orig_links: List[str]) -> List[str]: new_files: List[str] = [] file_good, file_res, path_good, path_res = network.check_404_response(session.url) # these are here for data typing results: Union[List[Result], None] links: Union[List[str], None] if not file_good: reporter.display( "Web server does not respond properly to file 404 errors.", Issue( Vulnerabilities.SERVER_INVALID_404_FILE, session.url, Evidence.from_response(file_res), ), ) if not path_good: reporter.display( "Web server does not respond properly to path 404 errors.", Issue( Vulnerabilities.SERVER_INVALID_404_PATH, session.url, Evidence.from_response(path_res), ), ) if not (file_good or path_good): output.norm( "Site does not respond properly to non-existent file/path requests; skipping some checks." ) if file_good: links, results = special_files.check_special_files(session.url) if results: reporter.display_results(results, "\t") new_files += links if session.args.files: output.empty() output.norm("Searching for common files (this will take a few minutes)...") with Spinner(): try: links, results = file_search.find_files(session.url) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") results = None links = None if results is not None and results: reporter.display_results(results, "\t") if links is not None and links: new_files += links for l in links: if l not in orig_links: output.norm(f"\tNew file found: {l}") output.empty() # check for common backup files all_links = orig_links + new_files with Spinner(): backups, res = file_search.find_backups(all_links) if res: reporter.display_results(res, "\t") if backups: new_files += backups if path_good: links, results = special_files.check_special_paths(session.url) if results: reporter.display_results(results, "\t") new_files += links if session.args.dir: output.empty() output.norm( "Searching for common directories (this will take a few minutes)..." ) with Spinner(): try: links, results = file_search.find_directories( session.url, session.args.dirlistredir, session.args.dirrecursive, ) except Exception as error: output.debug_exception() output.error(f"Error running scan: {str(error)}") results = None links = None if results is not None and results: reporter.display_results(results, "\t") if links is not None and links: new_files += links for l in links: if l not in orig_links: output.norm(f"\tNew directory found: {l}") output.empty() # check for .DS_Store files if file_good: res = file_search.find_ds_store(new_files) if res: reporter.display_results(res, "\t") return new_files