async def test_ssl_scanner(): persister = AsyncMock() request = Request("https://127.0.0.1:4443/") request.path_id = 42 crawler = AsyncCrawler("https://127.0.0.1:4443/") options = {"timeout": 10, "level": 2} module = ModuleSsl(crawler, persister, options, Event()) await module.attack(request) # Depending on installed python/openssl version different vulnerabilities may be present but the following # vulnerabilities and information should be there everytime persister.add_payload.assert_any_call(request_id=-1, payload_type="additional", module="ssl", category=NAME, level=INFO_LEVEL, request=request, parameter='', wstg=["WSTG-CRYP-01"], info="Certificate subject: yolo.com") persister.add_payload.assert_any_call( request_id=-1, payload_type="vulnerability", module="ssl", category=NAME, level=CRITICAL_LEVEL, request=request, parameter='', wstg=["WSTG-CRYP-01"], info="Requested hostname doesn't match those in the certificate") persister.add_payload.assert_any_call( request_id=-1, payload_type="vulnerability", module="ssl", category=NAME, level=CRITICAL_LEVEL, request=request, parameter='', wstg=["WSTG-CRYP-01"], info= "Certificate is invalid for Mozilla trust store: self signed certificate" ) persister.add_payload.assert_any_call( request_id=-1, payload_type="vulnerability", module="ssl", category=NAME, level=HIGH_LEVEL, request=request, parameter='', wstg=["WSTG-CRYP-01"], info="Strict Transport Security (HSTS) is not set") await crawler.close()
async def test_must_attack(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) assert await module_htp.must_attack( Request("http://perdu.com", method="POST")) is False assert await module_htp.must_attack( Request("http://perdu.com", method="GET")) is True
async def test_csrf_cases(): persister = AsyncMock() all_requests = [] request = Request("http://127.0.0.1:65086/") request.path_id = 1 all_requests.append(request) request = Request( "http://127.0.0.1:65086/", method="POST", post_params=[["email", "*****@*****.**"], ["xsrf_token", "weak"]], ) request.path_id = 2 all_requests.append(request) request = Request( "http://127.0.0.1:65086/?check=true", method="POST", post_params=[["email", "*****@*****.**"], ["xsrf_token", "weak"]], ) request.path_id = 3 all_requests.append(request) request = Request( "http://127.0.0.1:65086/?check=true", method="POST", post_params=[["name", "Obiwan"]], ) request.path_id = 4 all_requests.append(request) crawler = AsyncCrawler("http://127.0.0.1:65086/", timeout=1) options = {"timeout": 10, "level": 1} module = ModuleCsrf(crawler, persister, options, Event()) module.do_post = True for request in all_requests: if await module.must_attack(request): await module.attack(request) else: # Not attacked because of GET verb assert request.path_id == 1 vulnerabilities = set() for call in persister.add_payload.call_args_list: vulnerabilities.add((call[1]["request_id"], call[1]["info"])) assert vulnerabilities == { (2, _("CSRF token '{}' is not properly checked in backend").format( "xsrf_token")), (3, _("CSRF token '{}' might be easy to predict").format("xsrf_token")), (4, _("Lack of anti CSRF token")) } await crawler.close()
async def test_analyze_file_none_content(): respx.get("http://perdu.com/").mock(return_value=None) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com")) assert len(module_htp.tech_versions) == 0
async def test_wp_version(): respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) respx.get("http://perdu.com/feed/").mock( return_value=httpx.Response( 200, text='<?xml version="1.0" encoding="UTF-8"?>\ <rss version="2.0">\ <channel>\ <generator>https://wordpress.org/?v=5.8.2</generator>\ </channel>\ </rss>' ) ) persister = AsyncMock() request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} with mock.patch.object(ModuleWpEnum, "detect_plugin", AsyncMock()) as mock_detect_plugin, \ mock.patch.object(ModuleWpEnum, "detect_theme", AsyncMock()) as mock_detect_theme: module = ModuleWpEnum(crawler, persister, options, Event()) await module.attack(request) mock_detect_plugin.assert_called_once() mock_detect_theme.assert_called_once() assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"name": "WordPress", "versions": ["5.8.2"], "categories": ["CMS", "Blogs"]}' )
async def test_vulnerabilities(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-Generator": "Backdrop CMS 4.5", "Server": "Cherokee/1.3.4" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 5 # FIrst one is an additional assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}') assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[3][1]["info"] == ( '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}' ) assert persister.add_payload.call_args_list[3][1]["category"] == _( 'Fingerprint web server') await crawler.close()
async def test_finish_two_ranges(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") persister.get_root_url.return_value = "http://perdu.com/" request = Request("http://perdu.com/") request.path_id = 1 techno = "techno" versions = ["1.0", "1.1", "1.2", "1.2.1", "1.3", "1.4", "1.5", "1.6"] async def async_magic(): pass MagicMock.__await__ = lambda x: async_magic().__await__() with mock.patch("wapitiCore.attack.mod_htp.ModuleHtp.add_vuln_info", autospec=True) as mock_add_vuln_info, \ mock.patch.object(ModuleHtp, "_db", new_callable=PropertyMock) as mock_db, \ mock.patch.object(ModuleHtp, "_get_versions", return_value=versions): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) module_htp._root_url = "http://perdu.com/" module_htp.tech_versions[techno] = [["1.2", "1.2.1", "1.3"], ["1.3", "1.4"], ["1.5", "1.5"], ["1.0", "1.2"]] await module_htp.finish() mock_add_vuln_info.assert_called_once_with( module_htp, category=_("Fingerprint web server"), request=Request("http://perdu.com/"), info= '{"name": "techno", "versions": ["1.0", "1.1", "1.2", "1.2.1", "1.3", "1.4", "1.5"]}' )
async def test_analyze_file_request_error(): respx.get("http://perdu.com/").mock( side_effect=httpx.RequestError("error")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com")) assert len(module_htp.tech_versions) == 0 assert module_htp.network_errors == 1
async def test_analyze_file_no_detection(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 with mock.patch.object(ModuleHtp, "_find_technology", return_value=None): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com")) assert len(module_htp.tech_versions) == 0
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/cgi-bin/a1disp3.cgi?../../../../../../../../../../etc/passwd").mock( return_value=httpx.Response(200, text="root:0:0:") ) respx.route(host="perdu.com").mock( return_value=httpx.Response(404, text="Not found") ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.get_links.return_value = chain([request]) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_nikto(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "nikto" assert persister.add_payload.call_args_list[0][1]["category"] == _("Potentially dangerous file") assert persister.add_payload.call_args_list[0][1]["request"].url == ( "http://perdu.com/cgi-bin/a1disp3.cgi?..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2Fetc%2Fpasswd" ) assert ( "This CGI allows attackers read arbitrary files on the host" ) in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def test_verify_headers_vuln_found(): async def mock_verify_dns(_header_uuid: str): return True # When a vuln has been found with patch.object(Request, "http_repr", autospec=True) as mock_http_repr: persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module._verify_dns = mock_verify_dns modified_request = Request("http://perdu.com/") malicious_headers = {"Header": "payload"} headers_uuid_record = {"Header": "unique_id"} await module._verify_headers_vulnerability(modified_request, malicious_headers, headers_uuid_record) mock_http_repr.assert_called_once() persister.add_payload.assert_called_once_with( request_id=-1, payload_type=VULN, module="log4shell", category=NAME, level=CRITICAL_LEVEL, request=request, parameter="Header: payload", info=_("URL {0} seems vulnerable to Log4Shell attack by using the header {1}") \ .format(modified_request.url, "Header"), wstg=["WSTG-INPV-11"] )
async def test_must_attack(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module.finished = False assert await module.must_attack(Request("foobar")) module.finished = True assert not await module.must_attack(Request("foobar"))
async def test_blind_detection(): with NamedTemporaryFile() as database_fd: conn = sqlite3.connect(database_fd.name) cursor = conn.cursor() cursor.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username TEXT, password TEXT)") conn.commit() cursor.execute("INSERT INTO users (id, username, password) VALUES (1, \"admin\", \"123456\")") conn.commit() cursor.close() conn.close() def process(http_request): try: user_id = parse_qs(urlparse(str(http_request.url)).query)["user_id"][0] except (IndexError, KeyError): return httpx.Response(200, text="Unknown user") else: conn = sqlite3.connect(database_fd.name) cursor = conn.cursor() try: # Will you spot the SQLi vulnerability? :D cursor.execute("SELECT username FROM users WHERE id = {}".format(user_id)) row = cursor.fetchone() except sqlite3.OperationalError: cursor.close() conn.close() return httpx.Response(200, text="Unknown user") else: cursor.close() conn.close() if row: return httpx.Response(200, text="Welcome {}".format(row[0])) else: return httpx.Response(200, text="Unknown user") respx.get(url__regex=r"http://perdu\.com/\?user_id=.*").mock(side_effect=process) persister = AsyncMock() request = Request("http://perdu.com/?user_id=1") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/"), timeout=1) options = {"timeout": 10, "level": 1} module = ModuleSql(crawler, persister, options, Event()) module.do_post = True await module.attack(request) assert persister.add_payload.call_count # One request for error-based, one to get normal response, four to test boolean-based attack assert respx.calls.call_count == 6 await crawler.close()
def test_get_file_special_payload(self, loop, mocker, file_svc): payload = 'unittestpayload' new_payload_name = 'utp' payload_content = b'content' payload_func = AsyncMock(return_value=(payload, new_payload_name)) # patch out read_file and special payload for testing mocker.patch.object(file_svc, 'read_file', new_callable=AsyncMock, return_value=(payload, payload_content)) mocker.patch.dict(file_svc.special_payloads, {payload: payload_func}) fp, rcontent, display_name = loop.run_until_complete( file_svc.get_file( headers=dict(file=payload, name=new_payload_name))) payload_func.assert_called_once() assert display_name == new_payload_name assert rcontent == payload_content assert payload in fp
async def test_direct_upload(): respx.route(host="127.0.0.1").pass_through() persister = AsyncMock() request = Request("http://127.0.0.1:65084/xxe/outofband/upload.php", file_params=[[ "foo", ("bar.xml", "<xml>test</xml>", "application/xml") ], [ "calendar", ("calendar.xml", "<xml>test</xml>", "application/xml") ]]) request.path_id = 8 persister.get_path_by_id.return_value = request crawler = AsyncCrawler("http://127.0.0.1:65084/") options = { "timeout": 10, "level": 1, "external_endpoint": "http://wapiti3.ovh/", "internal_endpoint": "http://wapiti3.ovh/" } logger = Mock() module = mod_xxe(crawler, persister, logger, options, Event()) await module.attack(request) respx.get( "http://wapiti3.ovh/get_xxe.php?session_id=" + module._session_id ).mock(return_value=httpx.Response( 200, json={ "8": { "63616c656e646172": [{ "date": "2019-08-17T16:52:41+00:00", "url": "https://wapiti3.ovh/xxe_data/yolo/8/63616c656e646172/31337-0-192.168.2.1.txt", "ip": "192.168.2.1", "size": 999, "payload": "linux2" }] } })) assert not persister.add_payload.call_count await module.finish() assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1][ "parameter"] == "calendar" await crawler.close()
async def test_finish_no_technologies(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 with mock.patch("wapitiCore.attack.mod_htp.ModuleHtp.add_vuln_info", autospec=True) as mock_add_vuln_info, \ mock.patch.object(ModuleHtp, "_db", new_callable=PropertyMock) as mock_db: crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp.finish() mock_db.assert_called() mock_add_vuln_info.assert_not_called()
async def test_out_of_band_body(): respx.route(host="127.0.0.1").pass_through() persister = AsyncMock() request = Request("http://127.0.0.1:65084/xxe/outofband/body.php", method="POST", post_params=[["placeholder", "yolo"]]) request.path_id = 42 persister.get_path_by_id.return_value = request persister.requests.append(request) crawler = AsyncCrawler("http://127.0.0.1:65084/") options = { "timeout": 10, "level": 1, "external_endpoint": "http://wapiti3.ovh/", "internal_endpoint": "http://wapiti3.ovh/" } logger = Mock() module = mod_xxe(crawler, persister, logger, options, Event()) respx.get( "http://wapiti3.ovh/get_xxe.php?session_id=" + module._session_id ).mock(return_value=httpx.Response( 200, json={ "42": { "72617720626f6479": [{ "date": "2019-08-17T16:52:41+00:00", "url": "https://wapiti3.ovh/xxe_data/yolo/3/72617720626f6479/31337-0-192.168.2.1.txt", "ip": "192.168.2.1", "size": 999, "payload": "linux2" }] } })) module.do_post = False await module.attack(request) assert not persister.add_payload.call_count await module.finish() assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1][ "parameter"] == "raw body" assert "linux2" in persister.add_payload.call_args_list[0][1][ "request"].post_params await crawler.close()
async def test_warning_false_positive(): persister = AsyncMock() request = Request("http://127.0.0.1:65085/inclusion.php?yolo=warn&f=toto") request.path_id = 42 crawler = AsyncCrawler("http://127.0.0.1:65085/") options = {"timeout": 10, "level": 2} module = ModuleFile(crawler, persister, options, Event()) module.do_post = False await module.attack(request) assert persister.add_payload.call_count == 1 assert ["f", "/etc/services"] in persister.add_payload.call_args_list[0][1]["request"].get_params await crawler.close()
def _test_download_file_with_encoding(loop, file_svc, data_svc, encoding, original_content, encoded_content): filename = 'testencodedpayload.txt' file_svc.read_file = AsyncMock(return_value=(filename, original_content)) file_svc.data_svc = data_svc file_path, content, display_name = loop.run_until_complete( file_svc.get_file(headers={ 'file': filename, 'x-file-encoding': encoding })) assert file_path == filename assert content == encoded_content assert display_name == filename
async def test_timesql_false_positive(): persister = AsyncMock() request = Request( "http://127.0.0.1:65082/blind_sql.php?vuln2=hello%20there") request.path_id = 42 crawler = AsyncCrawler("http://127.0.0.1:65082/", timeout=1) options = {"timeout": 1, "level": 1} module = mod_timesql(crawler, persister, options, Event()) module.do_post = False await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
async def test_implies_detection(): # Test for implied applications respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"} ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}' ) assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}' ) await crawler.close()
async def test_multi_detection(): # Test if application is detected using several ways respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"WordPress 5.6.1\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \ </body></html>", headers={"link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\""} ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"]}' ) await crawler.close()
async def test_url_detection(): # Test if application is detected using its url regex respx.get( "http://perdu.com/owa/auth/logon.aspx" ).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wapp" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}' ) await crawler.close()
async def test_direct_query_string(): persister = AsyncMock() request = Request("http://127.0.0.1:65084/xxe/direct/qs.php") request.path_id = 42 crawler = AsyncCrawler("http://127.0.0.1:65084/") options = {"timeout": 10, "level": 2} module = mod_xxe(crawler, persister, options, Event()) module.do_post = False await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1][ "parameter"] == "QUERY_STRING" await crawler.close()
def test_find_file_path_no_plugin(self, loop, mocker, file_svc): data_svc = mocker.Mock() data_svc.locate = AsyncMock(return_value=[]) mocker.patch.object(file_svc, 'data_svc', new=data_svc) filename = 'unittest-file-path-test' path = 'data' with open('./%s/%s' % (path, filename), 'w') as f: f.write('test') _, file_path = loop.run_until_complete( file_svc.find_file_path(filename)) assert file_path == '%s/%s' % (path, filename) # delete file os.remove('./%s/%s' % (path, filename))
async def test_attack(): files = { "headers.txt": '\n'.join([str(nbr) for nbr in random.sample(range(0, 100), 100)]), } persister = AsyncMock() persister.get_root_url.return_value = "http://perdu.com/" home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncMock() options = {"timeout": 10, "level": 2} request_to_attack = Request("http://perdu.com/", "GET") future_verify_dns = asyncio.Future() future_verify_dns.set_result(True) with mock.patch("builtins.open", get_mock_open(files)) as mock_open_headers, \ patch.object(ModuleLog4Shell, "_verify_dns", return_value=future_verify_dns) as mock_verify_dns: module = ModuleLog4Shell(crawler, persister, options, Event()) module.DATA_DIR = "" module.HEADERS_FILE = "headers.txt" await module.attack(request_to_attack) mock_open_headers.assert_called_once() # vsphere case (2) + each header batch (10) + url case (1) + druid case (1) + solr case (1) assert crawler.async_send.call_count == 15 assert mock_verify_dns.call_count == 105
async def test_title_false_positive(): # We should fail at escaping the title tag and we should be aware of it persister = AsyncMock() request = Request( "http://127.0.0.1:65081/title_false_positive.php?title=yolo&fixed=yes") request.path_id = 42 crawler = AsyncCrawler("http://127.0.0.1:65081/") options = {"timeout": 10, "level": 2} module = ModuleXss(crawler, persister, options, Event()) module.do_post = False await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
async def test_attack(): target_url = "http://perdu.com/" respx.get(target_url).mock(return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") persister.get_root_url.return_value = "http://perdu.com/" request = Request(target_url) request.path_id = 1 static_files = ["README.md", "index.html"] future_init_db = asyncio.Future() future_init_db.set_result(None) with mock.patch.object( ModuleHtp, "_get_static_files", return_value=static_files, autospec=True ) as mock_get_static_files, \ mock.patch.object( ModuleHtp, "_analyze_file", autospec=True ) as mock_analyze_file, \ mock.patch.object(ModuleHtp, "_init_db", return_value=future_init_db): crawler = AsyncCrawler(Request(target_url)) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) module_htp._root_url = target_url target_request = Request(target_url + "index.html") await module_htp.attack(target_request) mock_get_static_files.assert_not_called() assert mock_analyze_file.call_count == 1
async def test_xss_with_strong_csp(): persister = AsyncMock() request = Request("http://127.0.0.1:65081/strong_csp.php?content=Hello%20there") request.path_id = 42 crawler = AsyncCrawler("http://127.0.0.1:65081/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_xss(crawler, persister, logger, options, Event()) module.do_post = False await module.attack(request) assert persister.add_payload.call_count assert _("Warning: Content-Security-Policy is present!") in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def test_bad_separator_used(): persister = AsyncMock() request = Request("http://127.0.0.1:65081/confuse_separator.php?number=42") request.path_id = 42 crawler = AsyncCrawler("http://127.0.0.1:65081/") options = {"timeout": 10, "level": 2} module = ModuleXss(crawler, persister, options, Event()) module.do_post = False await module.attack(request) assert persister.add_payload.call_count used_payload = persister.add_payload.call_args_list[0][1][ "request"].get_params[0][1].lower() assert used_payload.startswith("\">") await crawler.close()