async def test_cookies_detection(): # Test if application is detected using its cookies regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"], "groups": ["Web development"]}' ) await crawler.close()
async def test_html_detection(): # Test if application is detected using its html regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"], "groups": ["Web development"]}' ) await crawler.close()
async def test_analyze_file_detection(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 techno = "techno" techno_versions = '"{\\"versions\\": [\\"1.2\\", \\"1.2.1\\"]}"' #'{"versions": ["1.2", "1.2.1"]}' with mock.patch.object(ModuleHtp, "_find_technology", return_value=(techno, techno_versions)): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com/")) assert len(module_htp.tech_versions) == 1 assert module_htp.tech_versions.get(techno) is not None assert module_htp.tech_versions.get(techno) == [["1.2", "1.2.1"]]
async def test_attack_apache_druid(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncMock() options = {"timeout": 10, "level": 2, "dns_endpoint": None} future_url_vulnerability = asyncio.Future() future_url_vulnerability.set_result(None) with patch.object( ModuleLog4Shell, "_verify_url_vulnerability", return_value=future_url_vulnerability) as mock_verify_url: module = ModuleLog4Shell(crawler, persister, options, Event()) await module._attack_apache_druid_url("http://perdu.com/") assert crawler.async_send.assert_called_once assert mock_verify_url.assert_called_once
async def test_multi_detection(): # Test if application is detected using several ways respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"WordPress 5.6.1\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \ </body></html>", headers={ "link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\"" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"], "groups": ["Content"]}' ) await crawler.close()
async def test_implies_detection(): # Test for implied applications respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"], "groups": ["Content"]}' ) assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": [], "name": "PHP", "categories": ["Programming languages"], "groups": ["Web development"]}' ) await crawler.close()
async def test_script_detection(): # Test if application is detected using its script regex respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\ </body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}' ) await crawler.close()
async def test_false_positive(): # Test for false positive respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
async def test_is_valid_dns(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) good_dns = "foobar" bad_dns = "wrongdns" # Good DNS with patch("socket.gethostbyname", autospec=True) as mock_gethostbyname: status = module._is_valid_dns(good_dns) assert status mock_gethostbyname.assert_called_once_with(good_dns) # Bad DNS with patch("socket.gethostbyname", side_effect=OSError("error")) as mock_gethostbyname: status = module._is_valid_dns(bad_dns) assert not status
def test_get_batch_malicious_headers(): persister = AsyncMock() persister.get_root_url.return_value = "http://perdu.com" home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) headers = random.sample(range(0, 100), 100) malicious_headers, headers_uuid_record = module._get_batch_malicious_headers(headers) assert len(malicious_headers) == 10 for batch_headers in malicious_headers: for header, payload in batch_headers.items(): assert "${jndi:dns://" + module.dns_endpoint in payload assert header in headers assert header in headers_uuid_record assert str(headers_uuid_record.get(header)) in payload
async def test_verify_dns(): class MockAnswer(): def __init__(self, response: bool) -> None: self.strings = [str(response).lower().encode("utf-8")] persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module._dns_host = "" with mock.patch.object(Resolver, "resolve", return_value=(MockAnswer(True),)): assert await module._verify_dns("foobar") is True with mock.patch.object(Resolver, "resolve", return_value=(MockAnswer(False),)): assert await module._verify_dns("foobar") is False
def test_init(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncMock() options = {"timeout": 10, "level": 2, "dns_endpoint": None} # When the dns_endpoint is valid with patch.object(ModuleLog4Shell, "_is_valid_dns", return_value=True), \ patch("socket.gethostbyname", autospec=True) as mock_gethostbyname: module = ModuleLog4Shell(crawler, persister, options, Event()) assert mock_gethostbyname.assert_called_once assert not module.finished # When the dns_endpoint is not valid with patch.object(ModuleLog4Shell, "_is_valid_dns", return_value=False): module = ModuleLog4Shell(crawler, persister, options, Event()) assert module.finished # When the dns_endpoint is None with patch("socket.gethostbyname", autospec=True) as mock_gethostbyname: module = ModuleLog4Shell(crawler, persister, options, Event()) assert module.finished
async def test_read_headers(): files = { "headers.txt": "Header1\nHeader2\n", "empty.txt": "" } persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module.DATA_DIR = "" with mock.patch("builtins.open", get_mock_open(files)) as mock_open_headers: module.HEADERS_FILE = "headers.txt" headers = await module.read_headers() mock_open_headers.assert_called_once() assert len(headers) == 2 assert headers[0] == "Header1" assert headers[1] == "Header2" module.HEADERS_FILE = "empty.txt" headers = await module.read_headers() assert len(headers) == 1
async def test_attack(): files = { "headers.txt": '\n'.join([str(nbr) for nbr in random.sample(range(0, 100), 100)]), } persister = AsyncMock() persister.get_root_url.return_value = "http://perdu.com" home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncMock() options = {"timeout": 10, "level": 2} request_to_attack = Request("http://foobar/") future_verify_dns = asyncio.Future() future_verify_dns.set_result(True) with mock.patch("builtins.open", get_mock_open(files)) as mock_open_headers, \ patch.object(ModuleLog4Shell, "_verify_dns", return_value=future_verify_dns) as mock_verify_dns: module = ModuleLog4Shell(crawler, persister, options, Event()) module.DATA_DIR = "" module.HEADERS_FILE = "headers.txt" await module.attack(request_to_attack) mock_open_headers.assert_called_once() # vsphere case + each header batch assert crawler.async_send.call_count == 11 assert mock_verify_dns.call_count == 101
async def test_url_detection(): # Test if application is detected using its url regex respx.get( "http://perdu.com/owa/auth/logon.aspx" ).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wapp" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"], "groups": ["Communication"]}' ) await crawler.close()
async def test_verify_headers_vuln_not_found(): async def mock_verify_dns(_header_uuid: str): return False # When no vuln have been found with patch.object(Request, "http_repr", autospec=True) as mock_http_repr: persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module._verify_dns = mock_verify_dns modified_request = Request("http://perdu.com/") malicious_headers = {"Header": "payload"} headers_uuid_record = {"Header": "unique_id"} await module._verify_headers_vulnerability(modified_request, malicious_headers, headers_uuid_record) mock_http_repr.assert_not_called() persister.add_payload.assert_not_called()
async def test_meta_detection(): # Test if application is detected using its meta regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"], "groups": ["Content"]}' ) await crawler.close()
async def test_false_positives(): respx.route(host="raw.githubusercontent.com").pass_through() # This one trigger a match based on content respx.get("http://perdu.com/opendir.php?/etc/passwd").mock( return_value=httpx.Response(200, text="root:0:0:") ) # A lot of cases will trigger because HTTP 200 is returned instead of 404 but false positive check should block them respx.route(host="perdu.com").mock( return_value=httpx.Response(200, text="Hello there") ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") temp_nikto_db = os.path.join(persister.CONFIG_DIR, "temp_nikto_db") with open(temp_nikto_db, "w") as fd: fd.writelines( [ "003270,539,d,/catinfo,GET,200,,,,,May be vulnerable to a buffer overflow. Request '/catinfo?',,\n", "003271,5407,a,/soap/servlet/soaprouter,GET,200,,,,,Oracle 9iAS SOAP components allow anonymous,,\n", "003272,543,7,/opendir.php?/etc/passwd,GET,root:,,,,,This PHP-Nuke CGI allows attackers to read,,\n" ] ) request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.get_links.return_value = chain([request]) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2, "tasks": 20} module = ModuleNikto(crawler, persister, options, Event()) module.do_get = True module.NIKTO_DB = "temp_nikto_db" await module.attack(request) os.unlink(temp_nikto_db) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "nikto" assert persister.add_payload.call_args_list[0][1]["category"] == _("Potentially dangerous file") assert persister.add_payload.call_args_list[0][1]["request"].url == ( "http://perdu.com/opendir.php?%2Fetc%2Fpasswd" ) assert ( "This PHP-Nuke CGI allows attackers to read" ) in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def test_merge_with_and_without_redirection(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 301, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-OWA-Version": "15.0.1497.26", "Location": "http://perdu.com/auth/login" })) respx.get("http://perdu.com/auth/login").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <link rel='shortcut icon' href='/owa/auth/15.0.1497/themes/resources/favicon.ico' type='image/x-icon'> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 5 assert persister.add_payload.call_args_list[3][1]["info"] == ( '{"versions": ["15.0.1497", "15.0.1497.26"], "name": "Outlook Web App", "categories": ["Webmail"], "groups": ["Communication"]}' ) assert persister.add_payload.call_args_list[3][1]["category"] == _( "Fingerprint web application framework") await crawler.close()
async def test_must_attack(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) assert await module_htp.must_attack( Request("http://perdu.com", method="POST")) is False assert await module_htp.must_attack( Request("http://perdu.com", method="GET")) is True
async def test_analyze_file_none_content(): respx.get("http://perdu.com/").mock(return_value=None) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com")) assert len(module_htp.tech_versions) == 0
async def test_vulnerabilities(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-Generator": "Backdrop CMS 4.5", "Server": "Cherokee/1.3.4" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 5 # FIrst one is an additional assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}') assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[3][1]["info"] == ( '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}' ) assert persister.add_payload.call_args_list[3][1]["category"] == _( 'Fingerprint web server') await crawler.close()
async def test_finish_two_ranges(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") persister.get_root_url.return_value = "http://perdu.com/" request = Request("http://perdu.com/") request.path_id = 1 techno = "techno" versions = ["1.0", "1.1", "1.2", "1.2.1", "1.3", "1.4", "1.5", "1.6"] async def async_magic(): pass MagicMock.__await__ = lambda x: async_magic().__await__() with mock.patch("wapitiCore.attack.mod_htp.ModuleHtp.add_vuln_info", autospec=True) as mock_add_vuln_info, \ mock.patch.object(ModuleHtp, "_db", new_callable=PropertyMock) as mock_db, \ mock.patch.object(ModuleHtp, "_get_versions", return_value=versions): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) module_htp._root_url = "http://perdu.com/" module_htp.tech_versions[techno] = [["1.2", "1.2.1", "1.3"], ["1.3", "1.4"], ["1.5", "1.5"], ["1.0", "1.2"]] await module_htp.finish() mock_add_vuln_info.assert_called_once_with( module_htp, category=_("Fingerprint web server"), request=Request("http://perdu.com/"), info= '{"name": "techno", "versions": ["1.0", "1.1", "1.2", "1.2.1", "1.3", "1.4", "1.5"]}' )
async def test_analyze_file_request_error(): respx.get("http://perdu.com/").mock( side_effect=httpx.RequestError("error")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com")) assert len(module_htp.tech_versions) == 0 assert module_htp.network_errors == 1
async def test_analyze_file_no_detection(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 with mock.patch.object(ModuleHtp, "_find_technology", return_value=None): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com")) assert len(module_htp.tech_versions) == 0
async def test_verify_headers_vuln_found(): async def mock_verify_dns(_header_uuid: str): return True # When a vuln has been found with patch.object(Request, "http_repr", autospec=True) as mock_http_repr: persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module._verify_dns = mock_verify_dns modified_request = Request("http://perdu.com/") malicious_headers = {"Header": "payload"} headers_uuid_record = {"Header": "unique_id"} await module._verify_headers_vulnerability(modified_request, malicious_headers, headers_uuid_record) mock_http_repr.assert_called_once() persister.add_payload.assert_called_once_with( request_id=-1, payload_type=VULN, module="log4shell", category=NAME, level=CRITICAL_LEVEL, request=request, parameter="Header: payload", info=_("URL {0} seems vulnerable to Log4Shell attack by using the header {1}") \ .format(modified_request.url, "Header"), wstg=["WSTG-INPV-11"] )
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/cgi-bin/a1disp3.cgi?../../../../../../../../../../etc/passwd").mock( return_value=httpx.Response(200, text="root:0:0:") ) respx.route(host="perdu.com").mock( return_value=httpx.Response(404, text="Not found") ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.get_links.return_value = chain([request]) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_nikto(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "nikto" assert persister.add_payload.call_args_list[0][1]["category"] == _("Potentially dangerous file") assert persister.add_payload.call_args_list[0][1]["request"].url == ( "http://perdu.com/cgi-bin/a1disp3.cgi?..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2Fetc%2Fpasswd" ) assert ( "This CGI allows attackers read arbitrary files on the host" ) in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def test_must_attack(): persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleLog4Shell(crawler, persister, options, Event()) module.finished = False assert await module.must_attack(Request("foobar")) module.finished = True assert not await module.must_attack(Request("foobar"))
async def test_finish_no_technologies(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 with mock.patch("wapitiCore.attack.mod_htp.ModuleHtp.add_vuln_info", autospec=True) as mock_add_vuln_info, \ mock.patch.object(ModuleHtp, "_db", new_callable=PropertyMock) as mock_db: crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp.finish() mock_db.assert_called() mock_add_vuln_info.assert_not_called()
async def test_attack(): target_url = "http://perdu.com/" respx.get(target_url).mock(return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") persister.get_root_url.return_value = "http://perdu.com/" request = Request(target_url) request.path_id = 1 static_files = ["README.md", "index.html"] future_init_db = asyncio.Future() future_init_db.set_result(None) with mock.patch.object( ModuleHtp, "_get_static_files", return_value=static_files, autospec=True ) as mock_get_static_files, \ mock.patch.object( ModuleHtp, "_analyze_file", autospec=True ) as mock_analyze_file, \ mock.patch.object(ModuleHtp, "_init_db", return_value=future_init_db): crawler = AsyncCrawler(Request(target_url)) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) module_htp._root_url = target_url target_request = Request(target_url + "index.html") await module_htp.attack(target_request) mock_get_static_files.assert_not_called() assert mock_analyze_file.call_count == 1