async def test_extract_disconnect_urls(): target_url = "http://perdu.com/" respx.get(target_url).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong><a href='http://perdu.com/foobar/'></a> \ <a href='http://perdu.com/foobar/logout'></a> \ <a href='http://perdu.com/foobar/logoff'></a> \ <a href='http://perdu.com/foobar/signout'></a> \ <a href='http://perdu.com/foobar/signoff'></a> \ <a href='http://perdu.com/foobar/disconnect'></a> \ <a href='../../foobar/déconnexion'></a> \ </div></body></html>")) crawler = AsyncCrawler(Request(target_url), timeout=1) page = await crawler.async_get(Request(target_url)) disconnect_urls = crawler._extract_disconnect_urls(page) test_disconnect_urls = [ "http://perdu.com/foobar/logout", "http://perdu.com/foobar/logoff", "http://perdu.com/foobar/signout", "http://perdu.com/foobar/signoff", "http://perdu.com/foobar/disconnect", "http://perdu.com/foobar/déconnexion" ] assert len(disconnect_urls) == len(test_disconnect_urls) assert all(url in disconnect_urls for url in test_disconnect_urls) is True
async def test_async_try_login_basic_digest_ntlm_wrong_credentials(): target_url = "http://perdu.com/" respx.get(target_url).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong><a href='http://perdu.com/foobar/'></a> \ <a href='http://perdu.com/foobar/signout'></a> \ <div><a href='http://perdu.com/a/b/signout'></a></div></body></html>" )) auth_urls = [["http://perdu.com/login1", 401], ["http://perdu.com/login2", 403], ["http://perdu.com/login3", 404]] crawler = AsyncCrawler(Request(target_url), timeout=1) crawler._auth_credentials = ["username", "password"] for auth_url, status_code in auth_urls: respx.get(auth_url).mock( return_value=httpx.Response(status_code, text="KO")) is_logged_in, form, disconnect_urls = await crawler._async_try_login_basic_digest_ntlm( auth_url) assert is_logged_in is False assert len(form) == 0 assert len(disconnect_urls) == 0
async def test_html_detection(): # Test if application is detected using its html regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"], "groups": ["Web development"]}' ) await crawler.close()
async def test_implies_detection(): # Test for implied applications respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"], "groups": ["Content"]}' ) assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": [], "name": "PHP", "categories": ["Programming languages"], "groups": ["Web development"]}' ) await crawler.close()
async def test_meta_detection(): # Test if application is detected using its meta regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"], "groups": ["Content"]}' ) await crawler.close()
async def test_false_positive(): # Test for false positive respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get("http://perdu.com/config.php.bak").mock(return_value=httpx.Response(200, text="password = 123456")) respx.get("http://perdu.com/config.php").mock(return_value=httpx.Response(200, text="Hello there")) respx.get(url__startswith="http://perdu.com/").mock(return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/config.php") request.path_id = 1 request.set_headers({"content-type": "text/html"}) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_backup(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.add_payload.call_args_list[0][1]["module"] == "backup" assert persister.add_payload.call_args_list[0][1]["payload_type"] == "vulnerability" assert persister.add_payload.call_args_list[0][1]["request"].url == 'http://perdu.com/config.php.bak' await crawler.close()
async def test_no_wordpress(): respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 # persister.requests.append(request) crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module = ModuleWpEnum(crawler, persister, options, Event()) await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
async def test_multi_versions_detected(): base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__) test_directory = os.path.join(base_dir, "..", "tests/data/drupal/") maintainers_file = "MAINTAINERS.txt" with open(path_join(test_directory, maintainers_file), errors="ignore") as maintainers: data = maintainers.read() # Response to tell that Drupal is used respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200)) # Response for maintainers.txt respx.get("http://perdu.com/core/MAINTAINERS.txt").mock(return_value=httpx.Response(200, text=data)) respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2, "tasks": 20} module = ModuleDrupalEnum(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"name": "Drupal", "versions": ["8.0.0-beta4", "8.0.0-beta5", "8.0.0-beta6"], "categories": ["CMS Drupal"]}' ) await crawler.close()
async def test_version_not_detected(): base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__) test_directory = os.path.join(base_dir, "..", "tests/data/drupal/") changelog_edited = "CHANGELOG_EDITED.txt" with open(path_join(test_directory, changelog_edited), errors="ignore") as changelog: data = changelog.read() # Response to tell that Drupal is used respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200)) # Response for edited changelog.txt respx.get("http://perdu.com/CHANGELOG.txt").mock(return_value=httpx.Response(200, text=data)) respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2, "tasks": 20} module = ModuleDrupalEnum(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"name": "Drupal", "versions": [""], "categories": ["CMS Drupal"]}' ) await crawler.close()
async def test_explorer_extract_links(): crawler = AsyncCrawler(Request("http://perdu.com/")) explorer = Explorer(crawler, Event()) respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="""<html><body> <a href="http://perdu.com/index.html"></a> <a href="https://perdu.com/secure_index.html"></a> <a href="//perdu.com/protocol_relative.html"></a> <a href="//lol.com/protocol_relative.html"></a> <a href="http://perdu.com:8000/other_port.html"></a> <a href="http://microsoft.com/other_domain.html"></a> <a href="welcome.html"></a> <a href="/about.html"></a> <form method="POST" action="http://perdu.com/valid_form.html"> <input name="field" type="hidden" value="hello"/></form> <form method="POST" action="http://external.com/external_form.html"> <input name="field" type="hidden" value="hello"/></form> """)) request = Request("http://perdu.com/") page = await crawler.async_send(request) results = list(explorer.extract_links(page, request)) # We should get 6 resources as the âth from the form will also be used as url assert len(results) == 6 await crawler.close()
async def test_unregistered_cname(): # Test attacking all kind of parameter without crashing respx.route(host="perdu.com").mock(return_value=httpx.Response(200, text="Hello there")) async def resolve(qname, rdtype, raise_on_no_answer: bool = False): if qname.startswith("supercalifragilisticexpialidocious."): # No wildcard responses return [] if qname.startswith("admin.") and rdtype == "CNAME": return make_cname_answer("perdu.com", "unregistered.com") raise dns.resolver.NXDOMAIN("Yolo") with patch("wapitiCore.attack.mod_takeover.dns.asyncresolver.resolve") as mocked_resolve_: with patch("wapitiCore.attack.mod_takeover.dns.asyncresolver.Resolver.resolve") as mocked_resolve: mocked_resolve.side_effect = resolve mocked_resolve_.side_effect = resolve persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = ModuleTakeover(crawler, persister, options, Event()) for request in all_requests: await module.attack(request) assert persister.add_payload.call_args_list[0][1]["request"].hostname == "admin.perdu.com" assert "unregistered.com" in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get(url__regex=r"http://perdu\.com/.*").mock(return_value=httpx.Response(200, text="Hello there")) respx.post(url__regex=r"http://perdu\.com/.*").mock(return_value=httpx.Response(200, text="Hello there")) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 all_requests.append(request) request = Request("http://perdu.com/?foo=bar") request.path_id = 2 all_requests.append(request) request = Request( "http://perdu.com/?foo=bar", post_params=[["a", "b"]], file_params=[["file", ("calendar.xml", b"<xml>Hello there</xml", "application/xml")]] ) request.path_id = 3 all_requests.append(request) crawler = AsyncCrawler(Request("http://perdu.com/"), timeout=1) options = {"timeout": 10, "level": 2} module = ModuleSql(crawler, persister, options, Event()) module.do_post = True for request in all_requests: await module.attack(request) assert True await crawler.close()
async def test_url_detection(): # Test if application is detected using its url regex respx.get( "http://perdu.com/owa/auth/logon.aspx" ).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = FakePersister() request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[ 2] == '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}' await crawler.close()
async def test_false_positive(): # Test for false positive respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert not persister.additionals await crawler.close()
async def test_whole_stuff(): respx.get("http://perdu.com/").mock(return_value=httpx.Response(200)) respx.get(url__regex=r"http://perdu.com/.*").mock(side_effect=shellshock_callback) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) request = Request("http://perdu.com/vuln/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_shellshock(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True for request in all_requests: await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "shellshock" assert persister.add_payload.call_args_list[0][1]["category"] == _("Command execution") assert persister.add_payload.call_args_list[0][1]["request"].url == "http://perdu.com/vuln/" await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.options("http://perdu.com/").mock(return_value=httpx.Response( 200, text="Default page", headers={"Allow": "GET,POST,HEAD"})) respx.options("http://perdu.com/dav/").mock(return_value=httpx.Response( 200, text="Private section", headers={"Allow": "GET,POST,HEAD,PUT"})) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) request = Request("http://perdu.com/dav/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = ModuleMethods(crawler, persister, options, Event()) module.do_get = True for request in all_requests: await module.attack(request) assert persister.add_payload.call_count == 1 assert "http://perdu.com/dav/" in persister.add_payload.call_args_list[0][ 1]["info"] await crawler.close()
async def test_true_positive(): respx.get("http://perdu.com/?foo=bar").mock( return_value=httpx.Response(200, text="Hi there")) respx.get( url__regex=r"http://perdu\.com/\?foo=.*" ).mock(return_value=httpx.Response( 200, text= ("You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version " "for the right syntax to use near '\\\"\\'' at line 1"))) persister = AsyncMock() request = Request("http://perdu.com/?foo=bar") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} module = mod_sql(crawler, persister, options, Event()) module.verbose = 2 module.do_post = True await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "sql" assert persister.add_payload.call_args_list[0][1]["category"] == _( "SQL Injection") await crawler.close()
async def test_wp_version_no_file(): respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) respx.get(url__regex=r"http://perdu.com/.*?").mock( return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com")) options = {"timeout": 10, "level": 2} with mock.patch.object(ModuleWpEnum, "detect_plugin", AsyncMock()) as mock_detect_plugin, \ mock.patch.object(ModuleWpEnum, "detect_theme", AsyncMock()) as mock_detect_theme: module = ModuleWpEnum(crawler, persister, options, Event()) await module.attack(request) mock_detect_plugin.assert_called_once() mock_detect_theme.assert_called_once() assert persister.add_payload.call_count == 0
async def test_analyze_file_detection(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 techno = "techno" techno_versions = '"{\\"versions\\": [\\"1.2\\", \\"1.2.1\\"]}"' #'{"versions": ["1.2", "1.2.1"]}' with mock.patch.object(ModuleHtp, "_find_technology", return_value=(techno, techno_versions)): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com/")) assert len(module_htp.tech_versions) == 1 assert module_htp.tech_versions.get(techno) is not None assert module_htp.tech_versions.get(techno) == [["1.2", "1.2.1"]]
async def test_no_crash(): persister = FakePersister() request = Request("http://127.0.0.1:65085/empty.html") request.path_id = 1 persister.requests.append(request) request = Request( "http://127.0.0.1:65085/empty.html?foo=bar", post_params=[["x", "y"]], file_params=[["file", ("fname", "content", "text/plain")]]) request.path_id = 2 persister.requests.append(request) crawler = AsyncCrawler("http://127.0.0.1:65085/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_file(crawler, persister, logger, options, Event()) module.do_post = False for request in persister.requests: await module.attack(request) assert True await crawler.close()
async def test_script_detection(): # Test if application is detected using its script regex respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\ </body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}' ) await crawler.close()
async def test_cookies_detection(): # Test if application is detected using its cookies regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"], "groups": ["Web development"]}' ) await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get(url__regex=r"http://perdu\.com/\?a=.*&foo=bar").mock( return_value=httpx.Response(200, text="Hello there")) respx.get(url__regex=r"http://perdu.com/\?a=b*&foo=.*wapiti.*").mock( return_value=httpx.Response( 200, text="Hello there", headers={"wapiti": "3.0.5 version"})) persister = AsyncMock() request = Request("http://perdu.com/?a=b&foo=bar") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_crlf(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "crlf" assert persister.add_payload.call_args_list[0][1]["category"] == _( "CRLF Injection") assert persister.add_payload.call_args_list[0][1]["parameter"] == "foo" await crawler.close()
async def test_multi_detection(): # Test if application is detected using several ways respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"WordPress 5.6.1\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \ </body></html>", headers={ "link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\"" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"], "groups": ["Content"]}' ) await crawler.close()
async def test_detection(): respx.get(url__regex=r"http://perdu\.com/\?vuln=.*env.*").mock( return_value=httpx.Response(200, text="PATH=/bin:/usr/bin;PWD=/")) respx.get(url__regex=r"http://perdu\.com/\?vuln=.*").mock( return_value=httpx.Response(200, text="Hello there")) persister = AsyncMock() request = Request("http://perdu.com/?vuln=hello") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} module = ModuleExec(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "exec" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Command execution") assert persister.add_payload.call_args_list[0][1][ "request"].get_params == [["vuln", ";env;"]] await crawler.close()
async def test_url_detection(): # Test if application is detected using its url regex respx.get( "http://perdu.com/owa/auth/logon.aspx" ).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wapp" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"], "groups": ["Communication"]}' ) await crawler.close()
async def test_cookieflags(): respx.get("https://github.com/").mock(return_value=httpx.Response( 200, headers= [("set-cookie", "_octo=31337; Path=/; Domain=github.com; Secure; SameSite=Lax"), ("set-cookie", "logged_in=no; Path=/; Domain=github.com; HttpOnly; Secure; SameSite=Lax" ), ("set-cookie", "foo=bar; Path=/; Domain=github.com;")])) persister = AsyncMock() request = Request("https://github.com/") request.path_id = 1 crawler = AsyncCrawler(Request("https://github.com/"), timeout=1) await crawler.async_send(request) # Put cookies in our crawler object options = {"timeout": 10, "level": 2} module = ModuleCookieflags(crawler, persister, options, asyncio.Event()) await module.attack(request) cookie_flags = [] assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1][ "module"] == "cookieflags" for call in persister.add_payload.call_args_list: description, cookie_name = call[1]["info"].split(":") cookie_flags.append( (cookie_name.strip(), re.search(r"(HttpOnly|Secure)", description).group())) assert cookie_flags == [('_octo', 'HttpOnly'), ('foo', 'HttpOnly'), ('foo', 'Secure')] await crawler.close()
async def test_true_positive(): respx.get("http://perdu.com/?foo=bar").mock( return_value=httpx.Response(200, text="Hi there")) respx.get( url__regex=r"http://perdu\.com/\?foo=.*" ).mock(return_value=httpx.Response( 200, text= ("You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version " "for the right syntax to use near '\\\"\\'' at line 1"))) persister = FakePersister() request = Request("http://perdu.com/?foo=bar") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} logger = Mock() module = mod_sql(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_post = True await module.attack(request) assert persister.vulnerabilities await crawler.close()
async def test_no_drupal(): respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) respx.get(url__regex=r"http://perdu.com/.*?").mock( return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_drupal_enum(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert not persister.add_payload.call_count await crawler.close()