def test_whole_stuff(): # Test attacking all kind of parameter without crashing responses.add(responses.GET, url="http://perdu.com/config.php.bak", body="password = 123456") responses.add( responses.GET, url="http://perdu.com/config.php", body="Hello there", ) responses.add(responses.GET, url=re.compile(r"http://perdu.com/config.php.+"), status=404) persister = FakePersister() request = Request("http://perdu.com/config.php") request.path_id = 1 request.set_headers({"content-type": "text/html"}) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_backup(crawler, persister, logger, options) module.verbose = 2 module.do_get = True module.attack(request) assert persister.vulnerabilities assert persister.vulnerabilities[ 0].url == "http://perdu.com/config.php.bak"
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get("http://perdu.com/config.php.bak").mock( return_value=httpx.Response(200, text="password = 123456")) respx.get("http://perdu.com/config.php").mock( return_value=httpx.Response(200, text="Hello there")) respx.get(url__startswith="http://perdu.com/").mock( return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/config.php") request.path_id = 1 request.set_headers({"content-type": "text/html"}) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = ModuleBackup(crawler, persister, options, Event()) module.do_get = True await module.attack(request) assert persister.add_payload.call_args_list[0][1]["module"] == "backup" assert persister.add_payload.call_args_list[0][1][ "payload_type"] == "vulnerability" assert persister.add_payload.call_args_list[0][1][ "request"].url == "http://perdu.com/config.php.bak" await crawler.close()
def test_html_detection(): # Test if application is detected using its html regex responses.add(responses.GET, url="http://perdu.com/", body="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}'
def test_implies_detection(): # Test for implied applications responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"}) persister = FakePersister() request = Request("http://perdu.com") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}' == persister.additionals[ 0] assert '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}' == persister.additionals[ 1]
def test_url_detection(): # Test if application is detected using its url regex responses.add( responses.GET, url="http://perdu.com/owa/auth/logon.aspx", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) persister = FakePersister() request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 2] == '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}'
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get(url__regex=r"http://perdu\.com/\?a=.*&foo=bar").mock( return_value=httpx.Response(200, text="Hello there")) respx.get(url__regex=r"http://perdu.com/\?a=b*&foo=.*wapiti.*").mock( return_value=httpx.Response( 200, text="Hello there", headers={"wapiti": "3.0.5 version"})) persister = AsyncMock() request = Request("http://perdu.com/?a=b&foo=bar") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_crlf(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "crlf" assert persister.add_payload.call_args_list[0][1]["category"] == _( "CRLF Injection") assert persister.add_payload.call_args_list[0][1]["parameter"] == "foo" await crawler.close()
def test_cookies_detection(): # Test if application is detected using its cookies regex responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"}) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}'
async def test_analyze_file_detection(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 techno = "techno" techno_versions = '"{\\"versions\\": [\\"1.2\\", \\"1.2.1\\"]}"' #'{"versions": ["1.2", "1.2.1"]}' with mock.patch.object(ModuleHtp, "_find_technology", return_value=(techno, techno_versions)): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) await module_htp._analyze_file(Request("http://perdu.com/")) assert len(module_htp.tech_versions) == 1 assert module_htp.tech_versions.get(techno) is not None assert module_htp.tech_versions.get(techno) == [["1.2", "1.2.1"]]
async def test_script_detection(): # Test if application is detected using its script regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\ </body></html>")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"], "groups": ["Web development"]}' ) await crawler.close()
async def test_true_positive(): respx.get("http://perdu.com/?foo=bar").mock( return_value=httpx.Response(200, text="Hi there")) respx.get( url__regex=r"http://perdu\.com/\?foo=.*" ).mock(return_value=httpx.Response( 200, text= ("You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version " "for the right syntax to use near '\\\"\\'' at line 1"))) persister = AsyncMock() request = Request("http://perdu.com/?foo=bar") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} module = mod_sql(crawler, persister, options, Event()) module.verbose = 2 module.do_post = True await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "sql" assert persister.add_payload.call_args_list[0][1]["category"] == _( "SQL Injection") await crawler.close()
def test_no_wordpress(): responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 #persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wp_enum(crawler, persister, logger, options) module.verbose = 2 module.attack(request) assert not persister.additionals
async def test_multi_versions_detected(): base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__) test_directory = os.path.join(base_dir, "..", "tests/data/drupal/") maintainers_file = "MAINTAINERS.txt" with open(path_join(test_directory, maintainers_file), errors="ignore") as maintainers: data = maintainers.read() # Response to tell that Drupal is used respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200)) # Response for maintainers.txt respx.get("http://perdu.com/core/MAINTAINERS.txt").mock(return_value=httpx.Response(200, text=data)) respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2, "tasks": 20} module = ModuleDrupalEnum(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"name": "Drupal", "versions": ["8.0.0-beta4", "8.0.0-beta5", "8.0.0-beta6"], "categories": ["CMS Drupal"]}' ) await crawler.close()
async def test_version_not_detected(): base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__) test_directory = os.path.join(base_dir, "..", "tests/data/drupal/") changelog_edited = "CHANGELOG_EDITED.txt" with open(path_join(test_directory, changelog_edited), errors="ignore") as changelog: data = changelog.read() # Response to tell that Drupal is used respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200)) # Response for edited changelog.txt respx.get("http://perdu.com/CHANGELOG.txt").mock(return_value=httpx.Response(200, text=data)) respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2, "tasks": 20} module = ModuleDrupalEnum(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"name": "Drupal", "versions": [""], "categories": ["CMS Drupal"]}' ) await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get("http://perdu.com/config.php.bak").mock( return_value=httpx.Response(200, text="password = 123456")) respx.get("http://perdu.com/config.php").mock( return_value=httpx.Response(200, text="Hello there")) respx.get(url__startswith="http://perdu.com/").mock( return_value=httpx.Response(404)) persister = FakePersister() request = Request("http://perdu.com/config.php") request.path_id = 1 request.set_headers({"content-type": "text/html"}) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_backup(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.vulnerabilities assert persister.vulnerabilities[ 0].url == "http://perdu.com/config.php.bak" await crawler.close()
async def test_url_detection(): # Test if application is detected using its url regex respx.get("http://perdu.com/owa/auth/logon.aspx").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wapp" assert persister.add_payload.call_args_list[0][1]["category"] == _("Fingerprint web technology") assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}' ) await crawler.close()
async def test_cookies_detection(): # Test if application is detected using its cookies regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"], "groups": ["Web development"]}' ) await crawler.close()
async def test_html_detection(): # Test if application is detected using its html regex respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}' ) await crawler.close()
async def test_multi_detection(): # Test if application is detected using several ways respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"WordPress 5.6.1\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \ </body></html>", headers={ "link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\"" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"], "groups": ["Content"]}' ) await crawler.close()
async def test_no_drupal(): respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) respx.get(url__regex=r"http://perdu.com/.*?").mock( return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_drupal_enum(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
async def test_implies_detection(): # Test for implied applications respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler(Request("http://perdu.com")) options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"], "groups": ["Content"]}' ) assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": [], "name": "PHP", "categories": ["Programming languages"], "groups": ["Web development"]}' ) await crawler.close()
def test_meta_detection(): # Test if application is detected using its meta regex responses.add(responses.GET, url="http://perdu.com/", body="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}'
async def test_cookieflags(): respx.get("https://github.com/").mock(return_value=httpx.Response( 200, headers= [("set-cookie", "_octo=31337; Path=/; Domain=github.com; Secure; SameSite=Lax"), ("set-cookie", "logged_in=no; Path=/; Domain=github.com; HttpOnly; Secure; SameSite=Lax" ), ("set-cookie", "foo=bar; Path=/; Domain=github.com;")])) persister = AsyncMock() request = Request("https://github.com/") request.path_id = 1 crawler = AsyncCrawler("https://github.com/", timeout=1) await crawler.async_send(request) # Put cookies in our crawler object options = {"timeout": 10, "level": 2} module = ModuleCookieflags(crawler, persister, options, asyncio.Event()) await module.attack(request) cookie_flags = [] assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1][ "module"] == "cookieflags" for call in persister.add_payload.call_args_list: description, cookie_name = call[1]["info"].split(":") cookie_flags.append( (cookie_name.strip(), re.search(r"(HttpOnly|Secure)", description).group())) assert cookie_flags == [('_octo', 'HttpOnly'), ('foo', 'HttpOnly'), ('foo', 'Secure')] await crawler.close()
def test_false_positive(): # Test for false positive responses.add_passthru( "https://raw.githubusercontent.com/AliasIO/wappalyzer/master/src/technologies.json" ) responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert not persister.additionals
async def test_detection(): respx.get(url__regex=r"http://perdu\.com/\?vuln=.*env.*").mock( return_value=httpx.Response(200, text="PATH=/bin:/usr/bin;PWD=/")) respx.get(url__regex=r"http://perdu\.com/\?vuln=.*").mock( return_value=httpx.Response(200, text="Hello there")) persister = AsyncMock() request = Request("http://perdu.com/?vuln=hello") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} logger = Mock() module = mod_exec(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "exec" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Command execution") assert persister.add_payload.call_args_list[0][1][ "request"].get_params == [["vuln", ";env;"]] await crawler.close()
def test_whole_stuff(): # Test attacking all kind of parameter without crashing responses.add( responses.GET, re.compile(r"http://perdu.com/\?a=.*&foo=bar"), body="Hello there" ) responses.add( responses.GET, re.compile(r"http://perdu.com/\?a=b*&foo=.*wapiti.*"), body="Hello there", headers={"wapiti": "3.0.4 version"} ) persister = FakePersister() request = Request("http://perdu.com/?a=b&foo=bar") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_crlf(crawler, persister, logger, options) module.verbose = 2 module.do_get = True for __ in module.attack(): pass assert persister.vulnerabilities assert persister.vulnerabilities[0][0] == "foo"
async def test_false_positive(): # Test for false positive respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
def test_detection(): responses.add(responses.GET, re.compile(r"http://perdu.com/\?vuln=.*env.*"), body="PATH=/bin:/usr/bin;PWD=/") responses.add(responses.GET, re.compile(r"http://perdu.com/\?vuln=.*"), body="Hello there") persister = FakePersister() request = Request("http://perdu.com/?vuln=hello") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} logger = Mock() module = mod_exec(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.vulnerabilities assert persister.vulnerabilities[0][0] == "vuln" assert "env" in persister.vulnerabilities[0][1]
async def test_meta_detection(): # Test if application is detected using its meta regex respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}' ) await crawler.close()
def test_script_detection(): # Test if application is detected using its script regex responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}'
async def test_unregistered_cname(): # Test attacking all kind of parameter without crashing respx.route(host="perdu.com").mock(return_value=httpx.Response(200, text="Hello there")) async def resolve(qname, rdtype, raise_on_no_answer: bool = False): if qname.startswith("supercalifragilisticexpialidocious."): # No wildcard responses return [] if qname.startswith("admin.") and rdtype == "CNAME": return make_cname_answer("perdu.com", "unregistered.com") raise dns.resolver.NXDOMAIN("Yolo") with patch("wapitiCore.attack.mod_takeover.dns.asyncresolver.resolve") as mocked_resolve_: with patch("wapitiCore.attack.mod_takeover.dns.asyncresolver.Resolver.resolve") as mocked_resolve: mocked_resolve.side_effect = resolve mocked_resolve_.side_effect = resolve persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = ModuleTakeover(crawler, persister, options, Event()) for request in all_requests: await module.attack(request) assert persister.add_payload.call_args_list[0][1]["request"].hostname == "admin.perdu.com" assert "unregistered.com" in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()