async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.options("http://perdu.com/").mock(return_value=httpx.Response( 200, text="Default page", headers={"Allow": "GET,POST,HEAD"})) respx.options("http://perdu.com/dav/").mock(return_value=httpx.Response( 200, text="Private section", headers={"Allow": "GET,POST,HEAD,PUT"})) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) request = Request("http://perdu.com/dav/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = ModuleMethods(crawler, persister, options, Event()) module.do_get = True for request in all_requests: await module.attack(request) assert persister.add_payload.call_count == 1 assert "http://perdu.com/dav/" in persister.add_payload.call_args_list[0][ 1]["info"] await crawler.close()
async def test_whole_stuff(): respx.get("http://perdu.com/").mock(return_value=httpx.Response(200)) respx.get(url__regex=r"http://perdu.com/.*").mock( side_effect=shellshock_callback) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) request = Request("http://perdu.com/vuln/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_shellshock(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_get = True for request in persister.requests: await module.attack(request) assert len(persister.vulnerabilities) == 1 assert persister.vulnerabilities[0][0].url == ("http://perdu.com/vuln/") await crawler.close()
async def test_whole_stuff(): respx.get("http://perdu.com/").mock(return_value=httpx.Response(200)) respx.get(url__regex=r"http://perdu.com/.*").mock(side_effect=shellshock_callback) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) request = Request("http://perdu.com/vuln/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_shellshock(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True for request in all_requests: await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "shellshock" assert persister.add_payload.call_args_list[0][1]["category"] == _("Command execution") assert persister.add_payload.call_args_list[0][1]["request"].url == "http://perdu.com/vuln/" await crawler.close()
def test_whole_stuff(): responses.add(responses.GET, url="http://perdu.com/", status=200) responses.add_callback(responses.GET, re.compile(r"http://perdu.com/.*"), callback=shellshock_callback) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) request = Request("http://perdu.com/vuln/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_shellshock(crawler, persister, logger, options) module.verbose = 2 module.do_get = True for __ in module.attack(): pass assert len(persister.vulnerabilities) == 1 assert persister.vulnerabilities[0][0].url == ("http://perdu.com/vuln/")
def test_whole_stuff(): # Test attacking all kind of parameter without crashing responses.add( responses.GET, url="http://perdu.com/", body="Default page" ) responses.add( responses.GET, url="http://perdu.com/admin/", body="Private section", status=401 ) responses.add( "ABC", url="http://perdu.com/admin/", body="Hello there" ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) request = Request("http://perdu.com/admin/") request.path_id = 2 request.status = 401 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_htaccess(crawler, persister, logger, options) module.verbose = 2 module.do_get = True for __ in module.attack(): pass assert persister.vulnerabilities assert persister.vulnerabilities[0].url == "http://perdu.com/admin/"
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="Default page")) respx.get("http://perdu.com/admin/").mock( return_value=httpx.Response(401, text="Private section")) respx.route(method="ABC", host="perdu.com", path="/admin/").mock( return_value=httpx.Response(200, text="Hello there")) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) request = Request("http://perdu.com/admin/") request.path_id = 2 request.status = 401 request.set_headers({"content-type": "text/html"}) all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_htaccess(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_get = True for request in all_requests: if await module.must_attack(request): await module.attack(request) else: assert request.path_id == 1 assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "htaccess" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Htaccess Bypass") assert persister.add_payload.call_args_list[0][1][ "request"].url == "http://perdu.com/admin/" await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="Default page")) respx.get("http://perdu.com/admin/").mock( return_value=httpx.Response(401, text="Private section")) respx.route(method="ABC", host="perdu.com", path="/admin/").mock( return_value=httpx.Response(200, text="Hello there")) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) request = Request("http://perdu.com/admin/") request.path_id = 2 request.status = 401 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_htaccess(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_get = True for request in persister.requests: if module.must_attack(request): await module.attack(request) else: assert request.path_id == 1 assert persister.vulnerabilities assert persister.vulnerabilities[0].url == "http://perdu.com/admin/" await crawler.close()
def test_whole_stuff(): # Test attacking all kind of parameter without crashing responses.add( responses.OPTIONS, url="http://perdu.com/", body="Default page", headers={"Allow": "GET,POST,HEAD"} ) responses.add( responses.OPTIONS, url="http://perdu.com/dav/", body="Private section", headers={"Allow": "GET,POST,HEAD,PUT"} ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) request = Request("http://perdu.com/dav/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = FakeLogger() module = mod_methods(crawler, persister, logger, options) module.verbose = 2 module.do_get = True for __ in module.attack(): pass assert "http://perdu.com/dav/" in logger.message
async def test_false_positives(): respx.route(host="raw.githubusercontent.com").pass_through() # This one trigger a match based on content respx.get("http://perdu.com/opendir.php?/etc/passwd").mock( return_value=httpx.Response(200, text="root:0:0:") ) # A lot of cases will trigger because HTTP 200 is returned instead of 404 but false positive check should block them respx.route(host="perdu.com").mock( return_value=httpx.Response(200, text="Hello there") ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") temp_nikto_db = os.path.join(persister.CONFIG_DIR, "temp_nikto_db") with open(temp_nikto_db, "w") as fd: fd.writelines( [ "003270,539,d,/catinfo,GET,200,,,,,May be vulnerable to a buffer overflow. Request '/catinfo?',,\n", "003271,5407,a,/soap/servlet/soaprouter,GET,200,,,,,Oracle 9iAS SOAP components allow anonymous,,\n", "003272,543,7,/opendir.php?/etc/passwd,GET,root:,,,,,This PHP-Nuke CGI allows attackers to read,,\n" ] ) request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.get_links.return_value = chain([request]) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2, "tasks": 20} module = ModuleNikto(crawler, persister, options, Event()) module.do_get = True module.NIKTO_DB = "temp_nikto_db" await module.attack(request) os.unlink(temp_nikto_db) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "nikto" assert persister.add_payload.call_args_list[0][1]["category"] == _("Potentially dangerous file") assert persister.add_payload.call_args_list[0][1]["request"].url == ( "http://perdu.com/opendir.php?%2Fetc%2Fpasswd" ) assert ( "This PHP-Nuke CGI allows attackers to read" ) in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.options("http://perdu.com/").mock( return_value=httpx.Response(200, text="Default page", headers={"Allow": "GET,POST,HEAD"}) ) respx.options("http://perdu.com/dav/").mock( return_value=httpx.Response(200, text="Private section", headers={"Allow": "GET,POST,HEAD,PUT"}) ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) request = Request("http://perdu.com/dav/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = FakeLogger() module = mod_methods(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_get = True for request in persister.requests: await module.attack(request) assert "http://perdu.com/dav/" in logger.message await crawler.close()
def test_whole_stuff(): # Test attacking all kind of parameter without crashing responses.add_passthru( "https://raw.githubusercontent.com/wapiti-scanner/nikto/master/program/databases/db_tests" ) responses.add( responses.GET, url= "http://perdu.com/cgi-bin/a1disp3.cgi?../../../../../../../../../../etc/passwd", body="root:0:0:", ) responses.add(responses.GET, url=re.compile(r"http://perdu.com/*"), body="Not found", status=404) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_nikto(crawler, persister, logger, options) module.verbose = 2 module.do_get = True for __ in module.attack(): pass assert len(persister.vulnerabilities) == 1 assert persister.vulnerabilities[0][0].url == ( "http://perdu.com/cgi-bin/a1disp3.cgi?..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2Fetc%2Fpasswd" ) assert "This CGI allows attackers read arbitrary files on the host" in persister.vulnerabilities[ 0][1]
async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/cgi-bin/a1disp3.cgi?../../../../../../../../../../etc/passwd").mock( return_value=httpx.Response(200, text="root:0:0:") ) respx.route(host="perdu.com").mock( return_value=httpx.Response(404, text="Not found") ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) persister.get_links.return_value = chain([request]) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_nikto(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "nikto" assert persister.add_payload.call_args_list[0][1]["category"] == _("Potentially dangerous file") assert persister.add_payload.call_args_list[0][1]["request"].url == ( "http://perdu.com/cgi-bin/a1disp3.cgi?..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2F..%2Fetc%2Fpasswd" ) assert ( "This CGI allows attackers read arbitrary files on the host" ) in persister.add_payload.call_args_list[0][1]["info"] await crawler.close()
async def async_send(self, resource: web.Request, headers: dict = None, follow_redirects: bool = False) -> Page: if resource.method == "GET": page = await self.async_get(resource, headers=headers, follow_redirects=follow_redirects) elif resource.method == "POST": page = await self.async_post(resource, headers=headers, follow_redirects=follow_redirects) else: page = await self.async_request(resource.method, resource, headers=headers, follow_redirects=follow_redirects) resource.status = page.status resource.set_headers(page.headers) return page