async def test_implies_detection(): # Test for implied applications respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 3 assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}') assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}' ) await crawler.close()
async def test_cookies_detection(): # Test if application is detected using its cookies regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}' ) await crawler.close()
async def test_url_detection(): # Test if application is detected using its url regex respx.get( "http://perdu.com/owa/auth/logon.aspx" ).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = FakePersister() request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[ 2] == '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}' await crawler.close()
def test_url_detection(): # Test if application is detected using its url regex responses.add( responses.GET, url="http://perdu.com/owa/auth/logon.aspx", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) persister = FakePersister() request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 2] == '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}'
async def test_multi_detection(): # Test if application is detected using several ways respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"WordPress 5.6.1\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \ </body></html>", headers={ "link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\"" })) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[ -1] == '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"]}' await crawler.close()
async def test_meta_detection(): # Test if application is detected using its meta regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>")) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}' await crawler.close()
async def test_cookies_detection(): # Test if application is detected using its cookies regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"})) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}' await crawler.close()
async def test_script_detection(): # Test if application is detected using its script regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\ </body></html>")) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[0] == \ '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}' await crawler.close()
async def test_implies_detection(): # Test for implied applications respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"})) persister = FakePersister() request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}' assert persister.additionals[ 1] == '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}' await crawler.close()
async def test_meta_detection(): # Test if application is detected using its meta regex respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}' ) await crawler.close()
def test_html_detection(): # Test if application is detected using its html regex responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <title>Powered by <a href=\"http://atlassian.com/software/confluence\">Atlassian Confluence</a> 2.8.4</p> \ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[0] == "Atlassian Confluence 2.8.4"
def test_false_positive(): # Test for false positive responses.add_passthru( "https://raw.githubusercontent.com/wapiti-scanner/wappalyzer/master/src/technologies.json" ) responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert not persister.additionals
def test_meta_detection(): # Test if application is detected using its meta regex responses.add(responses.GET, url="http://perdu.com/", body="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"Planet/1.6.2\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["1.6.2"], "name": "Planet", "categories": ["Feed readers"]}'
def test_implies_detection(): # Test for implied applications responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"X-Generator": "Backdrop CMS 4.5"}) persister = FakePersister() request = Request("http://perdu.com") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}' == persister.additionals[ 0] assert '{"versions": [], "name": "PHP", "categories": ["Programming languages"]}' == persister.additionals[ 1]
def test_script_detection(): # Test if application is detected using its script regex responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script src=\"http://chartjs.org/dist/1.4.2/Chart.js\"></script>\ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["1.4.2"], "name": "Chart.js", "categories": ["JavaScript graphics"]}'
def test_cookies_detection(): # Test if application is detected using its cookies regex responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Set-Cookie": "ci_csrf_token=4.1"}) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["2+"], "name": "CodeIgniter", "categories": ["Web frameworks"]}'
async def test_html_detection(): # Test if application is detected using its html regex respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}' ) await crawler.close()
def test_html_detection(): # Test if application is detected using its html regex responses.add(responses.GET, url="http://perdu.com/", body="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>") persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[ 0] == '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}'
async def test_html_detection(): # Test if application is detected using its html regex respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text="<html><head><title>FishEye 2.8.4</title> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ </body></html>")) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.additionals assert persister.additionals[0] == \ '{"versions": ["2.8.4"], "name": "Atlassian FishEye", "categories": ["Development"]}' await crawler.close()
async def test_url_detection(): # Test if application is detected using its url regex respx.get("http://perdu.com/owa/auth/logon.aspx").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wapp" assert persister.add_payload.call_args_list[0][1]["category"] == _("Fingerprint web technology") assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"]}' ) await crawler.close()
async def test_false_positive(): # Test for false positive respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert not persister.add_payload.call_count await crawler.close()
def test_headers_detection(): # Test if application is detected using its headers regex responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={"Server": "Cherokee/1.3.4"}) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert persister.additionals assert persister.additionals[0] == "Cherokee 1.3.4"
async def test_false_positive(): # Test for false positive respx.route(host="raw.githubusercontent.com").pass_through() respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert not persister.additionals await crawler.close()
async def test_vulnerabilities(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-Generator": "Backdrop CMS 4.5", "Server": "Cherokee/1.3.4" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 5 # FIrst one is an additional assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}') assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[3][1]["info"] == ( '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}' ) assert persister.add_payload.call_args_list[3][1]["category"] == _( 'Fingerprint web server') await crawler.close()
async def test_multi_detection(): # Test if application is detected using several ways respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title> \ <meta name=\"generator\" content=\"WordPress 5.6.1\"> \ </head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ <script type=\"text/javascript\" src=\"https://perdu.com/wp-includes/js/wp-embed.min.js\" ></script> \ </body></html>", headers={"link": "<http://perdu.com/wp-json/>; rel=\"https://api.w.org/\""} ) ) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = mod_wapp(crawler, persister, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[-1][1]["info"] == ( '{"versions": ["5.6.1"], "name": "WordPress", "categories": ["CMS", "Blogs"]}' ) await crawler.close()
async def test_vulnerabilities(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-Generator": "Backdrop CMS 4.5", "Server": "Cherokee/1.3.4" })) persister = FakePersister() request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.vulnerabilities assert persister.vulnerabilities[0][ 0] == '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}' assert persister.vulnerabilities[0][ 1] == 'Fingerprint web application framework' assert persister.vulnerabilities[1][0] == \ '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}' assert persister.vulnerabilities[1][1] == 'Fingerprint web server' await crawler.close()