def _init_report(self): for rep_gen_info in self.xml_rep_gen_parser.get_report_generators(): if self.report_generator_type.lower() == rep_gen_info.get_key(): self.report_gen = rep_gen_info.create_instance() self.report_gen.set_report_info( self.target_url, self.target_scope, gmtime(), WAPITI_VERSION ) break vuln_xml_parser = VulnerabilityXMLParser() vuln_xml_parser.parse(os.path.join(CONF_DIR, "config", "vulnerabilities", "vulnerabilities.xml")) for vul in vuln_xml_parser.get_vulnerabilities(): self.report_gen.add_vulnerability_type( _(vul.get_name()), _(vul.get_description()), _(vul.get_solution()), vul.get_references()) anom_xml_parser = AnomalyXMLParser() anom_xml_parser.parse(os.path.join(CONF_DIR, "config", "vulnerabilities", "anomalies.xml")) for anomaly in anom_xml_parser.get_anomalies(): self.report_gen.add_anomaly_type( _(anomaly.get_name()), (anomaly.get_description()), _(anomaly.get_solution()), anomaly.get_references() )
class Anomaly(Notice): # Constants ERROR_500 = _("Internal Server Error") RES_CONSUMPTION = _("Resource consumption") MSG_500 = _("Received a HTTP 500 error in {0}") MSG_TIMEOUT = _("Timeout occurred in {0}") MSG_QS_TIMEOUT = _( "The request timed out while attempting to inject a payload in the query string" ) MSG_PATH_TIMEOUT = _( "The request timed out while attempting to inject a payload in the resource path" ) MSG_PARAM_TIMEOUT = _( "The request timed out while attempting to inject a payload in the parameter {0}" ) MSG_QS_500 = _("The server responded with a 500 HTTP error code " "while attempting to inject a payload in the query string") MSG_PATH_500 = _( "The server responded with a 500 HTTP error code " "while attempting to inject a payload in the resource path") MSG_PARAM_500 = _( "The server responded with a 500 HTTP error code " "while attempting to inject a payload in the parameter {0}")
def _init_passive(self): self._init_report() print(_("[*] Loading modules:")) modules_list = sorted(module_name[4:] for module_name in passive.modules) print("\t {0}".format(", ".join(modules_list))) # Init output file for report (can be found in the attack() function too) if not self.output_file: if self.report_generator_type == "html": self.output_file = self.COPY_REPORT_DIR else: filename = "{}_{}".format( self.server.replace(":", "_"), strftime("%m%d%Y_%H%M", self.report_gen.scan_date)) if self.report_generator_type == "txt": extension = ".txt" elif self.report_generator_type == "json": extension = ".json" else: extension = ".xml" self.output_file = filename + extension logger = ConsoleLogger() if self.color: logger.color = True for mod_name in passive.modules: passive_module = import_module("wapitiCore.passive." + mod_name) instance = getattr(passive_module, mod_name)(self.persister, logger) self.passive.append(instance) instance.log_green(_("[*] Loading passive module {0}"), instance.name)
def save_scan_state(self): print(_("[*] Saving scan state, please wait...")) # Not yet scanned URLs are all saved in one single time (bulk insert + final commit) self.persister.set_to_browse(self._start_urls) print('') print(_(" Note")) print("========") print(_("This scan has been saved in the file {0}").format(self.persister.output_file))
def browse(self): """Extract hyperlinks and forms from the webpages found on the website""" for resource in self.persister.get_to_browse(): self._start_urls.append(resource) for resource in self.persister.get_links(): self._excluded_urls.append(resource) for resource in self.persister.get_forms(): self._excluded_urls.append(resource) stopped = False explorer = crawler.Explorer(self.crawler) explorer.max_depth = self._max_depth explorer.max_files_per_dir = self._max_files_per_dir explorer.max_requests_per_depth = self._max_links_per_page explorer.forbidden_parameters = self._bad_params explorer.qs_limit = SCAN_FORCE_VALUES[self._scan_force] explorer.verbose = (self.verbose > 0) explorer.load_saved_state(self.persister.output_file[:-2] + "pkl") self.persister.set_root_url(self.target_url) start = datetime.utcnow() try: for resource in explorer.explore(self._start_urls, self._excluded_urls): # Browsed URLs are saved one at a time self.persister.add_request(resource) if (datetime.utcnow() - start).total_seconds() > self._max_scan_time >= 1: print(_("Max scan time was reached, stopping.")) break except KeyboardInterrupt: stopped = True print(_("[*] Saving scan state, please wait...")) # Not yet scanned URLs are all saved in one single time (bulk insert + final commit) self.persister.set_to_browse(self._start_urls) # Let's save explorer values (limits) explorer.save_state(self.persister.output_file[:-2] + "pkl") print('') print(_(" Note")) print("========") print( _("This scan has been saved in the file {0}").format( self.persister.output_file)) if stopped: print( _("The scan will be resumed next time unless you pass the --skip-crawl option." ))
class Vulnerability(Notice): # Constants SQL_INJECTION = _("SQL Injection") BLIND_SQL_INJECTION = _("Blind SQL Injection") FILE_HANDLING = _("File Handling") XSS = _("Cross Site Scripting") CRLF = _("CRLF Injection") EXEC = _("Commands execution") HTACCESS = _("Htaccess Bypass") BACKUP = _("Backup file") NIKTO = _("Potentially dangerous file") SSRF = _("Server Side Request Forgery")
async def async_try_login(self, auth_url: str): """Try to authenticate with the provided url and credentials.""" if len(self._auth_credentials) != 2: logging.error( _("Login failed") + " : " + _("Invalid credentials format")) return username, password = self._auth_credentials # Fetch the login page and try to extract the login form try: page = await self.async_get(web.Request(auth_url), follow_redirects=True) login_form, username_field_idx, password_field_idx = page.find_login_form( ) if login_form: post_params = login_form.post_params get_params = login_form.get_params if login_form.method == "POST": post_params[username_field_idx][1] = username post_params[password_field_idx][1] = password else: get_params[username_field_idx][1] = username get_params[password_field_idx][1] = password login_request = web.Request(path=login_form.url, method=login_form.method, post_params=post_params, get_params=get_params, referer=login_form.referer, link_depth=login_form.link_depth) login_response = await self.async_send(login_request, follow_redirects=True) # ensure logged in if login_response.soup.find_all(text=re.compile( r'(?i)((log|sign)\s?out|disconnect|déconnexion)')): self.is_logged_in = True logging.success(_("Login success")) else: logging.warning( _("Login failed") + " : " + _("Credentials might be invalid")) else: logging.warning( _("Login failed") + " : " + _("No login form detected")) except ConnectionError: logging.error(_("[!] Connection error with URL"), auth_url) except httpx.RequestError as error: logging.error( _("[!] {} with url {}").format(error.__class__.__name__, auth_url))
def update(self): """Update modules that implement an update method""" logger = ConsoleLogger() if self.color: logger.color = True for mod_name in attack.modules: mod = import_module("wapitiCore.attack." + mod_name) mod_instance = getattr(mod, mod_name)(self.crawler, self.persister, logger, self.attack_options) if hasattr(mod_instance, "update"): print(_("Updating module {0}").format(mod_name[4:])) mod_instance.update() print(_("Update done."))
def is_valid_endpoint(url_type, url): """Verify if the url provided has the right format""" try: parts = urlparse(url) except ValueError: print('ValueError') return False else: if parts.params or parts.query or parts.fragment: print(_("Error: {} must not contain params, query or fragment!").format(url_type)) return False if parts.scheme in ("http", "https") and parts.netloc: return True print(_("Error: {} must contain scheme and host").format(url_type)) return False
async def test_url_detection(): # Test if application is detected using its url regex respx.get( "http://perdu.com/owa/auth/logon.aspx" ).mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" )) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/owa/auth/logon.aspx") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "wapp" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[2][1]["info"] == ( '{"versions": [], "name": "Outlook Web App", "categories": ["Webmail"], "groups": ["Communication"]}' ) await crawler.close()
async def async_try_login(self, auth_url: str, auth_type: str) -> Tuple[bool, dict, List[str]]: """ Try to authenticate with the provided url and credentials. Returns if the the authentication has been successful, the used form variables and the disconnect urls. """ if len(self._auth_credentials) != 2: logging.error( _("Login failed") + " : " + _("Invalid credentials format")) return False, {}, [] username, password = self._auth_credentials if auth_type == "post": return await self._async_try_login_post(username, password, auth_url) return await self._async_try_login_basic_digest_ntlm(auth_url)
async def test_vulnerabilities(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-Generator": "Backdrop CMS 4.5", "Server": "Cherokee/1.3.4" })) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options, Event()) module.verbose = 2 await module.attack(request) assert persister.add_payload.call_count == 5 # FIrst one is an additional assert persister.add_payload.call_args_list[0][1]["info"] == ( '{"versions": ["4.5"], "name": "Backdrop", "categories": ["CMS"]}') assert persister.add_payload.call_args_list[0][1]["category"] == _( "Fingerprint web technology") assert persister.add_payload.call_args_list[3][1]["info"] == ( '{"versions": ["1.3.4"], "name": "Cherokee", "categories": ["Web servers"]}' ) assert persister.add_payload.call_args_list[3][1]["category"] == _( 'Fingerprint web server') await crawler.close()
async def _async_try_login_post( self, username: str, password: str, auth_url: str) -> Tuple[bool, dict, List[str]]: # Fetch the login page and try to extract the login form try: page = await self.async_get(web.Request(auth_url), follow_redirects=True) form = {} disconnect_urls = [] login_form, username_field_idx, password_field_idx = page.find_login_form( ) if login_form: post_params = login_form.post_params get_params = login_form.get_params if login_form.method == "POST": post_params[username_field_idx][1] = username post_params[password_field_idx][1] = password form["login_field"] = post_params[username_field_idx][0] form["password_field"] = post_params[password_field_idx][0] else: get_params[username_field_idx][1] = username get_params[password_field_idx][1] = password form["login_field"] = get_params[username_field_idx][0] form["password_field"] = get_params[password_field_idx][0] login_request = web.Request(path=login_form.url, method=login_form.method, post_params=post_params, get_params=get_params, referer=login_form.referer, link_depth=login_form.link_depth) login_response = await self.async_send(login_request, follow_redirects=True) # ensure logged in if login_response.soup.find_all( text=re.compile(DISCONNECT_REGEX)): self.is_logged_in = True logging.success(_("Login success")) disconnect_urls = self._extract_disconnect_urls( login_response) else: logging.warning( _("Login failed") + " : " + _("Credentials might be invalid")) else: logging.warning( _("Login failed") + " : " + _("No login form detected")) return self.is_logged_in, form, disconnect_urls except ConnectionError: logging.error(_("[!] Connection error with URL"), auth_url) except httpx.RequestError as error: logging.error( _("[!] {} with url {}").format(error.__class__.__name__, auth_url))
async def test_merge_with_and_without_redirection(): # Test for vulnerabilities detected respx.get("http://perdu.com/").mock(return_value=httpx.Response( 301, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={ "X-OWA-Version": "15.0.1497.26", "Location": "http://perdu.com/auth/login" })) respx.get("http://perdu.com/auth/login").mock(return_value=httpx.Response( 200, text= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <link rel='shortcut icon' href='/owa/auth/15.0.1497/themes/resources/favicon.ico' type='image/x-icon'> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong> \ </body></html>", headers={})) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") request = Request("http://perdu.com/") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/") options = {"timeout": 10, "level": 2} module = ModuleWapp(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 5 assert persister.add_payload.call_args_list[3][1]["info"] == ( '{"versions": ["15.0.1497", "15.0.1497.26"], "name": "Outlook Web App", "categories": ["Webmail"], "groups": ["Communication"]}' ) assert persister.add_payload.call_args_list[3][1]["category"] == _( "Fingerprint web application framework") await crawler.close()
async def test_finish_two_ranges(): respx.get("http://perdu.com/").mock( return_value=httpx.Response(200, text="foobar")) persister = AsyncMock() home_dir = os.getenv("HOME") or os.getenv("USERPROFILE") base_dir = os.path.join(home_dir, ".wapiti") persister.CONFIG_DIR = os.path.join(base_dir, "config") persister.get_root_url.return_value = "http://perdu.com/" request = Request("http://perdu.com/") request.path_id = 1 techno = "techno" versions = ["1.0", "1.1", "1.2", "1.2.1", "1.3", "1.4", "1.5", "1.6"] async def async_magic(): pass MagicMock.__await__ = lambda x: async_magic().__await__() with mock.patch("wapitiCore.attack.mod_htp.ModuleHtp.add_vuln_info", autospec=True) as mock_add_vuln_info, \ mock.patch.object(ModuleHtp, "_db", new_callable=PropertyMock) as mock_db, \ mock.patch.object(ModuleHtp, "_get_versions", return_value=versions): crawler = AsyncCrawler(Request("http://perdu.com/")) options = {"timeout": 10, "level": 2} module_htp = ModuleHtp(crawler, persister, options, Event()) module_htp._root_url = "http://perdu.com/" module_htp.tech_versions[techno] = [["1.2", "1.2.1", "1.3"], ["1.3", "1.4"], ["1.5", "1.5"], ["1.0", "1.2"]] await module_htp.finish() mock_add_vuln_info.assert_called_once_with( module_htp, category=_("Fingerprint web server"), request=Request("http://perdu.com/"), info= '{"name": "techno", "versions": ["1.0", "1.1", "1.2", "1.2.1", "1.3", "1.4", "1.5"]}' )
class GenericObservation: # Constants MSG_EVIL_URL = _(" Evil url: {0}") MSG_PARAM_INJECT = _("{0} in {1} via injection in the parameter {2}") MSG_FROM = _(" coming from {0}") MSG_QS_INJECT = _("{0} in {1} via injection in the query string") MSG_PATH_INJECT = _("{0} in {1} via injection in the resource path") MSG_EVIL_PARAM = _("Involved parameter: {0}") MSG_EVIL_REQUEST = _("Evil request:") HIGH_LEVEL = "1" MEDIUM_LEVEL = "2" LOW_LEVEL = "3" def __init__(self): self.name = "" self.description = "" self.solution = "" self.references = {} def get_name(self): return self.name def get_description(self): return self.description def get_solution(self): return self.solution def get_references(self): return self.references def set_name(self, name): self.name = name def set_description(self, description): self.description = description def set_solution(self, solution): self.solution = solution def set_references(self, references): self.references = references
async def browse(self, stop_event: asyncio.Event, parallelism: int = 8): """Extract hyperlinks and forms from the webpages found on the website""" explorer = crawler.Explorer(self.crawler, stop_event, parallelism=parallelism) explorer.max_depth = self._max_depth explorer.max_files_per_dir = self._max_files_per_dir explorer.max_requests_per_depth = self._max_links_per_page explorer.forbidden_parameters = self._bad_params explorer.qs_limit = SCAN_FORCE_VALUES[self._scan_force] explorer.verbose = (self.verbose > 0) explorer.load_saved_state(self.persister.output_file[:-2] + "pkl") start = datetime.utcnow() async for resource in explorer.async_explore(self._start_urls, self._excluded_urls): # Browsed URLs are saved one at a time self.persister.add_request(resource) if not stop_event.is_set() and (datetime.utcnow() - start).total_seconds() > self._max_scan_time >= 1: print(_("Max scan time was reached, stopping.")) stop_event.set() # Let's save explorer values (limits) explorer.save_state(self.persister.output_file[:-2] + "pkl")
def _write_auth_info(self, txt_report_file: codecs.StreamReaderWriter): """ Write the authentication section explaining what method, fields, url were used and also if it has been successful """ if self._infos.get("auth") is None: return auth_dict = self._infos["auth"] txt_report_file.write(title(_("Authentication :"))) txt_report_file.write(f"Method: {auth_dict['method']}\n") txt_report_file.write(f"Url: {auth_dict['url']}\n") txt_report_file.write(f"Logged in: {auth_dict['logged_in']}\n") auth_form_dict = auth_dict.get("form") if auth_form_dict is None or len(auth_form_dict) == 0: return txt_report_file.write( f"Login field: {auth_form_dict['login_field']}\n") txt_report_file.write( f"Password field: {auth_form_dict['password_field']}\n") txt_report_file.write("\n") txt_report_file.write(separator)
# the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from wapitiCore.language.language import _ TYPE = "vulnerability" NAME = _("Blind SQL Injection") SHORT_NAME = _("Blind SQLI") WSTG_CODE = ["WSTG-INPV-05"] DESCRIPTION = _( "Blind SQL injection is a technique that exploits a vulnerability occurring in the database of an application." ) + " " + _( "This kind of vulnerability is harder to detect than basic SQL injections because no error message will be " "displayed on the webpage.") SOLUTION = _( "To protect against SQL injection, user input must not directly be embedded in SQL statements." ) + " " + _( "Instead, user input must be escaped or filtered or parameterized statements must be used." )
# the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from wapitiCore.language.language import _ TYPE = "vulnerability" NAME = _("Potentially dangerous file") SHORT_NAME = NAME WSTG_CODE = ["WSTG-CONF-04", "WSTG-CONF-01"] DESCRIPTION = _( "A file with potential vulnerabilities has been found on the website.") SOLUTION = _( "Make sure the script is up-to-date and restrict access to it if possible." ) REFERENCES = [{ "title": "Mitre: Search details of a CVE", "url": "https://cve.mitre.org/cve/search_cve_list.html" }, {
# the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from wapitiCore.language.language import _ TYPE = "anomaly" NAME = _("Internal Server Error") SHORT_NAME = NAME WSTG_CODE = ["WSTG-ERRH-01"] DESCRIPTION = _( "An error occurred on the server's side, preventing it to process the request." ) + " " + _("It may be the sign of a vulnerability.") SOLUTION = _( "More information about the error should be found in the server logs.") REFERENCES = [{ "title": "Wikipedia: List of 5xx HTTP status codes", "url":
# the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from wapitiCore.language.language import _ TYPE = "additional" NAME = _("Secure Flag cookie") SHORT_NAME = NAME DESCRIPTION = _( "The secure flag is an option that can be set by the application server when sending a new cookie to the user " "within an HTTP Response." ) + " " + _( "The purpose of the secure flag is to prevent cookies from being observed by unauthorized parties due " "to the transmission of a the cookie in clear text." ) SOLUTION = _( "When generating the cookie, make sure to set the Secure Flag to True." ) REFERENCES = [
def __str__(self): return _("Invalid argument for option {0} : {1}").format( self.opt_name, self.opt_value)
def wapiti_main(): banners = [ """ __ __ .__ __ .__________ / \\ / \\_____ ______ |__|/ |_|__\\_____ \\ \\ \\/\\/ /\\__ \\ \\____ \\| \\ __\\ | _(__ < \\ / / __ \\| |_> > || | | |/ \\ \\__/\\ / (____ / __/|__||__| |__/______ / \\/ \\/|__| \\/""", """ __ __ _ _ _ _____ / / /\\ \\ \\__ _ _ __ (_) |_(_)___ / \\ \\/ \\/ / _` | '_ \\| | __| | |_ \\ \\ /\\ / (_| | |_) | | |_| |___) | \\/ \\/ \\__,_| .__/|_|\\__|_|____/ |_| """, """ ██╗ ██╗ █████╗ ██████╗ ██╗████████╗██╗██████╗ ██║ ██║██╔══██╗██╔══██╗██║╚══██╔══╝██║╚════██╗ ██║ █╗ ██║███████║██████╔╝██║ ██║ ██║ █████╔╝ ██║███╗██║██╔══██║██╔═══╝ ██║ ██║ ██║ ╚═══██╗ ╚███╔███╔╝██║ ██║██║ ██║ ██║ ██║██████╔╝ ╚══╝╚══╝ ╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═════╝ """ ] print(choice(banners)) print("Wapiti-3.0.3 (wapiti.sourceforge.io)") moon_phase = phase() if moon_phase == "full": print(_("[*] You are lucky! Full moon tonight.")) elif moon_phase == "new": print(_("[*] Be careful! New moon tonight.")) if datetime.now().weekday() == 4: if datetime.now().day == 13: print( _("[*] Watch out! Bad things can happen on Friday the 13th.")) elif datetime.now().month == 8 and datetime.now().day < 8: print(_("[*] Today is International Beer Day!")) if datetime.now().month == 5 and datetime.now().day == 4: print(_("[*] May the force be with you!")) elif datetime.now().month == datetime.now().day == 1: print(_("[*] Happy new year!")) elif datetime.now().month == 12 and datetime.now().day == 25: print(_("[*] Merry christmas!")) elif datetime.now().month == 3 and datetime.now().day == 31: print(_("[*] Today is world backup day! Is your data safe?")) parser = argparse.ArgumentParser( description="Wapiti-3.0.3: Web application vulnerability scanner") group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "-u", "--url", help= _("The base URL used to define the scan scope (default scope is folder)" ), metavar="URL", dest="base_url", default="http://example.com/" # required=True ) parser.add_argument("--scope", help=_("Set scan scope"), default="folder", choices=["page", "folder", "domain", "url", "punk"]) parser.add_argument("-m", "--module", dest="modules", default=None, help=_("List of modules to load"), metavar="MODULES_LIST") group.add_argument("--list-modules", action="store_true", help=_("List Wapiti attack modules and exit")) group.add_argument("--update", action="store_true", help=_("Update Wapiti attack modules and exit")) parser.add_argument("-l", "--level", metavar="LEVEL", dest="level", help=_("Set attack level"), default=1, type=int, choices=[1, 2]) parser.add_argument( "-p", "--proxy", action="append", default=[], help=_( "Set the HTTP(S) proxy to use. Supported: http(s) and socks proxies" ), metavar="PROXY_URL", dest="proxies") parser.add_argument("--tor", action="store_true", help=_("Use Tor listener (127.0.0.1:9050)")) parser.add_argument("-a", "--auth-cred", dest="credentials", default=argparse.SUPPRESS, help=_("Set HTTP authentication credentials"), metavar="CREDENTIALS") parser.add_argument( "--auth-type", default=argparse.SUPPRESS, help=_("Set the authentication type to use"), choices=["basic", "digest", "kerberos", "ntlm", "post"]) parser.add_argument("-c", "--cookie", help=_("Set a JSON cookie file to use"), default=argparse.SUPPRESS, metavar="COOKIE_FILE") parser.add_argument( "--skip-crawl", action="store_true", help= _("Don't resume the scanning process, attack URLs scanned during a previous session" )) parser.add_argument( "--resume-crawl", action="store_true", help= _("Resume the scanning process (if stopped) even if some attacks were previously performed" )) parser.add_argument( "--flush-attacks", action="store_true", help=_( "Flush attack history and vulnerabilities for the current session") ) parser.add_argument( "--flush-session", action="store_true", help= _("Flush everything that was previously found for this target (crawled URLs, vulns, etc)" )) parser.add_argument( "--store-session", help=_("Directory where to store attack history and session data."), default=None, metavar="PATH", ) parser.add_argument( "--store-config", help=_("Directory where to store configuration databases."), default=None, metavar="PATH", ) parser.add_argument("-s", "--start", action="append", default=[], help=_("Adds an url to start scan with"), metavar="URL", dest="starting_urls") parser.add_argument("-x", "--exclude", action="append", default=[], help=_("Adds an url to exclude from the scan"), metavar="URL", dest="excluded_urls") parser.add_argument("-r", "--remove", action="append", default=[], help=_("Remove this parameter from urls"), metavar="PARAMETER", dest="excluded_parameters") parser.add_argument("--skip", action="append", default=[], help=_("Skip attacking given parameter(s)"), metavar="PARAMETER", dest="skipped_parameters") parser.add_argument( "-d", "--depth", help=_("Set how deep the scanner should explore the website"), type=int, default=40) parser.add_argument( "--max-links-per-page", metavar="MAX", help= _("Set how many (in-scope) links the scanner should extract for each page" ), type=int, default=100) parser.add_argument( "--max-files-per-dir", metavar="MAX", help=_("Set how many pages the scanner should explore per directory"), type=int, default=0) parser.add_argument( "--max-scan-time", metavar="SECONDS", help=_( "Set how many seconds you want the scan to last (floats accepted)" ), type=float, default=0) parser.add_argument( "--max-attack-time", metavar="SECONDS", help= _("Set how many seconds you want each attack module to last (floats accepted)" ), type=float, default=0) parser.add_argument( "--max-parameters", metavar="MAX", help= _("URLs and forms having more than MAX input parameters will be erased before attack." ), type=int, default=0) parser.add_argument( "-S", "--scan-force", metavar="FORCE", help=_( "Easy way to reduce the number of scanned and attacked URLs.\n" "Possible values: paranoid, sneaky, polite, normal, aggressive, insane" ), choices=[ "paranoid", "sneaky", "polite", "normal", "aggressive", "insane" ], default="normal") parser.add_argument("-t", "--timeout", type=float, default=6.0, help=_("Set timeout for requests"), metavar="SECONDS") parser.add_argument( "-H", "--header", action="append", default=[], help=_("Set a custom header to use for every requests"), metavar="HEADER", dest="headers") parser.add_argument( "-A", "--user-agent", default=argparse.SUPPRESS, help=_("Set a custom user-agent to use for every requests"), metavar="AGENT", dest="user_agent") parser.add_argument("--verify-ssl", default=0, dest="check_ssl", help=_("Set SSL check (default is no check)"), type=int, choices=[0, 1]) parser.add_argument("--color", action="store_true", help=_("Colorize output")) parser.add_argument( "-v", "--verbose", metavar="LEVEL", dest="verbosity", help=_("Set verbosity level (0: quiet, 1: normal, 2: verbose)"), default=0, type=int, choices=range(0, 3)) parser.add_argument( "-f", "--format", metavar="FORMAT", help=_("Set output format. Supported: json, html (default), txt, xml"), default="html", choices=["json", "html", "txt", "xml"]) parser.add_argument("-o", "--output", metavar="OUPUT_PATH", default=argparse.SUPPRESS, help=_("Output file or folder")) parser.add_argument("--external-endpoint", metavar="EXTERNAL_ENDPOINT_URL", default=argparse.SUPPRESS, help=_("Url serving as endpoint for target")) parser.add_argument("--internal-endpoint", metavar="INTERNAL_ENDPOINT_URL", default=argparse.SUPPRESS, help=_("Url serving as endpoint for attacker")) parser.add_argument( "--endpoint", metavar="ENDPOINT_URL", default="https://wapiti3.ovh/", help=_("Url serving as endpoint for both attacker and target")) parser.add_argument( "--no-bugreport", action="store_true", help=_("Don't send automatic bug report when an attack module fails")) parser.add_argument("--version", action="version", help=_("Show program's version number and exit"), version=WAPITI_VERSION) args = parser.parse_args() if args.scope == "punk": print(_("[*] Do you feel lucky punk?")) if args.list_modules: print(_("[*] Available modules:")) modules_list = sorted(module_name[4:] for module_name in attack.modules) for module_name in modules_list: mod = import_module("wapitiCore.attack.mod_" + module_name) is_common = " (used by default)" if module_name in attack.commons else "" print("\t{}{}".format(module_name, is_common)) print("\t\t" + getdoc(getattr(mod, "mod_" + module_name))) print('') sys.exit() url = fix_url_path(args.base_url) parts = urlparse(url) if not parts.scheme or not parts.netloc: print( _("Invalid base URL was specified, please give a complete URL with protocol scheme." )) sys.exit() wap = Wapiti(url, scope=args.scope, session_dir=args.store_session, config_dir=args.store_config) if args.update: print(_("[*] Updating modules")) attack_options = {"level": args.level, "timeout": args.timeout} wap.set_attack_options(attack_options) wap.update() sys.exit() try: for start_url in args.starting_urls: if start_url.startswith(("http://", "https://")): wap.add_start_url(start_url) elif os.path.isfile(start_url): try: urlfd = codecs.open(start_url, encoding="UTF-8") for urlline in urlfd: urlline = urlline.strip() if urlline.startswith(("http://", "https://")): wap.add_start_url(urlline) urlfd.close() except UnicodeDecodeError: print( _("Error: File given with the -s option must be UTF-8 encoded !" )) raise InvalidOptionValue("-s", start_url) else: raise InvalidOptionValue('-s', start_url) for exclude_url in args.excluded_urls: if exclude_url.startswith(("http://", "https://")): wap.add_excluded_url(exclude_url) else: raise InvalidOptionValue("-x", exclude_url) for proxy_url in args.proxies: if proxy_url.startswith(("http://", "https://", "socks://")): wap.set_proxy(proxy_url) else: raise InvalidOptionValue("-p", proxy_url) if args.tor: wap.set_proxy("socks://127.0.0.1:9050/") if "cookie" in args: if os.path.isfile(args.cookie): wap.set_cookie_file(args.cookie) else: raise InvalidOptionValue("-c", args.cookie) if "credentials" in args: if "%" in args.credentials: wap.set_auth_credentials(args.credentials.split("%", 1)) else: raise InvalidOptionValue("-a", args.credentials) if "auth_type" in args: if args.auth_type == "post" and args.starting_urls != []: wap.crawler.auth_url = args.starting_urls[0] else: wap.set_auth_type(args.auth_type) for bad_param in args.excluded_parameters: wap.add_bad_param(bad_param) wap.set_max_depth(args.depth) wap.set_max_files_per_dir(args.max_files_per_dir) wap.set_max_links_per_page(args.max_links_per_page) wap.set_scan_force(args.scan_force) wap.set_max_scan_time(args.max_scan_time) wap.set_max_attack_time(args.max_attack_time) # should be a setter wap.verbosity(args.verbosity) if args.color: wap.set_color() wap.set_timeout(args.timeout) wap.set_modules(args.modules) if args.no_bugreport: wap.set_bug_reporting(False) if "user_agent" in args: wap.add_custom_header("user-agent", args.user_agent) for custom_header in args.headers: if ":" in custom_header: hdr_name, hdr_value = custom_header.split(":", 1) wap.add_custom_header(hdr_name.strip(), hdr_value.strip()) if "output" in args: wap.set_output_file(args.output) found_generator = False if args.format not in GENERATORS: raise InvalidOptionValue("-f", args.format) wap.set_report_generator_type(args.format) wap.set_verify_ssl(bool(args.check_ssl)) attack_options = {"level": args.level, "timeout": args.timeout} if "endpoint" in args: endpoint = fix_url_path(args.endpoint) if is_valid_endpoint('ENDPOINT', endpoint): attack_options["external_endpoint"] = endpoint attack_options["internal_endpoint"] = endpoint else: raise InvalidOptionValue("--endpoint", args.endpoint) if "external_endpoint" in args: external_endpoint = fix_url_path(args.external_endpoint) if is_valid_endpoint('EXTERNAL ENDPOINT', external_endpoint): attack_options["external_endpoint"] = external_endpoint else: raise InvalidOptionValue("--external-endpoint", external_endpoint) if "internal_endpoint" in args: internal_endpoint = fix_url_path(args.internal_endpoint) if is_valid_endpoint('INTERNAL ENDPOINT', internal_endpoint): if ping(internal_endpoint): attack_options["internal_endpoint"] = internal_endpoint else: print( _("Error: Internal endpoint URL must be accessible from Wapiti!" )) raise InvalidOptionValue("--internal-endpoint", internal_endpoint) else: raise InvalidOptionValue("--internal-endpoint", internal_endpoint) if args.skipped_parameters: attack_options["skipped_parameters"] = set(args.skipped_parameters) wap.set_attack_options(attack_options) if args.flush_attacks: wap.flush_attacks() if args.flush_session: wap.flush_session() except InvalidOptionValue as msg: print(msg) sys.exit(2) try: if not args.skip_crawl: if wap.have_attacks_started() and not args.resume_crawl: pass else: if wap.has_scan_started(): print( _("[*] Resuming scan from previous session, please wait" )) if "auth_type" in args and args.auth_type == "post": wap.crawler.try_login(wap.crawler.auth_url) wap.browse() if args.max_parameters: count = wap.persister.remove_big_requests(args.max_parameters) print( _("[*] {0} URLs and forms having more than {1} parameters were removed." .format(count, args.max_parameters))) print( _("[*] Wapiti found {0} URLs and forms during the scan").format( wap.count_resources())) try: wap.attack() except KeyboardInterrupt: print('') print( _("Attack process interrupted. Scan will be resumed next time " "unless you specify \"--flush-attacks\" or \"--flush-session\"." )) print('') pass except OperationalError: print( _("[!] Can't store informations in persister. SQLite database must have been locked by another process" )) print(_("[!] You should unlock and launch Wapiti again.")) except SystemExit: pass
def attack(self): """Launch the attacks based on the preferences set by the command line""" self._init_attacks() for attack_module in self.attacks: start = datetime.utcnow() if attack_module.do_get is False and attack_module.do_post is False: continue print('') if attack_module.require: attack_name_list = [ attack.name for attack in self.attacks if attack.name in attack_module.require and ( attack.do_get or attack.do_post) ] if attack_module.require != attack_name_list: print( _("[!] Missing dependencies for module {0}:").format( attack_module.name)) print(" {0}".format(",".join([ attack for attack in attack_module.require if attack not in attack_name_list ]))) continue attack_module.load_require([ attack for attack in self.attacks if attack.name in attack_module.require ]) attack_module.log_green(_("[*] Launching module {0}"), attack_module.name) already_attacked = self.persister.count_attacked( attack_module.name) if already_attacked: attack_module.log_green( _("[*] {0} pages were previously attacked and will be skipped" ), already_attacked) generator = attack_module.attack() answer = "0" skipped = 0 while True: try: original_request_or_exception = next(generator) if isinstance(original_request_or_exception, BaseException): raise original_request_or_exception if (datetime.utcnow() - start ).total_seconds() > self._max_attack_time >= 1: print( _("Max attack time was reached for module {0}, stopping." .format(attack_module.name))) break except KeyboardInterrupt as exception: print('') print(_("Attack process was interrupted. Do you want to:")) print( _("\tr) stop everything here and generate the (R)eport" )) print(_("\tn) move to the (N)ext attack module (if any)")) print(_("\tq) (Q)uit without generating the report")) print(_("\tc) (C)ontinue the current attack")) while True: try: answer = input("? ").strip().lower() except UnicodeDecodeError: pass if answer not in ("r", "n", "q", "c"): print( _("Invalid choice. Valid choices are r, n, q and c." )) else: break if answer in ("r", "n"): break if answer == "c": continue # if answer is q, raise KeyboardInterrupt and it will stop cleanly raise exception except (ConnectionError, Timeout, ChunkedEncodingError, ContentDecodingError): sleep(1) skipped += 1 continue except StopIteration: break except Exception as exception: # Catch every possible exceptions and print it exception_traceback = sys.exc_info()[2] print(exception.__class__.__name__, exception) print_tb(exception_traceback) if self._bug_report: traceback_file = str(uuid1()) with open(traceback_file, "w") as traceback_fd: print_tb(exception_traceback, file=traceback_fd) print("{}: {}".format(exception.__class__.__name__, exception), file=traceback_fd) print("Occurred in {} on {}".format( attack_module.name, self.target_url), file=traceback_fd) print("{}. Requests {}. OS {}".format( WAPITI_VERSION, requests.__version__, sys.platform)) try: upload_request = Request( "https://wapiti3.ovh/upload.php", file_params=[[ "crash_report", [ traceback_file, open(traceback_file, "rb").read() ] ]]) page = self.crawler.send(upload_request) print( _("Sending crash report {} ... {}").format( traceback_file, page.content)) except RequestException: print(_("Error sending crash report")) os.unlink(traceback_file) else: if original_request_or_exception and original_request_or_exception.path_id is not None: self.persister.set_attacked( original_request_or_exception.path_id, attack_module.name) if hasattr(attack_module, "finish"): attack_module.finish() if skipped: print( _("{} requests were skipped due to network issues").format( skipped)) if answer == "1": break # if self.crawler.get_uploads(): # print('') # print(_("Upload scripts found:")) # print("----------------------") # for upload_form in self.crawler.get_uploads(): # print(upload_form) if not self.output_file: if self.report_generator_type == "html": self.output_file = self.COPY_REPORT_DIR else: filename = "{}_{}".format( self.server.replace(":", "_"), strftime("%m%d%Y_%H%M", self.report_gen.scan_date)) if self.report_generator_type == "txt": extension = ".txt" elif self.report_generator_type == "json": extension = ".json" else: extension = ".xml" self.output_file = filename + extension for payload in self.persister.get_payloads(): if payload.type == "vulnerability": self.report_gen.add_vulnerability(category=payload.category, level=payload.level, request=payload.evil_request, parameter=payload.parameter, info=payload.info) elif payload.type == "anomaly": self.report_gen.add_anomaly(category=payload.category, level=payload.level, request=payload.evil_request, parameter=payload.parameter, info=payload.info) elif payload.type == "additional": self.report_gen.add_additional(category=payload.category, level=payload.level, request=payload.evil_request, parameter=payload.parameter, info=payload.info) self.report_gen.generate_report(self.output_file) print('') print(_("Report")) print("------") print( _("A report has been generated in the file {0}").format( self.output_file)) if self.report_generator_type == "html": print( _("Open {0} with a browser to see this report.").format( self.report_gen.final_path))
def _init_attacks(self): self._init_report() logger = ConsoleLogger() if self.color: logger.color = True print(_("[*] Loading modules:")) modules_list = sorted(module_name[4:] for module_name in attack.modules) print("\t {0}".format(", ".join(modules_list))) for mod_name in attack.modules: try: mod = import_module("wapitiCore.attack." + mod_name) except ImportError: print(_("[!] Could not find module {0}").format(mod_name)) continue mod_instance = getattr(mod, mod_name)(self.crawler, self.persister, logger, self.attack_options) if hasattr(mod_instance, "set_timeout"): mod_instance.set_timeout(self.crawler.timeout) self.attacks.append(mod_instance) self.attacks.sort(key=attrgetter("PRIORITY")) for attack_module in self.attacks: attack_module.set_verbose(self.verbose) if attack_module.name not in attack.commons: attack_module.do_get = False attack_module.do_post = False if self.color == 1: attack_module.set_color() # Custom list of modules was specified if self.module_options is not None: # First deactivate all modules for attack_module in self.attacks: attack_module.do_get = False attack_module.do_post = False opts = self.module_options.split(",") for module_opt in opts: if module_opt.strip() == "": continue method = "" if module_opt.find(":") > 0: module_name, method = module_opt.split(":", 1) else: module_name = module_opt # deactivate some module options if module_name.startswith("-"): module_name = module_name[1:] if module_name in ("all", "common"): for attack_module in self.attacks: if module_name == "all" or attack_module.name in attack.commons: if not method: attack_module.do_get = attack_module.do_post = False elif method == "get": attack_module.do_get = False elif method == "post": attack_module.do_post = False else: found = False for attack_module in self.attacks: if attack_module.name == module_name: found = True if not method: attack_module.do_get = attack_module.do_post = False elif method == "get": attack_module.do_get = False elif method == "post": attack_module.do_post = False if not found: print( _("[!] Unable to find a module named {0}"). format(module_name)) # activate some module options else: if module_name.startswith("+"): module_name = module_name[1:] if module_name in ("all", "common"): for attack_module in self.attacks: if module_name == "all" or attack_module.name in attack.commons: if not method: attack_module.do_get = attack_module.do_post = True elif method == "get": attack_module.do_get = True elif method == "post": attack_module.do_post = True else: found = False for attack_module in self.attacks: if attack_module.name == module_name: found = True if not method: attack_module.do_get = attack_module.do_post = True elif method == "get": attack_module.do_get = True elif method == "post": attack_module.do_post = True if not found: print( _("[!] Unable to find a module named {0}"). format(module_name))
class Messages: MSG_EVIL_URL = _(" Evil url: {0}") MSG_PARAM_INJECT = _("{0} in {1} via injection in the parameter {2}") MSG_FROM = _(" coming from {0}") MSG_QS_INJECT = _("{0} in {1} via injection in the query string") MSG_PATH_INJECT = _("{0} in {1} via injection in the resource path") MSG_EVIL_PARAM = _("Involved parameter: {0}") MSG_EVIL_REQUEST = _("Evil request:") ERROR_500 = _("Internal Server Error") RES_CONSUMPTION = _("Resource consumption") MSG_500 = _("Received a HTTP 500 error in {0}") MSG_TIMEOUT = _("Timeout occurred in {0}") MSG_QS_TIMEOUT = _( "The request timed out while attempting to inject a payload in the query string" ) MSG_PATH_TIMEOUT = _( "The request timed out while attempting to inject a payload in the resource path" ) MSG_PARAM_TIMEOUT = _( "The request timed out while attempting to inject a payload in the parameter {0}" ) MSG_QS_500 = _("The server responded with a 500 HTTP error code " "while attempting to inject a payload in the query string") MSG_PATH_500 = _( "The server responded with a 500 HTTP error code " "while attempting to inject a payload in the resource path") MSG_PARAM_500 = _( "The server responded with a 500 HTTP error code " "while attempting to inject a payload in the parameter {0}")
def inner_ctrl_c_signal_handler(): # pylint: disable=unused-argument print(_("Waiting for running crawler tasks to finish, please wait.")) stop_event.set()
MSG_PARAM_TIMEOUT = _( "The request timed out while attempting to inject a payload in the parameter {0}" ) MSG_QS_500 = _("The server responded with a 500 HTTP error code " "while attempting to inject a payload in the query string") MSG_PATH_500 = _( "The server responded with a 500 HTTP error code " "while attempting to inject a payload in the resource path") MSG_PARAM_500 = _( "The server responded with a 500 HTTP error code " "while attempting to inject a payload in the parameter {0}") # The only reason those lines are here is to allow the translation script to find them _("Backup file description") _("Backup file solution") _("Blind SQL Injection description") _("Blind SQL Injection solution") _("Commands execution description") _("Commands execution solution") _("CRLF description") _("CRLF solution") _("Cross Site Scripting description") _("Cross Site Scripting solution") _("File Handling description")
class Notice(GenericObservation): # Constants ERROR_404 = _("File not found message")