def process_cipher_suites(results, version: str): accepted_ciphers = results.accepted_cipher_suites if not accepted_ciphers: return with open(path_join(Attack.DATA_DIR, "cipher_suites.json"), encoding="utf-8") as fd: ciphers = json.load(fd) group_by_severity = defaultdict(list) log_blue(f"\nAccepted cipher suites for {version}:") for accepted_cipher_suite in accepted_ciphers: try: security_level = ciphers[ accepted_cipher_suite.cipher_suite.name]["security"].title() except KeyError: # Cipher that isn't in our database... certainly fresh and secure but let's ignore it continue logging.log( cipher_level_to_color(security_level), f"* {accepted_cipher_suite.cipher_suite.name} " f"{accepted_cipher_suite.cipher_suite.openssl_name} " # f"{accepted_cipher_suite.cipher_suite.key_size} " f"{security_level}") # Group ciphers using severity to reduce entries in the report group_by_severity[security_level].append( accepted_cipher_suite.cipher_suite.openssl_name) for security_level, ciphers in group_by_severity.items(): message = _("The following ciphers are {0} for {1}: {2}").format( _(security_level).lower(), version, ", ".join(sorted(ciphers))) yield cipher_level_to_wapiti_level(security_level), message
async def worker(self, queue: asyncio.Queue, resolvers: Iterator[str], root_domain: str, bad_responses: Set[str]): while True: try: domain = queue.get_nowait().strip() except asyncio.QueueEmpty: await asyncio.sleep(.05) else: queue.task_done() if domain == "__exit__": break try: resolver = dns.asyncresolver.Resolver() resolver.timeout = 10. resolver.nameservers = [ next(resolvers) for __ in range(10) ] answers = await resolver.resolve(domain, 'CNAME', raise_on_no_answer=False) except (socket.gaierror, UnicodeError): continue except (dns.asyncresolver.NXDOMAIN, dns.exception.Timeout): continue except (dns.name.EmptyLabel, dns.resolver.NoNameservers) as exception: logging.warning(f"{domain}: {exception}") continue for answer in answers: cname = answer.to_text().strip(".") if cname in bad_responses: continue log_verbose(_(f"Record {domain} points to {cname}")) try: if get_root_domain(cname) == root_domain: # If it is an internal CNAME (like www.target.tld to target.tld) just ignore continue except (TldDomainNotFound, TldBadUrl): logging.warning(f"{cname} is not a valid domain name") continue if await self.takeover.check(domain, cname): log_red("---") log_red( _(f"CNAME {domain} to {cname} seems vulnerable to takeover" )) log_red("---") await self.add_vuln_high( category=NAME, info= _(f"CNAME {domain} to {cname} seems vulnerable to takeover" ), request=Request(f"https://{domain}/"), wstg=WSTG_CODE)
async def attack(self, request: Request): page = request.path self.excluded_path.add(page) option_request = Request(page, "OPTIONS", referer=request.referer, link_depth=request.link_depth) if self.verbose == 2: logging.info("[+] {}".format(option_request)) try: response = await self.crawler.async_send(option_request) except RequestError: self.network_errors += 1 return if 200 <= response.status < 400: methods = response.headers.get("allow", '').upper().split(',') methods = {method.strip() for method in methods if method.strip()} interesting_methods = sorted(methods - self.KNOWN_METHODS) if interesting_methods: self.log_orange("---") self.log_orange( _("Interesting methods allowed on {}: {}").format( page, ", ".join(interesting_methods))) await self.add_addition( category=NAME, request=option_request, info=_("Interesting methods allowed on {}: {}").format( page, ", ".join(interesting_methods))) self.log_orange("---")
def attack(self): http_resources = self.persister.get_links(attack_module=self.name) if self.do_get else [] for original_request in http_resources: try: url = original_request.path if self.verbose == 2: print("[¨] {0}".format(url)) if url not in self.attacked_get: self.attacked_get.append(url) evil_req = web.Request(url) resp = self.crawler.send(evil_req, headers=self.hdrs) if resp: data = resp.content if self.rand_string in data: self.log_red(_("URL {0} seems vulnerable to Shellshock attack!").format(url)) self.add_vuln( request_id=original_request.path_id, category=Vulnerability.EXEC, level=Vulnerability.HIGH_LEVEL, request=evil_req, info=_("URL {0} seems vulnerable to Shellshock attack").format(url) ) except (RequestException, KeyboardInterrupt) as exception: yield exception yield original_request
def __init__(self, crawler, xml_report_generator, logger, attack_options): Attack.__init__(self, crawler, xml_report_generator, logger, attack_options) user_config_dir = os.getenv('HOME') or os.getenv('USERPROFILE') user_config_dir += "/config" if not os.path.isdir(user_config_dir): os.makedirs(user_config_dir) try: with open(os.path.join(user_config_dir, self.NIKTO_DB)) as fd: reader = csv.reader(fd) self.nikto_db = [line for line in reader if line != [] and line[0].isdigit()] except IOError: try: print(_("Problem with local nikto database.")) print(_("Downloading from the web...")) nikto_req = web.Request("http://cirt.net/nikto/UPDATES/2.1.5/db_tests") response = self.crawler.send(nikto_req) csv.register_dialect("nikto", quoting=csv.QUOTE_ALL, doublequote=False, escapechar="\\") reader = csv.reader(response.content.split("\n"), "nikto") self.nikto_db = [line for line in reader if line != [] and line[0].isdigit()] with open(os.path.join(user_config_dir, self.NIKTO_DB), "w") as fd: writer = csv.writer(fd) writer.writerows(self.nikto_db) except socket.timeout: print(_("Error downloading Nikto database"))
def _find_pattern_in_response(data, warned: bool): vuln_info = "" executed = 0 if "eval()'d code</b> on line <b>" in data and not warned: vuln_info = _("Warning eval()") warned = True if "PATH=" in data and "PWD=" in data: vuln_info = _("Command execution") executed = True if "w4p1t1_eval" in data: vuln_info = _("PHP evaluation") executed = True if "Cannot execute a blank command in" in data and not warned: vuln_info = _("Warning exec") warned = True if "sh: command substitution:" in data and not warned: vuln_info = _("Warning exec") warned = True if "Fatal error</b>: preg_replace" in data and not warned: vuln_info = _("preg_replace injection") warned = True if "Warning: usort()" in data and not warned: vuln_info = _("Warning usort()") warned = True if "Warning: preg_replace():" in data and not warned: vuln_info = _("preg_replace injection") warned = True if "Warning: assert():" in data and not warned: vuln_info = _("Warning assert") warned = True if "Failure evaluating code:" in data and not warned: vuln_info = _("Evaluation warning") warned = True return vuln_info, executed, warned
async def attack(self, request: Request): page = request.path self.excluded_path.add(page) option_request = Request(page, "OPTIONS", referer=request.referer, link_depth=request.link_depth) log_verbose(f"[+] {option_request}") try: response = await self.crawler.async_send(option_request) except RequestError: self.network_errors += 1 return if response.is_success or response.is_redirect: methods = response.headers.get("allow", '').upper().split(',') methods = {method.strip() for method in methods if method.strip()} interesting_methods = sorted(methods - self.KNOWN_METHODS) if interesting_methods: log_orange("---") log_orange( _("Interesting methods allowed on {}: {}").format( page, ", ".join(interesting_methods))) await self.add_addition( category=NAME, request=option_request, info=_("Interesting methods allowed on {}: {}").format( page, ", ".join(interesting_methods)), wstg=WSTG_CODE) log_orange("---")
async def attack(self, request: Request): csrf_value = self.is_csrf_present(request) # check if token is present if not csrf_value: vuln_message = _("Lack of anti CSRF token") elif not await self.is_csrf_verified(request): vuln_message = _( "CSRF token '{}' is not properly checked in backend").format( self.csrf_string) elif not self.is_csrf_robust(csrf_value): vuln_message = _( "CSRF token '{}' might be easy to predict").format( self.csrf_string) else: return self.already_vulnerable.append((request.url, request.post_keys)) self.log_red("---") self.log_red(vuln_message) self.log_red(request.http_repr()) self.log_red("---") await self.add_vuln_medium(request_id=request.path_id, category=NAME, request=request, info=vuln_message)
async def attack(self, request: Request): url = request.path self.attacked_get.append(url) # We can't see anything by printing requests because payload is in headers so let's print nothing :) evil_req = Request(url) try: resp = await self.crawler.async_send(evil_req, headers=self.hdrs) except RequestError: self.network_errors += 1 return if resp: data = resp.content if self.rand_string in data: self.log_red( _("URL {0} seems vulnerable to Shellshock attack!").format( url)) self.add_vuln( request_id=request.path_id, category=NAME, level=HIGH_LEVEL, request=evil_req, info=_("URL {0} seems vulnerable to Shellshock attack" ).format(url))
async def test_csrf_cases(): persister = AsyncMock() all_requests = [] request = Request("http://127.0.0.1:65086/") request.path_id = 1 all_requests.append(request) request = Request( "http://127.0.0.1:65086/", method="POST", post_params=[["email", "*****@*****.**"], ["xsrf_token", "weak"]], ) request.path_id = 2 all_requests.append(request) request = Request( "http://127.0.0.1:65086/?check=true", method="POST", post_params=[["email", "*****@*****.**"], ["xsrf_token", "weak"]], ) request.path_id = 3 all_requests.append(request) request = Request( "http://127.0.0.1:65086/?check=true", method="POST", post_params=[["name", "Obiwan"]], ) request.path_id = 4 all_requests.append(request) crawler = AsyncCrawler("http://127.0.0.1:65086/", timeout=1) options = {"timeout": 10, "level": 1} module = ModuleCsrf(crawler, persister, options, Event()) module.do_post = True for request in all_requests: if await module.must_attack(request): await module.attack(request) else: # Not attacked because of GET verb assert request.path_id == 1 vulnerabilities = set() for call in persister.add_payload.call_args_list: vulnerabilities.add((call[1]["request_id"], call[1]["info"])) assert vulnerabilities == { (2, _("CSRF token '{}' is not properly checked in backend").format( "xsrf_token")), (3, _("CSRF token '{}' might be easy to predict").format("xsrf_token")), (4, _("Lack of anti CSRF token")) } await crawler.close()
def attack(self): http_resources = self.persister.get_links( attack_module=self.name) if self.do_get else [] for original_request in http_resources: if original_request.file_name == "": yield original_request continue page = original_request.path headers = original_request.headers # Do not attack application-type files if "content-type" not in headers: # Sometimes there's no content-type... so we rely on the document extension if (page.split(".")[-1] not in self.allowed) and page[-1] != "/": yield original_request continue elif "text" not in headers["content-type"]: yield original_request continue for payload, flags in self.payloads: try: payload = payload.replace("[FILE_NAME]", original_request.file_name) payload = payload.replace( "[FILE_NOEXT]", splitext(original_request.file_name)[0]) url = page.replace(original_request.file_name, payload) if self.verbose == 2: print("[¨] {0}".format(url)) if url not in self.attacked_get: self.attacked_get.append(url) evil_req = web.Request(url) response = self.crawler.send(evil_req) if response and response.status == 200: self.log_red( _("Found backup file {}".format(evil_req.url))) self.add_vuln( request_id=original_request.path_id, category=NAME, level=LOW_LEVEL, request=evil_req, info=_("Backup file {0} found for {1}").format( url, page)) except (KeyboardInterrupt, RequestException) as exception: yield exception yield original_request
def attack(self, request: Request): request_to_root = Request(request.url) self.finished = True try: response = self.crawler.get(request_to_root, follow_redirects=True) except RequestException: self.network_errors += 1 return self.log_blue(_("Checking X-Frame-Options :")) if not self.is_set(response, "X-Frame-Options", self.check_list_xframe): self.log_red(INFO_XFRAME_OPTIONS) self.add_vuln( category=NAME, level=LOW_LEVEL, request=request_to_root, info=INFO_XFRAME_OPTIONS ) else: self.log_green("OK") self.log_blue(_("Checking X-XSS-Protection :")) if not self.is_set(response, "X-XSS-Protection", self.check_list_xss): self.log_red(INFO_XSS_PROTECTION) self.add_vuln( category=NAME, level=LOW_LEVEL, request=request_to_root, info=INFO_XSS_PROTECTION ) else: self.log_green("OK") self.log_blue(_("Checking X-Content-Type-Options :")) if not self.is_set(response, "X-Content-Type-Options", self.check_list_xcontent): self.log_red(INFO_XCONTENT_TYPE) self.add_vuln( category=NAME, level=LOW_LEVEL, request=request_to_root, info=INFO_XCONTENT_TYPE ) else: self.log_green("OK") self.log_blue(_("Checking Strict-Transport-Security :")) if not self.is_set(response, "Strict-Transport-Security", self.check_list_hsts): self.log_red(INFO_HSTS) self.add_vuln( category=NAME, level=LOW_LEVEL, request=request_to_root, info=INFO_HSTS ) else: self.log_green("OK")
def attack(self, request: Request): page = request.path for mutated_request, parameter, payload, flags in self.mutator.mutate(request): if self.verbose == 2: print("[¨] {0}".format(mutated_request.url)) try: response = self.crawler.send(mutated_request) except ReadTimeout: self.network_errors += 1 self.add_anom( request_id=request.path_id, category=Messages.RES_CONSUMPTION, level=MEDIUM_LEVEL, request=mutated_request, parameter=parameter, info="Timeout (" + parameter + ")" ) self.log_orange("---") self.log_orange(Messages.MSG_TIMEOUT, page) self.log_orange(Messages.MSG_EVIL_REQUEST) self.log_orange(mutated_request.http_repr()) self.log_orange("---") except HTTPError: self.network_errors += 1 self.log(_("Error: The server did not understand this request")) except RequestException: self.network_errors += 1 else: if "wapiti" in response.headers: self.add_vuln( request_id=request.path_id, category=NAME, level=LOW_LEVEL, request=mutated_request, parameter=parameter, info=_("{0} via injection in the parameter {1}").format(self.MSG_VULN, parameter) ) if parameter == "QUERY_STRING": injection_msg = Messages.MSG_QS_INJECT else: injection_msg = Messages.MSG_PARAM_INJECT self.log_red("---") self.log_red( injection_msg, self.MSG_VULN, page, parameter ) self.log_red(Messages.MSG_EVIL_REQUEST) self.log_red(mutated_request.http_repr()) self.log_red("---")
def attack(self, urls, forms): browsed_resources = list(urls) + list(forms) sorted_resources = sorted(browsed_resources, key=get_speed) self.log_cyan(_("Slowest resources found on the web server:")) for slow_resource in sorted_resources[:10]: self.log_cyan("---") speed = (slow_resource.size + 1) / slow_resource.duration self.log_cyan(_("With a download speed of {0} bps:").format(speed)) self.log_cyan(slow_resource) yield
def test_csrf_cases(): persister = FakePersister() request = Request("http://127.0.0.1:65086/") request.path_id = 1 persister.requests.append(request) request = Request( "http://127.0.0.1:65086/", method="POST", post_params=[["email", "*****@*****.**"], ["xsrf_token", "weak"]], ) request.path_id = 2 persister.requests.append(request) request = Request( "http://127.0.0.1:65086/?check=true", method="POST", post_params=[["email", "*****@*****.**"], ["xsrf_token", "weak"]], ) request.path_id = 3 persister.requests.append(request) request = Request( "http://127.0.0.1:65086/?check=true", method="POST", post_params=[["name", "Obiwan"]], ) request.path_id = 4 persister.requests.append(request) crawler = Crawler("http://127.0.0.1:65086/", timeout=1) options = {"timeout": 10, "level": 1} logger = Mock() module = mod_csrf(crawler, persister, logger, options) module.do_post = True module.verbose = 2 for request in persister.requests: if module.must_attack(request): module.attack(request) else: # Not attacked because of GET verb assert request.path_id == 1 assert set(persister.vulnerabilities) == { (2, _("CSRF token '{}' is not properly checked in backend").format( "xsrf_token")), (3, _("CSRF token '{}' might be easy to predict").format("xsrf_token")), (4, _("Lack of anti CSRF token")) }
def attack(self): url = self.persister.get_root_url() request = Request(url) response = self.crawler.get(request, follow_redirects=True) self.log_blue(_("Checking X-Frame-Options :")) if not self.is_set(response, "X-Frame-Options", self.check_list_xframe): self.log_red(INFO_XFRAME_OPTIONS) self.add_addition( category=NAME, level=LOW_LEVEL, request=request, info=INFO_XFRAME_OPTIONS ) else: self.log_green("OK") self.log_blue(_("Checking X-XSS-Protection :")) if not self.is_set(response, "X-XSS-Protection", self.check_list_xss): self.log_red(INFO_XSS_PROTECTION) self.add_addition( category=NAME, level=LOW_LEVEL, request=request, info=INFO_XSS_PROTECTION ) else: self.log_green("OK") self.log_blue(_("Checking X-Content-Type-Options :")) if not self.is_set(response, "X-Content-Type-Options", self.check_list_xcontent): self.log_red(INFO_XCONTENT_TYPE) self.add_addition( category=NAME, level=LOW_LEVEL, request=request, info=INFO_XCONTENT_TYPE ) else: self.log_green("OK") self.log_blue(_("Checking Strict-Transport-Security :")) if not self.is_set(response, "Strict-Transport-Security", self.check_list_hsts): self.log_red(INFO_HSTS) self.add_addition( category=NAME, level=LOW_LEVEL, request=request, info=INFO_HSTS ) else: self.log_green("OK") yield
async def attack(self, request: Request): try: with open(os.path.join(self.user_config_dir, self.NIKTO_DB), encoding='utf-8') as nikto_db_file: reader = csv.reader(nikto_db_file) next(reader) self.nikto_db = [ line for line in reader if line != [] and line[0].isdigit() ] except IOError: logging.warning(_("Problem with local nikto database.")) logging.info(_("Downloading from the web...")) await self.update() self.finished = True root_url = request.url self.parts = urlparse(root_url) tasks = set() pending_count = 0 with open(os.path.join(self.user_config_dir, self.NIKTO_DB), encoding='utf-8') as nikto_db_file: reader = csv.reader(nikto_db_file) while True: if pending_count < self.options[ "tasks"] and not self._stop_event.is_set(): try: line = next(reader) except StopIteration: pass else: if line == [] or not line[0].isdigit(): continue task = asyncio.create_task(self.process_line(line)) tasks.add(task) if not tasks: break done_tasks, pending_tasks = await asyncio.wait( tasks, timeout=0.01, return_when=asyncio.FIRST_COMPLETED) pending_count = len(pending_tasks) for task in done_tasks: await task tasks.remove(task) if self._stop_event.is_set(): for task in pending_tasks: task.cancel() tasks.remove(task)
async def attack(self, request: Request): page = request.path for mutated_request, parameter, _payload, _flags in self.mutator.mutate( request): log_verbose(f"[¨] {mutated_request.url}") try: response = await self.crawler.async_send(mutated_request) except ReadTimeout: self.network_errors += 1 await self.add_anom_medium(request_id=request.path_id, category=Messages.RES_CONSUMPTION, request=mutated_request, parameter=parameter, info="Timeout (" + parameter + ")", wstg=RESOURCE_CONSUMPTION_WSTG_CODE) log_orange("---") log_orange(Messages.MSG_TIMEOUT, page) log_orange(Messages.MSG_EVIL_REQUEST) log_orange(mutated_request.http_repr()) log_orange("---") except HTTPStatusError: self.network_errors += 1 logging.error( _("Error: The server did not understand this request")) except RequestError: self.network_errors += 1 else: if "wapiti" in response.headers: await self.add_vuln_low( request_id=request.path_id, category=NAME, request=mutated_request, parameter=parameter, info=_( "{0} via injection in the parameter {1}").format( self.MSG_VULN, parameter), wstg=WSTG_CODE) if parameter == "QUERY_STRING": injection_msg = Messages.MSG_QS_INJECT else: injection_msg = Messages.MSG_PARAM_INJECT log_red("---") log_red(injection_msg, self.MSG_VULN, page, parameter) log_red(Messages.MSG_EVIL_REQUEST) log_red(mutated_request.http_repr()) log_red("---")
def attack(self, request: Request): self.finished = True request_to_root = Request(request.url) response = self.crawler.send(request_to_root, follow_redirects=True) if self.check_wordpress(response): self.log_blue(_("Enumeration of WordPress Plugins :")) self.detect_plugin(request_to_root.url) self.log_blue("----") self.log_blue(_("Enumeration of WordPress Themes :")) self.detect_theme(request_to_root.url) else: self.log_blue(MSG_NO_WP)
async def _verify_wapp_database(self, categories_file_path: str, technologies_base_path: str, groups_file_path: str): try: with open(categories_file_path, encoding='utf-8') as categories_file, \ open(technologies_base_path, encoding='utf-8') as technologies_file, \ open(groups_file_path, encoding='utf-8') as groups_file: json.load(categories_file) json.load(technologies_file) json.load(groups_file) except IOError: logging.warning(_("Problem with local wapp database.")) logging.info(_("Downloading from the web...")) await self.update()
def attack(self): http_resources = self.persister.get_links( attack_module=self.name) if self.do_get else [] for original_request in http_resources: url = original_request.path referer = original_request.referer headers = {} if referer: headers["referer"] = referer if url not in self.attacked_get: if original_request.status in (401, 402, 403, 407): # The ressource is forbidden try: evil_req = web.Request(url, method="ABC") response = self.crawler.send(evil_req, headers=headers) unblocked_content = response.content if response.status == 404 or response.status < 400 or response.status >= 500: # Every 4xx status should be uninteresting (specially bad request in our case) self.log_red("---") self.add_vuln( request_id=original_request.path_id, category=Vulnerability.HTACCESS, level=Vulnerability.HIGH_LEVEL, request=evil_req, info=_( "{0} bypassable weak restriction").format( evil_req.url)) self.log_red( _("Weak restriction bypass vulnerability: {0}" ), evil_req.url) self.log_red( _("HTTP status code changed from {0} to {1}"). format(original_request.status, response.status)) if self.verbose == 2: self.log_red(_("Source code:")) self.log_red(unblocked_content) self.log_red("---") self.attacked_get.append(url) except (RequestException, KeyboardInterrupt) as exception: yield exception yield original_request
def attack(self): forms = self.persister.get_forms( attack_module=self.name) if self.do_post else [] # list to ensure only one occurrence per (vulnerable url/post_keys) tuple already_vulnerable = [] for original_request in forms: if (original_request.url, original_request.post_keys) in already_vulnerable: yield original_request continue if self.verbose >= 1: print("[+] {}".format(original_request)) csrf_value = self.is_csrf_present(original_request) # check if token is present if not csrf_value: vuln_message = _("Lack of anti CSRF token") elif not self.is_csrf_verified(original_request): vuln_message = _( "CSRF token '{}' is not properly checked in backend" ).format(self.csrf_string) elif not self.is_csrf_robust(csrf_value): vuln_message = _( "CSRF token '{}' might be easy to predict").format( self.csrf_string) else: yield original_request continue already_vulnerable.append( (original_request.url, original_request.post_keys)) self.log_red("---") self.log_red(vuln_message) self.log_red(original_request.http_repr()) self.log_red("---") self.add_vuln( request_id=original_request.path_id, category=Vulnerability.CSRF, level=Vulnerability.HIGH_LEVEL, request=original_request, info=vuln_message, ) yield original_request
def __init__(self, crawler, persister, logger, attack_options): Attack.__init__(self, crawler, persister, logger, attack_options) user_config_dir = self.persister.CRAWLER_DATA_DIR if not os.path.isdir(user_config_dir): os.makedirs(user_config_dir) try: with open(os.path.join(user_config_dir, self.WAPP_DB)) as wapp_db_file: json.load(wapp_db_file) except IOError: print(_("Problem with local wapp database.")) print(_("Downloading from the web...")) self.update()
def attack(self): urls = self.persister.get_links( attack_module=self.name) if self.do_get else [] forms = self.persister.get_forms( attack_module=self.name) if self.do_post else [] browsed_resources = list(urls) + list(forms) sorted_resources = sorted(browsed_resources, key=get_speed) self.log_cyan(_("Slowest resources found on the web server:")) for slow_resource in sorted_resources[:10]: self.log_cyan("---") speed = (slow_resource.size + 1) / slow_resource.duration self.log_cyan(_("With a download speed of {0} bps:").format(speed)) self.log_cyan(slow_resource) yield
def __init__(self, crawler, persister, logger, attack_options): Attack.__init__(self, crawler, persister, logger, attack_options) csv.register_dialect("nikto", quoting=csv.QUOTE_ALL, doublequote=False, escapechar="\\") user_config_dir = os.getenv("HOME") or os.getenv("USERPROFILE") user_config_dir += "/config" if not os.path.isdir(user_config_dir): os.makedirs(user_config_dir) try: with open(os.path.join(user_config_dir, self.NIKTO_DB)) as fd: reader = csv.reader(fd, "nikto") self.nikto_db = [ line for line in reader if line != [] and line[0].isdigit() ] except IOError: # Disable downloading of Nikto database because the license of the file # forbids it. self.nikto_db = [] return try: print(_("Problem with local nikto database.")) print(_("Downloading from the web...")) nikto_req = web.Request( "https://raw.githubusercontent.com/sullo/nikto/master/program/databases/db_tests" ) response = self.crawler.send(nikto_req) csv.register_dialect("nikto", quoting=csv.QUOTE_ALL, doublequote=False, escapechar="\\") reader = csv.reader(response.content.split("\n"), "nikto") self.nikto_db = [ line for line in reader if line != [] and line[0].isdigit() ] with open(os.path.join(user_config_dir, self.NIKTO_DB), "w") as fd: writer = csv.writer(fd, "nikto") writer.writerows(self.nikto_db) except socket.timeout: print(_("Error downloading Nikto database"))
async def attack(self, request: Request): page = request.path for mutated_request, parameter, __, __ in self.mutator.mutate(request): if self.verbose == 2: print("[¨] {0}".format(mutated_request.url)) try: response = await self.crawler.async_send(mutated_request) except RequestError: self.network_errors += 1 continue if any([ url.startswith("https://openbugbounty.org/") for url in response.all_redirections ]): await self.add_vuln_low( request_id=request.path_id, category=NAME, request=mutated_request, parameter=parameter, info=_("{0} via injection in the parameter {1}").format( self.MSG_VULN, parameter)) if parameter == "QUERY_STRING": injection_msg = Messages.MSG_QS_INJECT else: injection_msg = Messages.MSG_PARAM_INJECT self.log_red("---") self.log_red(injection_msg, self.MSG_VULN, page, parameter) self.log_red(Messages.MSG_EVIL_REQUEST) self.log_red(mutated_request.http_repr()) self.log_red("---")
async def test_detection(): respx.get(url__regex=r"http://perdu\.com/\?vuln=.*env.*").mock( return_value=httpx.Response(200, text="PATH=/bin:/usr/bin;PWD=/")) respx.get(url__regex=r"http://perdu\.com/\?vuln=.*").mock( return_value=httpx.Response(200, text="Hello there")) persister = AsyncMock() request = Request("http://perdu.com/?vuln=hello") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} module = ModuleExec(crawler, persister, options, Event()) await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "exec" assert persister.add_payload.call_args_list[0][1]["category"] == _( "Command execution") assert persister.add_payload.call_args_list[0][1][ "request"].get_params == [["vuln", ";env;"]] await crawler.close()
async def test_whole_stuff(): respx.get("http://perdu.com/").mock(return_value=httpx.Response(200)) respx.get(url__regex=r"http://perdu.com/.*").mock(side_effect=shellshock_callback) persister = AsyncMock() all_requests = [] request = Request("http://perdu.com/") request.path_id = 1 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) request = Request("http://perdu.com/vuln/") request.path_id = 2 request.status = 200 request.set_headers({"content-type": "text/html"}) all_requests.append(request) crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 2} module = mod_shellshock(crawler, persister, options, Event()) module.verbose = 2 module.do_get = True for request in all_requests: await module.attack(request) assert persister.add_payload.call_count == 1 assert persister.add_payload.call_args_list[0][1]["module"] == "shellshock" assert persister.add_payload.call_args_list[0][1]["category"] == _("Command execution") assert persister.add_payload.call_args_list[0][1]["request"].url == "http://perdu.com/vuln/" await crawler.close()
async def test_true_positive(): respx.get("http://perdu.com/?foo=bar").mock( return_value=httpx.Response(200, text="Hi there")) respx.get( url__regex=r"http://perdu\.com/\?foo=.*" ).mock(return_value=httpx.Response( 200, text= ("You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version " "for the right syntax to use near '\\\"\\'' at line 1"))) persister = AsyncMock() request = Request("http://perdu.com/?foo=bar") request.path_id = 1 crawler = AsyncCrawler("http://perdu.com/", timeout=1) options = {"timeout": 10, "level": 1} logger = Mock() module = mod_sql(crawler, persister, logger, options, Event()) module.verbose = 2 module.do_post = True await module.attack(request) assert persister.add_payload.call_count assert persister.add_payload.call_args_list[0][1]["module"] == "sql" assert persister.add_payload.call_args_list[0][1]["category"] == _( "SQL Injection") await crawler.close()
def payloads(self): """Load the payloads from the specified file""" if not self.PAYLOADS_FILE: return [] payloads = [] config_reader = ConfigParser(interpolation=None) config_reader.read_file( open(path_join(self.CONFIG_DIR, self.PAYLOADS_FILE))) # No time based payloads here so we don't care yet reader = PayloadReader(self.options) for section in config_reader.sections(): clean_payload, original_flags = reader.process_line( config_reader[section]["payload"]) flags = original_flags.with_section(section) rules = config_reader[section]["rules"].splitlines() messages = [ _(message) for message in config_reader[section]["messages"].splitlines() ] self.payload_to_rules[section] = rules self.rules_to_messages.update(dict(zip(rules, messages))) payloads.append((clean_payload, flags)) return payloads