def api_is_open_port(self, target, ports): """ If I have a way of telling if a port is open or not, for example using PHP's include() error messages, then I can perform a portscan by doing something similar to: for port in port_list: open = self.shell.is_open_port( host, port ) if open: report_open( port ) """ ip_address_list = [] if target != 'auto': ip_address_list = [ target, ] else: tcp_result = self.exec_payload('tcp') udp_result = self.exec_payload('udp') # # Load the private IP address as targets # for key in tcp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site(connected_to): ip_address_list.append(connected_to) for key in udp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site(connected_to): ip_address_list.append(connected_to) if ports == 'default': port_list = self.DEFAULT_PORTS else: port_list = ''.join(ports) port_list = port_list.split(',') port_list = [port.strip() for port in port_list] if not all(port.isdigit() for port in port_list): ValueError('Target ports need to be integers') result = {} # # Init # for ip_address in ip_address_list: result[ip_address] = [] # # Portscan # for ip_address in ip_address_list: for port in port_list: is_open = self.shell.is_open_port(ip_address, port) if is_open: result[ip_address].append(port) return result
def api_is_open_port(self, target, ports): """ If I have a way of telling if a port is open or not, for example using PHP's include() error messages, then I can perform a portscan by doing something similar to: for port in port_list: open = self.shell.is_open_port( host, port ) if open: report_open( port ) """ ip_address_list = [] if target != "auto": ip_address_list = [target] else: tcp_result = self.exec_payload("tcp") udp_result = self.exec_payload("udp") # # Load the private IP address as targets # for key in tcp_result: connected_to = tcp_result[key]["rem_address"] if is_private_site(connected_to): ip_address_list.append(connected_to) for key in udp_result: connected_to = tcp_result[key]["rem_address"] if is_private_site(connected_to): ip_address_list.append(connected_to) if ports == "default": port_list = self.DEFAULT_PORTS else: port_list = "".join(ports) port_list = port_list.split(",") port_list = [port.strip() for port in port_list] if not all(port.isdigit() for port in port_list): ValueError("Target ports need to be integers") result = {} # # Init # for ip_address in ip_address_list: result[ip_address] = [] # # Portscan # for ip_address in ip_address_list: for port in port_list: is_open = self.shell.is_open_port(ip_address, port) if is_open: result[ip_address].append(port) return result
def _local_test_inclusion(self, freq, orig_response): """ Check for RFI using a local web server :param freq: A FuzzableRequest object :return: None, everything is saved to the kb """ # # The listen address is an empty string when I have no default route # # Only work if: # - The listen address is private and the target address is private # - The listen address is public and the target address is public # if not self._listen_address: return is_listen_priv = is_private_site(self._listen_address) is_target_priv = is_private_site(freq.get_url().get_domain()) if (is_listen_priv and is_target_priv) or \ not (is_listen_priv or is_target_priv): msg = 'RFI using local web server for URL: %s' % freq.get_url() om.out.debug(msg) try: # Create file for remote inclusion php_jsp_code, rfi_data = self._create_file() # Setup the web server handler to return always the same # response body. This is important for the test, since it might # be the case that the web application prepends/appends # something to the URL being included, and we don't want to fail # there! # # Also, this allows us to remove the payloads we sent with \0 # which tried to achieve the same result. RFIWebHandler.RESPONSE_BODY = php_jsp_code # Start web server # # No real webroot is required since the custom handler returns # always the same HTTP response body webroot = '.' webserver.start_webserver(self._listen_address, self._listen_port, webroot, RFIWebHandler) # Perform the real work self._test_inclusion(freq, rfi_data, orig_response) except Exception, e: msg = 'An error occurred while running local web server for' \ ' the remote file inclusion (rfi) plugin: "%s"' om.out.error(msg % e)
def _local_test_inclusion(self, freq, orig_response, debugging_id): """ Check for RFI using a local web server :param freq: A FuzzableRequest object :return: None, everything is saved to the kb """ # # The listen address is an empty string when I have no default route # # Only work if: # - The listen address is private and the target address is private # - The listen address is public and the target address is public # if not self._listen_address: return is_listen_priv = is_private_site(self._listen_address) is_target_priv = is_private_site(freq.get_url().get_domain()) if (is_listen_priv and is_target_priv) or \ not (is_listen_priv or is_target_priv): msg = 'RFI using local web server for URL: %s' % freq.get_url() om.out.debug(msg) try: # Create file for remote inclusion php_jsp_code, rfi_data = self._create_file() # Setup the web server handler to return always the same # response body. This is important for the test, since it might # be the case that the web application prepends/appends # something to the URL being included, and we don't want to fail # there! # # Also, this allows us to remove the payloads we sent with \0 # which tried to achieve the same result. RFIWebHandler.RESPONSE_BODY = php_jsp_code # Start web server # # No real webroot is required since the custom handler returns # always the same HTTP response body webroot = '.' webserver.start_webserver(self._listen_address, self._listen_port, webroot, RFIWebHandler) # Perform the real work self._test_inclusion(freq, rfi_data, orig_response, debugging_id) except Exception, e: msg = 'An error occurred while running local web server for' \ ' the remote file inclusion (rfi) plugin: "%s"' om.out.error(msg % e)
def crawl(self, fuzzable_request): """ Does a search in archive.org and searches for links on the html. Then searches those URLs in the target site. This is a time machine ! :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching archive.org for "%s"'\ ' because it is a private site that will never be indexed.' om.out.information(msg % domain) raise RunOnce(msg) # Initial check to verify if domain in archive start_url = self.ARCHIVE_START_URL % fuzzable_request.get_url() start_url = URL(start_url) http_response = self._uri_opener.GET(start_url, cache=True) if self.NOT_IN_ARCHIVE in http_response.body: msg = 'There is no point in searching archive.org for "%s"' msg += ' because they are not indexing this site.' om.out.information(msg % domain) raise RunOnce(msg) references = self._spider_archive( [start_url, ], self._max_depth, domain) self._analyze_urls(references)
def crawl(self, fuzzable_request): """ Does a search in archive.org and searches for links on the html. Then searches those URLs in the target site. This is a time machine ! :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching archive.org for "%s"'\ ' because it is a private site that will never be indexed.' om.out.information(msg % domain) raise RunOnce(msg) # Initial check to verify if domain in archive start_url = self.ARCHIVE_START_URL % fuzzable_request.get_url() start_url = URL(start_url) http_response = self._uri_opener.GET(start_url, cache=True) if self.NOT_IN_ARCHIVE in http_response.body: msg = 'There is no point in searching archive.org for "%s"' msg += ' because they are not indexing this site.' om.out.information(msg % domain) raise RunOnce(msg) references = self._spider_archive([ start_url, ], self._max_depth, domain) self._analyze_urls(references)
def _is_public(self, domain): if is_private_site(domain): msg = 'shared_hosting plugin is not checking for subdomains for' \ ' domain: "%s" because it is a private address.' % domain om.out.debug(msg) return False return True
def _is_public(self, domain): if is_private_site(domain): msg = ( "shared_hosting plugin is not checking for subdomains for" ' domain: "%s" because it is a private address.' % domain ) om.out.debug(msg) return False return True
def _get_dead_domains(self, fuzzable_request): """ Find every link on a HTML document verify if the domain can be resolved :return: Yield domains that can not be resolved or resolve to a private IP address """ original_response = self._uri_opener.GET(fuzzable_request.get_uri(), cache=True) try: dp = parser_cache.dpc.get_document_parser_for(original_response) except BaseFrameworkException: # Failed to find a suitable parser for the document return # Note: # # - With parsed_references I'm 100% that it's really something in the # HTML that the developer intended to add. # # - The re_references are the result of regular expressions, which in # some cases are just false positives. # # In this case, and because I'm only going to use the domain name of the # URL I'm going to trust the re_references also. parsed_references, re_references = dp.get_references() parsed_references.extend(re_references) for link in parsed_references: domain = link.get_domain() if domain in self._already_queried_dns: continue self._already_queried_dns.add(domain) if not is_private_site(domain): continue desc = ( u'The content of "%s" references a non existent domain: "%s".' u' This can be a broken link, or an internal domain name.') desc %= (fuzzable_request.get_url(), domain) i = Info(u'Internal hostname in HTML link', desc, original_response.id, self.get_name()) i.set_url(fuzzable_request.get_url()) kb.kb.append(self, 'find_vhosts', i) om.out.information(i.get_desc()) yield domain
def crawl(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ # Get the domain and set some parameters domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching google for "site:%s".' \ ' Google doesn\'t index private pages.' om.out.information(msg % domain) else: self._do_clasic_GHDB(domain)
def crawl(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ # Get the domain and set some parameters domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching google for "site:%s".' \ ' Google doesn\'t index private pages.' om.out.information(msg % domain) else: self._do_clasic_GHDB(domain)
def discover(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ if not is_private_site(fuzzable_request.get_url().get_domain()): bingSE = bing(self._uri_opener) self._domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() results = bingSE.get_n_results("@" + self._domain_root, self._result_limit) # Send the requests using threads: self.worker_pool.map(self._find_accounts, results)
def discover(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ if not is_private_site(fuzzable_request.get_url().get_domain()): self._google = google(self._uri_opener) self._domain = domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() if self._fast_search: self._do_fast_search(domain) else: self._do_complete_search(domain)
def discover(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ if not is_private_site(fuzzable_request.get_url().get_domain()): self._google = google(self._uri_opener) self._domain = domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() if self._fast_search: self._do_fast_search(domain) else: self._do_complete_search(domain)
def discover(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ if not is_private_site(fuzzable_request.get_url().get_domain()): bingSE = bing(self._uri_opener) self._domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() results = bingSE.get_n_results('@' + self._domain_root, self._result_limit) # Send the requests using threads: self.worker_pool.map(self._find_accounts, results)
def crawl(self, fuzzable_request, debugging_id): """ :param debugging_id: A unique identifier for this call to discover() :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ # Get the domain and set some parameters domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = ('There is no point in searching google for "site:%s".' ' Google does not index private pages.') om.out.information(msg % domain) return self._do_clasic_GHDB(domain)
def crawl(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ bing_se = bing(self._uri_opener) domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching Bing for "site:%s".' " Bing doesn't index private pages." raise BaseFrameworkException(msg % domain) try: results = bing_se.get_n_results("site:" + domain, self._result_limit) except: pass else: self.worker_pool.map(self.http_get_and_parse, [r.URL for r in results])
def discover(self, fuzzable_request, debugging_id): """ :param debugging_id: A unique identifier for this call to discover() :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ if is_private_site(fuzzable_request.get_url().get_domain()): return # There are no race conditions here with these attributes because of # @runonce self._domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() self._google = google(self._uri_opener) if self._fast_search: self._do_fast_search() else: self._do_complete_search()
def crawl(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ bing_se = bing(self._uri_opener) domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching Bing for "site:%s".'\ ' Bing doesn\'t index private pages.' raise BaseFrameworkException(msg % domain) try: results = bing_se.get_n_results('site:' + domain, self._result_limit) except: pass else: self.worker_pool.map(self.http_get_and_parse, [r.URL for r in results])
def crawl(self, fuzzable_request, debugging_id): """ :param debugging_id: A unique identifier for this call to discover() :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ google_se = google(self._uri_opener) domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching google for "site:%s".'\ ' Google doesn\'t index private pages.' raise BaseFrameworkException(msg % domain) try: g_results = google_se.get_n_results('site:' + domain, self._result_limit) except: pass else: self.worker_pool.map(self.http_get_and_parse, [r.URL for r in g_results])
def test_is_private_site_false_case02(self): self.assertFalse(is_private_site('www.w3af.org'))
def test_is_private_site_true_case02(self): self.assertTrue(is_private_site("192.168.0.1"))
def test_is_private_site_true_case01(self): self.assertTrue(is_private_site("127.0.0.1"))
def test_is_private_site_false_case01(self): self.assertFalse(is_private_site("192.1.0.1"))
def test_is_private_site_true_case03(self): self.assertTrue(is_private_site("www.w3af-scanner.org"))
def test_is_private_site_false_case02(self): self.assertFalse(is_private_site("www.w3af.org"))
def test_is_private_site_true_case01(self): self.assertTrue(is_private_site('127.0.0.1'))
def test_is_private_site_true_case02(self): self.assertTrue(is_private_site('192.168.0.1'))
def test_is_private_site_true_case03(self): self.assertTrue(is_private_site('www.w3af-scanner.org'))
def test_is_private_site_false_case01(self): self.assertFalse(is_private_site('192.1.0.1'))