def api_is_open_port(self, target, ports): ''' If I have a way of telling if a port is open or not, for example using PHP's include() error messages, then I can perform a portscan by doing something similar to: for port in port_list: open = self.shell.is_open_port( host, port ) if open: report_open( port ) ''' ip_address_list = [] if target != 'auto': ip_address_list = [ target, ] else: tcp_result = self.exec_payload('tcp') udp_result = self.exec_payload('udp') # # Load the private IP address as targets # for key in tcp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site(connected_to): ip_address_list.append(connected_to) for key in udp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site(connected_to): ip_address_list.append(connected_to) if ports == 'default': port_list = self.DEFAULT_PORTS else: port_list = ''.join(ports) port_list = port_list.split(',') port_list = [port.strip() for port in port_list] if not all(port.isdigit() for port in port_list): ValueError('Target ports need to be integers') result = {} # # Init # for ip_address in ip_address_list: result[ip_address] = [] # # Portscan # for ip_address in ip_address_list: for port in port_list: is_open = self.shell.is_open_port(ip_address, port) if is_open: result[ip_address].append(port) return result
def api_is_open_port(self, target, ports): ''' If I have a way of telling if a port is open or not, for example using PHP's include() error messages, then I can perform a portscan by doing something similar to: for port in port_list: open = self.shell.is_open_port( host, port ) if open: report_open( port ) ''' ip_address_list = [] if target != 'auto': ip_address_list = [target, ] else: tcp_result = self.exec_payload('tcp') udp_result = self.exec_payload('udp') # # Load the private IP address as targets # for key in tcp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site(connected_to): ip_address_list.append(connected_to) for key in udp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site(connected_to): ip_address_list.append(connected_to) if ports == 'default': port_list = self.DEFAULT_PORTS else: port_list = ''.join(ports) port_list = port_list.split(',') port_list = [port.strip() for port in port_list] if not all(port.isdigit() for port in port_list): ValueError('Target ports need to be integers') result = {} # # Init # for ip_address in ip_address_list: result[ip_address] = [] # # Portscan # for ip_address in ip_address_list: for port in port_list: is_open = self.shell.is_open_port(ip_address, port) if is_open: result[ip_address].append(port) return result
def _local_test_inclusion(self, freq, orig_response): ''' Check for RFI using a local web server :param freq: A FuzzableRequest object :return: None, everything is saved to the kb ''' # # The listen address is an empty string when I have no default route # # Only work if: # - The listen address is private and the target address is private # - The listen address is public and the target address is public # if self._listen_address == '': return is_listen_priv = is_private_site(self._listen_address) is_target_priv = is_private_site(freq.get_url().get_domain()) if (is_listen_priv and is_target_priv) or \ not (is_listen_priv or is_target_priv): msg = 'RFI using local web server for URL: %s' % freq.get_url() om.out.debug(msg) try: # Create file for remote inclusion php_jsp_code, rfi_data = self._create_file() # Setup the web server handler to return always the same response # body. This is important for the test, since it might be the case # that the web application prepends/appends something to the # URL being included, and we don't want to fail there! # # Also, this allows us to remove the payloads we sent with \0 # which tried to achieve the same result. RFIWebHandler.RESPONSE_BODY = php_jsp_code # Start web server # # No real webroot is required since the custom handler returns # always the same HTTP response body webroot = '.' webserver.start_webserver(self._listen_address, self._listen_port, webroot, RFIWebHandler) # Perform the real work self._test_inclusion(freq, rfi_data, orig_response) except Exception, e: om.out.error('An error occurred while running local webserver:' ' "%s"' % e)
def discover(self, fuzzableRequest): ''' @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' if not self._run: # This will remove the plugin from the discovery plugins to be run. raise w3afRunOnce() # I will only run this one time. All calls to bing_spider return the same url's self._run = False bingSE = bing(self._uri_opener) domain = fuzzableRequest.getURL().getDomain() if is_private_site(domain): msg = 'There is no point in searching Bing for "site:'+ domain + '".' msg += ' Bing doesnt index private pages.' raise w3afException( msg ) results = bingSE.getNResults('site:'+ domain, self._resultLimit) for res in results: self._run_async(meth=self._genFuzzableRequests, args=(res.URL,)) self._join() return self._fuzzableRequests
def discover(self, fuzzableRequest ): ''' @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' self._fuzzableRequests = [] if not self._run: # This will remove the plugin from the discovery plugins to be runned. raise w3afRunOnce() else: # update ! if self._update_ghdb: self._update_db() # I will only run this one time. All calls to ghdb return the same url's self._run = False # Get the domain and set some parameters domain = fuzzableRequest.getURL().getDomain() if is_private_site( domain ): msg = 'There is no point in searching google for "site:'+ domain msg += '" . Google doesnt index private pages.' raise w3afException( msg ) return self._do_clasic_GHDB( domain ) return []
def crawl(self, fuzzable_request): ''' Does a search in archive.org and searches for links on the html. Then searches those URLs in the target site. This is a time machine ! :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching archive.org for "%s"'\ ' because it is a private site that will never be indexed.' om.out.information(msg % domain) raise w3afRunOnce(msg) # Initial check to verify if domain in archive start_url = self.ARCHIVE_START_URL % fuzzable_request.get_url() start_url = URL(start_url) http_response = self._uri_opener.GET(start_url, cache=True) if self.NOT_IN_ARCHIVE in http_response.body: msg = 'There is no point in searching archive.org for "%s"' msg += ' because they are not indexing this site.' om.out.information(msg % domain) raise w3afRunOnce(msg) references = self._spider_archive([ start_url, ], self._max_depth, domain) self._analyze_urls(references)
def crawl(self, fuzzable_request): ''' Does a search in archive.org and searches for links on the html. Then searches those URLs in the target site. This is a time machine ! :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching archive.org for "%s"'\ ' because it is a private site that will never be indexed.' om.out.information(msg % domain) raise w3afRunOnce(msg) # Initial check to verify if domain in archive start_url = self.ARCHIVE_START_URL % fuzzable_request.get_url() start_url = URL(start_url) http_response = self._uri_opener.GET(start_url, cache=True) if self.NOT_IN_ARCHIVE in http_response.body: msg = 'There is no point in searching archive.org for "%s"' msg += ' because they are not indexing this site.' om.out.information(msg % domain) raise w3afRunOnce(msg) references = self._spider_archive( [start_url, ], self._max_depth, domain) self._analyze_urls(references)
def discover(self, fuzzableRequest ): ''' @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' if not self._run: # This will remove the plugin from the discovery plugins to be runned. raise w3afRunOnce() else: # I will only run this one time. All calls to googleSpider return the same url's self._run = False google_se = google(self._urlOpener) domain = fuzzableRequest.getURL().getDomain() if is_private_site( domain ): msg = 'There is no point in searching google for "site:'+ domain + '".' msg += ' Google doesnt index private pages.' raise w3afException( msg ) try: results = google_se.getNResults('site:'+ domain, self._result_limit) except w3afException, w3: om.out.error(str(w3)) # If I found an error, I don't want to be run again raise w3afRunOnce() else:
def discover(self, fuzzableRequest ): ''' @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' self._new_fuzzable_requests = [] if not self._run: # This will remove the plugin from the discovery plugins to be runned. raise w3afRunOnce() else: # I will only run this one time. All calls to yahooSiteExplorer return the same url's self._run = False self._yse = yse( self._urlOpener ) domain = urlParser.getDomain( fuzzableRequest.getURL() ) if is_private_site( domain ): msg = 'There is no point in searching yahoo site explorer for site: "' msg += domain + '" . Yahoo doesnt index private pages.' raise w3afException(msg) results = self._yse.search( domain, 0, self._result_limit ) for res in results: # Send the requests using threads: targs = (res.URL,) self._tm.startFunction( target=self._generate_fuzzable_requests, \ args=targs, ownerObj=self ) # Wait for all threads to finish self._tm.join( self ) return self._new_fuzzable_requests
def api_is_open_port(self, ip_address_list, port_list, auto_target=False): ''' If I have a way of telling if a port is open or not, for example using PHP's include() error messages, then I can perform a portscan by doing something similar to: for port in port_list: open = self.shell.is_open_port( host, port ) if open: report_open( port ) ''' result = {} if auto_target: tcp_result = self.exec_payload('tcp') udp_result = self.exec_payload('udp') # # Load the private IP address as targets # for key in tcp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site( connected_to ): ip_address_list.append( connected_to ) for key in udp_result: connected_to = tcp_result[key]['rem_address'] if is_private_site( connected_to ): ip_address_list.append( connected_to ) # # Init # for ip_address in ip_address_list: result[ip_address] = [] # # Portscan # for ip_address in ip_address_list: for port in port_list: is_open = self.shell.is_open_port( ip_address, port ) if is_open: result[ ip_address ].append( port ) return result
def _is_public(self, domain): if is_private_site(domain): msg = 'shared_hosting plugin is not checking for subdomains for'\ ' domain: "%s" because it is a private address.' % domain om.out.debug(msg) return False return True
def _local_test_inclusion(self, freq): ''' Check for RFI using a local web server @param freq: A fuzzableRequest object @return: None, everything is saved to the kb ''' # # The listen address is an empty string when I have no default route # # Only work if: # - The listen address is private and the target address is private # - The listen address is public and the target address is public # if self._listen_address == '': return is_listen_priv = is_private_site(self._listen_address) is_target_priv = is_private_site(freq.getURL().getDomain()) if (is_listen_priv and is_target_priv) or \ not (is_listen_priv or is_target_priv): om.out.debug('RFI test using local web server for URL: ' + freq.getURL()) om.out.debug('w3af is running a webserver') try: # Create file for remote inclusion self._create_file() # Start web server webroot = os.path.join(get_home_dir(), 'webroot') webserver.start_webserver(self._listen_address, self._listen_port, webroot) # Perform the real work self._test_inclusion(freq) # Wait for threads to finish self._tm.join(self) except Exception,e: msg = 'An error occurred while running local webserver: "%s"' % str(e) om.out.error( msg ) finally:
def discover(self, fuzzableRequest ): ''' @param fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' self._fuzzableRequests = [] # Get the domain and set some parameters domain = fuzzableRequest.getURL().getDomain() if is_private_site(domain): msg = 'There is no point in searching google for "site:'+ domain msg += '" . Google doesnt index private pages.' raise w3afException(msg) return self._do_clasic_GHDB(domain)
def crawl(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' # Get the domain and set some parameters domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching google for "site:%s".' \ ' Google doesn\'t index private pages.' om.out.information(msg % domain) else: self._do_clasic_GHDB(domain)
def discover(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' if not is_private_site(fuzzable_request.get_url().get_domain()): self._google = google(self._uri_opener) self._domain = domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() if self._fast_search: self._do_fast_search(domain) else: self._do_complete_search(domain)
def discover(self, fuzzable_request): """ :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. """ if not is_private_site(fuzzable_request.get_url().get_domain()): bingSE = bing(self._uri_opener) self._domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() results = bingSE.get_n_results("@" + self._domain_root, self._result_limit) # Send the requests using threads: self.worker_pool.map(self._find_accounts, results)
def discover(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' if not is_private_site(fuzzable_request.get_url().get_domain()): bingSE = bing(self._uri_opener) self._domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() results = bingSE.get_n_results( '@' + self._domain_root, self._result_limit) # Send the requests using threads: self.worker_pool.map(self._find_accounts, results)
def crawl(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' bing_se = bing(self._uri_opener) domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching Bing for "site:%s".' msg += ' Bing does\'nt index private pages.' raise w3afException(msg % domain) try: results = bing_se.get_n_results('site:' + domain, self._result_limit) except: pass else: self.worker_pool.map(self._get_fuzzable_requests, [r.URL for r in results])
def discover(self, fuzzableRequest ): ''' Does a search in archive.org and searches for links on the html. Then searches those URLs in the target site. This is a time machine ! @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' # Get the domain and set some parameters domain = fuzzableRequest.getURL().getDomain() if is_private_site( domain ): msg = 'There is no point in searching archive.org for "'+ domain + '"' msg += ' because it is a private site that will never be indexed.' raise w3afException(msg) else: # Work om.out.debug( 'archiveDotOrg plugin is testing: ' + fuzzableRequest.getURL() ) start_url = 'http://web.archive.org/web/*/' + fuzzableRequest.getURL() start_url = url_object( start_url ) references = self._spider_archive( [ start_url, ] , self._max_depth, domain ) return self._analyze_urls( references )
def test_is_private_site_true_case01(self): self.assertTrue(is_private_site("127.0.0.1"))
def test_is_private_site_false_case01(self): self.assertFalse(is_private_site('192.1.0.1'))
def test_is_private_site_false_case01(self): self.assertFalse(is_private_site("192.1.0.1"))
def test_is_private_site_true_case01(self): self.assertTrue(is_private_site('127.0.0.1'))
def test_is_private_site_true_case02(self): self.assertTrue(is_private_site('192.168.0.1'))
def test_is_private_site_true_case03(self): self.assertTrue(is_private_site('www.w3af-scanner.org'))
def test_is_private_site_true_case02(self): self.assertTrue(is_private_site("192.168.0.1"))
def test_is_private_site_true_case03(self): self.assertTrue(is_private_site("www.w3af-scanner.org"))
def discover(self, fuzzableRequest ): ''' @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' if not self._run: # This will remove the plugin from the discovery plugins to be runned. raise w3afRunOnce() else: # I will only run this one time. All calls to sharedHosting return the same url's self._run = False bing_wrapper = bing( self._urlOpener ) domain = urlParser.getDomain( fuzzableRequest.getURL() ) if is_private_site( domain ): msg = 'sharedHosting plugin is not checking for subdomains for domain: ' msg += domain + ' because its a private address.' om.out.debug(msg) else: # Get the ip and do the search addrinfo = None try: addrinfo = socket.getaddrinfo(domain, 0) except: raise w3afException('Could not resolve hostname: ' + domain ) ip_address_list = [info[4][0] for info in addrinfo] ip_address_list = list( set(ip_address_list) ) # This is the best way to search, one by one! for ip_address in ip_address_list: results = bing_wrapper.getNResults('ip:'+ ip_address, self._result_limit ) results = [ urlParser.baseUrl( r.URL ) for r in results ] results = list( set( results ) ) # not vuln by default is_vulnerable = False if len(results) > 1: # We may have something... is_vulnerable = True if len(results) == 2: # Maybe we have this case: # [Mon 09 Jun 2008 01:08:26 PM ART] - http://216.244.147.14/ # [Mon 09 Jun 2008 01:08:26 PM ART] - http://www.business.com/ # Where www.business.com resolves to 216.244.147.14; so we don't really # have more than one domain in the same server. res0 = socket.gethostbyname( urlParser.getDomain( results[0] ) ) res1 = socket.gethostbyname( urlParser.getDomain( results[1] ) ) if res0 == res1: is_vulnerable = False if is_vulnerable: severityOfThisVuln = severity.MEDIUM v = vuln.vuln() v.setPluginName(self.getName()) v.setURL(fuzzableRequest.getURL()) v.setId(1) v['alsoInHosting'] = results msg = 'The web application under test seems to be in a shared hosting. ' msg += 'This list of domains, and the domain of the web application under ' msg += 'test, all point to the same IP address (%s):\n' % ip_address for url in results: domain = urlParser.getDomain(url) msg += '- %s\n' % url kb.kb.append( self, 'domains', domain) v.setDesc( msg ) v.setName( 'Shared hosting' ) v.setSeverity(severityOfThisVuln) om.out.vulnerability( msg, severity=severityOfThisVuln ) kb.kb.append( self, 'sharedHosting', v ) return []
def test_is_private_site_false_case02(self): self.assertFalse(is_private_site("www.w3af.org"))
def test_is_private_site_false_case02(self): self.assertFalse(is_private_site('www.w3af.org'))