def end(self, name): if name == 'phish_detail_url': self.inside_detail = False if name == 'url': self.inside_URL = False self.url_count += 1 if name == 'entry': self.inside_entry = False # # Now I try to match the entry with an element in the # to_check_list # for target_host in self._to_check: if target_host in self.url: phish_url = URL(self.url) target_host_url = URL(target_host) if target_host_url.get_domain() == phish_url.get_domain() or \ phish_url.get_domain().endswith('.' + target_host_url.get_domain()): phish_detail_url = URL(self.phish_detail_url) ptm = PhishTankMatch(phish_url, phish_detail_url) self.matches.append(ptm)
def test_set_domain(self): u = URL("http://w3af.com/def/jkl/") self.assertEqual(u.get_domain(), "w3af.com") u.set_domain("host.tld") self.assertEqual(u.get_domain(), "host.tld") u.set_domain("foobar") self.assertEqual(u.get_domain(), "foobar") u.set_domain("foobar.") self.assertEqual(u.get_domain(), "foobar.")
def test_set_domain(self): u = URL('http://w3af.com/def/jkl/') self.assertEqual(u.get_domain(), 'w3af.com') u.set_domain('host.tld') self.assertEqual(u.get_domain(), 'host.tld') u.set_domain('foobar') self.assertEqual(u.get_domain(), 'foobar') u.set_domain('foobar.') self.assertEqual(u.get_domain(), 'foobar.')
def test_default_proto(self): """ http is the default protocol, we can provide URLs with no proto """ u = URL("w3af.com") self.assertEqual(u.get_domain(), "w3af.com") self.assertEqual(u.get_protocol(), "http")
def do_ALL(self): global global_first_request if global_first_request: global_first_request = False om.out.information( 'The user is navigating through the spider_man proxy.') # Convert to url_object path = URL(self.path) if path == TERMINATE_URL: om.out.information('The user terminated the spider_man session.') self._send_end() self._spider_man.stop_proxy() return om.out.debug("[spider_man] Handling request: %s %s" % (self.command, path)) # Send this information to the plugin so it can send it to the core freq = self._create_fuzzable_request() self._spider_man.append_fuzzable_request(freq) grep = True if path.get_domain() != self.server.w3afLayer.target_domain: grep = False try: response = self._send_to_server(grep=grep) except Exception, e: self._send_error(e)
def test_default_proto(self): """ http is the default protocol, we can provide URLs with no proto """ u = URL('w3af.com') self.assertEqual(u.get_domain(), 'w3af.com') self.assertEqual(u.get_protocol(), 'http')
def search(self, query, start, count=10): """ Search the web with Bing. This method is based from the msn.py file from the massive enumeration toolset, coded by pdp and released under GPL v2. """ url = 'http://www.bing.com/search?' query = urllib.urlencode({ 'q': query, 'first': start + 1, 'FORM': 'PERE' }) url_instance = URL(url + query) response = self._uri_opener.GET(url_instance, headers=self._headers, cache=True, grep=False) # This regex might become outdated, but the good thing is that we have # test_bing.py which is going to fail and tell us that it's outdated re_match = re.findall('<a href="((http|https)(.*?))" h="ID=SERP,', response.get_body()) results = set() for url, _, _ in re_match: try: url = URL(url) except ValueError: pass else: # Test for full match. if url.get_domain() not in self.BLACKLISTED_DOMAINS: # Now test for partial match for blacklisted_domain in self.BLACKLISTED_DOMAINS: if blacklisted_domain in url.get_domain(): # ignore this domain. break else: bing_result = BingResult(url) results.add(bing_result) return results
def _url_matches(self, phishing_url, phishtank_detail_url): """ :param url: The url (as string) from the phishtank database :return: A PhishTankMatch if url matches what we're looking for, None if there is no match """ query_result = self._to_check_esm.query(phishing_url) if query_result: phish_url = URL(phishing_url) target_host_url = URL(query_result[0]) if target_host_url.get_domain() == phish_url.get_domain() or \ phish_url.get_domain().endswith('.' + target_host_url.get_domain()): phish_detail_url = URL(phishtank_detail_url) ptm = PhishTankMatch(phish_url, phish_detail_url) return ptm return None
def search(self, query, start, count=10): """ Search the web with Bing. This method is based from the msn.py file from the massive enumeration toolset, coded by pdp and released under GPL v2. """ url = 'http://www.bing.com/search?' query = urllib.urlencode({'q': query, 'first': start + 1, 'FORM': 'PERE'}) url_instance = URL(url + query) response = self._uri_opener.GET(url_instance, headers=self._headers, cache=True, grep=False) # This regex might become outdated, but the good thing is that we have # test_bing.py which is going to fail and tell us that it's outdated re_match = re.findall('<a href="((http|https)(.*?))" h="ID=SERP,', response.get_body()) results = set() for url, _, _ in re_match: try: url = URL(url) except ValueError: pass else: # Test for full match. if url.get_domain() not in self.BLACKLISTED_DOMAINS: # Now test for partial match for blacklisted_domain in self.BLACKLISTED_DOMAINS: if blacklisted_domain in url.get_domain(): # ignore this domain. break else: bing_result = BingResult(url) results.add(bing_result) return results
def setUp(self): self.kb.cleanup() self.w3afcore = w3afCore() if self.MOCK_RESPONSES: httpretty.enable() url = URL(self.target_url) domain = url.get_domain() proto = url.get_protocol() port = url.get_port() self._register_httpretty_uri(proto, domain, port)
def test_phishtank_match_last_url(self): phishtank_inst = self.w3afcore.plugins.get_plugin_inst('crawl', 'phishtank') vuln_url = URL(self.get_last_vulnerable_url()) phishtank_inst.crawl(FuzzableRequest(vuln_url)) vulns = self.kb.get('phishtank', 'phishtank') self.assertEqual(len(vulns), 1, vulns) vuln = vulns[0] self.assertEqual(vuln.get_name(), 'Phishing scam') self.assertEqual(vuln.get_severity(), MEDIUM) self.assertEqual(vuln.get_url().get_domain(), vuln_url.get_domain())
def test_phishtank_match(self): phishtank_inst = self.w3afcore.plugins.get_plugin_inst( 'crawl', 'phishtank') vuln_url = URL(self.get_vulnerable_url(phishtank_inst)) phishtank_inst.crawl(FuzzableRequest(vuln_url)) vulns = self.kb.get('phishtank', 'phishtank') self.assertEqual(len(vulns), 1, vulns) vuln = vulns[0] self.assertEqual(vuln.get_name(), 'Phishing scam') self.assertEqual(vuln.get_severity(), MEDIUM) self.assertEqual(vuln.get_url().get_domain(), vuln_url.get_domain())
def do_ALL(self): global global_first_request if global_first_request: global_first_request = False msg = 'The user is navigating through the spider_man proxy.' om.out.information(msg) # convert relative URL to absolute if request came from CONNECT if hasattr(self.server, 'chainedHandler'): base_path = "https://" + self.server.chainedHandler.path path = base_path + self.path else: path = self.path # Convert to url_object path = URL(path) # Ignore favicon.ico requests # https://github.com/andresriancho/w3af/issues/9135 if path == TERMINATE_FAVICON_URL: return if path == TERMINATE_URL: om.out.information('The user terminated the spider_man session.') self._send_end() self._spider_man.stop_proxy() return msg = '[spider_man] Handling request: %s %s' om.out.debug(msg % (self.command, path)) # Send this information to the plugin so it can send it to the core freq = self._create_fuzzable_request() self._spider_man.append_fuzzable_request(freq) grep = True if path.get_domain() != self.server.w3afLayer.target_domain: grep = False try: response = self._send_to_server(grep=grep) except Exception, e: self._send_error(e)
def setUp(self): self.kb.cleanup() self.w3afcore = w3afCore() if self.MOCK_RESPONSES: httpretty.enable() try: url = URL(self.target_url) except ValueError, ve: msg = 'When using MOCK_RESPONSES you need to set the'\ ' target_url attribute to a valid URL, exception was:'\ ' "%s".' raise Exception(msg % ve) domain = url.get_domain() proto = url.get_protocol() port = url.get_port() self._register_httpretty_uri(proto, domain, port)
def test_set_domain_with_port(self): u = URL('http://w3af.com:443/def/jkl/') self.assertEqual(u.get_domain(), 'w3af.com') u.set_domain('host.tld') self.assertEqual(u.get_net_location(), 'host.tld:443')
def test_set_domain_with_port(self): u = URL("http://w3af.com:443/def/jkl/") self.assertEqual(u.get_domain(), "w3af.com") u.set_domain("host.tld") self.assertEqual(u.get_net_location(), "host.tld:443")