def discover(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' if not is_private_site(fuzzable_request.get_url().get_domain()): self._google = google(self._uri_opener) self._domain = domain = fuzzable_request.get_url().get_domain() self._domain_root = fuzzable_request.get_url().get_root_domain() if self._fast_search: self._do_fast_search(domain) else: self._do_complete_search(domain)
def _do_clasic_GHDB(self, domain): ''' In classic GHDB, i search google for every term in the ghdb. ''' self._google_se = google(self._uri_opener) google_hack_list = self._read_ghdb() # Don't get discovered by google [at least try...] and avoid dups random.shuffle(google_hack_list) google_hack_set = set(google_hack_list) for gh in google_hack_set: search_term = 'site:%s %s' % (domain, gh.search) try: self._classic_worker(gh, search_term) except w3afException, w3: # Google is saying: "no more automated tests". om.out.error('GHDB exception: "' + str(w3) + '".') break
def crawl(self, fuzzable_request): ''' :param fuzzable_request: A fuzzable_request instance that contains (among other things) the URL to test. ''' google_se = google(self._uri_opener) domain = fuzzable_request.get_url().get_domain() if is_private_site(domain): msg = 'There is no point in searching google for "site:%s".' msg += ' Google does\'nt index private pages.' raise w3afException(msg % domain) try: g_results = google_se.get_n_results( 'site:' + domain, self._result_limit) except: pass else: self.worker_pool.map(self._get_fuzzable_requests, [r.URL for r in g_results])
def setUp(self): self.query, self.limit = random.choice([('big bang theory', 20), ('two and half man', 20), ('doctor house', 20)]) opener = ExtendedUrllib() self.gse = google(opener)
def setUp(self): self.query, self.limit = random.choice( [("big bang theory", 20), ("two and half man", 20), ("doctor house", 20)] ) opener = ExtendedUrllib() self.gse = google(opener)