def optimize(self, query): url = 'https://www.shodan.io/search/_summary?query={0}'.format(query) utils.print_good("Analyze first page for more result") r = sender.send_get(self.options, url, self.cookies) if r.status_code == 200: soup = utils.soup(r.text) else: return False query_by_cities = [] # check if query have country filter or not if 'country' in query: links = soup.find_all("a") country = utils.get_country_code(utils.url_decode(query)) for link in links: if 'city' in link.get('href'): item = { 'url': link.get('href'), 'city': link.text, 'country': country } utils.print_debug(self.options, item) query_by_cities.append(item) else: links = soup.find_all("a") countries = [] for link in links: if 'country' in link.get('href'): countries.append(utils.get_country_code(utils.url_decode(link.get('href')))) utils.print_debug(self.options, countries) for country in countries: # seding request again to get city country_query = utils.url_encode(' country:"{0}"'.format(country)) url = 'https://www.shodan.io/search/_summary?query={0}{1}'.format( query, country_query) r1 = sender.send_get(self.options, url, self.cookies) utils.random_sleep(5, 8) utils.print_info( "Sleep for couple seconds because Shodan server is really strict") if r1.status_code == 200: soup1 = utils.soup(r1.text) links = soup1.find_all("a") # countries = [] for link in links: if 'city' in link.get('href'): # countries.append(utils.get_city_name( # utils.url_decode(link.get('href')))) item = { 'url': link.get('href'), 'city': link.text, 'country': country } utils.print_debug(self.options, item) query_by_cities.append(item) utils.print_debug(self.options, query_by_cities) return query_by_cities
def pages(self, page_num): for i in range(2, int(page_num) + 1): utils.print_info("Get more result from page: {0}".format(str(i))) utils.random_sleep(1, 2) query = utils.url_encode(self.options['censys_query']) url = 'https://censys.io/ipv4/_search?q={1}&page={0}'.format( str(i), query) r = sender.send_get(self.options, url, self.cookies) if r.status_code == 200: response = r.text if 'class="alert alert-danger"' in response: utils.print_bad("Reach to the limit at page {0}".format( str(i))) return else: soup = utils.soup(response) self.analyze(soup)
def pages(self, page_num): for i in range(2, int(page_num) + 1): utils.random_sleep(1, 2) utils.print_info("Get more result from page: {0}".format(str(i))) query = utils.url_encode(self.options['zoomeye_query']) url = 'https://www.zoomeye.org/search?q={0}&t=host&p={1}'.format( query, str(i)) r = sender.send_get(self.options, url, headers=self.headers) if r.status_code == 200: response = r.text if '"msg": "forbidden"' in response: utils.print_bad("Reach to the limit at page {0}".format( str(i))) return else: json_response = utils.get_json(response) self.analyze(json_response) self.optimize(json_response)
def pages(self, page_num): for i in range(2, int(page_num) + 1): utils.print_info("Sleep for couple seconds because Shodan server is really strict") utils.random_sleep(3, 6) utils.print_info("Get more result from page: {0}".format(str(i))) query = utils.url_encode(self.options['shodan_query']) url = 'https://www.shodan.io/search?query={1}&page={0}'.format( str(i), query) r = sender.send_get(self.options, url, self.cookies) if r.status_code == 200: response = r.text if 'class="alert alert-error text-center"' in response: utils.print_bad( "Reach to the limit at page {0}".format(str(i))) return else: soup = utils.soup(response) self.analyze(soup)
def pages(self, page_num): more_output = [] for i in range(1, int(page_num) + 1): utils.print_debug(self.options, "Sleep for couple seconds") utils.random_sleep(1, 3) utils.print_info("Get more result from page: {0}".format(str(i))) data = {"type": "exploits", "sort": "default", "query": self.query, "title": not self.options.get('relatively'), "offset": i * 10} r = sender.send_post( self.options, self.base_url, data, is_json=True) if r.status_code == 200: response = json.loads(r.text) if self.analyze(response): more_output += self.analyze(response) else: return False return more_output