def optimize(self, query): url = 'https://www.shodan.io/search/_summary?query={0}'.format(query) utils.print_good("Analyze first page for more result") r = sender.send_get(self.options, url, self.cookies) if r.status_code == 200: soup = utils.soup(r.text) else: return False query_by_cities = [] # check if query have country filter or not if 'country' in query: links = soup.find_all("a") country = utils.get_country_code(utils.url_decode(query)) for link in links: if 'city' in link.get('href'): item = { 'url': link.get('href'), 'city': link.text, 'country': country } utils.print_debug(self.options, item) query_by_cities.append(item) else: links = soup.find_all("a") countries = [] for link in links: if 'country' in link.get('href'): countries.append(utils.get_country_code(utils.url_decode(link.get('href')))) utils.print_debug(self.options, countries) for country in countries: # seding request again to get city country_query = utils.url_encode(' country:"{0}"'.format(country)) url = 'https://www.shodan.io/search/_summary?query={0}{1}'.format( query, country_query) r1 = sender.send_get(self.options, url, self.cookies) utils.random_sleep(5, 8) utils.print_info( "Sleep for couple seconds because Shodan server is really strict") if r1.status_code == 200: soup1 = utils.soup(r1.text) links = soup1.find_all("a") # countries = [] for link in links: if 'city' in link.get('href'): # countries.append(utils.get_city_name( # utils.url_decode(link.get('href')))) item = { 'url': link.get('href'), 'city': link.text, 'country': country } utils.print_debug(self.options, item) query_by_cities.append(item) utils.print_debug(self.options, query_by_cities) return query_by_cities
def brute_country_code(self, query): utils.print_info("Brute the query with country code") if 'country' in query: raw_query = query.replace(utils.get_country_code(utils.url_decode(query)), '[replace]') else: raw_query += utils.url_encode(' country:"[replace]"') for country_code in utils.full_country_code: query = raw_query.replace('[replace]', country_code) url = 'https://www.shodan.io/search?query={1}&page={0}'.format( str(1), query) self.sending(url)
def brute_country_code(self, query): utils.print_info("Brute the query with country code") # clean the country filter raw_query = utils.url_decode(query) if 'location.country' in raw_query: country = utils.get_country_code(raw_query, source='censys') query = raw_query.replace(country, '').replace( 'AND ' + country, '').replace('and ' + country, '') raw_query += ' and location.country_code:"[replace]"' for country_code in utils.full_country_code: query = utils.url_encode( raw_query.replace('[replace]', country_code)) url = 'https://censys.io/ipv4/_search?q={1}&page={0}'.format(str(1), query) self.sending(url)
def optimize(self, json_response): analytics = json_response.get('aggs') if not analytics: return False # get analytics respoonse url = 'https://www.zoomeye.org/aggs/{0}'.format(analytics) r = sender.send_get(self.options, url, headers=self.headers) if r.status_code == 200: analytics_json = utils.get_json(r.text) else: return False analytics_countries = analytics_json.get('country') raw_query = self.options['zoomeye_query'] clean_query = self.options['zoomeye_query'] if 'country' in raw_query: country_code = utils.get_country_code(utils.url_decode(raw_query)) # clean country and subdivisions if it exist clean_query = raw_query.replace(' +country:', '').replace( '"{0}"'.format(str(country_code)), '') for country_item in analytics_countries: utils.print_info( "Optimize query by filter with coutry: {0}".format( country_item.get('name'))) # loop through city for city in country_item.get('subdivisions'): if 'country' in raw_query: real_query = raw_query + ' +subdivisions:"{0}"'.format( city.get('name')) else: real_query = clean_query + \ ' +country:"{0}"'.format(country_item.get('name')) + \ ' +subdivisions:"{0}"'.format(city.get('name')) query = utils.url_encode(real_query) url = 'https://www.zoomeye.org/search?q={0}&t=host'.format( query) r = sender.send_get(self.options, url, headers=self.headers) if r and r.status_code == 200: json_response = utils.get_json(r.text) self.analyze(json_response)
def brute_country_code(self, query): utils.print_info("Brute the query with country code") # && country=US raw_query = utils.just_b64_decode(utils.url_decode(query)) if 'country' in query: raw_query = query.replace( utils.get_country_code(raw_query, source='fofa'), '[replace]') else: raw_query += '&& country:"[replace]"' for country_code in utils.full_country_code: query = raw_query.replace('[replace]', country_code) query = utils.url_encode(utils.just_b64_encode(query)) url = 'https://fofa.so/result?page={0}&qbase64={1}'.format( str(1), query) self.sending(url)
def optimize(self, query): utils.print_good("Analyze metadata page for more result") raw_query = utils.url_decode(query) if 'location.country' in raw_query: country = utils.get_country_code(raw_query, source='censys') query = raw_query.replace(country, '').replace( 'AND ' + country, '').replace('and ' + country, '') url = 'https://censys.io/ipv4/metadata?q={0}'.format(query) r = sender.send_get(self.options, url, self.cookies) if r.status_code == 200: soup = utils.soup(r.text) else: return False query_by_countries = [] # check if query have country filter or not divs = soup.find_all("div", 'left-table') country_tables = [] for div in divs: if 'Country Breakdown' in div.h6.text: country_tables = div.find_all('tr') for row in country_tables: item = { 'url': 'N/A', 'country': 'N/A' } tds = row.find('td') for td in tds: if td.findChildren('a'): item['url'] = self.base_url + td.a.get('href') item['country'] = td.a.text query_by_countries.append(item) utils.print_debug(self.options, query_by_countries) return query_by_countries