def source_parsing(options): # search on specific search engine which is default routine if 'custom' in options.get('module'): if options.get('query_list'): queris = utils.just_read(options.get('query_list')).splitlines() for query in queris: options['query'] = query single_query(options) # query by multi source elif options.get('source_list'): query_by_source = utils.get_json(utils.just_read(options.get('source_list'))) if type(query_by_source) == dict: for key, value in query_by_source.items(): options['source'] = key options['query'] = value single_query(options) else: utils.print_bad("Look like your Source file not correct the pattern") else: single_query(options) # search for exploit if 'exploit' in options.get('module'): if options.get('target_list'): targets = utils.just_read(options.get('target_list')).splitlines() for query in targets: options['query'] = query single_query(options) else: module_query(options)
def get_defualt_user_settings(): jsettings = get_json('user_settings.json') default = {} for prop, value in jsettings['properties'].items(): default.update({prop: value['default']}) return default
def optimize(self, json_response): analytics = json_response.get('aggs') if not analytics: return False # get analytics respoonse url = 'https://www.zoomeye.org/aggs/{0}'.format(analytics) r = sender.send_get(self.options, url, headers=self.headers) if r.status_code == 200: analytics_json = utils.get_json(r.text) else: return False analytics_countries = analytics_json.get('country') raw_query = self.options['zoomeye_query'] clean_query = self.options['zoomeye_query'] if 'country' in raw_query: country_code = utils.get_country_code(utils.url_decode(raw_query)) # clean country and subdivisions if it exist clean_query = raw_query.replace(' +country:', '').replace( '"{0}"'.format(str(country_code)), '') for country_item in analytics_countries: utils.print_info( "Optimize query by filter with coutry: {0}".format( country_item.get('name'))) # loop through city for city in country_item.get('subdivisions'): if 'country' in raw_query: real_query = raw_query + ' +subdivisions:"{0}"'.format( city.get('name')) else: real_query = clean_query + \ ' +country:"{0}"'.format(country_item.get('name')) + \ ' +subdivisions:"{0}"'.format(city.get('name')) query = utils.url_encode(real_query) url = 'https://www.zoomeye.org/search?q={0}&t=host'.format( query) r = sender.send_get(self.options, url, headers=self.headers) if r and r.status_code == 200: json_response = utils.get_json(r.text) self.analyze(json_response)
def parse_domains(line): if utils.is_json(line.strip()): jsonl = utils.get_json(line) elif ';;' in line.strip(): jsonl = parse_special_line(line) else: jsonl = {'domain': line.strip()} return jsonl
def update_fsid(self): if not self.city or not self.lat or not self.lng: return False url = 'https://api.foursquare.com/v2/venues/search' params = { # 'query': self.place.name.encode('utf-8'), # 'near': self.city.name.encode('utf-8'), 'll': '%s,%s' % (self.lat, self.lng,), 'radius': 100, 'intent': 'browse', } params.update(FS_AUTH) data = get_json(url, params) if len(data['response']['venues']): venues = data['response']['venues'] nearest = { 'distance': None, 'ids' : [], 'names' : [], } place_name = self.place.name print '---> %s <---' % place_name for v in venues: d = Levenshtein.distance(place_name, v['name']) if nearest['distance'] is None or d < nearest['distance']: nearest['distance'] = d nearest['ids'] = [v['id'],] nearest['names'] = [v['name'],] elif d == nearest['distance']: nearest['ids'].append(v['id']) nearest['names'].append(v['name']) for n in nearest['names']: print n max_distance = 5 if len(place_name) < 5: max_distance = len(place_name) / 2 if len(nearest['ids']) > 1: # raise Exception('More ids for addr = %s (%s) in place = %s' # % (self.address, self.id, self.place.id)) print '--- More ids. EXIT!!!' return if nearest['distance'] > max_distance: print '--- More distance. EXIT!!!' return self.fsid = nearest['ids'][0] self.save() print '+++ Set or update FSID'
def get_fs_photos(self, fsid): url = 'https://api.foursquare.com/v2/venues/%s/photos' % fsid data = get_json(url, FS_AUTH) present_photos = self.foursquare_photo.all().values_list('photo_id', flat=True) for k in data['response']['photos']['items']: if k['id'] not in present_photos: param = { 'place': self, 'photo_id': k['id'], 'prefix': k['prefix'], 'suffix': k['suffix'], } FoursquarePhoto(**param).save()
def update_geo(self): if not self.city: self.city = self.place.sites.all()[0].city url = 'http://geocode-maps.yandex.ru/1.x/' params = { 'geocode' : self.city.name.encode('utf-8')+', '+self.address.encode('utf-8'), 'format': 'json', } data = get_json(url, params) pos = data['response']['GeoObjectCollection']['featureMember'][0] \ ['GeoObject']['Point']['pos'].split(' ') self.lng = pos[0] self.lat = pos[1] self.save()
def validate_user_settings(self, value): if not value: return value # try validate data by this schema here: # https://jsoneditoronline.org/?id=9a132ea5fc5d45a69335fa0a1775d80c _schema = utils.get_json('user_settings.json') if value: try: jsonschema.validate(instance=value, schema=_schema) except jsonschema.exceptions.ValidationError as ve: raise serializers.ValidationError(ve) return value
def pages(self, page_num): for i in range(2, int(page_num) + 1): utils.random_sleep(1, 2) utils.print_info("Get more result from page: {0}".format(str(i))) query = utils.url_encode(self.options['zoomeye_query']) url = 'https://www.zoomeye.org/search?q={0}&t=host&p={1}'.format( query, str(i)) r = sender.send_get(self.options, url, headers=self.headers) if r.status_code == 200: response = r.text if '"msg": "forbidden"' in response: utils.print_bad("Reach to the limit at page {0}".format( str(i))) return else: json_response = utils.get_json(response) self.analyze(json_response) self.optimize(json_response)
def testRetrieveFormatDataAtMeEndpoint(self): _schema = utils.get_json('me_endpoint_responce.json') users = DomofonUser.objects.all() for user in users: # print(user.user_info) self.login_as(username=user.username, password="******") me_endpoint = self.app.get(url='/api/me/', user=self.user, headers={ 'X-CSRFToken': self.csrf_token }).json # if no exception - test passed - so we know # what responce valid by _schema try: jsonschema.validate(instance=me_endpoint, schema=_schema) except jsonschema.exceptions.ValidationError as ve: raise ValueError( f'wrong data for user_id={user.pk} at api/me endpoint') self.logout()
def get_result(self): result = {} if self.data['slug'] == self.PREDEFINED[0]: user_settings_data = utils.get_json('user_settings.json') result.update({"properties": user_settings_data['properties']}) result.update({"ui_strings": user_settings_data['ui_strings']}) if self.data['slug'] == self.PREDEFINED[1]: if settings.DEBUG: result = self.get_user_clients() else: result = {} if self.data['slug'] == self.PREDEFINED[2]: result = { 'PHONE_CACHE_TTL': PHONE_CACHE_TTL, 'VERIFY_CNT_LIMIT': VERIFY_CNT_LIMIT } return result
def sending(self, url): # sending request and return the response r1 = sender.send_get(self.options, url, headers=self.headers) if r1: response = r1.text if self.options['store_content']: ts = str(int(time.time())) raw_file = self.options['raw'] + \ "/zoomeye/{0}_{1}".format(utils.url_encode( url.replace(self.base_url, '')).replace('/', '_'), ts) utils.just_write(raw_file, response) json_response = utils.get_json(response) self.analyze(json_response) # loop throuh pages if you're logged in page_num = self.get_num_pages(json_response) if self.logged_in and page_num and int(page_num) > 1: self.pages(page_num) # get aggs and found more result self.optimize(json_response)
def get_secrets_json(self): print(os.path.abspath(self.secrets_path)) data = get_json(self.secrets_path) return data
def get_secrets_json(self): data = get_json(self.api_key_path) return data