def get_dir_list(self, get_dir_from_web): log.info('Get list for {}'.format(self.url)) if get_dir_from_web or not self.status.get('web'): loc = self.status['loc'] log.info('get list from website') headers = {'origin': self.urlparsed.netloc} # parameters params = { 'd': self.urlparsed.path.split('/')[-1], 'folder_id': self.urlparsed.query, 'passcode': '', 'r': str(random.random()), 'ref': '', } r = self.s.get(GET_DIR_URL, params=params, headers=headers) j = json.loads(r.text.encode().decode('utf-8-sig')) requests_debug(r) if j.get('code') == 404 or j.get('code') == 503: log.error('dl_dir_list error: {}, {}'.format( self.url, j.get('message'))) loc['name'] = j['folder_name'] loc['url'] = j['url'] # real url log.info('folder name: {}'.format(loc['name'])) log.info('folder url: {}'.format(loc['url'])) r = self.s.get(WEBAPI_HOST + loc['url']) self.status['web'] = json.loads(r.text) self.save_status()
def run(self): while True: while True: self._params['rd'] = str(random.random()) r = self._s.get(GET_FILE_URL2, params=self._params, headers=self._headers) j = json.loads(r.text) log.debug('dl-{:03d} step 2'.format(self._index + 1)) requests_debug(r, 'dl-{:03d} '.format(self._index + 1)) if j['code'] == 503 and j[ 'message'] == 'require for verifycode': log.debug('dl-{:03d} retry'.format(self._index + 1)) self._UA = random_ua(self._index) self._headers['user-agent'] = self._UA else: break with open(self._filename + '.ctdown', 'r+b') as f: f.seek(self._start) self._headers['Range'] = 'bytes={}-{}'.format( self._start, self._end) r = self._s.get(j['downurl'].replace(r'\/', r'/'), headers=self._headers, stream=True) log.debug('dl-{:03d} download file request: {}'.format( self._index + 1, r.status_code)) if r.status_code == 503: log.warning( 'dl-{:03d} download fail, retry'.format(self._index + 1)) self._headers['user-agent'] = random_ua(self._index) time.sleep(1) elif r.status_code == 404: log.error( 'dl-{:03d} download fail, 404'.format(self._index + 1)) self._status = DL_Thread_status.E404 break else: for chunk in r.iter_content(chunk_size=128): f.write(chunk) self._downloaded_bytes += len(chunk) log.debug('dl-{:03d} exit'.format(self._index + 1)) break
def get_osm_data(bbox, date, osm_file_name): got_data = False while not got_data: free_slot = False while not free_slot: r = requests.get("http://overpass-api.de/api/status").text if "available now" in r: free_slot = True else: if "Slot available after" in r: rate_seconds = int( r.split(", in ")[1].split(" seconds.")[0]) + 15 log.warning( f"Overpass is rate-limiting you. Gonna have to wait {rate_seconds} seconds before continuing" ) time.sleep(rate_seconds) else: log.warning( "Had trouble finding out about your overpass status. Waiting 1 minute before trying again" ) time.sleep(60) log.info( "Getting OSM data. This will take ages if this is your first run.") osm_time_start = timeit.default_timer() nest_json = fetch_data(bbox, date) osm_time_stop = timeit.default_timer() seconds = round(osm_time_stop - osm_time_start, 1) if len(nest_json.get("elements", [])) == 0: log.error( f"Did not get any data from overpass in {seconds} seconds. This probably means that you were rate-limited by overpass. Sleeping 5 minutes and trying again.\nIf you want, you can share the below log entry in Discord" ) log.error(nest_json.get("remark")) time.sleep(60 * 5) else: got_data = True with open(osm_file_name, mode='w', encoding="utf-8") as osm_file: osm_file.write(json.dumps(nest_json, indent=4)) log.success( f"Done. Got all OSM data in {seconds} seconds and saved it.") return nest_json
def get_nest_text(self, config, emote_refs=None): with open(f"data/mon_names/{config.language}.json", "r") as f: mon_names = json.load(f) with open("config/discord.json", "r") as f: template = json.load(f) try: with open("data/custom_emotes.json", "r") as f: emote_data = json.load(f) except: emote_data = { "Shiny": "✨", "Grass": "🌿", "Poison": "☠", "Fire": "🔥", "Flying": "🐦", "Water": "💧", "Bug": "🐛", "Normal": "⭕", "Dark": "🌑", "Electric": "⚡", "Rock": "🗿", "Ground": "🌍", "Fairy": "🦋", "Fighting": "👊", "Psychic": "☯", "Steel": "🔩", "Ice": "❄", "Ghost": "👻", "Dragon": "🐲" } with open("data/custom_emotes.json", "w+") as f: f.write(json.dumps(emote_data, indent=4)) shiny_data = requests.get( "https://pogoapi.net/api/v1/shiny_pokemon.json").json() type_data_raw = requests.get( "https://pogoapi.net/api/v1/pokemon_types.json").json() type_data = {} for data in type_data_raw: if data.get("form", "").lower() == "normal": type_data[int(data.get("pokemon_id", 0))] = data.get("type", []) filters = template[1] entries = "" entry_list = [] # Sorting def sort_avg(nest): return nest.mon_avg def sort_count(nest): return nest.mon_count def sort_ratio(nest): return nest.mon_ratio def sort_mid(nest): return nest.mon_id def sort_name(nest): return nest.name sorts = { "mon_avg": [sort_avg, True], "mon_count": [sort_count, True], "mon_id": [sort_mid, False], "mon_ratio": [sort_ratio, True], "park_name": [sort_name, False] } sort_ = sorts[filters["sort_by"]] self.nests = sorted(self.nests, key=sort_[0], reverse=sort_[1]) # statimap gen #polygons = [] markers = [] static_map = "" if len(config.static_url) > 0: maxlat = max([n.max_lat for n in self.nests]) minlat = min([n.min_lat for n in self.nests]) maxlon = max([n.max_lon for n in self.nests]) minlon = min([n.min_lon for n in self.nests]) zoom = get_zoom([maxlat, maxlon], [minlat, minlon], 1000, 600, 256) def add_to_points(points, monid, lat, lon): points.append( [str(monid).zfill(3), round(lat, 6), round(lon, 6)]) return points for nest in self.nests: points = [] if self.settings["max_markers"] == 1: points = add_to_points(points, nest.mon_id, nest.lat, nest.lon) else: if nest.mon_avg > self.settings["max_markers"]: avg = self.settings["max_markers"] else: avg = round(nest.mon_avg) while len(points) <= avg - 1: pnt = geometry.Point( random.uniform(nest.min_lon, nest.max_lon), random.uniform(nest.min_lat, nest.max_lat)) if nest.polygon.contains(pnt): points = add_to_points(points, nest.mon_id, pnt.y, pnt.x) markers += points center_lat = minlat + ((maxlat - minlat) / 2) center_lon = minlon + ((maxlon - minlon) / 2) def parse(var): return quote_plus(json.dumps(var)).replace('+', '') static_map_data = { "lat": center_lat, "lon": center_lon, "zoom": zoom, "nestjson": markers } static_map_raw = config.static_url + "staticmap/nests?pregenerate=true®eneratable=true" result = requests.post(static_map_raw, json=static_map_data) if "error" in result.text: log.error( f"Error while generating Static Map:\n\n{static_map_raw}\n{result.text}\n" ) static_map = config.static_url + f"staticmap/pregenerated/{result.text}" requests.get(static_map) # Text gen + filtering def replace(dic): # Formats all strings in a dict for k, v in dic.items(): if isinstance(v, str): dic[k] = v.format(nest_entry=entries, areaname=self.name, staticmap=static_map, current_time=datetime.utcnow()) elif isinstance(v, dict): dic[k] = replace(v) return dic for nest in self.nests: if nest.mon_avg < filters["min_avg"]: continue if nest.name == nest._default_name and filters["ignore_unnamed"]: continue shiny_emote = "" if shiny_data.get(str(nest.mon_id), {}).get("found_wild", False): shiny_emote = emote_data.get("Shiny", "") type_emotes = [] types = type_data.get(nest.mon_id) for t in types: type_emotes.append(emote_data.get(t, "")) type_emote = "/".join(type_emotes) mon_emote = "" if emote_refs is not None: emote_id = emote_refs.get(nest.mon_id, "") if not emote_id == "": mon_emote = f"<:m{nest.mon_id}:{emote_id}>" entry = filters["nest_entry"].format( park_name=nest.name, lat=nest.lat, lon=nest.lon, mon_id=nest.mon_id, mon_avg=nest.mon_avg, mon_count=nest.mon_count, mon_ratio=nest.mon_ratio * 100, mon_name=mon_names.get(str(nest.mon_id), ""), mon_emoji=mon_emote, type_emoji=type_emote, shiny=shiny_emote) if len(entries) + len(entry) <= 2048: entries += entry entry_list.append(entry) return replace(template[0]), entry_list
def analyze_nests(config, area, nest_mons, queries, reset_time, nodelete): # Getting OSM/overpass data osm_file_name = f"data/osm_data/{area.name} {OSM_DATE.replace(':', '')}.json" try: with open(osm_file_name, mode="r", encoding="utf-8") as osm_file: nest_json = json.load(osm_file) except (IOError, OSError): free_slot = False while not free_slot: r = requests.get("http://overpass-api.de/api/status").text if "available now" in r: free_slot = True else: if "Slot available after" in r: rate_seconds = int( r.split(", in ")[1].split(" seconds.")[0]) + 5 log.warning( f"Overpass is rate-limiting you. Gonna have to wait {rate_seconds} seconds before continuing" ) time.sleep(rate_seconds) else: log.warning( "Had trouble finding out about your overpass status. Waiting 1 minute before trying again" ) time.sleep(60) log.info( "Getting OSM data. This will take ages if this is your first run.") osm_time_start = timeit.default_timer() nest_json = get_osm_data(area.bbox, OSM_DATE) osm_time_stop = timeit.default_timer() seconds = round(osm_time_stop - osm_time_start, 1) if not nest_json["elements"]: log.error( f"Did not get any data from overpass in {seconds} seconds. Because of that, the script will now error out. Please try again in a few hours, since you were rate-limited by overpass. If this still doesn't help, try splitting up your area." ) log.error(nest_json.get("remark")) return with open(osm_file_name, mode='w', encoding="utf-8") as osm_file: osm_file.write(json.dumps(nest_json, indent=4)) log.success( f"Done. Got all OSM data in {seconds} seconds and saved it.") # Getting area data area_file_name = f"data/area_data/{area.name}.json" area_file_data = {} try: with open(area_file_name, mode="r", encoding="utf-8") as area_file: log.info( "Found area data file. Reading and using data from it now") area_file_data_raw = json.load(area_file) for k, v in area_file_data_raw.items(): area_file_data[int(k)] = v except FileNotFoundError: pass """db_file_name = f"data/db_data/{area.name}.json" try: with open(db_file_name, mode="r", encoding="utf-8") as db_file: db_data = json.load(db_file) except FileNotFoundError: db_data = {}""" if not nodelete: queries.nest_delete(area.sql_fence) log.info( f"Got all relevant information. Searching for nests in {area.name} now" ) nodes = {} ways = [] relations = [] for element in nest_json['elements']: if not "type" in element: continue if element["type"] == "node": nodes[element["id"]] = { "lat": element["lat"], "lon": element["lon"] } elif element["type"] == "way": if "nodes" not in element and not element["nodes"]: continue ways.append(WayPark(element, config)) elif element["type"] == "relation": if "members" not in element and not element["members"]: continue relations.append(RelPark(element, config)) parks = ways + relations # Check Relations failed_nests = defaultdict(int) failed_nests["Total Nests found"] = 0 double_ways = [] start = timeit.default_timer() if config.less_queries: log.info("Getting DB data") all_spawns = [(str(_id), geometry.Point(lon, lat)) for _id, lat, lon in queries.spawns(area.sql_fence)] all_mons = queries.all_mons(str(tuple(nest_mons)), str(reset_time), area.sql_fence) all_mons = [(_id, geometry.Point(lon, lat)) for _id, lat, lon in all_mons] with Progress() as progress: #check_rels_task = progress.add_task("Generating Polygons", total=len(parks)) for park in relations: double_ways = park.get_polygon(nodes, ways, double_ways) #progress.update(check_rels_task, advance=1) for park in ways: park.get_polygon(nodes) #progress.update(check_rels_task, advance=1) for osm_id, data in area_file_data.items(): for connect_id in data["connect"]: for i, park in enumerate(parks): if park.id == osm_id: big_park = park big_park_i = i if park.id == connect_id: small_park = park small_park_i = i parks[big_park_i].connect.append(connect_id) parks[big_park_i].polygon = cascaded_union( [big_park.polygon, small_park.polygon]) parks.pop(small_park_i) # NOW CHECK ALL AREAS ONE AFTER ANOTHER check_nest_task = progress.add_task("Nests found: 0", total=len(parks)) nests = [] for park in parks: progress.update( check_nest_task, advance=1, description=f"Nests found: {failed_nests['Total Nests found']}" ) if not park.is_valid: failed_nests["Geometry is not valid"] += 1 continue if not area.polygon.contains(park.polygon): failed_nests["Not in Geofence"] += 1 continue pokestop_in = None stops = [] if config.scanner == "rdm" and config.pokestop_pokemon: # Get all Pokestops with id, lat and lon for pkstp in queries.stops(park.sql_fence): stops.append(str(pkstp[0])) pokestop_in = "'{}'".format("','".join(stops)) if config.less_queries: spawns = [ s[0] for s in all_spawns if park.polygon.contains(s[1]) ] else: spawns = [str(s[0]) for s in queries.spawns(park.sql_fence)] if not stops and not spawns: failed_nests["No Stops or Spawnpoints"] += 1 continue if (len(stops) < 1) and (len(spawns) < area.settings['min_spawnpoints']): failed_nests["Not enough Spawnpoints"] += 1 continue spawnpoint_in = "'{}'".format("','".join(spawns)) if spawnpoint_in == "''": spawnpoint_in = "NULL" # This will handle the SQL warning since a blank string shouldn't be used for a number if config.less_queries: mons = [s[0] for s in all_mons if park.polygon.contains(s[1])] if len(mons) == 0: failed_nests["No Pokemon"] += 1 continue most_id = max(set(mons), key=mons.count) poke_data = [most_id, mons.count(most_id)] else: poke_data = queries.mons(spawnpoint_in, str(tuple(nest_mons)), str(reset_time), pokestop_in) if poke_data is None: failed_nests["No Pokemon"] += 1 continue park.mon_data(poke_data[0], poke_data[1], area.settings['scan_hours_per_day'], len(spawns) + len(stops)) if park.mon_count < area.settings['min_pokemon']: failed_nests["Not enough Pokemon"] += 1 continue if park.mon_avg < area.settings['min_average']: failed_nests["Average spawnrate too low"] += 1 continue if park.mon_ratio < area.settings['min_ratio']: failed_nests["Average spawn ratio too low"] += 1 continue if park.id in double_ways: failed_nests["Avoiding double nests"] += 1 continue park.generate_details(area_file_data, failed_nests["Total Nests found"]) # Insert Nest data to db insert_args = { "nest_id": park.id, "name": park.name, "form": park.mon_form, "lat": park.lat, "lon": park.lon, "pokemon_id": park.mon_id, "type": 0, "pokemon_count": park.mon_count, "pokemon_avg": park.mon_avg, "pokemon_ratio": park.mon_ratio, "poly_path": json.dumps(park.path), "poly_type": 1 if isinstance(park, RelPark) else 0, "current_time": int(time.time()) } failed_nests["Total Nests found"] += 1 nests.append(park) queries.nest_insert(insert_args) stop = timeit.default_timer() log.success( f"Done finding nests in {area.name} ({round(stop - start, 1)} seconds)" ) for k, v in failed_nests.items(): log.info(f" - {k}: {v}") def sort_avg(nest): return nest.mon_avg new_area_data = {} for nest in sorted(nests, key=sort_avg, reverse=True): new_area_data[nest.id] = { "name": nest.name, "center": [nest.lat, nest.lon], "connect": nest.connect } for oid, data in area_file_data.items(): if oid not in [n.id for n in nests]: new_area_data[oid] = { "name": data["name"], "center": data["center"], "connect": data["connect"] } with open(area_file_name, mode="w+") as area_file: area_file.write(json.dumps(new_area_data, indent=4)) log.info("Saved area data") log.success(f"All done with {area.name}\n") return nests
def dl(self): log.info('download {}'.format(self.url)) # step 1 headers = { 'origin': self.urlparsed.netloc, } # parameters params = { 'f': self.url.split('/')[-1], 'passcode': '', 'r': str(random.random()), 'ref': '', } r = self.s.get(GET_FILE_URL1, params=params, headers=headers) j = json.loads(r.text) log.debug('step 1') requests_debug(r) # link error handler if j.get('code') == 404: log.error('dl_file error: {}'.format(j.get('message'))) if j.get('message') == DL_ERROR_FILELINKTIMEOUT: log.error('need get dir list again') return False, j.get('message') if not self.filename: self.filename = j['file_name'] # step 2 params = { 'uid': j['userid'], 'fid': j['file_id'], 'folder_id': 0, 'file_chk': j['file_chk'], 'mb': 0, 'app': 0, 'acheck': 1, 'verifycode': '', 'rd': str(random.random()) } while True: r = self.s.get(GET_FILE_URL2, params=params, headers=headers) j = json.loads(r.text) log.debug('step 2') requests_debug(r) if j.get('code') == 503: params['rd'] = str(random.random()) else: break # create an empty file filename = os.path.join(self.parent_dir, self.filename) filesize = int(j['file_size']) temp_filename = filename + '.ctdown' log.debug('create empty file {} size {}'.format( temp_filename, filesize)) with open(temp_filename, 'wb') as fd: fd.truncate(filesize) # donwload with thread threads = [] for i in range(self.args.split): start = i * filesize // self.args.split end = ( i + 1 ) * filesize // self.args.split - 1 if i != self.args.split - 1 else filesize t = SplitThread(i, j['downurl'].replace(r'\/', r'/'), params, headers, filename, start, end) log.debug('dl-{:03d} download range start={} end={}'.format( i + 1, start, end)) threads.append(t) t.start() # time.sleep(1) progressbar = tqdm.tqdm(total=filesize, desc=filename, ascii=' #', unit="B", unit_scale=True, unit_divisor=1024) downloaded_bytes = 0 last_downloaded_bytes = 0 download_success = True while downloaded_bytes < filesize: downloaded_bytes = 0 for t in threads: if t._status == DL_Thread_status.E404: log.error('dl-{:03d} download {} Fail'.format( t._index, filename)) download_success = False break downloaded_bytes += t.downloaded_bytes() if not download_success: log.error('exit') break progressbar.update(downloaded_bytes - last_downloaded_bytes) last_downloaded_bytes = downloaded_bytes log.debug("{} {}".format(downloaded_bytes, filesize)) time.sleep(1) log.debug('quit') for i in range(self.args.split): threads[i].join() os.rename(temp_filename, filename) return True, None
config.hours_since_change = int(args.hours) log.info( f"Overwriting hours since change with {config.hours_since_change}") if args.noevents: config.use_events = False with open("config/areas.json", "r") as area_file: areas = json.load(area_file) with open("config/settings.json", "r") as settings_file: settings = json.load(settings_file) if args.area is not None: args.nodelete = True areas = [a for a in areas if a["name"] == args.area] if len(areas) == 0: log.error("Couldn't find that area. Maybe check capitalization") sys.exit() reset_time = int(time.time()) - (config.hours_since_change * 3600) defaults = { "min_pokemon": 9, "min_spawnpoints": 2, "min_average": 0.5, "min_ratio": 0, "scan_hours_per_day": 24, "max_markers": 30, "discord": "" } settings_defaults = [s for s in settings if s.get("area") == "DEFAULT"] if len(settings_defaults) > 0: