def gen_csv_counted(filename, dbfile): dbin = sqlite3.connect(dbfile) dbout = sqlite3.connect(':memory:'); dbcur = dbout.cursor() dbcur.execute("CREATE TABLE encount (spawn VARCHAR, poke INT, count INT DEFAULT 0, \ PRIMARY KEY (spawn, poke) )") for i in range(151): encs = [x[0] for x in dbin.cursor().execute("SELECT spawn_id FROM encounters " "WHERE spawn_id IS NOT NULL AND pokemon_id = %d" % i).fetchall()] if len(encs) > 0: for e in encs: dbcur.execute("INSERT OR IGNORE INTO encount (spawn, poke) VALUES ('{}',{})".format(e,i)) dbcur.execute("UPDATE encount SET count = count + 1 WHERE spawn = '%s' AND poke = %d" % (e,i)) dbout.commit() f = open(filename,'w') pname = get_pokenames('pokes.txt') f.write('spawn_id, latitude, longitude, count, pokemon_id, pokemon_name\n') spwns = dbout.execute("SELECT spawn, poke, count FROM encount ORDER BY poke ASC, count DESC").fetchall() if len(spwns) > 0: for s,p,c in spwns: _ll = CellId.from_token(s).to_lat_lng() lat, lng, = _ll.lat().degrees, _ll.lng().degrees f.write("{},{},{},{},{},{}\n".format(s,lat,lng,c,p,pname[p])) print('Done!')
def writeplans(): subplans = request.args.get('subplans', type=int) plans = [] lock_plans.acquire() for token in list_plans: center = LatLng.from_point( Cell(CellId.from_token(token)).get_center()) center = (center.lat().degrees, center.lng().degrees) for ind_sub in range(1, subplans + 1): plans.append({ 'type': 'seikur0_s2', 'token': token, 'location': [center[0], center[1]], 'subplans': subplans, 'subplan_index': ind_sub }) lock_plans.release() for plan in plans: filename = '{}_{}_{}.plan'.format(plan['token'], plan['subplan_index'], plan['subplans']) try: f = open(plandir + '/' + filename, 'w', 0) json.dump(plan, f, indent=1, separators=(',', ': ')) print('[+] Plan file {} was written.'.format(filename)) except Exception as e: print( '[+] Error while writing plan file, error : {}'.format(e)) finally: if 'f' in vars() and not f.closed: f.close() return jsonify("")
def load(cell_id): if cell_id.parent(10).to_token() in lv10: return try: data = np.load( f"data/{cell_id.parent(5).to_token()}/{cell_id.parent(10).to_token()}.npy" ) except: return lv10.append(cell_id.parent(10).to_token()) for i in range(len(data)): pos[data[i]['cellid']] = (data[i]['lat'], data[i]['lng']) cell_id = CellId.from_token(data[i]['cellid']) p_16 = cell_id.parent(16).to_token() if p_16 not in lv16.keys(): lv16[p_16] = [] lv16[p_16].append(data[i]) p_15 = cell_id.parent(15).to_token() if p_15 not in lv15.keys(): lv15[p_15] = [] lv15[p_15].append(data[i]) p_14 = cell_id.parent(14).to_token() if p_14 not in lv14.keys(): lv14[p_14] = [] lv14[p_14].append(data[i]) global portals portals = np.concatenate((portals, data))
def gen_que(ndbfile, fdbfile): if not os.path.isfile(fdbfile): log.critical('Fastmap DB missing!!') log.info('Run bootstrap.py!') return False db = sqlite3.connect(ndbfile) dbtmp = sqlite3.connect(':memory:') dbtmp.cursor().execute("CREATE TABLE queque (\ cell VARCHAR PRIMARY KEY,\ count INT DEFAULT (0) )") # tiling up spawns = [x[0] for x in sqlite3.connect(fdbfile).cursor()\ .execute("SELECT spawn_id FROM 'spawns' ORDER BY spawn_id").fetchall()] for spawn in spawns: cellid = CellId.from_token(spawn).parent(14).to_token() dbtmp.cursor().execute( "INSERT OR IGNORE INTO queque (cell) VALUES ('{}')".format(cellid)) dbtmp.cursor().execute( "UPDATE queque SET count = count + 1 WHERE cell = '{}'".format( cellid)) # tiling down cells = [x[0] for x in dbtmp.cursor()\ .execute("SELECT cell FROM 'queque' ORDER BY count DESC").fetchall()] dbtmp.close() del dbtmp db.cursor().execute("DELETE FROM _queue") for cell in cells: subcells = sub_cells(CellId.from_token(cell)) for subcell in subcells: db.cursor().execute( "INSERT OR IGNORE INTO _queue (cell_id) VALUES ('{}')".format( subcell.to_token())) db.cursor().execute("VACUUM") db.commit() log.info('Scan queue generated.') return True
def getPortals(cell): ret = [] try: for p in np.load( f"data/{cell.parent(5).to_token()}/{cell.parent(10).to_token()}.npy" ): if cell.contains(CellId.from_token(p['cellid'])): ret.append(p) except: pass return ret
def gen_csv(filename, dbfile): f = open(filename,'w') db = sqlite3.connect(dbfile) pname = get_pokenames('pokes.txt') f.write('spawn_id, latitude, longitude, pokemon_id, pokemon_name\n') spwns = db.execute("SELECT spawn_id, pokemon_id FROM encounters WHERE spawn_id IS NOT NULL").fetchall() if len(spwns) > 0: for s,p in spwns: _ll = CellId.from_token(s).to_lat_lng() lat, lng, = _ll.lat().degrees, _ll.lng().degrees f.write("{},{},{},{},{}\n".format(s,lat,lng,p,pname[p])) print('Done!')
def main(): if not os.path.isfile('db.sqlite'): print('Fastmap DB missing!') if not os.path.isfile('db2.sqlite'): if os.name == 'nt': os.system('copy db2.temp db2.sqlite') elif os.name == 'posix': os.system('cp db2.temp db2.sqlite') spawns = [x[0] for x in sqlite3.connect('db.sqlite').cursor()\ .execute("SELECT spawn_id FROM 'spawns' ORDER BY spawn_id").fetchall()] db = sqlite3.connect('db2.sqlite') cells = 0 for spawn in spawns: cellid = CellId.from_token(spawn).parent(15).to_token() db.cursor().execute( "INSERT OR IGNORE INTO queque (cell_id) VALUES ('{}')".format( cellid)) db.cursor().execute( "UPDATE queque SET spawn_count = spawn_count + 1 WHERE cell_id = '{}'" .format(cellid)) cells += 1 db.commit() print 'Done!'
def writeplans(): subplans = request.args.get('subplans', type=int) plans = [] lock_plans.acquire() for token in list_plans: center = LatLng.from_point(Cell(CellId.from_token(token)).get_center()) center = (center.lat().degrees, center.lng().degrees) for ind_sub in range(1,subplans+1): plans.append({'type': 'seikur0_s2', 'token': token, 'location': [center[0],center[1]], 'subplans': subplans, 'subplan_index': ind_sub}) lock_plans.release() for plan in plans: filename = '{}_{}_{}.plan'.format(plan['token'],plan['subplan_index'],plan['subplans']) try: f = open(plandir+'/'+filename, 'w', 0) json.dump(plan, f, indent=1, separators=(',', ': ')) print('[+] Plan file {} was written.'.format(filename)) except Exception as e: print('[+] Error while writing plan file, error : {}'.format(e)) finally: if 'f' in vars() and not f.closed: f.close() return jsonify("")
def main(): config = init_config() if not config: return watchlist = get_watchlist('watch.txt') pokenames = get_pokenames('pokes.txt') db = sqlite3.connect(config.ndbfile) dbc = db.cursor() run = 1 while run: _ccnt, y, z = 1, config.limit, (config.step - 1) * config.limit dbc.execute("SELECT cell_id FROM _queue ORDER BY cell_id LIMIT %d,%d" % (z, y)) del y # http://stackoverflow.com/questions/3614277/how-to-strip-from-python-pyodbc-sql-returns scan_queque = [x[0] for x in dbc.fetchall()] if config.regen or len(scan_queque) == 0: log.info('Generating scan queue...') if gen_que(config.ndbfile, config.fdbfile): continue else: return api = api_init(config) if api == None: log.error('Login failed!') return else: log.info('API online! starting Scan...') time.sleep(5) for queq in scan_queque: try: _ecnt = [0, 0] traverse = 0 targets = [] cell = CellId.from_token(queq) lat = CellId.to_lat_lng(cell).lat().degrees lng = CellId.to_lat_lng(cell).lng().degrees cell_ids = [cell.id()] response_dict = get_response(api, cell_ids, lat, lng) log.info('Scanning cell {} of {}.'.format( _ccnt + z, z + (len(scan_queque)))) for _map_cell in response_dict['responses']['GET_MAP_OBJECTS'][ 'map_cells']: if 'nearby_pokemons' in _map_cell: for _poke in _map_cell['nearby_pokemons']: _ecnt[0] += 1 _s = hex(_poke['encounter_id']) _c = CellId(_map_cell['s2_cell_id']).to_token() dbc.execute( "INSERT OR IGNORE INTO encounters (encounter_id, cell_id, pokemon_id, encounter_time) VALUES ('{}','{}',{},{})" "".format( _s.strip('L'), _c, _poke['pokemon_id'], int(_map_cell['current_timestamp_ms'] / 1000))) if _poke['pokemon_id'] in watchlist: traverse = 1 targets.append(_poke['encounter_id']) log.info('{} nearby!'.format( pokenames[_poke['pokemon_id']])) if 'catchable_pokemons' in _map_cell: for _poke in _map_cell['catchable_pokemons']: _ecnt[1] += 1 _s = hex(_poke['encounter_id']) dbc.execute( "INSERT OR REPLACE INTO encounters (spawn_id, encounter_id, pokemon_id, encounter_time, expire_time) VALUES ('{}','{}',{},{},{})" "".format( _poke['spawn_point_id'], _s.strip('L'), _poke['pokemon_id'], int(_map_cell['current_timestamp_ms'] / 1000), int(_poke['expiration_timestamp_ms'] / 1000))) db.commit() if traverse: _remaining = len(targets) log.info('Narrow search for %d Pokemon...' % len(targets)) time.sleep(config.delay) _scnt = 1 subcells = susub_cells(cell) for _sub in subcells: log.debug('Scanning subcell {} of up to 16.'.format( _scnt, (len(scan_queque)))) lat = CellId.to_lat_lng(_sub).lat().degrees lng = CellId.to_lat_lng(_sub).lng().degrees cell_ids = get_cell_ids(cover_circle(lat, lng, 100)) try: response_dict = get_response( api, cell_ids, lat, lng) except NotLoggedInException: api = None api = api_init(config) response_dict = get_response( api, cell_ids, lat, lng) for _map_cell in response_dict['responses'][ 'GET_MAP_OBJECTS']['map_cells']: if 'catchable_pokemons' in _map_cell: for _poke in _map_cell['catchable_pokemons']: _ecnt[1] += 1 _s = hex(_poke['encounter_id']) dbc.execute( "INSERT OR REPLACE INTO encounters (spawn_id, encounter_id, pokemon_id, encounter_time, expire_time) VALUES ('{}','{}',{},{},{})" "".format( _poke['spawn_point_id'], _s.strip('L'), _poke['pokemon_id'], int(_map_cell[ 'current_timestamp_ms'] / 1000), int(_poke[ 'expiration_timestamp_ms'] / 1000))) if _poke['encounter_id'] in targets: log.info('Tracked down {}!'.format( pokenames[_poke['pokemon_id']])) _remaining -= 1 log.info('%d Pokemon remaining...' % _remaining) if _remaining <= 0: break time.sleep(config.delay) _scnt += 1 db.commit() log.info("Encounters: {} coarse, {} fine...".format(*_ecnt)) time.sleep(config.delay) _ccnt += 1 except NotLoggedInException: api = None break log.info("Rinsing 'n' Repeating...")
areas[parent] = [] data[i]['cellid'] = cell_id.to_token() data[i]['id'] = total areas[parent].append(data[i]) total += 1 p.update(total) for key, value in areas.items(): np.save(f"data_fc/{key}.npy", value) p.finish() # data_fc -> data p.start(PORTAL_NUM) total = 0 for filename in os.listdir('data_fc/'): data = np.load(f'data_fc/{filename}') areas = {} for i in range(len(data)): cell_id = CellId.from_token(data[i]['cellid']) parent = cell_id.parent(10).to_token() if parent not in areas.keys(): areas[parent] = [] areas[parent].append(data[i]) total += 1 p.update(total) os.makedirs(f"data/{filename[:-4]}") for key, value in areas.items(): np.save(f"data/{filename[:-4]}/{key}.npy", value) p.finish()
def _token_to_cell(s2_address): return Cell(CellId.from_token(s2_address))
def path(start_lat, start_lng): cell_id = CellId.from_lat_lng( LatLng.from_degrees(float(start_lat), float(start_lng))) token = cell_id.to_token() jump = {} jump[token] = {'layer': 0, 'pre': ''} queue = [token] total = 0 start = token mxdis = 0 mxpor = None prelayer = -1 while len(queue) > 0: total += 1 token = queue[0] queue = queue[1:] cell_id = CellId.from_token(token) load(cell_id) if jump[token]['layer'] != prelayer: prelayer = jump[token]['layer'] print(jump[token]['layer'], total, mxdis) if geo.haversine(pos[start], pos[token]) > mxdis: mxdis = geo.haversine(pos[start], pos[token]) mxpor = token cells = getCoveringRect(pos[token][0], pos[token][1], 500) for cell in cells: load(cell) if cell.level() == 16: if cell.to_token() in lv16.keys(): for pp in lv16[cell.to_token()]: if pp['cellid'] not in jump.keys(): jump[pp['cellid']] = { 'layer': jump[token]['layer'] + 1, 'pre': token } queue.append(pp['cellid']) elif cell.level() == 15: if cell.to_token() in lv15.keys(): for pp in lv15[cell.to_token()]: if pp['cellid'] not in jump.keys(): jump[pp['cellid']] = { 'layer': jump[token]['layer'] + 1, 'pre': token } queue.append(pp['cellid']) elif cell.level() == 14: if cell.to_token() in lv14.keys(): for pp in lv14[cell.to_token()]: if pp['cellid'] not in jump.keys(): jump[pp['cellid']] = { 'layer': jump[token]['layer'] + 1, 'pre': token } queue.append(pp['cellid']) print(mxdis) token = mxpor draw = [{ "type": "polyline", "latLngs": [{ "lat": pos[token][0], "lng": pos[token][1] }], "color": "#a24ac3" }] while jump[token]['layer'] > 0: token = jump[token]['pre'] draw.append({ "type": "circle", "latLng": { "lat": pos[token][0], "lng": pos[token][1] }, "radius": 500, "color": "#a24ac3" }) draw[0]['latLngs'].append({"lat": pos[token][0], "lng": pos[token][1]}) with open(f'path_{start_lat}_{start_lng}.json', 'w') as f: json.dump(draw, f)
def subtask(filename, t, h): parent = CellId.from_token(filename[:-4]) data = np.load(f'data_fc/{filename}') lv16 = {} lv15 = {} lv14 = {} head = {} for i in range(len(data)): head[data[i]['id']] = data[i]['id'] def find(x): if head[head[x]] != head[x]: head[x] = find(head[x]) return head[x] for i in range(len(data)): cell_id = CellId.from_token(data[i]['cellid']) p_16 = cell_id.parent(16).to_token() if p_16 not in lv16.keys(): lv16[p_16] = [] lv16[p_16].append(data[i]) p_15 = cell_id.parent(15).to_token() if p_15 not in lv15.keys(): lv15[p_15] = [] lv15[p_15].append(data[i]) p_14 = cell_id.parent(14).to_token() if p_14 not in lv14.keys(): lv14[p_14] = [] lv14[p_14].append(data[i]) for i in range(len(data)): cells = getCoveringRect(data[i]['lat'], data[i]['lng'], 500) H = find(data[i]['id']) for cell in cells: if parent.contains(cell) == False: for pp in getPortals(cell): if pp['id'] != data[i]['id']: h.put((pp['id'], data[i]['id'])) elif cell.level() == 16: if cell.to_token() in lv16.keys(): for pp in lv16[cell.to_token()]: if pp['id'] != data[i]['id']: head[find(pp['id'])] = H elif cell.level() == 15: if cell.to_token() in lv15.keys(): for pp in lv15[cell.to_token()]: if pp['id'] != data[i]['id']: head[find(pp['id'])] = H elif cell.level() == 14: if cell.to_token() in lv14.keys(): for pp in lv14[cell.to_token()]: if pp['id'] != data[i]['id']: head[find(pp['id'])] = H total = t.get(True) t.put(total + 1) for key, value in head.items(): h.put((key, value))
def s2_to_parent(s2_address): return CellId.from_token(s2_address).parent().to_token()
def s2_to_children(s2_address): return [ children.to_token() for children in CellId.from_token(s2_address).children() ]
def main(): config = init_config() if not config: log.error('Configuration Error!'); return db = sqlite3.connect(config.dbfile) db_cur = db.cursor() db_cur.execute("SELECT cell_id FROM '_queue' WHERE cell_level = %d ORDER BY cell_id" % config.level) _tstats = [0, 0, 0, 0] scan_queque = [x[0] for x in db_cur.fetchall()] # http://stackoverflow.com/questions/3614277/how-to-strip-from-python-pyodbc-sql-returns if len(scan_queque) == 0: log.info('Nothing to scan!'); return api = api_init(config) if api == None: log.error('Login failed!'); return else: log.info('API online! Scan starts in 5sec...') time.sleep(5) for que in scan_queque: cell_ids = [] _content = 0 _tstats[0] += 1 _cstats = [0, 0, 0] log.info('Scan {} of {}.'.format(_tstats[0],(len(scan_queque)))) cell = CellId.from_token(que) _ll = CellId.to_lat_lng(cell) lat, lng, alt = _ll.lat().degrees, _ll.lng().degrees, 0 if config.test: cell_ids = get_cell_ids(lat, lng, 1500) else: cells = susub_cells(cell) cell_ids = sorted([x.id() for x in cells]) try: response_dict = get_response(cell_ids, lat, lng, alt, api,config) except NotLoggedInException: del api; api = api_init(config) response_dict = get_response(cell_ids, lat, lng, alt, api,config) for _map_cell in response_dict['responses']['GET_MAP_OBJECTS']['map_cells']: _cell = CellId(_map_cell['s2_cell_id']).to_token() if 'forts' in _map_cell: for _frt in _map_cell['forts']: if 'gym_points' in _frt: _cstats[0]+=1 _type = 0 _content = set_bit(_content, 2) db_cur.execute("INSERT OR IGNORE INTO forts (fort_id, cell_id, pos_lat, pos_lng, fort_enabled, fort_type, last_scan) " "VALUES ('{}','{}',{},{},{},{},{})".format(_frt['id'],_cell,_frt['latitude'],_frt['longitude'], \ int(_frt['enabled']),0,int(_map_cell['current_timestamp_ms']/1000))) else: _type = 1; _cstats[1]+=1 _content = set_bit(_content, 1) db_cur.execute("INSERT OR IGNORE INTO forts (fort_id, cell_id, pos_lat, pos_lng, fort_enabled, fort_type, last_scan) " "VALUES ('{}','{}',{},{},{},{},{})".format(_frt['id'],_cell,_frt['latitude'],_frt['longitude'], \ int(_frt['enabled']),1,int(_map_cell['current_timestamp_ms']/1000))) if 'spawn_points' in _map_cell: _content = set_bit(_content, 0) for _spwn in _map_cell['spawn_points']: _cstats[2]+=1; spwn_id = CellId.from_lat_lng(LatLng.from_degrees(_spwn['latitude'],_spwn['longitude'])).parent(20).to_token() db_cur.execute("INSERT OR IGNORE INTO spawns (spawn_id, cell_id, pos_lat, pos_lng, last_scan) " "VALUES ('{}','{}',{},{},{})".format(spwn_id,_cell,_spwn['latitude'],_spwn['longitude'],int(_map_cell['current_timestamp_ms']/1000))) if 'decimated_spawn_points' in _map_cell: _content = set_bit(_content, 0) for _spwn in _map_cell['decimated_spawn_points']: _cstats[2]+=1; spwn_id = CellId.from_lat_lng(LatLng.from_degrees(_spwn['latitude'],_spwn['longitude'])).parent(20).to_token() db_cur.execute("INSERT OR IGNORE INTO spawns (spawn_id, cell_id, pos_lat, pos_lng, last_scan) " "VALUES ('{}','{}',{},{},{})".format(spwn_id,_cell,_spwn['latitude'],_spwn['longitude'],int(_map_cell['current_timestamp_ms']/1000))) db_cur.execute("INSERT OR IGNORE INTO cells (cell_id, content, last_scan) " "VALUES ('{}', {}, {})".format(_cell,_content,int(_map_cell['current_timestamp_ms']/1000))) _tstats[1] += _cstats[0]; _tstats[2] += _cstats[1]; _tstats[3] += _cstats[2] db_cur.execute("DELETE FROM _queue WHERE cell_id='{}'".format(cell.to_token())) log.info("Found {} Gyms, {} Pokestops, {} Spawns. Sleeping...".format(*_cstats)) db.commit() time.sleep(int(config.delay)) log.info('Scanned {} cells; got {} Gyms, {} Pokestops, {} Spawns.'.format(*_tstats))
def main(): config = init_config() if not config: return if not os.path.isfile('db2.sqlite'): log.error('DB not found - please run nestgen.py!') return watchlist = get_watchlist('watch.txt') pokenames = get_pokenames('pokes.txt') log.info("DB ok. Loggin' in...") api = api_init(config) if api == None: log.error('Login failed!') return else: log.info('API online! Scan starts in 5sec...') time.sleep(5) db = sqlite3.connect('db2.sqlite') db_cur = db.cursor() run = 1 while run: _ccnt = 1 db_cur.execute( "SELECT cell_id FROM queque ORDER BY spawn_count LIMIT %d" % config.limit) # http://stackoverflow.com/questions/3614277/how-to-strip-from-python-pyodbc-sql-returns scan_queque = [x[0] for x in db_cur.fetchall()] if len(scan_queque) == 0: log.info('Nothing to scan!') return for queq in scan_queque: _ecnt = [0, 0] traverse = 0 targets = [] cell = CellId.from_token(queq) _ll = CellId.to_lat_lng(cell) lat, lng, alt = _ll.lat().degrees, _ll.lng().degrees, 0 cell_ids = [cell.id()] try: response_dict = get_response(cell_ids, lat, lng, alt, api, config) except NotLoggedInException: del api api = api_init(config) response_dict = get_response(cell_ids, lat, lng, alt, api, config) log.info('Scanning macrocell {} of {}.'.format( _ccnt, (len(scan_queque)))) for _map_cell in response_dict['responses']['GET_MAP_OBJECTS'][ 'map_cells']: if 'nearby_pokemons' in _map_cell: for _poke in _map_cell['nearby_pokemons']: _ecnt[0] += 1 _s = hex(_poke['encounter_id']) db_cur.execute( "INSERT OR IGNORE INTO encounters (encounter_id, pokemon_id, encounter_time) VALUES ('{}',{},{})" "".format( _s.strip('L'), _poke['pokemon_id'], int(_map_cell['current_timestamp_ms'] / 1000))) if _poke['pokemon_id'] in watchlist: traverse = 1 targets.append(_poke['encounter_id']) log.info('{} nearby!'.format( pokenames[_poke['pokemon_id']])) if 'catchable_pokemons' in _map_cell: for _poke in _map_cell['catchable_pokemons']: _ecnt[1] += 1 _s = hex(_poke['encounter_id']) db_cur.execute( "INSERT OR REPLACE INTO encounters (spawn_id, encounter_id, pokemon_id, encounter_time, expire_time) VALUES ('{}','{}',{},{},{})" "".format( _poke['spawn_point_id'], _s.strip('L'), _poke['pokemon_id'], int(_map_cell['current_timestamp_ms'] / 1000), int(_poke['expiration_timestamp_ms'] / 1000))) db.commit if traverse: _remaining = len(targets) log.info('Narrow search for %d Pokemon...' % len(targets)) time.sleep(config.delay) _scnt = 1 subcells = susub_cells(cell) for _sub in subcells: log.info('Scanning subcell {} of up to 16.'.format( _scnt, (len(scan_queque)))) _ll = CellId.to_lat_lng(_sub) lat, lng, alt = _ll.lat().degrees, _ll.lng().degrees, 0 cell_ids = get_cell_ids(lat, lng, 100) try: response_dict = get_response(cell_ids, lat, lng, alt, api, config) except NotLoggedInException: del api api = api_init(config) response_dict = get_response(cell_ids, lat, lng, alt, api, config) for _map_cell in response_dict['responses'][ 'GET_MAP_OBJECTS']['map_cells']: if 'catchable_pokemons' in _map_cell: for _poke in _map_cell['catchable_pokemons']: _ecnt[1] += 1 _s = hex(_poke['encounter_id']) db_cur.execute( "INSERT OR REPLACE INTO encounters (spawn_id, encounter_id, pokemon_id, encounter_time, expire_time) VALUES ('{}','{}',{},{},{})" "".format( _poke['spawn_point_id'], _s.strip('L'), _poke['pokemon_id'], int(_map_cell['current_timestamp_ms'] / 1000), int(_poke['expiration_timestamp_ms'] / 1000))) if _poke['encounter_id'] in targets: log.info('Tracked down {}!'.format( pokenames[_poke['pokemon_id']])) _remaining -= 1 log.info('%d Pokemon remaining...' % _remaining) if _remaining <= 0: break time.sleep(config.delay) _scnt += 1 db.commit() log.info("Encounters: {} coarse, {} fine...".format(*_ecnt)) time.sleep(config.delay) _ccnt += 1 log.info("Rinsing 'n' Repeating...")