def automatic_captcha_solve(args, status, api, captcha_url, account, wh_queue): status['message'] = ( 'Account {} is encountering a captcha, starting 2captcha ' + 'sequence.').format(account['username']) log.warning(status['message']) wh_message = {} if args.webhooks: wh_message = { 'status_name': args.status_name, 'status': 'encounter', 'mode': '2captcha', 'account': account['username'], 'captcha': status['captcha'], 'time': 0 } wh_queue.put(('captcha', wh_message)) time_start = now() captcha_token = token_request(args, status, captcha_url) time_elapsed = now() - time_start if 'ERROR' in captcha_token: log.warning('Unable to resolve captcha, please check your ' + '2captcha API key and/or wallet balance.') if args.webhooks: wh_message['status'] = 'error' wh_message['time'] = time_elapsed wh_queue.put(('captcha', wh_message)) return False else: status['message'] = ( 'Retrieved captcha token, attempting to verify challenge ' + 'for {}.').format(account['username']) log.info(status['message']) response = api.verify_challenge(token=captcha_token) time_elapsed = now() - time_start if 'success' in response['responses']['VERIFY_CHALLENGE']: status['message'] = "Account {} successfully uncaptcha'd.".format( account['username']) log.info(status['message']) if args.webhooks: wh_message['status'] = 'success' wh_message['time'] = time_elapsed wh_queue.put(('captcha', wh_message)) return True else: status['message'] = ( 'Account {} failed verifyChallenge, putting away ' + 'account for now.').format(account['username']) log.info(status['message']) if args.webhooks: wh_message['status'] = 'failure' wh_message['time'] = time_elapsed wh_queue.put(('captcha', wh_message)) return False
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() set_log_and_verbosity(log) # Abort if only-server and no-server are used together if args.only_server and args.no_server: log.critical( "You can't use no-server and only-server at the same time, silly.") sys.exit(1) # Abort if status name is not valid. regexp = re.compile('^([\w\s\-.]+)$') if not regexp.match(args.status_name): log.critical('Status name contains illegal characters.') sys.exit(1) # Stop if we're just looking for a debug dump. if args.dump: log.info('Retrieving environment info...') hastebin_id = get_debug_dump_link() log.info('Done! Your debug link: https://hastebin.com/%s.txt', hastebin_id ) sys.exit(1) args.root_path = os.path.dirname(os.path.abspath(__file__)) if args.ex_gyms: if args.geofence_file == '': log.critical('A geofence is required to find EX-gyms') sys.exit(1) else: exgyms(args.geofence_file) log.info('Finished checking gyms against OSM parks, exiting') sys.exit(1) init_args(args) # Initialize Mr. Mime library mrmime_cfg = { # We don't want exceptions on captchas because we handle them differently. 'exception_on_captcha': False, # MrMime shouldn't jitter 'jitter_gmo': False, 'pgpool_system_id': args.status_name } # Don't overwrite PGPool URL if it's only set in MrMime config JSON if args.pgpool_url: mrmime_cfg['pgpool_url'] = args.pgpool_url mrmime_config_file = os.path.join(os.path.dirname(__file__), 'config/mrmime_config.json') init_mr_mime(config_file=mrmime_config_file, user_cfg=mrmime_cfg) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server and not validate_assets(args): sys.exit(1) position = extract_coordinates(args.location) # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm.', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) # Scanning toggles. log.info('Parsing of Pokemon %s.', 'disabled' if args.no_pokemon else 'enabled') log.info('Parsing of Pokestops %s.', 'disabled' if args.no_pokestops else 'enabled') log.info('Parsing of Gyms %s.', 'disabled' if args.no_gyms else 'enabled') log.info('Pokemon encounters %s.', 'enabled' if args.encounter else 'disabled') app = None if not args.no_server and not args.clear_db: app = Pogom(__name__, root_path=os.path.dirname( os.path.abspath(__file__)).decode('utf8')) app.before_request(app.validate_request) app.set_current_location(position) db = startup_db(app, args.clear_db) # Control the search status (running or not) across threads. control_flags = { 'on_demand': Event(), 'api_watchdog': Event(), 'search_control': Event() } for flag in control_flags.values(): flag.clear() if args.on_demand_timeout > 0: control_flags['on_demand'].set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() if app: app.set_db_updates_queue(db_updates_queue) # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(db_updates_queue, db)) t.daemon = True t.start() # Database cleaner; really only need one ever. if args.enable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args,)) t.daemon = True t.start() # WH updates queue & WH unique key LFU caches. # The LFU caches will stop the server from resending the same data an # infinite number of times. The caches will be instantiated in the # webhook's startup code. wh_updates_queue = Queue() wh_key_cache = {} if not args.wh_types: log.info('Webhook disabled.') else: log.info('Webhook enabled for events: sending %s to %s.', args.wh_types, args.webhooks) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() if not args.only_server: # Speed limit. log.info('Scanning speed limit %s.', 'set to {} km/h'.format(args.kph) if args.kph > 0 else 'disabled') log.info('High-level speed limit %s.', 'set to {} km/h'.format(args.hlvl_kph) if args.hlvl_kph > 0 else 'disabled') # Check if we are able to scan. if not can_start_scanning(args): sys.exit(1) initialize_proxies(args) # Update player locale if not set correctly, yet. args.player_locale = PlayerLocale.get_locale(args.location) if not args.player_locale: args.player_locale = gmaps_reverse_geolocate( args.gmaps_key, args.locale, str(position[0]) + ', ' + str(position[1])) db_player_locale = { 'location': args.location, 'country': args.player_locale['country'], 'language': args.player_locale['country'], 'timezone': args.player_locale['timezone'], } db_updates_queue.put((PlayerLocale, {0: db_player_locale})) else: log.debug( 'Existing player locale has been retrieved from the DB.') # Set To True For Fake Spawn Test Mode # fake_pokemon_mode = False ######################################## if fake_pokemon_mode: log.info('** Starting a fake search **') search_thread = Thread(target=fake_search_thread, name='search-overseer', args=(args, position, db_updates_queue, wh_updates_queue)) else: argset = (args, new_location_queue, control_flags, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_control_flags(control_flags) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain( args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() # Add file logging if enabled. if args.verbose and args.verbose != 'nofile': filelog = logging.FileHandler(args.verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.very_verbose and args.very_verbose != 'nofile': filelog = logging.FileHandler(args.very_verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.verbose or args.very_verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server: if not os.path.exists( os.path.join(os.path.dirname(__file__), 'static/dist')): log.critical( 'Missing front-end assets (static/dist) -- please run ' + '"npm install && npm run build" before starting the server.') sys.exit() # You need custom image files now. if not os.path.isfile( os.path.join(os.path.dirname(__file__), 'static/icons-sprite.png')): log.critical('Missing sprite files.') sys.exit() # These are very noisy, let's shush them up a bit. logging.getLogger('peewee').setLevel(logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING) logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO) logging.getLogger('werkzeug').setLevel(logging.ERROR) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms # Turn these back up if debugging. if args.verbose or args.very_verbose: logging.getLogger('pgoapi').setLevel(logging.DEBUG) if args.very_verbose: logging.getLogger('peewee').setLevel(logging.DEBUG) logging.getLogger('requests').setLevel(logging.DEBUG) logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG) logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG) logging.getLogger('rpc_api').setLevel(logging.DEBUG) logging.getLogger('werkzeug').setLevel(logging.DEBUG) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) if position is None or not any(position): log.error("Location not found: '{}'".format(args.location)) sys.exit() # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china app = Pogom(__name__) app.before_request(app.validate_request) db = init_database(app) if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) create_tables(db) app.set_current_location(position) # Control the search status (running or not) across threads. pause_bit = Event() pause_bit.clear() if args.on_demand_timeout > 0: pause_bit.set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue, db)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH updates queue & WH gym/pokéstop unique key LFU cache. # The LFU cache will stop the server from resending the same data an # infinite number of times. # TODO: Rework webhooks entirely so a LFU cache isn't necessary. wh_updates_queue = Queue() wh_key_cache = LFUCache(maxsize=args.wh_lfu_size) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() if not args.only_server: # Processing proxies if set (load from file, check and overwrite old # args.proxy with new working list) args.proxy = check_proxies(args) # Run periodical proxy refresh thread if (args.proxy_file is not None) and (args.proxy_refresh > 0): t = Thread(target=proxies_refresher, name='proxy-refresh', args=(args, )) t.daemon = True t.start() else: log.info('Periodical proxies refresh disabled.') # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of # endure the woe). if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints): with open(args.spawnpoint_scanning, 'w+') as file: log.info('Saving spawn points to %s', args.spawnpoint_scanning) spawns = Pokemon.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, new_location_queue, pause_bit, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(pause_bit) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose or args.very_verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() # Abort if status name is not alphanumeric. if not str(args.status_name).isalnum(): log.critical('Status name must be alphanumeric.') sys.exit(1) set_log_and_verbosity(log) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms config['parse_raids'] = not args.no_raids # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server and not validate_assets(args): sys.exit(1) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) if position is None or not any(position): log.error("Location not found: '{}'".format(args.location)) sys.exit() # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china # if we're clearing the db, do not bother with the blacklist if args.clear_db: args.disable_blacklist = True app = Pogom(__name__) app.before_request(app.validate_request) db = init_database(app) if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) verify_database_schema(db) create_tables(db) # fixing encoding on present and future tables verify_table_encoding(db) if args.clear_db: log.info( 'Drop and recreate is complete. Now remove -cd and restart.') sys.exit() app.set_current_location(position) # Control the search status (running or not) across threads. control_flags = { 'on_demand': Event(), 'api_watchdog': Event(), 'search_control': Event() } for flag in control_flags.values(): flag.clear() if args.on_demand_timeout > 0: control_flags['on_demand'].set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue, db)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args,)) t.daemon = True t.start() # WH updates queue & WH unique key LFU caches. # The LFU caches will stop the server from resending the same data an # infinite number of times. The caches will be instantiated in the # webhook's startup code. wh_updates_queue = Queue() wh_key_cache = {} # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if not args.only_server: # Check if we are able to scan. if not can_start_scanning(args): sys.exit(1) # Processing proxies if set (load from file, check and overwrite old # args.proxy with new working list). args.proxy = load_proxies(args) if args.proxy and not args.proxy_skip_check: args.proxy = check_proxies(args, args.proxy) # Run periodical proxy refresh thread. if (args.proxy_file is not None) and (args.proxy_refresh > 0): t = Thread(target=proxies_refresher, name='proxy-refresh', args=(args,)) t.daemon = True t.start() else: log.info('Periodical proxies refresh disabled.') # Update player locale if not set correctly, yet. args.player_locale = PlayerLocale.get_locale(args.location) if not args.player_locale: args.player_locale = gmaps_reverse_geolocate( args.gmaps_key, args.locale, str(position[0]) + ', ' + str(position[1])) db_player_locale = { 'location': args.location, 'country': args.player_locale['country'], 'language': args.player_locale['country'], 'timezone': args.player_locale['timezone'], } db_updates_queue.put((PlayerLocale, {0: db_player_locale})) else: log.debug( 'Existing player locale has been retrieved from the DB.') # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of # endure the woe). if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints): with open(args.spawnpoint_scanning, 'w+') as file: log.info( 'Saving spawn points to %s', args.spawnpoint_scanning) spawns = SpawnPoint.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, new_location_queue, control_flags, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(control_flags['search_control']) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain( args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() set_log_and_verbosity(log) # Abort if only-server and no-server are used together if args.only_server and args.no_server: log.critical( "You can't use no-server and only-server at the same time, silly.") sys.exit(1) # Abort if status name is not valid. regexp = re.compile('^([\w\s\-.]+)$') if not regexp.match(args.status_name): log.critical('Status name contains illegal characters.') sys.exit(1) # Stop if we're just looking for a debug dump. if args.dump: log.info('Retrieving environment info...') hastebin_id = get_debug_dump_link() log.info('Done! Your debug link: https://hastebin.com/%s.txt', hastebin_id) sys.exit(1) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server and not validate_assets(args): sys.exit(1) if args.no_version_check and not args.only_server: log.warning('You are running RocketMap in No Version Check mode. ' "If you don't know what you're doing, this mode " 'can have negative consequences, and you will not ' 'receive support running in NoVC mode. ' 'You have been warned.') position = extract_coordinates(args.location) # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm.', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt).', position[0], position[1], position[2]) # Scanning toggles. log.info('Parsing of Pokemon %s.', 'disabled' if args.no_pokemon else 'enabled') log.info('Parsing of Pokestops %s.', 'disabled' if args.no_pokestops else 'enabled') log.info('Parsing of Gyms %s.', 'disabled' if args.no_gyms else 'enabled') log.info('Pokemon encounters %s.', 'enabled' if args.encounter else 'disabled') app = None if not args.no_server and not args.clear_db: app = Pogom(__name__, root_path=os.path.dirname( os.path.abspath(__file__)).decode('utf8')) app.before_request(app.validate_request) app.set_current_location(position) db = startup_db(app, args.clear_db) args.root_path = os.path.dirname(os.path.abspath(__file__)) # Control the search status (running or not) across threads. control_flags = { 'on_demand': Event(), 'api_watchdog': Event(), 'search_control': Event() } for flag in control_flags.values(): flag.clear() if args.on_demand_timeout > 0: control_flags['on_demand'].set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(db_updates_queue, db)) t.daemon = True t.start() # Database cleaner; really only need one ever. if args.enable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH updates queue & WH unique key LFU caches. # The LFU caches will stop the server from resending the same data an # infinite number of times. The caches will be instantiated in the # webhook's startup code. wh_updates_queue = Queue() wh_key_cache = {} if not args.wh_types: log.info('Webhook disabled.') else: log.info('Webhook enabled for events: sending %s to %s.', args.wh_types, args.webhooks) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() if not args.only_server: # Speed limit. log.info( 'Scanning speed limit %s.', 'set to {} km/h'.format(args.kph) if args.kph > 0 else 'disabled') log.info( 'High-level speed limit %s.', 'set to {} km/h'.format( args.hlvl_kph) if args.hlvl_kph > 0 else 'disabled') # Check if we are able to scan. if not can_start_scanning(args): sys.exit(1) initialize_proxies(args) # Monitor files, update data if they've changed recently. # Keys are 'args' object keys, values are filenames to load. files_to_monitor = {} if args.encounter: files_to_monitor['enc_whitelist'] = args.enc_whitelist_file log.info('Encounters are enabled.') else: log.info('Encounters are disabled.') if args.webhook_blacklist_file: files_to_monitor['webhook_blacklist'] = args.webhook_blacklist_file log.info('Webhook blacklist is enabled.') elif args.webhook_whitelist_file: files_to_monitor['webhook_whitelist'] = args.webhook_whitelist_file log.info('Webhook whitelist is enabled.') else: log.info('Webhook whitelist/blacklist is disabled.') if files_to_monitor: t = Thread(target=dynamic_loading_refresher, name='dynamic-enclist', args=(files_to_monitor, )) t.daemon = True t.start() log.info('Dynamic list refresher is enabled.') else: log.info('Dynamic list refresher is disabled.') # Update player locale if not set correctly yet. args.player_locale = PlayerLocale.get_locale(args.location) if not args.player_locale: args.player_locale = gmaps_reverse_geolocate( args.gmaps_key, args.locale, str(position[0]) + ', ' + str(position[1])) db_player_locale = { 'location': args.location, 'country': args.player_locale['country'], 'language': args.player_locale['country'], 'timezone': args.player_locale['timezone'], } db_updates_queue.put((PlayerLocale, {0: db_player_locale})) else: log.debug('Existing player locale has been retrieved from the DB.') # Gather the Pokemon! argset = (args, new_location_queue, control_flags, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_control_flags(control_flags) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() # Add file logging if enabled. if args.verbose and args.verbose != 'nofile': filelog = logging.FileHandler(args.verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] %(message)s' )) logging.getLogger('').addHandler(filelog) if args.very_verbose and args.very_verbose != 'nofile': filelog = logging.FileHandler(args.very_verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] %(message)s' )) logging.getLogger('').addHandler(filelog) if args.verbose or args.very_verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server: if not os.path.exists( os.path.join(os.path.dirname(__file__), 'static/dist')): log.critical( 'Missing front-end assets (static/dist) -- please run "npm install && npm run build" before starting the server.' ) sys.exit() # These are very noisy, let's shush them up a bit. logging.getLogger('peewee').setLevel(logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING) logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO) logging.getLogger('werkzeug').setLevel(logging.ERROR) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms # Turn these back up if debugging. if args.verbose or args.very_verbose: logging.getLogger('pgoapi').setLevel(logging.DEBUG) if args.very_verbose: logging.getLogger('peewee').setLevel(logging.DEBUG) logging.getLogger('requests').setLevel(logging.DEBUG) logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG) logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG) logging.getLogger('rpc_api').setLevel(logging.DEBUG) logging.getLogger('werkzeug').setLevel(logging.DEBUG) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) # Use the latitude and longitude to get the local altitude from Google. try: url = 'https://maps.googleapis.com/maps/api/elevation/json?locations={},{}'.format( str(position[0]), str(position[1])) altitude = requests.get(url).json()[u'results'][0][u'elevation'] log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) except (requests.exceptions.RequestException, IndexError, KeyError): log.error('Unable to retrieve altitude from Google APIs; setting to 0') if not any(position): log.error('Could not get a position by name, aborting!') sys.exit() log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china app = Pogom(__name__) db = init_database(app) if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) create_tables(db) app.set_current_location(position) # Control the search status (running or not) across threads. pause_bit = Event() pause_bit.clear() if args.on_demand_timeout > 0: pause_bit.set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH Updates. wh_updates_queue = Queue() # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue)) t.daemon = True t.start() if not args.only_server: # Check all proxies before continue so we know they are good. if args.proxy and not args.proxy_skip_check: # Overwrite old args.proxy with new working list. args.proxy = check_proxies(args) # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of endure the woe). if args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints: with open(args.spawnpoint_scanning, 'w+') as file: log.info('Saving spawn points to %s', args.spawnpoint_scanning) spawns = Pokemon.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, new_location_queue, pause_bit, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(pause_bit) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if args.ssl_certificate and args.ssl_privatekey \ and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose or args.very_verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() set_log_and_verbosity(log) global db_updates_queue # Abort if status name is not valid. regexp = re.compile('^([\w\s\-.]+)$') if not regexp.match(args.status_name): log.critical('Status name contains illegal characters.') sys.exit(1) # Stop if we're just looking for a debug dump. if args.dump: log.info('Retrieving environment info...') hastebin_id = get_debug_dump_link() log.info('Done! Your debug link: https://hastebin.com/%s.txt', hastebin_id) sys.exit(1) # Let's not forget to run Grunt / Only needed when running with webserver. if not validate_assets(args): sys.exit(1) position = extract_coordinates(args.location) # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm.', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt).', position[0], position[1], position[2]) # Scanning toggles. log.info('Parsing of Pokemon %s.', 'disabled' if args.no_pokemon else 'enabled') log.info('Parsing of Pokestops %s.', 'disabled' if args.no_pokestops else 'enabled') log.info('Parsing of Gyms %s.', 'disabled' if args.no_gyms else 'enabled') log.info('Pokemon encounters %s.', 'enabled' if args.encounter else 'disabled') app = None if not args.clear_db: app = Pogom(__name__, root_path=os.path.dirname( os.path.abspath(__file__)).decode('utf8'), db_update_queue=db_updates_queue, spawn_delay=args.spawn_delay, stepsize=args.stepsize, maxradius=args.maxradius, lure_duration=args.lure_duration) app.before_request(app.validate_request) app.set_current_location(position) db = startup_db(app, args.clear_db) args.root_path = os.path.dirname(os.path.abspath(__file__)) if args.ex_gyms: # Geofence is required. if not args.geofence_file: log.critical('A geofence is required to find EX-gyms.') sys.exit(1) update_ex_gyms(args.geofence_file) log.info('Finished checking gyms against OSM parks, exiting.') sys.exit(1) # Control the search status (running or not) across threads. control_flags = { 'on_demand': Event(), 'api_watchdog': Event(), 'search_control': Event() } for flag in control_flags.values(): flag.clear() if args.on_demand_timeout > 0: control_flags['on_demand'].set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(db_updates_queue, db)) t.daemon = True t.start() # Database cleaner; really only need one ever. if args.db_cleanup: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() if args.rarity_update_frequency: t = Thread(target=dynamic_rarity_refresher, name='dynamic-rarity') t.daemon = True t.start() log.info('Dynamic rarity is enabled.') else: log.info('Dynamic rarity is disabled.') if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_control_flags(control_flags) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
async def handle_captcha_url(args, status, api, account, account_failures, account_captchas, whq, captcha_url, step_location): try: if len(captcha_url) > 1: status['captcha'] += 1 if not args.captcha_solving: status['message'] = ('Account {} has encountered a captcha. ' + 'Putting account away.').format( account['username']) log.warning(status['message']) account_failures.append({ 'account': account, 'last_fail_time': now(), 'reason': 'captcha found' }) if args.webhooks: wh_message = { 'status_name': args.status_name, 'status': 'encounter', 'mode': 'disabled', 'account': account['username'], 'captcha': status['captcha'], 'time': 0 } whq.put(('captcha', wh_message)) return False if args.captcha_key and args.manual_captcha_timeout == 0: if await automatic_captcha_solve(args, status, api, captcha_url, account, whq): return True else: account_failures.append({ 'account': account, 'last_fail_time': now(), 'reason': 'captcha failed to verify' }) return False else: status['message'] = ('Account {} has encountered a captcha. ' + 'Waiting for token.').format( account['username']) log.warning(status['message']) account['last_active'] = datetime.utcnow() account['last_location'] = step_location account_captchas.append((status, account, captcha_url)) if args.webhooks: wh_message = { 'status_name': args.status_name, 'status': 'encounter', 'mode': 'manual', 'account': account['username'], 'captcha': status['captcha'], 'time': args.manual_captcha_timeout } whq.put(('captcha', wh_message)) return False except KeyError as e: log.error('Unable to check captcha: {}'.format(e)) return None
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() # Add file logging if enabled. if args.verbose and args.verbose != 'nofile': filelog = logging.FileHandler(args.verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.very_verbose and args.very_verbose != 'nofile': filelog = logging.FileHandler(args.very_verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.verbose or args.very_verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server: if not os.path.exists( os.path.join(os.path.dirname(__file__), 'static/dist')): log.critical( 'Missing front-end assets (static/dist) -- please run ' + '"npm install && npm run build" before starting the server.') sys.exit() # You need custom image files now. if not os.path.isfile( os.path.join(os.path.dirname(__file__), 'static/icons-sprite.png')): log.info('Sprite files not present, extracting bundled ones...') extract_sprites() log.info('Done!') # Beehive configuration beehive_workers = [args.workers] if args.beehive > 0: beehive_size = 1 # Calculate number of hives required ( -bh 2 => i:1, i:2 ) for i in range(1, args.beehive + 1): beehive_size += i * 6 # Initialize worker distribution list beehive_workers = [0 for x in range(beehive_size)] skip_indexes = [] hives_ignored = 0 workers_forced = 0 log.debug('-bhw --beehive-workers: %s', args.beehive_workers) # Parse beehive configuration for i in range(0, len(args.beehive_workers)): bhw = args.beehive_workers[i].split(':') bhw_index = int(bhw[0]) bhw_workers = int(bhw[1]) if (bhw_index >= 0) and (bhw_index < beehive_size): if bhw_index in skip_indexes: log.warning( 'Duplicate hive index found in -bhw ' + '--beehive-workers: %d', bhw_index) continue if bhw_workers <= 0: skip_indexes.append(bhw_index) beehive_workers[bhw_index] = 0 hives_ignored += 1 else: skip_indexes.append(bhw_index) beehive_workers[bhw_index] = bhw_workers workers_forced += bhw_workers else: log.warning( 'Invalid hive index found in -bhw ' + '--beehive-workers: %d', bhw_index) # Check if we have enough workers for beehive setup. workers_required = workers_forced if args.workers_per_hive > 0: count = beehive_size - len(skip_indexes) workers_required += count * args.workers_per_hive log.info( 'Beehive size: %d (%d hives ignored). Workers forced: ' + '%d. Workers required: %d', beehive_size, hives_ignored, workers_forced, workers_required) if args.workers < workers_required: log.critical('Not enough workers to fill the beehive. ' + 'Increase -w --workers, decrease -bh --beehive ' + 'or decrease -wph --workers-per-hive') sys.exit() # Assign remaining workers to available hives. remaining_workers = args.workers - workers_forced populate_index = 0 while remaining_workers > 0: beehive_index = populate_index % beehive_size if beehive_index in skip_indexes: populate_index += 1 continue beehive_workers[beehive_index] += 1 populate_index += 1 remaining_workers -= 1 log.debug('Beehive worker distribution: %s', beehive_workers) # These are very noisy, let's shush them up a bit. logging.getLogger('peewee').setLevel(logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING) logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO) logging.getLogger('werkzeug').setLevel(logging.ERROR) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms # Turn these back up if debugging. if args.verbose or args.very_verbose: logging.getLogger('pgoapi').setLevel(logging.DEBUG) if args.very_verbose: logging.getLogger('peewee').setLevel(logging.DEBUG) logging.getLogger('requests').setLevel(logging.DEBUG) logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG) logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG) logging.getLogger('rpc_api').setLevel(logging.DEBUG) logging.getLogger('werkzeug').setLevel(logging.DEBUG) # Web access logs. if args.access_logs: logger = logging.getLogger('werkzeug') handler = logging.FileHandler('access.log') logger.setLevel(logging.INFO) logger.addHandler(handler) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) if position is None or not any(position): log.error("Location not found: '{}'".format(args.location)) sys.exit() # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china # if we're clearing the db, do not bother with the blacklist if args.clear_db: args.disable_blacklist = True app = Pogom(__name__) app.before_request(app.validate_request) db = init_database(app) if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) create_tables(db) if args.clear_db: log.info("Drop and recreate is complete. Now remove -cd and restart.") sys.exit() app.set_current_location(position) # Control the search status (running or not) across threads. pause_bit = Event() pause_bit.clear() if args.on_demand_timeout > 0: pause_bit.set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() app.set_db_updates_queue(db_updates_queue) # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue, db)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH updates queue & WH unique key LFU caches. # The LFU caches will stop the server from resending the same data an # infinite number of times. The caches will be instantiated in the # webhook's startup code. wh_updates_queue = Queue() wh_key_cache = {} app.set_wh_updates_queue(wh_updates_queue) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() if not args.only_server: # Abort if we don't have a hash key set if not args.hash_key: log.critical('Hash key is required for scanning. Exiting.') sys.exit() # Processing proxies if set (load from file, check and overwrite old # args.proxy with new working list) args.proxy = check_proxies(args) # Run periodical proxy refresh thread if (args.proxy_file is not None) and (args.proxy_refresh > 0): t = Thread(target=proxies_refresher, name='proxy-refresh', args=(args, )) t.daemon = True t.start() else: log.info('Periodical proxies refresh disabled.') # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of # endure the woe). if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints): with open(args.spawnpoint_scanning, 'w+') as file: log.info('Saving spawn points to %s', args.spawnpoint_scanning) spawns = Pokemon.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, beehive_workers, new_location_queue, pause_bit, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(pause_bit) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose or args.very_verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)