def validate_assets(args): assets_error_log = ( 'Missing front-end assets (static/dist) -- please run ' + '"npm install && npm run build" before starting the server.') root_path = os.path.dirname(__file__) if not os.path.exists(os.path.join(root_path, 'static/dist')): log.critical(assets_error_log) return False static_path = os.path.join(root_path, 'static/js') for file in os.listdir(static_path): if file.endswith(".js"): generated_path = os.path.join(static_path, '../dist/js/', file.replace(".js", ".min.js")) source_path = os.path.join(static_path, file) if not os.path.exists(generated_path) or ( os.path.getmtime(source_path) > os.path.getmtime(generated_path)): log.critical(assets_error_log) return False # You need custom image files now. if not os.path.isfile( os.path.join(root_path, 'static/icons-sprite.png')): log.info('Sprite files not present, extracting bundled ones...') extract_sprites(root_path) log.info('Done!') # Check if custom.css is used otherwise fall back to default. if os.path.exists(os.path.join(root_path, 'static/css/custom.css')): args.custom_css = True log.info( 'File \"custom.css\" found, applying user-defined settings.') else: args.custom_css = False log.info('No file \"custom.css\" found, using default settings.') # Check if custom.js is used otherwise fall back to default. if os.path.exists(os.path.join(root_path, 'static/js/custom.js')): args.custom_js = True log.info( 'File \"custom.js\" found, applying user-defined settings.') else: args.custom_js = False log.info('No file \"custom.js\" found, using default settings.') return True
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() init_mr_mime(config_file='config/mrmime_config.json') # Add file logging if enabled. if args.verbose and args.verbose != 'nofile': filelog = logging.FileHandler(args.verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.very_verbose and args.very_verbose != 'nofile': filelog = logging.FileHandler(args.very_verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.verbose or args.very_verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server: root_path = os.path.dirname(__file__) if not os.path.exists(os.path.join(root_path, 'static/dist')): log.critical( 'Missing front-end assets (static/dist) -- please run ' + '"npm install && npm run build" before starting the server.') sys.exit() # You need custom image files now. if not os.path.isfile( os.path.join(root_path, 'static/icons-sprite.png')): log.info('Sprite files not present, extracting bundled ones...') extract_sprites(root_path) log.info('Done!') # Check if custom.css is used otherwise fall back to default. if os.path.exists(os.path.join(root_path, 'static/css/custom.css')): args.custom_css = True log.info( 'File \"custom.css\" found, applying user-defined settings.') else: args.custom_css = False log.info('No file \"custom.css\" found, using default settings.') # These are very noisy, let's shush them up a bit. logging.getLogger('peewee').setLevel(logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING) logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO) logging.getLogger('werkzeug').setLevel(logging.ERROR) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms # Turn these back up if debugging. if args.verbose or args.very_verbose: logging.getLogger('pgoapi').setLevel(logging.DEBUG) if args.very_verbose: logging.getLogger('peewee').setLevel(logging.DEBUG) logging.getLogger('requests').setLevel(logging.DEBUG) logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG) logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG) logging.getLogger('rpc_api').setLevel(logging.DEBUG) logging.getLogger('werkzeug').setLevel(logging.DEBUG) # Web access logs. if args.access_logs: logger = logging.getLogger('werkzeug') handler = logging.FileHandler('access.log') logger.setLevel(logging.INFO) logger.addHandler(handler) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) if position is None or not any(position): log.error("Location not found: '{}'".format(args.location)) sys.exit() # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china # if we're clearing the db, do not bother with the blacklist if args.clear_db: args.disable_blacklist = True app = Pogom(__name__) app.before_request(app.validate_request) db = init_database(app) if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) verify_database_schema(db) create_tables(db) # fixing encoding on present and future tables verify_table_encoding(db) if args.clear_db: log.info("Drop and recreate is complete. Now remove -cd and restart.") sys.exit() app.set_current_location(position) # Control the search status (running or not) across threads. pause_bit = Event() pause_bit.clear() if args.on_demand_timeout > 0: pause_bit.set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() app.set_db_updates_queue(db_updates_queue) # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue, db)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH updates queue & WH unique key LFU caches. # The LFU caches will stop the server from resending the same data an # infinite number of times. The caches will be instantiated in the # webhook's startup code. wh_updates_queue = Queue() wh_key_cache = {} app.set_wh_updates_queue(wh_updates_queue) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if not args.only_server: # Abort if we don't have a hash key set if not args.hash_key: log.critical('Hash key is required for scanning. Exiting.') sys.exit() # Processing proxies if set (load from file, check and overwrite old # args.proxy with new working list) args.proxy = check_proxies(args) # Run periodical proxy refresh thread if (args.proxy_file is not None) and (args.proxy_refresh > 0): t = Thread(target=proxies_refresher, name='proxy-refresh', args=(args, )) t.daemon = True t.start() else: log.info('Periodical proxies refresh disabled.') # Find the reverse geolocation geolocator = GoogleV3(api_key=args.gmaps_key) args.player_locale = { 'country': 'US', 'language': args.locale, 'timezone': 'America/Denver' } try: location = geolocator.reverse(args.location) country = location[-1].raw['address_components'][-1]['short_name'] try: timezone = geolocator.timezone(args.location) args.player_locale.update({ 'country': country, 'timezone': str(timezone) }) except Exception as e: log.warning( 'Exception while obtaining Google Timezone. ' + 'Key probably not enabled: %s.', repr(e)) pass except Exception as e: log.warning('Exception while obtaining player locale: %s.', repr(e)) pass # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of # endure the woe). if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints): with open(args.spawnpoint_scanning, 'w+') as file: log.info('Saving spawn points to %s', args.spawnpoint_scanning) spawns = Pokemon.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, new_location_queue, pause_bit, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(pause_bit) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose or args.very_verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() # Add file logging if enabled. if args.verbose and args.verbose != 'nofile': filelog = logging.FileHandler(args.verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.very_verbose and args.very_verbose != 'nofile': filelog = logging.FileHandler(args.very_verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.verbose or args.very_verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server: if not os.path.exists( os.path.join(os.path.dirname(__file__), 'static/dist')): log.critical( 'Missing front-end assets (static/dist) -- please run ' + '"npm install && npm run build" before starting the server.') sys.exit() # You need custom image files now. if not os.path.isfile( os.path.join(os.path.dirname(__file__), 'static/icons-sprite.png')): log.info('Sprite files not present, extracting bundled ones...') extract_sprites() log.info('Done!') # These are very noisy, let's shush them up a bit. logging.getLogger('peewee').setLevel(logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING) logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO) logging.getLogger('werkzeug').setLevel(logging.ERROR) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms # Turn these back up if debugging. if args.verbose or args.very_verbose: logging.getLogger('pgoapi').setLevel(logging.DEBUG) if args.very_verbose: logging.getLogger('peewee').setLevel(logging.DEBUG) logging.getLogger('requests').setLevel(logging.DEBUG) logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG) logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG) logging.getLogger('rpc_api').setLevel(logging.DEBUG) logging.getLogger('werkzeug').setLevel(logging.DEBUG) # Web access logs. if args.access_logs: logger = logging.getLogger('werkzeug') handler = logging.FileHandler('access.log') logger.setLevel(logging.INFO) logger.addHandler(handler) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) if position is None or not any(position): log.error("Location not found: '{}'".format(args.location)) sys.exit() # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china app = Pogom(__name__) app.before_request(app.validate_request) db = init_database(app) if args.accountdb: accounts = Account.get_all() args.accounts = [] for account in accounts: if account['enabled']: args.accounts.append({ 'username': account['name'], 'password': account['password'], 'auth_service': account['login_type'] }) connections = args.db_max_connections if len(args.accounts) > 0: connections *= len(args.accounts) db.max_connections = connections if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) create_tables(db) app.set_current_location(position) # Control the search status (running or not) across threads. pause_bit = Event() pause_bit.clear() if args.on_demand_timeout > 0: pause_bit.set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue, db)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH updates queue & WH gym/pokéstop unique key LFU cache. # The LFU cache will stop the server from resending the same data an # infinite number of times. # TODO: Rework webhooks entirely so a LFU cache isn't necessary. wh_updates_queue = Queue() wh_key_cache = LFUCache(maxsize=args.wh_lfu_size) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() if not args.only_server: # Processing proxies if set (load from file, check and overwrite old # args.proxy with new working list) args.proxy = check_proxies(args) # Run periodical proxy refresh thread if (args.proxy_file is not None) and (args.proxy_refresh > 0): t = Thread(target=proxies_refresher, name='proxy-refresh', args=(args, )) t.daemon = True t.start() else: log.info('Periodical proxies refresh disabled.') # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of # endure the woe). if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints): with open(args.spawnpoint_scanning, 'w+') as file: log.info('Saving spawn points to %s', args.spawnpoint_scanning) spawns = Pokemon.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, new_location_queue, pause_bit, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(pause_bit) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose or args.very_verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)
def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() # Make sure exceptions get logged. sys.excepthook = handle_exception args = get_args() # Add file logging if enabled. if args.verbose and args.verbose != 'nofile': filelog = logging.FileHandler(args.verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.very_verbose and args.very_verbose != 'nofile': filelog = logging.FileHandler(args.very_verbose) filelog.setFormatter( logging.Formatter( '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] ' + '%(message)s')) logging.getLogger('').addHandler(filelog) if args.verbose or args.very_verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server: if not os.path.exists( os.path.join(os.path.dirname(__file__), 'static/dist')): log.critical( 'Missing front-end assets (static/dist) -- please run ' + '"npm install && npm run build" before starting the server.') sys.exit() # You need custom image files now. if not os.path.isfile( os.path.join(os.path.dirname(__file__), 'static/icons-sprite.png')): log.info('Sprite files not present, extracting bundled ones...') extract_sprites() log.info('Done!') # Beehive configuration beehive_workers = [args.workers] if args.beehive > 0: beehive_size = 1 # Calculate number of hives required ( -bh 2 => i:1, i:2 ) for i in range(1, args.beehive + 1): beehive_size += i * 6 # Initialize worker distribution list beehive_workers = [0 for x in range(beehive_size)] skip_indexes = [] hives_ignored = 0 workers_forced = 0 log.debug('-bhw --beehive-workers: %s', args.beehive_workers) # Parse beehive configuration for i in range(0, len(args.beehive_workers)): bhw = args.beehive_workers[i].split(':') bhw_index = int(bhw[0]) bhw_workers = int(bhw[1]) if (bhw_index >= 0) and (bhw_index < beehive_size): if bhw_index in skip_indexes: log.warning( 'Duplicate hive index found in -bhw ' + '--beehive-workers: %d', bhw_index) continue if bhw_workers <= 0: skip_indexes.append(bhw_index) beehive_workers[bhw_index] = 0 hives_ignored += 1 else: skip_indexes.append(bhw_index) beehive_workers[bhw_index] = bhw_workers workers_forced += bhw_workers else: log.warning( 'Invalid hive index found in -bhw ' + '--beehive-workers: %d', bhw_index) # Check if we have enough workers for beehive setup. workers_required = workers_forced if args.workers_per_hive > 0: count = beehive_size - len(skip_indexes) workers_required += count * args.workers_per_hive log.info( 'Beehive size: %d (%d hives ignored). Workers forced: ' + '%d. Workers required: %d', beehive_size, hives_ignored, workers_forced, workers_required) if args.workers < workers_required: log.critical('Not enough workers to fill the beehive. ' + 'Increase -w --workers, decrease -bh --beehive ' + 'or decrease -wph --workers-per-hive') sys.exit() # Assign remaining workers to available hives. remaining_workers = args.workers - workers_forced populate_index = 0 while remaining_workers > 0: beehive_index = populate_index % beehive_size if beehive_index in skip_indexes: populate_index += 1 continue beehive_workers[beehive_index] += 1 populate_index += 1 remaining_workers -= 1 log.debug('Beehive worker distribution: %s', beehive_workers) # These are very noisy, let's shush them up a bit. logging.getLogger('peewee').setLevel(logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING) logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO) logging.getLogger('werkzeug').setLevel(logging.ERROR) config['parse_pokemon'] = not args.no_pokemon config['parse_pokestops'] = not args.no_pokestops config['parse_gyms'] = not args.no_gyms # Turn these back up if debugging. if args.verbose or args.very_verbose: logging.getLogger('pgoapi').setLevel(logging.DEBUG) if args.very_verbose: logging.getLogger('peewee').setLevel(logging.DEBUG) logging.getLogger('requests').setLevel(logging.DEBUG) logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG) logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG) logging.getLogger('rpc_api').setLevel(logging.DEBUG) logging.getLogger('werkzeug').setLevel(logging.DEBUG) # Web access logs. if args.access_logs: logger = logging.getLogger('werkzeug') handler = logging.FileHandler('access.log') logger.setLevel(logging.INFO) logger.addHandler(handler) # Use lat/lng directly if matches such a pattern. prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") res = prog.match(args.location) if res: log.debug('Using coordinates from CLI directly') position = (float(res.group(1)), float(res.group(2)), 0) else: log.debug('Looking up coordinates in API') position = util.get_pos_by_name(args.location) if position is None or not any(position): log.error("Location not found: '{}'".format(args.location)) sys.exit() # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: log.debug('Local altitude is: %sm', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': log.error( 'Google API Elevation request was denied. You probably ' + 'forgot to enable elevation api in https://console.' + 'developers.google.com/apis/api/elevation_backend/') sys.exit() else: log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0], position[1], position[2]) if args.no_pokemon: log.info('Parsing of Pokemon disabled.') if args.no_pokestops: log.info('Parsing of Pokestops disabled.') if args.no_gyms: log.info('Parsing of Gyms disabled.') if args.encounter: log.info('Encountering pokemon enabled.') config['LOCALE'] = args.locale config['CHINA'] = args.china # if we're clearing the db, do not bother with the blacklist if args.clear_db: args.disable_blacklist = True app = Pogom(__name__) app.before_request(app.validate_request) db = init_database(app) if args.clear_db: log.info('Clearing database') if args.db_type == 'mysql': drop_tables(db) elif os.path.isfile(args.db): os.remove(args.db) create_tables(db) if args.clear_db: log.info("Drop and recreate is complete. Now remove -cd and restart.") sys.exit() app.set_current_location(position) # Control the search status (running or not) across threads. pause_bit = Event() pause_bit.clear() if args.on_demand_timeout > 0: pause_bit.set() heartbeat = [now()] # Setup the location tracking queue and push the first location on. new_location_queue = Queue() new_location_queue.put(position) # DB Updates db_updates_queue = Queue() app.set_db_updates_queue(db_updates_queue) # Thread(s) to process database updates. for i in range(args.db_threads): log.debug('Starting db-updater worker thread %d', i) t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue, db)) t.daemon = True t.start() # db cleaner; really only need one ever. if not args.disable_clean: t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, )) t.daemon = True t.start() # WH updates queue & WH unique key LFU caches. # The LFU caches will stop the server from resending the same data an # infinite number of times. The caches will be instantiated in the # webhook's startup code. wh_updates_queue = Queue() wh_key_cache = {} app.set_wh_updates_queue(wh_updates_queue) # Thread to process webhook updates. for i in range(args.wh_threads): log.debug('Starting wh-updater worker thread %d', i) t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue, wh_key_cache)) t.daemon = True t.start() if not args.only_server: # Abort if we don't have a hash key set if not args.hash_key: log.critical('Hash key is required for scanning. Exiting.') sys.exit() # Processing proxies if set (load from file, check and overwrite old # args.proxy with new working list) args.proxy = check_proxies(args) # Run periodical proxy refresh thread if (args.proxy_file is not None) and (args.proxy_refresh > 0): t = Thread(target=proxies_refresher, name='proxy-refresh', args=(args, )) t.daemon = True t.start() else: log.info('Periodical proxies refresh disabled.') # Gather the Pokemon! # Attempt to dump the spawn points (do this before starting threads of # endure the woe). if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints): with open(args.spawnpoint_scanning, 'w+') as file: log.info('Saving spawn points to %s', args.spawnpoint_scanning) spawns = Pokemon.get_spawnpoints_in_hex( position, args.step_limit) file.write(json.dumps(spawns)) log.info('Finished exporting spawn points') argset = (args, beehive_workers, new_location_queue, pause_bit, heartbeat, db_updates_queue, wh_updates_queue) log.debug('Starting a %s search thread', args.scheduler) search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset) search_thread.daemon = True search_thread.start() if args.cors: CORS(app) # No more stale JS. init_cache_busting(app) app.set_search_control(pause_bit) app.set_heartbeat_control(heartbeat) app.set_location_queue(new_location_queue) config['ROOT_PATH'] = app.root_path config['GMAPS_KEY'] = args.gmaps_key if args.no_server: # This loop allows for ctrl-c interupts to work since flask won't be # holding the program open. while search_thread.is_alive(): time.sleep(60) else: ssl_context = None if (args.ssl_certificate and args.ssl_privatekey and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey)): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey) log.info('Web server in SSL mode.') if args.verbose or args.very_verbose: app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context) else: app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)