Пример #1
0
def cfg_init(shadowcheck=False):
    global args

    log.info("Loading PGNumbra configuration.")

    parse_args()

    # Provide hash keys
    args.hash_key_provider = CyclicResourceProvider()
    for hk in args.hash_key:
        args.hash_key_provider.add_resource(hk)

    mrmime_cfg = {'pgpool_system_id': get_pgpool_system_id()}

    if args.pgpool_url:
        mrmime_cfg['pgpool_url'] = args.pgpool_url
        log.info("Attaching to PGPool at {}".format(args.pgpool_url))

    if shadowcheck:
        # This test must include nearby Pokemon to work properly.
        args.include_nearby = True
        mrmime_cfg['pgpool_auto_update'] = False

    if cfg_get('pgpool_min_level') > 1 or cfg_get('pgpool_max_level') < 40:
        log.info("Only checking accounts with trainer level {} to {}.".format(
            cfg_get('pgpool_min_level'), cfg_get('pgpool_max_level')))

    if cfg_get('max_good'):
        log.info("Stopping after {} GOOD accounts.".format(
            cfg_get('max_good')))

    init_mr_mime(user_cfg=mrmime_cfg)
Пример #2
0
def cfg_init():
    log.info("Loading PGScout configuration...")

    parse_args()

    # MrMime config
    mrmime_cfg = {
        'pgpool_system_id': args.pgpool_system_id,
        'exception_on_captcha': True
    }
    if args.pgpool_url:
        mrmime_cfg['pgpool_url'] = args.pgpool_url
        log.info("Attaching to PGPool at {}".format(args.pgpool_url))
    init_mr_mime(mrmime_cfg)

    # Collect hash keys
    args.hash_key_provider = CyclicResourceProvider()
    for hk in args.hash_key:
        args.hash_key_provider.add_resource(hk)

    # Collect proxies
    args.proxies = check_proxies(cfg_get('proxies_file'))
    args.proxy_provider = CyclicResourceProvider()
    for proxy in args.proxies:
        args.proxy_provider.add_resource(proxy)
Пример #3
0
def cfg_init():
    log.info("Loading PGScout configuration...")

    parse_args()

    # MrMime config
    mrmime_cfg = {
        'pgpool_system_id': args.pgpool_system_id,
        'exception_on_captcha': True
    }
    if args.pgpool_url:
        mrmime_cfg['pgpool_url'] = args.pgpool_url
        log.info("Attaching to PGPool at {}".format(args.pgpool_url))
    init_mr_mime(mrmime_cfg)

    # Collect hash keys
    args.hash_key_provider = CyclicResourceProvider()
    for hk in args.hash_key:
        args.hash_key_provider.add_resource(hk)

    # Collect proxies
    args.proxies = check_proxies(cfg_get('proxies_file'))
    args.proxy_provider = CyclicResourceProvider()
    for proxy in args.proxies:
        args.proxy_provider.add_resource(proxy)

    args.low_prio_pokemon = []
    if args.low_prio_file:
        with open(args.low_prio_file) as f:
            args.low_prio_pokemon = read_pokemon_ids_from_file(f)
        if args.low_prio_pokemon:
            log.info("{} low priority Pokemon loaded from {}".format(len(args.low_prio_pokemon), args.low_prio_file))
            t = Thread(target=watch_low_prio_file, args=(args.low_prio_file,))
            t.daemon = True
            t.start()
Пример #4
0
def cfg_init(shadowcheck=False):
    log.info("Loading PGNumbra configuration.")

    parse_args()

    # Provide hash keys
    args.hash_key_provider = CyclicResourceProvider()
    for hk in args.hash_key:
        args.hash_key_provider.add_resource(hk)

    mrmime_cfg = {'pgpool_system_id': get_pgpool_system_id()}

    if args.pgpool_url:
        mrmime_cfg['pgpool_url'] = args.pgpool_url
        log.info("Attaching to PGPool at {}".format(args.pgpool_url))

    if shadowcheck:
        # This test must include nearby Pokemon to work properly.
        args.include_nearby = True
        mrmime_cfg['pgpool_auto_update'] = False

    init_mr_mime(user_cfg=mrmime_cfg)
Пример #5
0
        sys.__excepthook__(exc_type, exc_value, exc_traceback)
        return

    log.error("Uncaught exception", exc_info=(
        exc_type, exc_value, exc_traceback))


# ===========================================================================

log.info("PGNumbra ShadowCheck starting up.")

install_thread_excepthook()
sys.excepthook = handle_exception

init_mr_mime(user_cfg={
    'pgpool_auto_update': False
})

lat = cfg_get('latitude')
lng = cfg_get('longitude')

# Delete result files.
remove_account_file('good')
remove_account_file('blind')
remove_account_file('captcha')
remove_account_file('banned')
remove_account_file('error')

if os.path.isfile(ACC_INFO_FILE):
    os.remove(ACC_INFO_FILE)
Пример #6
0
def main():
    # Patch threading to make exceptions catchable.
    install_thread_excepthook()

    # Make sure exceptions get logged.
    sys.excepthook = handle_exception

    args = get_args()

    set_log_and_verbosity(log)

    # Abort if only-server and no-server are used together
    if args.only_server and args.no_server:
        log.critical(
            "You can't use no-server and only-server at the same time, silly.")
        sys.exit(1)

    # Abort if status name is not valid.
    regexp = re.compile('^([\w\s\-.]+)$')
    if not regexp.match(args.status_name):
        log.critical('Status name contains illegal characters.')
        sys.exit(1)

    # Stop if we're just looking for a debug dump.
    if args.dump:
        log.info('Retrieving environment info...')
        hastebin_id  = get_debug_dump_link()
        log.info('Done! Your debug link: https://hastebin.com/%s.txt',
                 hastebin_id )
        sys.exit(1)

    args.root_path = os.path.dirname(os.path.abspath(__file__))

    if args.ex_gyms:
        if args.geofence_file == '':
            log.critical('A geofence is required to find EX-gyms')
            sys.exit(1)
        else:
            exgyms(args.geofence_file)
            log.info('Finished checking gyms against OSM parks, exiting')
            sys.exit(1)

    init_args(args)

    # Initialize Mr. Mime library
    mrmime_cfg = {
        # We don't want exceptions on captchas because we handle them differently.
        'exception_on_captcha': False,
        # MrMime shouldn't jitter
        'jitter_gmo': False,
        'pgpool_system_id': args.status_name
    }
    # Don't overwrite PGPool URL if it's only set in MrMime config JSON
    if args.pgpool_url:
        mrmime_cfg['pgpool_url'] = args.pgpool_url
    mrmime_config_file = os.path.join(os.path.dirname(__file__), 'config/mrmime_config.json')
    init_mr_mime(config_file=mrmime_config_file, user_cfg=mrmime_cfg)

    # Let's not forget to run Grunt / Only needed when running with webserver.
    if not args.no_server and not validate_assets(args):
        sys.exit(1)

    position = extract_coordinates(args.location)

    # Use the latitude and longitude to get the local altitude from Google.
    (altitude, status) = get_gmaps_altitude(position[0], position[1],
                                            args.gmaps_key)
    if altitude is not None:
        log.debug('Local altitude is: %sm.', altitude)
        position = (position[0], position[1], altitude)
    else:
        if status == 'REQUEST_DENIED':
            log.error(
                'Google API Elevation request was denied. You probably ' +
                'forgot to enable elevation api in https://console.' +
                'developers.google.com/apis/api/elevation_backend/')
            sys.exit()
        else:
            log.error('Unable to retrieve altitude from Google APIs' +
                      'setting to 0')

    log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)',
             position[0], position[1], position[2])

    # Scanning toggles.
    log.info('Parsing of Pokemon %s.',
             'disabled' if args.no_pokemon else 'enabled')
    log.info('Parsing of Pokestops %s.',
             'disabled' if args.no_pokestops else 'enabled')
    log.info('Parsing of Gyms %s.',
             'disabled' if args.no_gyms else 'enabled')
    log.info('Pokemon encounters %s.',
             'enabled' if args.encounter else 'disabled')

    app = None
    if not args.no_server and not args.clear_db:
        app = Pogom(__name__,
                    root_path=os.path.dirname(
                              os.path.abspath(__file__)).decode('utf8'))
        app.before_request(app.validate_request)
        app.set_current_location(position)

    db = startup_db(app, args.clear_db)

    # Control the search status (running or not) across threads.
    control_flags = {
      'on_demand': Event(),
      'api_watchdog': Event(),
      'search_control': Event()
    }

    for flag in control_flags.values():
        flag.clear()

    if args.on_demand_timeout > 0:
        control_flags['on_demand'].set()

    heartbeat = [now()]

    # Setup the location tracking queue and push the first location on.
    new_location_queue = Queue()
    new_location_queue.put(position)

    # DB Updates
    db_updates_queue = Queue()
    if app:
        app.set_db_updates_queue(db_updates_queue)

    # Thread(s) to process database updates.
    for i in range(args.db_threads):
        log.debug('Starting db-updater worker thread %d', i)
        t = Thread(target=db_updater, name='db-updater-{}'.format(i),
                   args=(db_updates_queue, db))
        t.daemon = True
        t.start()

    # Database cleaner; really only need one ever.
    if args.enable_clean:
        t = Thread(target=clean_db_loop, name='db-cleaner', args=(args,))
        t.daemon = True
        t.start()

    # WH updates queue & WH unique key LFU caches.
    # The LFU caches will stop the server from resending the same data an
    # infinite number of times. The caches will be instantiated in the
    # webhook's startup code.
    wh_updates_queue = Queue()
    wh_key_cache = {}

    if not args.wh_types:
        log.info('Webhook disabled.')
    else:
        log.info('Webhook enabled for events: sending %s to %s.',
                 args.wh_types,
                 args.webhooks)

        # Thread to process webhook updates.
        for i in range(args.wh_threads):
            log.debug('Starting wh-updater worker thread %d', i)
            t = Thread(target=wh_updater, name='wh-updater-{}'.format(i),
                       args=(args, wh_updates_queue, wh_key_cache))
            t.daemon = True
            t.start()

    if not args.only_server:
        # Speed limit.
        log.info('Scanning speed limit %s.',
                 'set to {} km/h'.format(args.kph)
                 if args.kph > 0 else 'disabled')
        log.info('High-level speed limit %s.',
                 'set to {} km/h'.format(args.hlvl_kph)
                 if args.hlvl_kph > 0 else 'disabled')

        # Check if we are able to scan.
        if not can_start_scanning(args):
            sys.exit(1)

        initialize_proxies(args)

        # Update player locale if not set correctly, yet.
        args.player_locale = PlayerLocale.get_locale(args.location)
        if not args.player_locale:
            args.player_locale = gmaps_reverse_geolocate(
                args.gmaps_key,
                args.locale,
                str(position[0]) + ', ' + str(position[1]))
            db_player_locale = {
                'location': args.location,
                'country': args.player_locale['country'],
                'language': args.player_locale['country'],
                'timezone': args.player_locale['timezone'],
            }
            db_updates_queue.put((PlayerLocale, {0: db_player_locale}))
        else:
            log.debug(
                'Existing player locale has been retrieved from the DB.')

        # Set To True For Fake Spawn Test Mode #
        fake_pokemon_mode = False
        ########################################
        if fake_pokemon_mode:
            log.info('** Starting a fake search **')
            search_thread = Thread(target=fake_search_thread,
                                   name='search-overseer',
                                   args=(args, position, db_updates_queue,
                                         wh_updates_queue))
        else:
            argset = (args, new_location_queue, control_flags,
                      heartbeat, db_updates_queue, wh_updates_queue)

            log.debug('Starting a %s search thread', args.scheduler)
            search_thread = Thread(target=search_overseer_thread,
                                   name='search-overseer', args=argset)
        search_thread.daemon = True
        search_thread.start()

    if args.no_server:
        # This loop allows for ctrl-c interupts to work since flask won't be
        # holding the program open.
        while search_thread.is_alive():
            time.sleep(60)
    else:

        if args.cors:
            CORS(app)

        # No more stale JS.
        init_cache_busting(app)

        app.set_control_flags(control_flags)
        app.set_heartbeat_control(heartbeat)
        app.set_location_queue(new_location_queue)
        ssl_context = None
        if (args.ssl_certificate and args.ssl_privatekey and
                os.path.exists(args.ssl_certificate) and
                os.path.exists(args.ssl_privatekey)):
            ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            ssl_context.load_cert_chain(
                args.ssl_certificate, args.ssl_privatekey)
            log.info('Web server in SSL mode.')
        if args.verbose:
            app.run(threaded=True, use_reloader=False, debug=True,
                    host=args.host, port=args.port, ssl_context=ssl_context)
        else:
            app.run(threaded=True, use_reloader=False, debug=False,
                    host=args.host, port=args.port, ssl_context=ssl_context)
Пример #7
0
                    # Ignore blank lines and comment lines.
                    if len(line.strip()) == 0 or line.startswith('#'):
                        continue
                    resource = line.strip()
                    resources.append(resource)
        except IOError:
            log.exception('Could not load {} from {}.'.format(resource_file))
            exit(1)
    return resources


# ===========================================================================

log.info("PGScout starting up.")

init_mr_mime()

init_proxies()

hash_key_provider = CyclicResourceProvider()

hash_key = cfg_get('hash_key')
if hash_key and len(hash_key) > 0:
    hash_key_provider.add_resource(hash_key)
    log.info("Loaded single hash key from config.")

hash_keys = cfg_get('hash_keys')
if hash_keys and len(hash_keys) > 0:
    for hk in hash_keys:
        hash_key_provider.add_resource(hk)
    log.info("Loaded {} hash keys from config.".format(len(hash_keys)))
Пример #8
0
def main():
    # Patch threading to make exceptions catchable.
    install_thread_excepthook()

    # Make sure exceptions get logged.
    sys.excepthook = handle_exception

    args = get_args()

    init_mr_mime(config_file='config/mrmime_config.json')

    # Add file logging if enabled.
    if args.verbose and args.verbose != 'nofile':
        filelog = logging.FileHandler(args.verbose)
        filelog.setFormatter(
            logging.Formatter(
                '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] '
                + '%(message)s'))
        logging.getLogger('').addHandler(filelog)
    if args.very_verbose and args.very_verbose != 'nofile':
        filelog = logging.FileHandler(args.very_verbose)
        filelog.setFormatter(
            logging.Formatter(
                '%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] '
                + '%(message)s'))
        logging.getLogger('').addHandler(filelog)

    if args.verbose or args.very_verbose:
        log.setLevel(logging.DEBUG)
    else:
        log.setLevel(logging.INFO)

    # Let's not forget to run Grunt / Only needed when running with webserver.
    if not args.no_server:
        root_path = os.path.dirname(__file__)
        if not os.path.exists(os.path.join(root_path, 'static/dist')):
            log.critical(
                'Missing front-end assets (static/dist) -- please run ' +
                '"npm install && npm run build" before starting the server.')
            sys.exit()

        # You need custom image files now.
        if not os.path.isfile(
                os.path.join(root_path, 'static/icons-sprite.png')):
            log.info('Sprite files not present, extracting bundled ones...')
            extract_sprites(root_path)
            log.info('Done!')

        # Check if custom.css is used otherwise fall back to default.
        if os.path.exists(os.path.join(root_path, 'static/css/custom.css')):
            args.custom_css = True
            log.info(
                'File \"custom.css\" found, applying user-defined settings.')
        else:
            args.custom_css = False
            log.info('No file \"custom.css\" found, using default settings.')

    # These are very noisy, let's shush them up a bit.
    logging.getLogger('peewee').setLevel(logging.INFO)
    logging.getLogger('requests').setLevel(logging.WARNING)
    logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING)
    logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO)
    logging.getLogger('werkzeug').setLevel(logging.ERROR)

    config['parse_pokemon'] = not args.no_pokemon
    config['parse_pokestops'] = not args.no_pokestops
    config['parse_gyms'] = not args.no_gyms

    # Turn these back up if debugging.
    if args.verbose or args.very_verbose:
        logging.getLogger('pgoapi').setLevel(logging.DEBUG)
    if args.very_verbose:
        logging.getLogger('peewee').setLevel(logging.DEBUG)
        logging.getLogger('requests').setLevel(logging.DEBUG)
        logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG)
        logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG)
        logging.getLogger('rpc_api').setLevel(logging.DEBUG)
        logging.getLogger('werkzeug').setLevel(logging.DEBUG)

    # Web access logs.
    if args.access_logs:
        logger = logging.getLogger('werkzeug')
        handler = logging.FileHandler('access.log')
        logger.setLevel(logging.INFO)
        logger.addHandler(handler)

    # Use lat/lng directly if matches such a pattern.
    prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$")
    res = prog.match(args.location)
    if res:
        log.debug('Using coordinates from CLI directly')
        position = (float(res.group(1)), float(res.group(2)), 0)
    else:
        log.debug('Looking up coordinates in API')
        position = util.get_pos_by_name(args.location)

    if position is None or not any(position):
        log.error("Location not found: '{}'".format(args.location))
        sys.exit()

    # Use the latitude and longitude to get the local altitude from Google.
    (altitude, status) = get_gmaps_altitude(position[0], position[1],
                                            args.gmaps_key)
    if altitude is not None:
        log.debug('Local altitude is: %sm', altitude)
        position = (position[0], position[1], altitude)
    else:
        if status == 'REQUEST_DENIED':
            log.error(
                'Google API Elevation request was denied. You probably ' +
                'forgot to enable elevation api in https://console.' +
                'developers.google.com/apis/api/elevation_backend/')
            sys.exit()
        else:
            log.error('Unable to retrieve altitude from Google APIs' +
                      'setting to 0')

    log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0],
             position[1], position[2])

    if args.no_pokemon:
        log.info('Parsing of Pokemon disabled.')
    if args.no_pokestops:
        log.info('Parsing of Pokestops disabled.')
    if args.no_gyms:
        log.info('Parsing of Gyms disabled.')
    if args.encounter:
        log.info('Encountering pokemon enabled.')

    config['LOCALE'] = args.locale
    config['CHINA'] = args.china

    # if we're clearing the db, do not bother with the blacklist
    if args.clear_db:
        args.disable_blacklist = True
    app = Pogom(__name__)
    app.before_request(app.validate_request)

    db = init_database(app)
    if args.clear_db:
        log.info('Clearing database')
        if args.db_type == 'mysql':
            drop_tables(db)
        elif os.path.isfile(args.db):
            os.remove(args.db)

    verify_database_schema(db)

    create_tables(db)

    # fixing encoding on present and future tables
    verify_table_encoding(db)

    if args.clear_db:
        log.info("Drop and recreate is complete. Now remove -cd and restart.")
        sys.exit()

    app.set_current_location(position)

    # Control the search status (running or not) across threads.
    pause_bit = Event()
    pause_bit.clear()
    if args.on_demand_timeout > 0:
        pause_bit.set()

    heartbeat = [now()]

    # Setup the location tracking queue and push the first location on.
    new_location_queue = Queue()
    new_location_queue.put(position)

    # DB Updates
    db_updates_queue = Queue()
    app.set_db_updates_queue(db_updates_queue)

    # Thread(s) to process database updates.
    for i in range(args.db_threads):
        log.debug('Starting db-updater worker thread %d', i)
        t = Thread(target=db_updater,
                   name='db-updater-{}'.format(i),
                   args=(args, db_updates_queue, db))
        t.daemon = True
        t.start()

    # db cleaner; really only need one ever.
    if not args.disable_clean:
        t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, ))
        t.daemon = True
        t.start()

    # WH updates queue & WH unique key LFU caches.
    # The LFU caches will stop the server from resending the same data an
    # infinite number of times. The caches will be instantiated in the
    # webhook's startup code.
    wh_updates_queue = Queue()
    wh_key_cache = {}
    app.set_wh_updates_queue(wh_updates_queue)

    # Thread to process webhook updates.
    for i in range(args.wh_threads):
        log.debug('Starting wh-updater worker thread %d', i)
        t = Thread(target=wh_updater,
                   name='wh-updater-{}'.format(i),
                   args=(args, wh_updates_queue, wh_key_cache))
        t.daemon = True
        t.start()

    config['ROOT_PATH'] = app.root_path
    config['GMAPS_KEY'] = args.gmaps_key

    if not args.only_server:

        # Abort if we don't have a hash key set
        if not args.hash_key:
            log.critical('Hash key is required for scanning. Exiting.')
            sys.exit()

        # Processing proxies if set (load from file, check and overwrite old
        # args.proxy with new working list)
        args.proxy = check_proxies(args)

        # Run periodical proxy refresh thread
        if (args.proxy_file is not None) and (args.proxy_refresh > 0):
            t = Thread(target=proxies_refresher,
                       name='proxy-refresh',
                       args=(args, ))
            t.daemon = True
            t.start()
        else:
            log.info('Periodical proxies refresh disabled.')

        # Find the reverse geolocation
        geolocator = GoogleV3(api_key=args.gmaps_key)
        args.player_locale = {
            'country': 'US',
            'language': args.locale,
            'timezone': 'America/Denver'
        }
        try:
            location = geolocator.reverse(args.location)
            country = location[-1].raw['address_components'][-1]['short_name']
            try:
                timezone = geolocator.timezone(args.location)
                args.player_locale.update({
                    'country': country,
                    'timezone': str(timezone)
                })
            except Exception as e:
                log.warning(
                    'Exception while obtaining Google Timezone. ' +
                    'Key probably not enabled: %s.', repr(e))
                pass
        except Exception as e:
            log.warning('Exception while obtaining player locale: %s.',
                        repr(e))
            pass

        # Gather the Pokemon!

        # Attempt to dump the spawn points (do this before starting threads of
        # endure the woe).
        if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile'
                and args.dump_spawnpoints):
            with open(args.spawnpoint_scanning, 'w+') as file:
                log.info('Saving spawn points to %s', args.spawnpoint_scanning)
                spawns = Pokemon.get_spawnpoints_in_hex(
                    position, args.step_limit)
                file.write(json.dumps(spawns))
                log.info('Finished exporting spawn points')

        argset = (args, new_location_queue, pause_bit, heartbeat,
                  db_updates_queue, wh_updates_queue)

        log.debug('Starting a %s search thread', args.scheduler)
        search_thread = Thread(target=search_overseer_thread,
                               name='search-overseer',
                               args=argset)
        search_thread.daemon = True
        search_thread.start()

    if args.cors:
        CORS(app)

    # No more stale JS.
    init_cache_busting(app)

    app.set_search_control(pause_bit)
    app.set_heartbeat_control(heartbeat)
    app.set_location_queue(new_location_queue)

    if args.no_server:
        # This loop allows for ctrl-c interupts to work since flask won't be
        # holding the program open.
        while search_thread.is_alive():
            time.sleep(60)
    else:
        ssl_context = None
        if (args.ssl_certificate and args.ssl_privatekey
                and os.path.exists(args.ssl_certificate)
                and os.path.exists(args.ssl_privatekey)):
            ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            ssl_context.load_cert_chain(args.ssl_certificate,
                                        args.ssl_privatekey)
            log.info('Web server in SSL mode.')
        if args.verbose or args.very_verbose:
            app.run(threaded=True,
                    use_reloader=False,
                    debug=True,
                    host=args.host,
                    port=args.port,
                    ssl_context=ssl_context)
        else:
            app.run(threaded=True,
                    use_reloader=False,
                    debug=False,
                    host=args.host,
                    port=args.port,
                    ssl_context=ssl_context)
Пример #9
0
    'host': '127.0.0.1',
    'port': 4242,
    'proxies_file': '',
    'require_min_trainer_level': 30,
    # API related values, usually don't need to change them
    'login_retries': 3,
    'login_delay': 6
}


def cfg_get(key, default=None):
    return cfg.get(key, default)


parser = configargparse.ArgParser()
parser.add_argument(
    '-c',
    '--config',
    help=('Specify different config file. Default: config.json'),
    default='config.json')
args = parser.parse_args()

with open(args.config, 'r') as f:
    user_cfg = json.loads(f.read())
    cfg.update(user_cfg)

init_mr_mime({
    'login_delay': cfg['login_delay'],
    'login_retries': cfg['login_retries']
})
Пример #10
0
def main():
    # Patch threading to make exceptions catchable.
    install_thread_excepthook()

    # Make sure exceptions get logged.
    sys.excepthook = handle_exception

    args = get_args()

    # Abort if status name is not valid.
    regexp = re.compile('^([\w\s\-.]+)$')
    if not regexp.match(args.status_name):
        log.critical('Status name contains illegal characters.')
        sys.exit(1)

    set_log_and_verbosity(log)

    args.root_path = os.path.dirname(os.path.abspath(__file__))
    init_args(args)

    # Initialize Mr. Mime library
    mrmime_cfg = {
        # We don't want exceptions on captchas because we handle them differently.
        'exception_on_captcha': False,
        'pgpool_system_id': args.status_name
    }
    # Don't clear PGPool URL if it's not given in config but set in MrMime config JSON
    if args.pgpool_url:
        mrmime_cfg['pgpool_url'] = args.pgpool_url
    mrmime_config_file = os.path.join(os.path.dirname(__file__),
                                      'config/mrmime_config.json')
    init_mr_mime(config_file=mrmime_config_file, user_cfg=mrmime_cfg)

    # Abort if only-server and no-server are used together
    if args.only_server and args.no_server:
        log.critical(
            "You can't use no-server and only-server at the same time, silly.")
        sys.exit(1)

    # Stop if we're just looking for a debug dump.
    if args.dump:
        log.info('Retrieving environment info...')
        hastebin = get_debug_dump_link()
        log.info('Done! Your debug link: https://hastebin.com/%s.txt',
                 hastebin)
        sys.exit(1)

    # Let's not forget to run Grunt / Only needed when running with webserver.
    if not args.no_server and not validate_assets(args):
        sys.exit(1)

    # Use lat/lng directly if matches such a pattern.
    prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$")
    res = prog.match(args.location)
    if res:
        log.debug('Using coordinates from CLI directly')
        position = (float(res.group(1)), float(res.group(2)), 0)
    else:
        log.debug('Looking up coordinates in API')
        position = util.get_pos_by_name(args.location)

    if position is None or not any(position):
        log.error("Location not found: '{}'".format(args.location))
        sys.exit()

    # Use the latitude and longitude to get the local altitude from Google.
    (altitude, status) = get_gmaps_altitude(position[0], position[1],
                                            args.gmaps_key)
    if altitude is not None:
        log.debug('Local altitude is: %sm', altitude)
        position = (position[0], position[1], altitude)
    else:
        if status == 'REQUEST_DENIED':
            log.error(
                'Google API Elevation request was denied. You probably ' +
                'forgot to enable elevation api in https://console.' +
                'developers.google.com/apis/api/elevation_backend/')
            sys.exit()
        else:
            log.error('Unable to retrieve altitude from Google APIs' +
                      'setting to 0')

    log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', position[0],
             position[1], position[2])

    if args.no_pokemon:
        log.info('Parsing of Pokemon disabled.')
    if args.no_pokestops:
        log.info('Parsing of Pokestops disabled.')
    if args.no_gyms:
        log.info('Parsing of Gyms disabled.')
    if args.encounter:
        log.info('Encountering pokemon enabled.')

    app = None
    if not args.no_server and not args.clear_db:
        app = Pogom(__name__,
                    root_path=os.path.dirname(
                        os.path.abspath(__file__)).decode('utf8'))
        app.before_request(app.validate_request)
        app.set_current_location(position)

    db = init_database(app)
    if args.clear_db:
        log.info('Clearing database')
        if args.db_type == 'mysql':
            drop_tables(db)
        elif os.path.isfile(args.db):
            os.remove(args.db)

    verify_database_schema(db)

    create_tables(db)

    # Fix encoding on present and future tables.
    verify_table_encoding(db)

    if args.clear_db:
        log.info('Drop and recreate is complete. Now remove -cd and restart.')
        sys.exit()

    # Control the search status (running or not) across threads.
    control_flags = {
        'on_demand': Event(),
        'api_watchdog': Event(),
        'search_control': Event()
    }

    for flag in control_flags.values():
        flag.clear()

    if args.on_demand_timeout > 0:
        control_flags['on_demand'].set()

    heartbeat = [now()]

    # Setup the location tracking queue and push the first location on.
    new_location_queue = Queue()
    new_location_queue.put(position)

    # DB Updates
    db_updates_queue = Queue()
    if app:
        app.set_db_updates_queue(db_updates_queue)

    # Thread(s) to process database updates.
    for i in range(args.db_threads):
        log.debug('Starting db-updater worker thread %d', i)
        t = Thread(target=db_updater,
                   name='db-updater-{}'.format(i),
                   args=(db_updates_queue, db))
        t.daemon = True
        t.start()

    # Database cleaner; really only need one ever.
    if args.enable_clean:
        t = Thread(target=clean_db_loop, name='db-cleaner', args=(args, ))
        t.daemon = True
        t.start()

    # WH updates queue & WH unique key LFU caches.
    # The LFU caches will stop the server from resending the same data an
    # infinite number of times. The caches will be instantiated in the
    # webhook's startup code.
    wh_updates_queue = Queue()
    wh_key_cache = {}

    if len(args.wh_types) == 0:
        log.info('Webhook disabled.')
    else:
        log.info('Webhook enabled for events: sending %s to %s.',
                 args.wh_types, args.webhooks)

        # Thread to process webhook updates.
        for i in range(args.wh_threads):
            log.debug('Starting wh-updater worker thread %d', i)
            t = Thread(target=wh_updater,
                       name='wh-updater-{}'.format(i),
                       args=(args, wh_updates_queue, wh_key_cache))
            t.daemon = True
            t.start()

    if not args.only_server:
        # Check if we are able to scan.
        if not can_start_scanning(args):
            sys.exit(1)

        # Processing proxies if set (load from file, check and overwrite old
        # args.proxy with new working list).
        args.proxy = load_proxies(args)

        if args.proxy and not args.proxy_skip_check:
            args.proxy = check_proxies(args, args.proxy)

        # Run periodical proxy refresh thread.
        if (args.proxy_file is not None) and (args.proxy_refresh > 0):
            t = Thread(target=proxies_refresher,
                       name='proxy-refresh',
                       args=(args, ))
            t.daemon = True
            t.start()
        else:
            log.info('Periodical proxies refresh disabled.')

        # Update player locale if not set correctly, yet.
        args.player_locale = PlayerLocale.get_locale(args.location)
        if not args.player_locale:
            args.player_locale = gmaps_reverse_geolocate(
                args.gmaps_key, args.locale,
                str(position[0]) + ', ' + str(position[1]))
            db_player_locale = {
                'location': args.location,
                'country': args.player_locale['country'],
                'language': args.player_locale['country'],
                'timezone': args.player_locale['timezone'],
            }
            db_updates_queue.put((PlayerLocale, {0: db_player_locale}))
        else:
            log.debug('Existing player locale has been retrieved from the DB.')

        # Gather the Pokemon!

        # Attempt to dump the spawn points (do this before starting threads of
        # endure the woe).
        if (args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile'
                and args.dump_spawnpoints):
            with open(args.spawnpoint_scanning, 'w+') as file:
                log.info('Saving spawn points to %s', args.spawnpoint_scanning)
                spawns = SpawnPoint.get_spawnpoints_in_hex(
                    position, args.step_limit)
                file.write(json.dumps(spawns))
                log.info('Finished exporting spawn points')

        argset = (args, new_location_queue, control_flags, heartbeat,
                  db_updates_queue, wh_updates_queue)

        log.debug('Starting a %s search thread', args.scheduler)
        search_thread = Thread(target=search_overseer_thread,
                               name='search-overseer',
                               args=argset)
        search_thread.daemon = True
        search_thread.start()

    if args.no_server:
        # This loop allows for ctrl-c interupts to work since flask won't be
        # holding the program open.
        while search_thread.is_alive():
            time.sleep(60)
    else:

        if args.cors:
            CORS(app)

        # No more stale JS.
        init_cache_busting(app)

        app.set_control_flags(control_flags)
        app.set_heartbeat_control(heartbeat)
        app.set_location_queue(new_location_queue)
        ssl_context = None
        if (args.ssl_certificate and args.ssl_privatekey
                and os.path.exists(args.ssl_certificate)
                and os.path.exists(args.ssl_privatekey)):
            ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            ssl_context.load_cert_chain(args.ssl_certificate,
                                        args.ssl_privatekey)
            log.info('Web server in SSL mode.')
        if args.verbose:
            app.run(threaded=True,
                    use_reloader=False,
                    debug=True,
                    host=args.host,
                    port=args.port,
                    ssl_context=ssl_context)
        else:
            app.run(threaded=True,
                    use_reloader=False,
                    debug=False,
                    host=args.host,
                    port=args.port,
                    ssl_context=ssl_context)
Пример #11
0
cfg = {
    'host': '127.0.0.1',
    'port': 4242,
    'proxies_file': '',
    'require_min_trainer_level': 30,
    # API related values, usually don't need to change them
    'login_retries': 3,
    'login_delay': 6
}


def cfg_get(key, default=None):
    return cfg.get(key, default)


parser = configargparse.ArgParser()
parser.add_argument('-c', '--config',
                    help=('Specify different config file. Default: config.json'),
                    default='config.json')
args = parser.parse_args()

with open(args.config, 'r') as f:
    user_cfg = json.loads(f.read())
    cfg.update(user_cfg)

init_mr_mime({
    'login_delay': cfg['login_delay'],
    'login_retries': cfg['login_retries']
})

Пример #12
0
def scan():
    log.info('Initializing.')
    parser = configargparse.ArgumentParser()
    parser.add_argument('-st', '--step-limit', default=6)
    parser.add_argument('-l', '--scan-location')
    parser.add_argument('-ac', '--accounts-file', default='accounts.csv')
    parser.add_argument('-pf', '--proxy-file', default='proxies.txt')
    parser.add_argument('-lpf', '--login-proxy-file', default='proxies.txt')
    parser.add_argument('-ld', '--login-delay', default=6)
    parser.add_argument('-hk', '--hash-key')
    parser.add_argument('-sd', '--scan-delay', default=10)
    parser.add_argument('-lt', '--login-timeout', default=15)
    parser.add_argument('-kph', '--kph', default=35)
    parser.add_argument('-bh', '--beehiving', action='store_true')
    parser.add_argument('-bhlf', '--beehive-locations-file', default=None)
    parser.add_argument('-lf', '--locations-file', default='coords.txt')
    parser.add_argument('-dd', '--ditto-detection', action='store_true')
    parser.add_argument('-spin', '--spin-pokestops', action='store_true')
    parser.add_argument('-nff', '--no-full-flow', action='store_true')
    parser.add_argument('-lp', '--lure-party', action='store_true')
    parser.add_argument('-psu', '--pgscout-url', default=None)
    parser.add_argument('-enc', '--encounter', action='store_true')
    parser.add_argument('-ef', '--encounter-file', default=None)
    parser.add_argument('-ss', '--spawn-scan', action='store_true')
    #parser.add_argument('-dbn', '--db-name')
    #parser.add_argument('-dbu', '--db-user')
    #parser.add_argument('-dbp', '--db-pass')
    #parser.add_argument('-dbh', '--db-host', default='localhost')
    #parser.add_argument('-dbpr', '--db-port', default=3306)
    parser.add_argument('-wh', '--webhook', action='store_true')
    parser.add_argument('-whf', '--webhook-file', default='webhooks.txt')
    args = parser.parse_args()
    #init_database(args)
    init_mr_mime({
        'full_login_flow': not args.no_full_flow,
        'scan_delay': int(args.scan_delay)
    })
    create_tables()
    proxies = read_file_content(args.proxy_file)
    login_proxies = read_file_content(args.login_proxy_file)

    if args.webhook:
        webhooks = read_file_content(args.webhook_file)
    else:
        webhooks = []

    if args.beehiving:
        bh_loc_cycle = cycle(read_file_content(args.beehive_locations_file))

    accounts = read_file_content(args.accounts_file)

    populate_accounts_queue(accounts, cycle(proxies), cycle(login_proxies))

    if not args.beehiving:
        loc = [float(i.strip()) for i in args.scan_location.split(',')]
        if args.spawn_scan:
            log.info('Using Spawn scheduler.')
            scheduler = SpawnpointScheduler(args, loc)
            scheduler.schedule()
        else:
            log.info('Using Classic scheduler.')
            scheduler = Scheduler(args, loc)
            scheduler.schedule()

    t = Thread(target=db_queue_inserter, name='db-inserter', args=(webhooks, ))
    t.start()

    if not args.beehiving:
        ss = Thread(target=spawn_stats, name='spawn-stats', args=(scheduler, ))
        ss.start()

    if args.encounter and args.pgscout_url != None:
        try:
            list_enc = [int(i) for i in read_file_content(args.encounter_file)]
        except:
            list_enc = []
    else:
        list_enc = []

    i = 0
    while i < len(accounts):
        if args.beehiving:
            loc = next(
                bh_loc_cycle,
                [float(i.strip()) for i in args.scan_location.split(',')])
            if args.spawn_scan:
                scheduler = SpawnpointScheduler(args, loc)
                scheduler.schedule()
            else:
                scheduler = Scheduler(args, loc)
                scheduler.schedule()
        t = Thread(target=search_worker,
                   name='search-worker-{}'.format(i),
                   args=(
                       args,
                       scheduler,
                       list_enc,
                   ))
        #t.daemon = True
        t.start()
        sleep(int(args.login_delay))
        i += 1