async def convert_tank_names(tanklist : list, tank_strs: dict) -> dict: """Convert tank names for Tankopedia""" tankopedia = {} userStrs = {} try: for tank in tanklist: tank['name'] = tank_strs[tank['userStr']] #userStrs[tank['userStr'].split(':')[1]] = tank['name'] tank.pop('userStr', None) tank_tmp = collections.OrderedDict() for key in sorted(tank.keys()): tank_tmp[key] = tank[key] tankopedia[str(tank['tank_id'])] = tank_tmp for tank_str in tank_strs: skip = False key = tank_str.split(':')[1] bu.debug('Tank string: ' + key + ' = ' + tank_strs[tank_str]) re_strs = [r'^Chassis_', r'^Turret_', r'^_', r'_short$' ] for re_str in re_strs: p = re.compile(re_str) if p.match(key): skip = True break if skip: continue userStrs[key] = tank_strs[tank_str] # sorting tankopedia_sorted = collections.OrderedDict() for tank_id in sorted(tankopedia.keys(), key=int): tankopedia_sorted[str(tank_id)] = tankopedia[str(tank_id)] userStrs_sorted = collections.OrderedDict() for userStr in sorted(userStrs.keys()): userStrs_sorted[userStr] = userStrs[userStr] # bu.debug('Tank strings: ' + str(len(userStrs_sorted))) except Exception as err: bu.error(err) sys.exit(1) return tankopedia_sorted, userStrs_sorted
async def replayWorker(queue: asyncio.Queue, workerID: int, account_id: int, priv = False): """Async Worker to process the replay queue""" global SKIPPED_N global ERROR_N while True: item = await queue.get() filename = item[0] N = item[1] title = item[2] replay_json_fn = filename + '.json' msg_str = 'Replay[' + str(N) + ']: ' #bu.debug(msg_str + replay_json_fn) try: if os.path.isfile(replay_json_fn): async with aiofiles.open(replay_json_fn) as fp: replay_json = json.loads(await fp.read()) #if replay_json['status'] == 'ok': if wi.chk_JSON_replay(replay_json): bu.verbose_std(msg_str + title + ' has already been posted. Skipping.' ) else: os.remove(replay_json_fn) bu.debug(msg_str + "Replay JSON not valid: Deleting " + replay_json_fn, id=workerID) SKIPPED_N += 1 queue.task_done() continue except asyncio.CancelledError as err: raise err except Exception as err: bu.error(msg_str + 'Unexpected error: ' + str(type(err)) + ' : '+ str(err)) try: #bu.debug('Opening file [' + str(N) +']: ' + filename) async with aiofiles.open(filename,'rb') as fp: filename = os.path.basename(filename) bu.debug(msg_str + 'File: ' + filename) json_resp = await wi.post_replay(await fp.read(), filename, account_id, title, priv, N) if json_resp != None: if (await bu.save_JSON(replay_json_fn,json_resp)): if wi.chk_JSON_replay(json_resp): if not bu.debug(msg_str + 'Replay saved OK: ' + filename): bu.verbose_std(msg_str + title + ' posted') else: bu.warning(msg_str +'Replay file is not valid: ' + filename) ERROR_N += 1 else: bu.error(msg_str + 'Error saving replay: ' + filename) ERROR_N += 1 else: bu.error(msg_str + 'Replay file is not valid: ' + filename) ERROR_N += 1 except Exception as err: bu.error(msg_str + 'Unexpected Exception: ' + str(type(err)) + ' : ' + str(err) ) bu.debug(msg_str + 'Marking task done') queue.task_done() return None
async def replayReader(queue: asyncio.Queue, readerID: int, args: argparse.Namespace): """Async Worker to process the replay queue""" #global SKIPPED_N account_id = args.accountID results = [] playerstanks = set() try: while True: item = await queue.get() filename = item[0] replayID = item[1] try: replay_json = await bu.readJSON(filename, wi.chkJSONreplay) if replay_json == None: bu.verbose('Replay[' + str(replayID) + ']: ' + filename + ' is invalid. Skipping.') #SKIPPED_N += 1 queue.task_done() continue ## Read the replay JSON bu.debug('[' + str(readerID) + '] reading replay: ' + filename) result = await readReplayJSON(replay_json, args) if result == None: bu.error('[' + str(readerID) + '] Error reading replay: ' + filename) queue.task_done() continue if (account_id != None): playerstanks.update(set(result['allies'])) playerstanks.update(set(result['enemies'])) results.append(result) bu.debug('Marking task ' + str(replayID) + ' done') except Exception as err: bu.error(str(err)) queue.task_done() except asyncio.CancelledError: return results, playerstanks return None
async def processTankStats(playerstanks, N_workers: int) -> dict: ## Create queue of player-tank pairs to find stats for statsQ = asyncio.Queue() bu.debug('Create player/tank stats queue') for playertank in playerstanks: await statsQ.put(playertank) # Process player WR / battle stats stats_tasks = [] bu.debug('Starting player/tank stats queue workers') for i in range(N_workers): stats_tasks.append(asyncio.create_task(statWorker(statsQ, i))) ## let the workers to process await statsQ.join() for task in stats_tasks: task.cancel() player_stats = {} for stats in await asyncio.gather(*stats_tasks): player_stats = {**player_stats, **stats} return player_stats
def getTitle(replayfile: str, title: str, i: int) -> str: global wg if title == None: try: filename = os.path.basename(replayfile) bu.debug(filename) map_usrStrs = wg.get_map_user_strs() tank_userStrs = wg.get_tank_user_strs() #p = re.compile('\\d{8}_\\d{4}_(.+)_(' + '|'.join(map_usrStrs) + ')(?:-\\d)?\\.wotbreplay$') # update 6.2 changed the file name format. Bug fixed 2019-09-09 Jylpah p = re.compile('\\d{8}_\\d{4}_.*?(' + '|'.join(tank_userStrs) + ')_(' + '|'.join(map_usrStrs) + ')(?:-\\d)?\\.wotbreplay$') m = p.match(filename) if (m != None): if wg.tanks != None: tank = m.group(1) if tank in wg.tanks['userStr']: tank = wg.tanks['userStr'][tank] else: bu.error('Tank code: "' + tank + '" not found from Tankopedia (tanks.json)') else: tank = m.group(1) blitz_map = m.group(2) if blitz_map in wg.maps: blitz_map = wg.maps[blitz_map] else: bu.error('Mapcode: "' + blitz_map + '" not found from map database (maps.json)') title = tank + ' @ ' + blitz_map else: title = re.sub('\\.wotbreplay$', '', filename) except Exception as err: bu.error(err) else: title.replace('NN', str(i)) return title
def getTitle(replayfile: str, title: str = None, i : int = 0) -> str: global wg if title == None: try: filename = os.path.basename(replayfile) bu.debug(filename) tank = None map_name = None with zipfile.ZipFile(replayfile, 'r') as archive: # bu.debug('Replay file: ' + replayfile + ' opened') with io.TextIOWrapper(archive.open('meta.json')) as meta: # bu.debug('Replay file\'s metadata: opened') try: metadata_json = json.load(meta) #player = metadata_json['playerName'] tank = wg.tank_str2name(metadata_json['playerVehicleName']) map_name = wg.get_map(metadata_json['mapName']) bu.debug('Tank: ' + tank + ' Map: ' + map_name) except Exception as err: bu.error(exception = err) if (tank != None) and (map_name != None): title = tank + ' @ ' + map_name else: title = re.sub('\\.wotbreplay$', '', filename) except Exception as err: bu.error(err) else: title.replace('NN', str(i)) bu.debug('Returning: ' + title) return title
async def replay_reader(queue: asyncio.Queue, readerID: int, args : argparse.Namespace): """Async Worker to process the replay queue""" #global SKIPPED_N results = [] playerstanks = set() try: while True: item = await queue.get() replay_json = item[0] replayID = item[1] replay_file = item[2] try: msg_str = 'Replay[' + str(replayID) + ']: ' if replay_json == None: bu.warning(msg_str + 'Invalid replay. Skipping: ' + (replay_file if replay_file != None else '') ) #SKIPPED_N += 1 queue.task_done() continue ## Read the replay JSON bu.debug('reading replay', readerID) result = await read_replay_JSON(replay_json, args) bu.print_progress() if result == None: bu.warning(msg_str + 'Invalid replay' + (replay_file if replay_file != None else '') ) queue.task_done() continue # if (account_id != None): playerstanks.update(set(result['allies'])) playerstanks.update(set(result['enemies'])) playerstanks.update(set([ result['player'] ])) results.append(result) bu.debug('Marking task ' + str(replayID) + ' done') except Exception as err: bu.error(exception=err) queue.task_done() except (asyncio.CancelledError, concurrent.futures.CancelledError): bu.debug( str(len(results)) + ' replays, ' + str(len(playerstanks)) + ' player/tanks', readerID) return results, playerstanks return None
def calc_team_stats(results: list, player_stats : dict, stat_id_map : dict, args : argparse.Namespace) -> list: """Calculate team stats""" return_list = [] ## Bug here?? #stat_types = player_stats[list(player_stats.keys())[0]].keys() stat_types = list() stat_types = BattleRecord.get_team_fields() for result in results: try: missing_stats = 0 n_players = len(result['allies']) + len(result['enemies']) n_allies = collections.defaultdict(def_value_zero) allies_stats = collections.defaultdict(def_value_zero) #bu.debug('Processing Allies') for ally in result['allies']: # Player itself is not in 'allies': see read_replay_JSON() ally_mapped = stat_id_map[ally] if ally_mapped not in player_stats.keys(): missing_stats += 1 continue for stat in stat_types: if player_stats[ally_mapped][stat] != None: allies_stats[stat] += player_stats[ally_mapped][stat] n_allies[stat] += 1 #bu.debug('Processing Enemies') n_enemies = collections.defaultdict(def_value_zero) enemies_stats = collections.defaultdict(def_value_zero) for enemy in result['enemies']: enemy_mapped = stat_id_map[enemy] if enemy_mapped not in player_stats: missing_stats += 1 continue for stat in stat_types: if player_stats[enemy_mapped][stat] != None: enemies_stats[stat] += player_stats[enemy_mapped][stat] n_enemies[stat] += 1 #bu.debug('Processing avg stats') player_mapped = stat_id_map[result['player']] if player_mapped not in player_stats: missing_stats += 1 else: for stat in stat_types: if player_stats[player_mapped][stat] != None: result['player_' + str(stat)] = player_stats[player_mapped][stat] for stat in stat_types: if n_allies[stat] > 0: result['allies_' + str(stat)] = allies_stats[stat] / n_allies[stat] else: bu.debug('No allies stats for: ' + str(result)) if n_enemies[stat] > 0: result['enemies_' + str(stat)] = enemies_stats[stat] / n_enemies[stat] else: bu.debug('No enemies stats for: ' + str(result)) # Steamroller stats result['team_result'] = str(result['allies_survived']) + '-' + str(result['enemies_survived']) result[N_PLAYERS] = n_players result[MISSING_STATS] = missing_stats return_list.append(result) except KeyError as err: bu.error('Key not found', err) except Exception as err: bu.error(exception=err) return return_list
async def process_player_stats(players, N_workers: int, args : argparse.Namespace, db : motor.motor_asyncio.AsyncIOMotorDatabase) -> dict: """Start stat_workers to retrieve and store players' stats""" ## Create queue of player-tank pairs to find stats for try: statsQ = asyncio.Queue() bu.debug('Create player stats queue: ' + str(len(players)) + ' players') stat_id_map = {} stat_ids = set() bu.set_progress_bar('Fetching player stats', len(players), 25, True) stat_id_map_func = globals()[STAT_FUNC[args.stat_func][0]] for player in players: stat_id_map[player] = stat_id_map_func(player) stat_ids.add(stat_id_map[player]) # create statsQ for stat_id in stat_ids: await statsQ.put(stat_id) # Process player WR / battle stats stats_tasks = [] bu.debug('Starting player stats workers') for i in range(N_workers): bu.debug("Starting worker " + str(i)) stats_tasks.append(asyncio.create_task(stat_worker(statsQ, i, args, db))) bu.debug('Waiting stats workers to finish') await statsQ.join() bu.debug('Cancelling stats workers') for task in stats_tasks: task.cancel() player_stats = {} stat_id_remap = {} bu.debug('Gathering stats worker outputs') for (stats, id_remap) in await asyncio.gather(*stats_tasks): player_stats = {**player_stats, **stats} stat_id_remap = {**stat_id_remap, **id_remap} bu.finish_progress_bar() ## DO REMAPPING stat_id_map = remap_stat_id(stat_id_map, stat_id_remap) bu.debug('Returning player_stats') return (player_stats, stat_id_map) except Exception as err: bu.error(exception=err) return None
async def main(argv): global wg, wi, WG_APP_ID # set the directory for the script current_dir = os.getcwd() os.chdir(os.path.dirname(sys.argv[0])) ## Default options: OPT_DB = False OPT_EXTENDED = False OPT_HIST = False OPT_STAT_FUNC = 'player' OPT_WORKERS_N = 10 WG_ACCOUNT = None # format: nick@server, where server is either 'eu', 'ru', 'na', 'asia' or 'china'. # China is not supported since WG API stats are not available there WG_ID = None # WG account_id in integer format. # WG_ACCOUNT will be used to retrieve the account_id, but it can be set directly too # WG_APP_ID = WG_APP_ID WG_RATE_LIMIT = 10 ## WG standard. Do not edit unless you have your ## own server app ID, it will REDUCE the performance ## VERY unlikely you have a DB set up DB_SERVER = 'localhost' DB_PORT = 27017 DB_SSL = False DB_CERT_REQ = ssl.CERT_NONE DB_AUTH = 'admin' DB_NAME = 'BlitzStats' DB_USER = '******' DB_PASSWD = 'PASSWORD' DB_CERT = None DB_CA = None try: ## Read config if os.path.isfile(FILE_CONFIG): config = configparser.ConfigParser() config.read(FILE_CONFIG) try: if 'OPTIONS' in config.sections(): configOptions = config['OPTIONS'] # WG account id of the uploader: # # Find it here: https://developers.wargaming.net/reference/all/wotb/account/list/ OPT_DB = configOptions.getboolean('opt_DB', OPT_DB) OPT_EXTENDED = configOptions.getboolean('opt_analyzer_extended', OPT_EXTENDED) OPT_HIST = configOptions.getboolean('opt_analyzer_hist', OPT_HIST) OPT_STAT_FUNC = configOptions.get('opt_analyzer_stat_func', fallback=OPT_STAT_FUNC) OPT_WORKERS_N = configOptions.getint('opt_analyzer_workers', OPT_WORKERS_N) except (KeyError, configparser.NoSectionError) as err: bu.error(exception=err) try: if 'ANALYZER' in config.sections(): configAnalyzer = config['ANALYZER'] histogram_fields_str = configAnalyzer.get('histogram_buckets', None) if histogram_fields_str != None: set_histogram_buckets(json.loads(histogram_fields_str)) except (KeyError, configparser.NoSectionError) as err: bu.error(exception=err) try: if 'WG' in config.sections(): configWG = config['WG'] WG_ID = configWG.getint('wg_id', WG_ID) WG_ACCOUNT = configWG.get('wg_account', WG_ACCOUNT) WG_APP_ID = configWG.get('wg_app_id', WG_APP_ID) WG_RATE_LIMIT = configWG.getint('wg_rate_limit', WG_RATE_LIMIT) except (KeyError, configparser.NoSectionError) as err: bu.error(exception=err) try: if 'DATABASE' in config.sections(): configDB = config['DATABASE'] DB_SERVER = configDB.get('db_server', DB_SERVER) DB_PORT = configDB.getint('db_port', DB_PORT) DB_SSL = configDB.getboolean('db_ssl', DB_SSL) DB_CERT_REQ = configDB.getint('db_ssl_req', DB_CERT_REQ) DB_AUTH = configDB.get('db_auth', DB_AUTH) DB_NAME = configDB.get('db_name', DB_NAME) DB_USER = configDB.get('db_user', DB_USER) DB_PASSWD = configDB.get('db_password', DB_PASSWD) DB_CERT = configDB.get('db_ssl_cert_file', DB_CERT) DB_CA = configDB.get('db_ssl_ca_file', DB_CA) except (KeyError, configparser.NoSectionError) as err: bu.error(exception=err) parser = ErrorCatchingArgumentParser(description='Analyze Blitz replay JSON files from WoTinspector.com. Use \'upload_wotb_replays.py\' to upload the replay files first.') parser.add_argument('--output', default='plain', choices=['plain', 'db'], help='Select output mode: plain text or database') parser.add_argument('-id', dest='account_id', type=int, default=WG_ID, help='WG account_id to analyze') parser.add_argument('-a', '--account', type=str, default=WG_ACCOUNT, help='WG account nameto analyze. Format: ACCOUNT_NAME@SERVER') parser.add_argument('-x', '--extended', action='store_true', default=OPT_EXTENDED, help='Print Extended stats') parser.add_argument('-X', '--extra_categories', choices=BattleRecordCategory.get_extra_categories(), default=None, nargs='*', help='Print Extended categories') parser.add_argument('--hist', action='store_true', default=OPT_HIST, help='Print player histograms (WR/battles)') parser.add_argument('--stat_func', default=OPT_STAT_FUNC, choices=STAT_FUNC.keys(), help='Select how to calculate for ally/enemy performance: tank-tier stats, global player stats') parser.add_argument('-u', '--url', action='store_true', default=False, help='Print replay URLs') parser.add_argument('--tankfile', type=str, default='tanks.json', help='JSON file to read Tankopedia from. Default is "tanks.json"') parser.add_argument('--mapfile', type=str, default='maps.json', help='JSON file to read Blitz map names from. Default is "maps.json"') parser.add_argument('-o','--outfile', type=str, default='-', metavar="OUTPUT", help='File to write results. Default STDOUT') parser.add_argument('--db', action='store_true', default=OPT_DB, help='Use DB - You are unlikely to have it') parser.add_argument('--filters', type=str, default=None, help='Filters for DB based analyses. MongoDB find() filter JSON format.') parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose mode') parser.add_argument('-s', '--silent', action='store_true', default=False, help='Silent mode') parser.add_argument('files', metavar='FILE1 [FILE2 ...]', type=str, nargs='+', help='Files/dirs to read. Use \'-\' for STDIN, "db:" for database') try: args = parser.parse_args() except Exception as err: raise bu.set_log_level(args.silent, args.verbose, args.debug) bu.set_progress_step(250) # Set the frequency of the progress dots. wg = WG(WG_APP_ID, args.tankfile, args.mapfile, stats_cache=True, rate_limit=WG_RATE_LIMIT) wi = WoTinspector(rate_limit=10) if args.account != None: args.account_id = await wg.get_account_id(args.account) bu.debug('WG account_id: ' + str(args.account_id)) BattleRecord.set_fields(args.extended) #### Connect to MongoDB (TBD) bu.debug('DB_SERVER: ' + DB_SERVER) bu.debug('DB_PORT: ' + str(DB_PORT)) bu.debug('DB_SSL: ' + "True" if DB_SSL else "False") bu.debug('DB_AUTH: ' + DB_AUTH) bu.debug('DB_NAME: ' + DB_NAME) client = None db = None if args.db: try: client = motor.motor_asyncio.AsyncIOMotorClient(DB_SERVER,DB_PORT, authSource=DB_AUTH, username=DB_USER, password=DB_PASSWD, ssl=DB_SSL, ssl_cert_reqs=DB_CERT_REQ, ssl_certfile=DB_CERT, tlsCAFile=DB_CA) db = client[DB_NAME] args.account_id = None bu.debug('Database connection initiated') except Exception as err: bu.error("Could no initiate DB connection: Disabling DB", err) args.db = False pass else: bu.debug('No DB in use') # rebase file arguments due to moving the working directory to the script location args.files = bu.rebase_file_args(current_dir, args.files) try: replayQ = asyncio.Queue(maxsize=1000) reader_tasks = [] # Make replay Queue scanner_task = asyncio.create_task(mk_replayQ(replayQ, args, db)) bu.debug('Replay scanner started') # Start tasks to process the Queue for i in range(OPT_WORKERS_N): reader_tasks.append(asyncio.create_task(replay_reader(replayQ, i, args))) bu.debug('ReplayReader ' + str(i) + ' started') bu.debug('Waiting for the replay scanner to finish') await asyncio.wait([scanner_task]) # bu.debug('Scanner finished. Waiting for replay readers to finish the queue') await replayQ.join() await asyncio.sleep(0.1) bu.debug('Replays read. Cancelling Readers and analyzing results') for task in reader_tasks: task.cancel() await asyncio.sleep(0.1) results = [] players = set() for res in await asyncio.gather(*reader_tasks): results.extend(res[0]) players.update(res[1]) if len(players) == 0: raise Exception("No players found to fetch stats for. No replays found?") (player_stats, stat_id_map) = await process_player_stats(players, OPT_WORKERS_N, args, db) bu.verbose('') bu.debug('Number of player stats: ' + str(len(player_stats))) teamresults = calc_team_stats(results, player_stats, stat_id_map, args) process_battle_results(teamresults, args) if args.hist: print('\nPlayer Histograms______', end='', flush=True) process_player_dist(results, player_stats, stat_id_map) bu.debug('Finished. Cleaning up..................') except Exception as err: bu.error(exception=err) except UserWarning as err: bu.verbose(str(err)) pass except Exception as err: bu.error(exception=err) finally: ## Need to close the aiohttp.session since Python destructors do not support async methods... if wg != None: await wg.close() if wi != None: await wi.close() return None
async def read_replay_JSON(replay_json: dict, args : argparse.Namespace) -> dict: """Parse replay JSON dict""" global REPLAY_I account_id = args.account_id url = args.url #db = args.db result = {} try: # bu.debug(str(replay_json)) if not wi.chk_JSON_replay(replay_json): bu.debug('Invalid replay') return None result['battle_start_timestamp'] = int(replay_json['data']['summary']['battle_start_timestamp']) # TBD... protagonist = int(replay_json['data']['summary']['protagonist']) if account_id == None: account_id = replay_json['data']['summary']['protagonist'] elif replay_json['data']['summary']['protagonist'] != account_id: if account_id in replay_json['data']['summary']['enemies']: # switch the teams... if replay_json['data']['summary']['battle_result'] != 2: if replay_json['data']['summary']['battle_result'] == 0: replay_json['data']['summary']['battle_result'] = 1 else: replay_json['data']['summary']['battle_result'] = 0 tmp = replay_json['data']['summary']['enemies'] replay_json['data']['summary']['enemies'] = replay_json['data']['summary']['allies'] replay_json['data']['summary']['allies'] = tmp elif account_id not in replay_json['data']['summary']['allies']: # account_id looked for but not found in teams bu.debug('Replay ' + replay_json['data']['summary']['title'] + ' does not have account_id ' + str(account_id) + '. Skipping.') return None if url: result['url'] = replay_json['data']['view_url'] for key in replay_summary_flds: result[key] = replay_json['data']['summary'][key] except Exception as err: bu.error(exception=err) return None try: bu.debug('Part 2') result['allies'] = set() result['enemies'] = set() result['allies_survived'] = 0 # for steamroller stats result['enemies_survived'] = 0 # for steamroller stats btl_duration = 0 btl_tier = 0 protagonist_tank = None for player in replay_json['data']['summary']['details']: btl_duration = max(btl_duration, player['time_alive']) player_tank_tier = wg.get_tank_data(player['vehicle_descr'], 'tier') btl_tier = max(btl_tier, player_tank_tier) if (protagonist != None) and (player['dbid'] == protagonist): protagonist_tank = player['vehicle_descr'] if player['dbid'] == account_id: # player itself is not put in results['allies'] tmp = {} tmp['account_id'] = account_id tmp['tank_id'] = player['vehicle_descr'] tmp['tank_tier'] = player_tank_tier tmp['tank_name'] = wg.get_tank_data(tmp['tank_id'], 'name') tmp['squad_index'] = player['squad_index'] for key in replay_details_flds: tmp[key] = player[key] if player['hitpoints_left'] == 0: tmp['survived'] = 0 tmp['destroyed'] = 1 else: tmp['survived'] = 1 tmp['destroyed'] = 0 result['allies_survived'] += 1 for key in tmp.keys(): result[key] = tmp[key] else: tmp_account_id = player['dbid'] tmp_tank_id = player['vehicle_descr'] tmp_battletime = result['battle_start_timestamp'] if player['death_reason'] == -1: survived = True else: survived = False if player['dbid'] in replay_json['data']['summary']['allies']: result['allies'].add(get_stat_id(tmp_account_id, tmp_tank_id, tmp_battletime)) if survived: result['allies_survived'] += 1 else: result['enemies'].add(get_stat_id(tmp_account_id, tmp_tank_id, tmp_battletime)) if survived: result['enemies_survived'] += 1 ## Rather use 'player' than incomprehensible 'protagonist'... result['player'] = get_stat_id(protagonist, protagonist_tank, result['battle_start_timestamp']) bu.debug('Player stats_id: ' + result['player']) # remove platoon buddy from stats if result['squad_index'] != None: for player in replay_json['data']['summary']['details']: bu.debug(str(player)) if (player['squad_index'] == result['squad_index']) and (player['dbid'] in replay_json['data']['summary']['allies']) and (player['dbid'] != account_id): # platoon buddy found tmp_account_id = player['dbid'] tmp_tank_id = player['vehicle_descr'] #tmp_tank_tier = str(wg.get_tank_tier(tmp_tank_id)) tmp_battletime = result['battle_start_timestamp'] # platoon buddy removed from stats result['allies'].remove(get_stat_id(tmp_account_id, tmp_tank_id, tmp_battletime)) break result['time_alive%'] = result['time_alive'] / btl_duration result['battle_tier'] = btl_tier result['top_tier'] = 1 if (result['tank_tier'] == btl_tier) else 0 result['win'] = 1 if result['battle_result'] == 1 else 0 REPLAY_I += 1 result['battle_i'] = REPLAY_I bu.debug(str(result)) return result except KeyError as err: bu.error('Key not found', err) except Exception as err: bu.error(exception=err) return None
async def main(argv): global wg, wi parser = argparse.ArgumentParser(description='Post replays(s) to WoTinspector.com and retrieve battle data') parser.add_argument('--output', default='single', choices=['file', 'files', 'db'] , help='Select output mode: single/multiple files or database') parser.add_argument('-id', dest='accountID', type=int, default=WG_ID, help='WG account_id') parser.add_argument('-a', '--account', dest='account', type=str, default=None, help='Uploader\'s WG account name. Format: ACCOUNT_NAME@SERVER') parser.add_argument('-t','--title', type=str, default=None, help='Title for replays. Use NN for continous numbering. Default is filename-based numbering') parser.add_argument('-p', '--private', dest="private", action='store_true', default=False, help='Set replays private on WoTinspector.com') parser.add_argument('--tasks', dest='N_tasks', type=int, default=10, help='Number of worker threads') parser.add_argument('--tankopedia', type=str, default='tanks.json', help='JSON file to read Tankopedia from. Default: "tanks.json"') parser.add_argument('--mapfile', type=str, default='maps.json', help='JSON file to read Blitz map names from. Default: "maps.json"') parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') parser.add_argument('-v', '--verbose', action='store_true', default=True, help='Verbose mode') parser.add_argument('-s', '--silent', action='store_true', default=False, help='Silent mode') parser.add_argument('files', metavar='FILE1 [FILE2 ...]', type=str, nargs='+', help='Files to read. Use \'-\' for STDIN"') args = parser.parse_args() bu.setVerbose(args.verbose) bu.setDebug(args.debug) if args.silent: bu.setVerbose(False) wg = WG(WG_appID, args.tankopedia, args.mapfile) wi = WoTinspector() if args.account != None: args.accountID = await wg.getAccountID(args.account) bu.debug('WG account_id: ' + str(args.accountID)) if args.accountID == None: args.accountID = 0 try: queue = asyncio.Queue() tasks = [] # Make replay Queue tasks.append(asyncio.create_task(mkReplayQ(queue, args.files, args.title))) # Start tasks to process the Queue for i in range(args.N_tasks): tasks.append(asyncio.create_task(replayWorker(queue, i, args.accountID, args.private))) bu.debug('Task ' + str(i) + ' started') bu.debug('Waiting for the replay scanner to finish') await asyncio.wait([tasks[0]]) bu.debug('Scanner finished. Waiting for workers to finish queue') await queue.join() bu.debug('Cancelling workers') for task in tasks: task.cancel() bu.debug('Waiting for workers to cancel') await asyncio.gather(*tasks, return_exceptions=True) bu.verbose(str(REPLAY_N) + ' replays: ' + str(REPLAY_N - SKIPPED_N) + ' uploaded, ' + str(SKIPPED_N) + ' skipped') except KeyboardInterrupt: print('Ctrl-c pressed ...') sys.exit(1) finally: ## Need to close the aiohttp.session since Python destructors do not support async methods... await wg.session.close() await wi.close() return None
async def main(argv): global wg, wi # set the directory for the script os.chdir(os.path.dirname(sys.argv[0])) ## Read config config = configparser.ConfigParser() config.read(FILE_CONFIG) configOptions = config['OPTIONS'] OPT_WORKERS_N = configOptions.getint('opt_uploader_workers', 5) configWG = config['WG'] # WG account id of the uploader: # # Find it here: https://developers.wargaming.net/reference/all/wotb/account/list/ WG_ID = configWG.getint('wg_id', None) ## WG API Rules limit 10 request / sec. Higher rate of requests will return errors ==> extra delay WG_RATE_LIMIT = configWG.getint('wg_rate_limit', 10) parser = argparse.ArgumentParser( description= 'Post replays(s) to WoTinspector.com and retrieve battle data') parser.add_argument('-id', dest='accountID', type=int, default=WG_ID, help='WG account_id') parser.add_argument( '-a', '--account', dest='account', type=str, default=None, help='Uploader\'s WG account name. Format: ACCOUNT_NAME@SERVER') parser.add_argument( '-t', '--title', type=str, default=None, help= 'Title for replays. Use NN for continous numbering. Default is filename-based numbering' ) parser.add_argument('-p', '--private', dest="private", action='store_true', default=False, help='Set replays private on WoTinspector.com') parser.add_argument( '--tankopedia', type=str, default='tanks.json', help='JSON file to read Tankopedia from. Default: "tanks.json"') parser.add_argument( '--mapfile', type=str, default='maps.json', help='JSON file to read Blitz map names from. Default: "maps.json"') parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose mode') parser.add_argument('-s', '--silent', action='store_true', default=False, help='Silent mode') parser.add_argument('files', metavar='FILE1 [FILE2 ...]', type=str, nargs='+', help='Files to read. Use \'-\' for STDIN"') args = parser.parse_args() bu.set_verbose(args.verbose) bu.set_log_level(args.silent, args.verbose, args.debug) wg = WG(WG_appID, args.tankopedia, args.mapfile) wi = WoTinspector(rate_limit=WG_RATE_LIMIT) if args.account != None: args.accountID = await wg.get_account_id(args.account) bu.debug('WG account_id: ' + str(args.accountID)) if args.accountID == None: args.accountID = 0 try: queue = asyncio.Queue() tasks = [] # Make replay Queue tasks.append( asyncio.create_task(mkReplayQ(queue, args.files, args.title))) # Start tasks to process the Queue for i in range(OPT_WORKERS_N): tasks.append( asyncio.create_task( replayWorker(queue, i, args.accountID, args.private))) bu.debug('Task ' + str(i) + ' started') bu.debug('Waiting for the replay scanner to finish') await asyncio.wait([tasks[0]]) bu.debug('Scanner finished. Waiting for workers to finish queue') await queue.join() bu.debug('Cancelling workers') for task in tasks: task.cancel() bu.debug('Waiting for workers to cancel') await asyncio.gather(*tasks, return_exceptions=True) bu.verbose( str(REPLAY_N) + ' replays: ' + str(REPLAY_N - SKIPPED_N - ERROR_N) + ' uploaded, ' + str(SKIPPED_N) + ' skipped, ' + str(ERROR_N) + ' errors') except KeyboardInterrupt: print('Ctrl-c pressed ...') sys.exit(1) finally: ## Need to close the aiohttp.session since Python destructors do not support async methods... await wg.session.close() await wi.close() return None
async def statWorker(queue: asyncio.Queue, workerID: int) -> list: """Worker thread to find stats for player / tank pairs""" # global wg stats = {} try: i = 0 while True: # item = await queue.get_nowait() item = await queue.get() try: i = (i + 1) % 10 if i == 0: bu.printWaiter() acc, tank = item.split(':') account_id = int(acc) tank_id = int(tank) # bu.debug('[' +str(workerID) + '] AccountID: ' + acc + ' TankID: ' + tank) playerTankStat = await wg.getPlayerTankStats( account_id, tank_id, ['all.battles', 'all.wins']) # bu.debug('[' +str(workerID) + '] ' + str(playerTankStat)) playerStat = await wg.getPlayerStats( account_id, ['statistics.all.battles', 'statistics.all.wins']) # bu.debug('[' +str(workerID) + '] ' + str(playerStat)) stats[item] = {} if (playerTankStat == None) or (playerStat == None): stats[item]['win_rate'] = None stats[item]['battles'] = None else: playerTankStat = playerTankStat['all'] playerStat = playerStat['statistics']['all'] battles = playerStat['battles'] stats[item]['battles'] = battles battles_in_tank = playerTankStat['battles'] if battles_in_tank >= STAT_TANK_BATTLE_MIN: stats[item]['win_rate'] = min( playerTankStat['wins'] / battles_in_tank, 1) # To cope with broken stats in WG DB else: stats[item]['win_rate'] = min( playerStat['wins'] / battles, 1) # To cope with broken stats in WG DB bu.debug('[' + str(workerID) + '] Player[' + str(account_id) + '], Tank[' + str(tank_id) + '] : WR : ' + str(stats[item]['win_rate']) + ' Battles: ' + str(battles)) except KeyError as err: bu.error('[' + str(workerID) + '] Key :' + str(err) + ' not found') except Exception as err: bu.error('[' + str(workerID) + '] ' + str(type(err)) + ' : ' + str(err)) queue.task_done() except asyncio.CancelledError: bu.debug('Stats queue[' + str(workerID) + '] is empty') except Exception as err: bu.error(str(err)) return stats
async def main(argv): global wg, wi current_dir = os.getcwd() # set the directory for the script os.chdir(os.path.dirname(sys.argv[0])) # options defaults OPT_WORKERS_N = 5 WG_ID = None WG_ACCOUNT = None # format: nick@server, where server is either 'eu', 'ru', 'na', 'asia' or 'china'. # China is not supported since WG API stats are not available there WG_RATE_LIMIT = 10 try: ## Read config if os.path.isfile(FILE_CONFIG): bu.debug('Reading config file: ' + FILE_CONFIG) config = configparser.ConfigParser() config.read(FILE_CONFIG) try: if 'UPLOADER' in config.sections(): configUploader = config['UPLOADER'] OPT_WORKERS_N = configUploader.getint('opt_workers', OPT_WORKERS_N) except (KeyError, configparser.NoSectionError) as err: bu.error(exception=err) try: if 'WG' in config.sections(): configWG = config['WG'] # WG account id of the uploader: # # Find it here: https://developers.wargaming.net/reference/all/wotb/account/list/ WG_ID = configWG.getint('wg_id', WG_ID) WG_ACCOUNT = configWG.get('wg_account', WG_ACCOUNT) ## WG API Rules limit 10 request / sec. Higher rate of requests will return errors ==> extra delay WG_RATE_LIMIT = configWG.getint('wg_rate_limit', WG_RATE_LIMIT) except (KeyError, configparser.NoSectionError) as err: bu.error(exception=err) except Exception as err: bu.error(exception=err) parser = argparse.ArgumentParser(description='Post replays(s) to WoTinspector.com and retrieve replay data as JSON') parser.add_argument('-id', dest='accountID', type=int, default=WG_ID, help='WG account_id') parser.add_argument('-a', '--account', dest='account', type=str, default=WG_ACCOUNT, help='Uploader\'s WG account name. Format: ACCOUNT_NAME@SERVER') parser.add_argument('-t','--title', type=str, default=None, help='Title for replays. Use "NN" for continous numbering. Default is automatic naming') parser.add_argument('-p', '--private', dest="private", action='store_true', default=False, help='Set replays private on WoTinspector.com') parser.add_argument('--tankopedia', type=str, default='tanks.json', help='JSON file to read Tankopedia from. Default: "tanks.json"') parser.add_argument('--mapfile', type=str, default='maps.json', help='JSON file to read Blitz map names from. Default: "maps.json"') parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose mode') parser.add_argument('-s', '--silent', action='store_true', default=False, help='Silent mode') parser.add_argument('files', metavar='FILE1 [FILE2 ...]', type=str, nargs='+', help='Files to read. Use \'-\' for STDIN"') args = parser.parse_args() bu.set_verbose(args.verbose) bu.set_log_level(args.silent, args.verbose, args.debug) wg = WG(WG_appID, args.tankopedia, args.mapfile) wi = WoTinspector(rate_limit=WG_RATE_LIMIT) if args.account != None: args.accountID = await wg.get_account_id(args.account) bu.debug('WG account_id: ' + str(args.accountID)) if args.accountID == None: args.accountID = 0 try: queue = asyncio.Queue() # rebase file arguments due to moving the working directory to the script location args.files = bu.rebase_file_args(current_dir, args.files) tasks = [] # Make replay Queue tasks.append(asyncio.create_task(mkReplayQ(queue, args.files, args.title))) # Start tasks to process the Queue for i in range(OPT_WORKERS_N): tasks.append(asyncio.create_task(replayWorker(queue, i, args.accountID, args.private))) bu.debug('Task ' + str(i) + ' started') bu.debug('Waiting for the replay scanner to finish') await asyncio.wait([tasks[0]]) bu.debug('Scanner finished. Waiting for workers to finish queue') await queue.join() bu.debug('Cancelling workers') for task in tasks: task.cancel() bu.debug('Waiting for workers to cancel') await asyncio.gather(*tasks, return_exceptions=True) bu.verbose_std(str(REPLAY_N) + ' replays: ' + str(REPLAY_N - SKIPPED_N - ERROR_N) + ' uploaded, ' + str(SKIPPED_N) + ' skipped, ' + str(ERROR_N) + ' errors') except KeyboardInterrupt: print('Ctrl-c pressed ...') sys.exit(1) finally: ## Need to close the aiohttp.session since Python destructors do not support async methods... await wg.session.close() await wi.close() return None
async def stat_worker(queue : asyncio.Queue, workerID: int, args : argparse.Namespace, db : motor.motor_asyncio.AsyncIOMotorDatabase) -> list: """Worker thread to find stats for player / tank pairs""" # global wg stats = {} stat_id_remap = {} stat_db_func = globals()[STAT_FUNC[args.stat_func][1]] stat_wg_func = globals()[STAT_FUNC[args.stat_func][2]] bu.debug("workedID: " + str(workerID) + ' started') try: while True: stat_id = await queue.get() try: bu.debug('Stat_id: ' + stat_id) bu.print_progress() # Analysing player performance based on their stats on the tier tanks they are playing # Try cache first if (stat_id not in stats): stats_tmp = None pruned_stat_id = prune_stat_id(stat_id) if (pruned_stat_id not in stats): stats_tmp = await stat_wg_func(pruned_stat_id, cache_only = True) else: stat_id_remap[stat_id] = pruned_stat_id queue.task_done() continue if (stats_tmp != None): stats[pruned_stat_id] = stats_tmp stat_id_remap[stat_id] = pruned_stat_id queue.task_done() continue # try DB stats[stat_id] = await stat_db_func(db, stat_id) bu.debug('get_db_' + args.stat_func + '_stats returned: ' + str(stats[stat_id]), workerID) # no DB stats found, trying WG AP if (stats[stat_id] == None): stats[pruned_stat_id] = await stat_wg_func(pruned_stat_id) stat_id_remap[stat_id] = pruned_stat_id del stats[stat_id] except KeyError as err: bu.error('Key not found', err, workerID) except Exception as err: bu.error('Unexpected error', err, workerID) queue.task_done() except (asyncio.CancelledError, concurrent.futures._base.CancelledError): bu.debug('Stats queue is empty', workerID) except Exception as err: bu.error(exception=err) finally: # remove empty stats try: keys_2_del = [] for key in stats: if stats[key] == None: keys_2_del.append(key) for key in keys_2_del: del stats[key] except KeyError as err: bu.error('Error in pruning empty stats', err) # bu.debug('Returning stats & exiting') return (stats, stat_id_remap)
async def main(argv): global wg, wi TASK_N = 7 parser = argparse.ArgumentParser(description='ANalyze Blitz replay JSONs from WoTinspector.com') parser.add_argument('--output', default='plain', choices=['json', 'plain', 'db'], help='Select output mode: JSON, plain text or database') parser.add_argument('-id', dest='accountID', type=int, default=None, help='WG account_id to analyze') parser.add_argument('-a', '--account', dest='account', type=str, default=None, help='WG account nameto analyze. Format: ACCOUNT_NAME@SERVER') parser.add_argument('-u', '--url', dest= 'url', action='store_true', default=False, help='Print replay URLs') parser.add_argument('--tankfile', type=str, default='tanks.json', help='JSON file to read Tankopedia from. Default is "tanks.json"') parser.add_argument('--mapfile', type=str, default='maps.json', help='JSON file to read Blitz map names from. Default is "maps.json"') parser.add_argument('-o','--outfile', type=str, default='-', metavar="OUTPUT", help='File to write results. Default STDOUT') parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose mode') parser.add_argument('files', metavar='FILE1 [FILE2 ...]', type=str, nargs='+', help='Files to read. Use \'-\' for STDIN"') args = parser.parse_args() bu.setVerbose(args.verbose) bu.setDebug(args.debug) wg = WG(WG_appID, args.tankfile, args.mapfile) wi = WoTinspector() if args.account != None: args.accountID = await wg.getAccountID(args.account) bu.debug('WG account_id: ' + str(args.accountID)) try: replayQ = asyncio.Queue() reader_tasks = [] # Make replay Queue scanner_task = asyncio.create_task(mkReplayQ(replayQ, args.files)) # Start tasks to process the Queue for i in range(TASK_N): reader_tasks.append(asyncio.create_task(replayReader(replayQ, i, args))) bu.debug('Task ' + str(i) + ' started') bu.debug('Waiting for the replay scanner to finish') await asyncio.wait([scanner_task]) bu.debug('Scanner finished. Waiting for replay readers to finish the queue') await replayQ.join() bu.debug('Replays read. Cancelling Readers and analyzing results') for task in reader_tasks: task.cancel() results = [] playerstanks = set() for res in await asyncio.gather(*reader_tasks): results.extend(res[0]) playerstanks.update(res[1]) player_stats = await processTankStats(playerstanks, TASK_N) bu.verbose('') results = calcTeamStats(results, player_stats, args.accountID) processStats(results, args) finally: ## Need to close the aiohttp.session since Python destructors do not support async methods... await wg.close() await wi.close() return None
async def mkReplayQ(queue : asyncio.Queue, files : list, title : str): """Create queue of replays to post""" p_replayfile = re.compile('.*\\.wotbreplay$') if files[0] == '-': bu.debug('reading replay file list from STDIN') stdin, _ = await aioconsole.get_standard_streams() while True: line = (await stdin.readline()).decode('utf-8').rstrip() if not line: break else: if (p_replayfile.match(line) != None): await queue.put(await mkQueueItem(line, title)) else: for fn in files: if fn.endswith('"'): fn = fn[:-1] if os.path.isfile(fn) and (p_replayfile.match(fn) != None): await queue.put(await mkQueueItem(fn, title)) bu.debug('File added to queue: ' + fn) elif os.path.isdir(fn): with os.scandir(fn) as dirEntry: for entry in dirEntry: try: bu.debug('Found: ' + entry.name) if entry.is_file() and (p_replayfile.match(entry.name) != None): bu.debug(entry.name) await queue.put(await mkQueueItem(entry.path, title)) bu.debug('File added to queue: ' + entry.path) except Exception as err: bu.error(str(err)) else: bu.error('File not found: ' + fn) bu.debug('Finished') return None
async def readReplayJSON(replay_json: dict, args : argparse.Namespace) -> dict: """Parse replay JSON dict""" account_id = args.accountID url = args.url result = {} try: if account_id == None: account_id = replay_json['data']['summary']['protagonist'] elif replay_json['data']['summary']['protagonist'] != account_id: if account_id in replay_json['data']['summary']['enemies']: # switch the teams... if replay_json['data']['summary']['battle_result'] != 2: if replay_json['data']['summary']['battle_result'] == 0: replay_json['data']['summary']['battle_result'] = 1 else: replay_json['data']['summary']['battle_result'] = 0 tmp = replay_json['data']['summary']['enemies'] replay_json['data']['summary']['enemies'] = replay_json['data']['summary']['allies'] replay_json['data']['summary']['allies'] = tmp elif account_id not in replay_json['data']['summary']['allies']: # account_id looked for but not found in teams bu.debug('Replay ' + replay_json['data']['summary']['title'] + ' does not have account_id ' + str(account_id) + '. Skipping.') return None if url: result['url'] = replay_json['data']['view_url'] #bu.debug('1') for key in replay_summary_flds: result[key] = replay_json['data']['summary'][key] result['battle_start_timestamp'] = int(replay_json['data']['summary']['battle_start_timestamp']) #bu.debug('2') result['allies'] = set() result['enemies'] = set() btl_duration = 0 btl_tier = 0 for player in replay_json['data']['summary']['details']: #bu.debug('3') btl_duration = max(btl_duration, player['time_alive']) player_tank_tier = wg.getTankData(player['vehicle_descr'], 'tier') btl_tier = max(btl_tier, player_tank_tier) #bu.debug('4') if player['dbid'] == account_id: tmp = {} for key in replay_details_flds: tmp[key] = player[key] if player['hitpoints_left'] == 0: tmp['survived'] = 0 tmp['destroyed'] = 1 else: tmp['survived'] = 1 tmp['destroyed'] = 0 tmp['account_id'] = account_id tmp['tank_id'] = player['vehicle_descr'] tmp['tank_tier'] = player_tank_tier tmp['tank_name'] = wg.getTankData(tmp['tank_id'], 'name') for key in tmp.keys(): result[key] = tmp[key] elif player['dbid'] in replay_json['data']['summary']['allies']: result['allies'].add(':'.join([ str(player['dbid']), str(player['vehicle_descr'])])) #result['allies'].add([ player['dbid'], player['vehicle_descr'], int(replay_json['data']['summary']['battle_start_timestamp']) ]) else: result['enemies'].add(':'.join([ str(player['dbid']), str(player['vehicle_descr'])])) #result['enemies'].add([ player['dbid'], player['vehicle_descr'], int(replay_json['data']['summary']['battle_start_timestamp']) ]) #bu.debug('6') result['time_alive%'] = result['time_alive'] / btl_duration result['battle_tier'] = btl_tier result['top_tier'] = 1 if (result['tank_tier'] == btl_tier) else 0 result['win'] = 1 if result['battle_result'] == 1 else 0 bu.debug(str(result)) return result except KeyError as err: bu.error('Key :' + str(err) + ' not found') except Exception as err: bu.error(str(type(err)) + ' : ' + str(err)) return None
async def mk_replayQ(queue : asyncio.Queue, args : argparse.Namespace, db : motor.motor_asyncio.AsyncIOMotorDatabase = None): """Create queue of replays to post""" p_replayfile = re.compile('.*\\.wotbreplay\\.json$') files = args.files Nreplays = 0 if files[0] == 'db:': if db == None: bu.error('No database connection opened') sys.exit(1) try: dbc = db[DB_C_REPLAYS] cursor = None if args.filters != None: bu.debug(str(args.filters)) filters = json.loads(args.filters) bu.debug(json.dumps(filters, indent=2)) cursor = dbc.find(filters) else: # select all cursor = dbc.find({}) bu.debug('Reading replays...') async for replay_json in cursor: _id = replay_json['_id'] del(replay_json['_id']) await queue.put(await mk_readerQ_item(replay_json, 'DB: _id = ' + _id)) Nreplays += 1 bu.debug('All the matching replays have been read from the DB') except Exception as err: bu.error(exception=err) elif files[0] == '-': bu.debug('reading replay file list from STDIN') stdin, _ = await aioconsole.get_standard_streams() while True: try: line = (await stdin.readline()).decode('utf-8').rstrip() if not line: break if (p_replayfile.match(line) != None): replay_json = await bu.open_JSON(line, wi.chk_JSON_replay) await queue.put(await mk_readerQ_item(replay_json, line)) except Exception as err: bu.error(exception=err) else: for fn in files: try: # bu.debug('Filename: ' + fn) if fn.endswith('"'): fn = fn[:-1] if os.path.isfile(fn) and (p_replayfile.match(fn) != None): replay_json = await bu.open_JSON(fn, wi.chk_JSON_replay) await queue.put(await mk_readerQ_item(replay_json, fn)) bu.debug('File added to queue: ' + fn) Nreplays += 1 elif os.path.isdir(fn): with os.scandir(fn) as dirEntry: for entry in dirEntry: if entry.is_file() and (p_replayfile.match(entry.name) != None): bu.debug(entry.name) replay_json = await bu.open_JSON(entry.path, wi.chk_JSON_replay) await queue.put(await mk_readerQ_item(replay_json, entry.name)) bu.debug('File added to queue: ' + entry.path) Nreplays += 1 except Exception as err: bu.error(exception=err) bu.verbose('Finished scanning replays: ' + str(Nreplays) + ' replays to process') return Nreplays