def export_all_to_folder(self, out_dir='./out', ext='.json'): all_res = self.get_all(exclude_falsy=True, where='_ElementalType != 99') ref_dir = os.path.join(out_dir, '..', 'adv') out_dir = os.path.join(out_dir, 'adv') check_target_path(out_dir) for res in tqdm(all_res, desc=os.path.basename(out_dir)): if not res.get('_IsPlayable'): continue try: outconf = self.process_result(res, exclude_falsy=True) out_name = self.outfile_name(outconf, ext) output = os.path.join(out_dir, out_name) # ref = os.path.join(ref_dir, out_name) # if os.path.exists(ref): # with open(ref, 'r', newline='', encoding='utf-8') as fp: # refconf = json.load(fp) # try: # outconf['c']['a'] = refconf['c']['a'] # except: # outconf['c']['a'] = [] with open(output, 'w', newline='', encoding='utf-8') as fp: # json.dump(res, fp, indent=2, ensure_ascii=False) fmt_conf(outconf, f=fp) except Exception as e: print(res['_Id']) pprint(outconf) raise e print('Missing endlag for:', AdvConf.MISSING_ENDLAG)
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True): # super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False) out_dir = os.path.join(out_dir, '_actions') all_res = self.get_all(exclude_falsy=exclude_falsy) check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in tqdm(all_res, desc='_actions'): res = self.process_result(res, exclude_falsy=exclude_falsy) try: k1, _ = res['_ActionName'].split('_', 1) if k1[0] == 'D' and k1 != 'DAG': k1 = 'DRAGON' sorted_res[k1].append(res) except: sorted_res[res['_ActionName']].append(res) # if res['_Id'] not in PlayerAction.REF: # sorted_res['UNUSED'].append(res) for group_name, res_list in sorted_res.items(): out_name = get_valid_filename(f'{group_name}{ext}') output = os.path.join(out_dir, out_name) with open(output, 'w', newline='', encoding='utf-8') as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False)
def export_all_to_folder(self, out_dir="./out", ext=".json"): out_dir = os.path.join(out_dir, "wyrmprints") all_res = self.get_all() check_target_path(out_dir) duplicates = collections.defaultdict(list) for res in all_res: duplicates[self.outfile_name(res, ext)].append(res) for out_name, res_list in tqdm(duplicates.items(), desc=os.path.basename(out_dir)): res_list = [self.process_result(res) for res in res_list] main_res = res_list[0] main_res_id = main_res["_Id"] if len(res_list) > 1: keys_that_differ = set() id_to_sub_res = {} for sub_res in res_list[1:]: id_to_sub_res[sub_res["_Id"]] = sub_res for key in sub_res: if sub_res[key] != main_res[key]: keys_that_differ.add(key) for key in keys_that_differ: main_res[key] = {main_res_id: main_res[key]} for sub_res_id, sub_res in id_to_sub_res.items(): main_res[key][sub_res_id] = sub_res[key] output = os.path.join(out_dir, out_name) with open(output, "w", newline="", encoding="utf-8") as fp: json.dump(main_res, fp, indent=2, ensure_ascii=False, default=str)
def outfile_name_with_subdir(res, ext=".json", aiscript_dir="./out/_aiscript", enemies_dir="./out/enemies"): subdir = EnemyParam.general_param_group(res) try: name = res["_Name"] except KeyError: name = "UNNAMED" check_target_path(os.path.join(enemies_dir, subdir)) try: _, ai_file = res["_Ai"].split("/") ai_path = os.path.join(aiscript_dir, ai_file) if os.path.exists(ai_path + ".py"): filename = snakey(f"{ai_file}_{name}") link_target = os.path.join(enemies_dir, subdir, f"{filename}.py") try: os.link(ai_path + ".py", link_target) except FileExistsError: os.remove(link_target) os.link(ai_path + ".py", link_target) except OSError: shutil.copy(ai_path + ".py", link_target) return subdir, snakey(f"{filename}{ext}") except KeyError: pass return subdir, snakey(f'{res["_Id"]:02}_{name}{ext}')
def export_all_to_folder(self, out_dir="./out", ext=".json"): aiscript_dir = os.path.join(out_dir, "_aiscript") out_dir = os.path.join(out_dir, "enemies") aiscript_init_link = os.path.join(out_dir, "__init__.py") all_res = self.get_all() check_target_path(out_dir) try: os.link(AISCRIPT_INIT_PATH, aiscript_init_link) except FileExistsError: os.remove(aiscript_init_link) os.link(AISCRIPT_INIT_PATH, aiscript_init_link) except OSError: shutil.copy(AISCRIPT_INIT_PATH, aiscript_init_link) misc_data = defaultdict(list) for res in tqdm(all_res, desc="enemies"): res = self.process_result(res) subdir, out_file = self.outfile_name_with_subdir( res, ext=ext, aiscript_dir=aiscript_dir, enemies_dir=out_dir) if subdir is None: misc_data[out_file].append(res) continue out_path = os.path.join(out_dir, subdir, out_file) with open(out_path, "w", newline="", encoding="utf-8") as fp: json.dump(res, fp, indent=2, ensure_ascii=False, default=str) for group_name, res_list in misc_data.items(): out_name = snakey(f"{group_name}{ext}") output = os.path.join(out_dir, out_name) with open(output, "w", newline="", encoding="utf-8") as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False, default=str)
def export_all_to_folder(self, out_dir="./out", ext=".json"): # super().export_all_to_folder(out_dir, ext, fn_mode='a', full_actions=False) out_dir = os.path.join(out_dir, "_actions") all_res = self.get_all() check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in tqdm(all_res, desc="_actions"): res = self.process_result(res) try: k1, _ = res["_ActionName"].split("_", 1) if k1[0] == "D" and k1 != "DAG": k1 = "DRAGON" sorted_res[k1].append(res) except: sorted_res[res["_ActionName"]].append(res) # if res['_Id'] not in PlayerAction.REF: # sorted_res['UNUSED'].append(res) for group_name, res_list in sorted_res.items(): out_name = snakey(f"{group_name}{ext}") output = os.path.join(out_dir, out_name) with open(output, "w", newline="", encoding="utf-8") as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False, default=str)
def export_all_to_folder(self, out_dir="./out", ext=".json"): # super().export_all_to_folder(out_dir, ext, fn_mode='a', full_actions=False) out_dir = os.path.join(out_dir, "_hit_attr") all_res = self.get_all() check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in tqdm(all_res, desc="_hit_attr"): res = self.process_result(res) try: k1, _ = res["_Id"].split("_", 1) if PlayerActionHitAttribute.S_PATTERN.match(k1): sorted_res["S"].append(res) else: sorted_res[k1].append(res) except: sorted_res[res["_Id"]].append(res) for group_name, res_list in sorted_res.items(): out_name = snakey(f"{group_name}{ext}") output = os.path.join(out_dir, out_name) with open(output, "w", newline="", encoding="utf-8") as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False, default=str)
def export_all_to_folder(self, out_dir='./out', ext='.json'): out_dir = os.path.join(out_dir, 'base') all_res = self.get_all(exclude_falsy=True) check_target_path(out_dir) for res in tqdm(all_res, desc=os.path.basename(out_dir)): out_name = self.outfile_name(res, ext) res = self.process_result(res, exclude_falsy=True) output = os.path.join(out_dir, out_name) with open(output, 'w', newline='', encoding='utf-8') as fp: # json.dump(res, fp, indent=2, ensure_ascii=False) fmt_conf(res, f=fp)
def export_all_to_folder(self, out_dir='./out/enemies', ext='.json', exclude_falsy=True): # super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False) all_res = self.get_all(exclude_falsy=exclude_falsy) check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in all_res: if '_ParamGroupName' in res: if (match := self.PARAM_GROUP.match(res['_ParamGroupName'])): sorted_res[match.group(1)].append(self.process_result(res, exclude_falsy=exclude_falsy)) else: sorted_res[res['_ParamGroupName'].split('_', 1)[0]].append(self.process_result(res, exclude_falsy=exclude_falsy))
def load_aiscript(path, reformat=True): check_target_path(OUTPUT) enemy_actions = None # enemy_actions = EnemyAction(DBViewIndex()) for root, _, files in os.walk(path): for file_name in tqdm(files, desc="aiscript"): load_aiscript_file(os.path.join(root, file_name)) if reformat: print("\nReformatting...", flush=True) try: subprocess.call( ["black", "--quiet", "--line-length", "200", OUTPUT]) except subprocess.CalledProcessError: print("Python black not installed", flush=True) print("Done", flush=True)
def make_json(out, outfile, view, id_fn, data_fn, avail_fn=None, where=None, order='_Id ASC', name_key='_Name'): all_res = view.get_all(exclude_falsy=False, where=where, order=order) data = {} for res in all_res: if not res[name_key]: continue data[id_fn(res, view)] = data_fn(res) if avail_fn: avail_fn(data) for d in data.copy(): if 'Availability' not in data[d]: del data[d] check_target_path(out) with open(os.path.join(out, outfile), 'w') as f: json.dump(data, f)
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True): # super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False) out_dir = os.path.join(out_dir, '_act_cond') all_res = self.get_all(exclude_falsy=exclude_falsy) check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in all_res: res = self.process_result(res, exclude_falsy=exclude_falsy) try: sorted_res[int(res['_Id'] / 100000000)].append(res) except: sorted_res[0].append(res) for group_name, res_list in sorted_res.items(): out_name = get_valid_filename(f'{group_name}00000000{ext}') output = os.path.join(out_dir, out_name) with open(output, 'w', newline='', encoding='utf-8') as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False)
def export_all_to_folder(self, out_dir="./out", ext=".json"): where = "_SpecialSkillId != 0" out_dir = os.path.join(out_dir, "_br") all_res = self.get_all(where=where) check_target_path(out_dir) sorted_res = {} for res in tqdm(all_res, desc="_br"): res = self.process_result(res) sorted_res[res["_Id"]] = res out_name = snakey(f"_chara_skin.json") output = os.path.join(out_dir, out_name) with open(output, "w", newline="", encoding="utf-8") as fp: json.dump(sorted_res, fp, indent=2, ensure_ascii=False, default=str)
def export_all_to_folder(self, out_dir='./out', ext='.json'): all_res = self.get_all(exclude_falsy=True) check_target_path(out_dir) outdata = {} skipped = [] for res in tqdm(all_res, desc=os.path.basename(out_dir)): conf = self.process_result(res, exclude_falsy=True) if conf: outdata[snakey(res['_Name'])] = conf else: skipped.append((res['_BaseId'], res['_Name'])) # skipped.append(res["_Name"]) outdata['High_Dragon_Print'] = WpConf.HDT_PRINT output = os.path.join(out_dir, 'wyrmprints.json') with open(output, 'w', newline='', encoding='utf-8') as fp: # json.dump(res, fp, indent=2, ensure_ascii=False) fmt_conf(outdata, f=fp) print('Skipped:', skipped)
def export_all_to_folder(self, out_dir='./out', ext='.json'): all_res = self.get_all(exclude_falsy=True, where='_WeaponSeriesId = 4') check_target_path(out_dir) outdata = { 'flame': {}, 'water': {}, 'wind': {}, 'light': {}, 'shadow': {} } # skipped = [] for res in tqdm(all_res, desc=os.path.basename(out_dir)): conf = self.process_result(res, exclude_falsy=True) # outfile = snakey(conf['d']['ele']) + '.json' if conf: outdata[conf['ele']][conf['wt']] = conf output = os.path.join(out_dir, f'weapons.json') with open(output, 'w', newline='', encoding='utf-8') as fp: fmt_conf(outdata, f=fp)
def export_all_to_folder(self, out_dir="./out", ext=".json"): # super().export_all_to_folder(out_dir, ext, fn_mode='a', full_actions=False) out_dir = os.path.join(out_dir, "_act_cond") all_res = self.get_all() check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in tqdm(all_res, desc="_act_cond"): res = self.process_result(res, ) try: sorted_res[int(res["_Id"] / 100000000)].append(res) except: sorted_res[0].append(res) for group_name, res_list in sorted_res.items(): out_name = snakey(f"{group_name}00000000{ext}") output = os.path.join(out_dir, out_name) with open(output, "w", newline="", encoding="utf-8") as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False, default=str)
def export_all_to_folder(self, out_dir='./out', ext='.json'): where_str = '_Rarity = 5 AND (_SellDewPoint = 8500 OR _Id in ('+ ','.join(map(str, DrgConf.EXTRA_DRAGONS)) +'))' all_res = self.get_all(exclude_falsy=True, where=where_str) out_dir = os.path.join(out_dir, 'drg') check_target_path(out_dir) outdata = { 'flame': {}, 'water': {}, 'wind': {}, 'light': {}, 'shadow': {} } # skipped = [] for res in tqdm(all_res, desc=os.path.basename(out_dir)): conf = self.process_result(res, exclude_falsy=True) # outfile = snakey(conf['d']['ele']) + '.json' if conf: outdata[conf['d']['ele']][snakey(conf['d']['name'])] = conf for ele, data in outdata.items(): output = os.path.join(out_dir, f'{ele}.json') with open(output, 'w', newline='', encoding='utf-8') as fp: fmt_conf(data, f=fp, lim=3)
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True): # super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False) out_dir = os.path.join(out_dir, '_hit_attr') all_res = self.get_all(exclude_falsy=exclude_falsy) check_target_path(out_dir) sorted_res = defaultdict(lambda: []) for res in tqdm(all_res, desc='_hit_attr'): res = self.process_result(res, exclude_falsy=exclude_falsy) try: k1, _ = res['_Id'].split('_', 1) if PlayerActionHitAttribute.S_PATTERN.match(k1): sorted_res['S'].append(res) else: sorted_res[k1].append(res) except: sorted_res[res['_Id']].append(res) for group_name, res_list in sorted_res.items(): out_name = get_valid_filename(f'{group_name}{ext}') output = os.path.join(out_dir, out_name) with open(output, 'w', newline='', encoding='utf-8') as fp: json.dump(res_list, fp, indent=2, ensure_ascii=False)
if __name__ == "__main__": all_avail = { "Chara": set(), "Dragon": {"Gacha", "Essence"}, "Amulet": set(), "Weapon": set() } outdir = os.path.join( pathlib.Path(__file__).parent.absolute(), "..", "..", "dl-collection") imgdir = os.path.join(outdir, "public") datadir = os.path.join(outdir, "src", "data") index = DBViewIndex() check_target_path(outdir) playable = "_ElementalType != 99 AND _IsPlayable = 1" make_json( datadir, "chara.json", CharaData(index), make_bv_id, make_chara_json, chara_availability_data, where=playable, ) make_json( datadir, "dragon.json", DragonData(index),
def load_aiscript(path): check_target_path(OUTPUT) for root, _, files in os.walk(path): for file_name in tqdm(files, desc='aiscript'): load_aiscript_file(os.path.join(root, file_name))
def make_weapon_jsons(out, index): view = WeaponBodyBuildupGroup(index) all_res = view.get_all(exclude_falsy=True) processed = defaultdict(lambda: defaultdict(lambda: [])) for res in all_res: mats = {} for i in range(1, 11): k1 = f'_BuildupMaterialId{i}' k2 = f'_BuildupMaterialQuantity{i}' try: mats[res[k1]] = res[k2] except KeyError: continue processed[res['_WeaponBodyBuildupGroupId']][ res['_BuildupPieceType']].append({ 'Step': res['_Step'], 'UnbindReq': res.get('_UnlockConditionLimitBreakCount', 0), 'SkinId': res.get('_RewardWeaponSkinNo', 0), 'Cost': res['_BuildupCoin'], 'Mats': mats }) processed = dict(processed) outfile = 'weaponbuild.json' check_target_path(out) with open(os.path.join(out, outfile), 'w') as f: json.dump(processed, f, indent=2) view = WeaponBody(index) all_res = view.get_all(exclude_falsy=True) processed = {} for res in all_res: if not res.get('_Name'): continue skins = {} for i, sid in enumerate(WeaponBody.WEAPON_SKINS): try: skin = index['WeaponSkin'].get(res[sid], exclude_falsy=True) skins[i] = make_wpn_id(skin) except (KeyError, TypeError): continue prereqcreate = [ res.get(need) for need in ('_NeedCreateWeaponBodyId1', '_NeedCreateWeaponBodyId2') if res.get(need) ] prereqfull = res.get('_NeedAllUnlockWeaponBodyId1') mats = {} for i in range(1, 6): k1 = f'_CreateEntityId{i}' k2 = f'_CreateEntityQuantity{i}' try: mats[res[k1]] = res[k2] except KeyError: continue processed[res['_Id']] = { 'NameEN': res['_Name'], 'NameJP': res['_NameJP'], 'NameCN': res['_NameCN'], 'Series': res['_WeaponSeriesId'], 'Build': res.get('_WeaponBodyBuildupGroupId'), 'Element': res['_ElementalType'], 'Weapon': res['_WeaponType'], 'Rarity': res['_Rarity'], 'Unbind': (res.get('_MaxLimitOverCount', -1) + 1) * 4, # check WeaponBodyRarity 'Prereq': { 'Create': prereqcreate, 'FullUp': prereqfull }, 'Cost': res.get('_CreateCoin', 0), 'Mats': mats, 'Skins': skins, 'Bonus': any([ res.get('_WeaponPassiveEffHp'), res.get('_WeaponPassiveEffAtk') ]), } outfile = 'weapon.json' check_target_path(out) with open(os.path.join(out, outfile), 'w') as f: json.dump(processed, f, indent=2)
"wep": to_jsonable(adv_wep_passives), "drg": to_jsonable(drg_passives), } out_path = os.path.join(out_dir, "fort.json") with open(out_path, "w") as fn: fmt_conf(fort_passives, f=fn) fn.write("\n") if __name__ == "__main__": write_fort_passives("./out/gen") exit() adv_ele_passives, adv_wep_passives, drg_passives = count_fort_passives( include_album=True) print("===Adventurer Bonus===") for ele, bonus in adv_ele_passives.items(): hp, atk = bonus print(f"{ele[1]}:\t{hp:.1f}% {atk:.1f}%") print() for wep, bonus in adv_wep_passives.items(): hp, atk = bonus print(f"{wep[1]}:\t{hp:.1f}% {atk:.1f}%") print("") print("===Dragon Bonus===") for ele, bonus in drg_passives.items(): hp, atk = bonus print(f"{ele[1]}:\t{hp:.1f}% {atk:.1f}%") check_target_path("./out")
def make_weapon_jsons(out, index): view = WeaponBodyBuildupGroup(index) all_res = view.get_all(exclude_falsy=True) processed = defaultdict(lambda: defaultdict(lambda: [])) for res in all_res: mats = {} for i in range(1, 11): k1 = f'_BuildupMaterialId{i}' k2 = f'_BuildupMaterialQuantity{i}' try: mats[res[k1]] = res[k2] material_icons.add(res[k1]) except KeyError: continue processed[res['_WeaponBodyBuildupGroupId']][res['_BuildupPieceType']].append({ 'Step': res['_Step'], 'UnbindReq': res.get('_UnlockConditionLimitBreakCount', 0), 'SkinId': res.get('_RewardWeaponSkinNo', 0), 'Cost': res['_BuildupCoin'], 'Mats': mats }) processed = dict(processed) outfile = 'weaponbuild.json' check_target_path(out) with open(os.path.join(out, outfile), 'w') as f: json.dump(processed, f) view = WeaponBody(index) all_res = view.get_all(exclude_falsy=True) processed = {} for res in all_res: if not res.get('_Name') or (not res.get('_WeaponBodyBuildupGroupId') and not res.get('_WeaponPassiveAbilityGroupId')): continue skins = {} for i, sid in enumerate(WeaponBody.WEAPON_SKINS): try: skin = index['WeaponSkin'].get(res[sid], exclude_falsy=True) skins[i] = make_wpn_id(skin) except (KeyError, TypeError): continue prereqcreate = [res.get(need) for need in ('_NeedCreateWeaponBodyId1', '_NeedCreateWeaponBodyId2') if res.get(need)] prereqfull = res.get('_NeedAllUnlockWeaponBodyId1') mats = {} for i in range(1, 6): k1 = f'_CreateEntityId{i}' k2 = f'_CreateEntityQuantity{i}' try: mats[res[k1]] = res[k2] material_icons.add(res[k1]) except KeyError: continue passive = None if res.get('_WeaponPassiveAbilityGroupId'): passive_ab_group = index['WeaponPassiveAbility'].get(res['_WeaponPassiveAbilityGroupId'], by='_WeaponPassiveAbilityGroupId', exclude_falsy=True) passive = {} for p in passive_ab_group: ab = index['AbilityData'].get(p['_AbilityId'], full_query=False) ability_icons.add(ab['_AbilityIconName'].lower()) ab_skins = {} for i in (1, 2): sid = f'_RewardWeaponSkinId{i}' try: skin = index['WeaponSkin'].get(p[sid], exclude_falsy=True) ab_skins[i] = make_wpn_id(skin) except (KeyError, TypeError): continue ab_mats = {} for i in range(1, 6): k1 = f'_UnlockMaterialId{i}' k2 = f'_UnlockMaterialQuantity{i}' try: ab_mats[p[k1]] = p[k2] material_icons.add(p[k1]) except KeyError: continue ability_val0 = int(ab.get('_AbilityType1UpValue', 0)) ability_info = { 'Icon': ab['_AbilityIconName'], 'NameEN': ab['_Name'].format(ability_val0=ability_val0).strip(), 'NameJP': ab['_NameJP'].format(ability_val0=ability_val0).strip(), 'NameCN': ab['_NameCN'].format(ability_val0=ability_val0).strip(), } passive[p['_WeaponPassiveAbilityNo']] = { 'UnbindReq': p.get('_UnlockConditionLimitBreakCount', 0), 'Ability': ability_info, 'Cost': p.get('_UnlockCoin', 0), 'Mats': ab_mats, 'Skins': ab_skins } processed[res['_Id']] = { 'NameEN': res['_Name'], 'NameJP': res['_NameJP'], 'NameCN': res['_NameCN'], 'Series': res['_WeaponSeriesId'], 'Build': res.get('_WeaponBodyBuildupGroupId'), 'Passive': passive, 'Element': res['_ElementalType'], 'Weapon': res['_WeaponType'], 'Rarity': res['_Rarity'], 'Prereq': { 'Create': prereqcreate, 'FullUp': prereqfull }, 'Cost': res.get('_CreateCoin', 0), 'Mats': mats, 'Skins': skins, # 'Bonus': any([res.get('_WeaponPassiveEffHp'), res.get('_WeaponPassiveEffAtk')]), } outfile = 'weapon.json' check_target_path(out) with open(os.path.join(out, outfile), 'w') as f: json.dump(processed, f)