def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): mod_types = { row["Name"]: {"sell_price_types": [key["Id"] for key in row["ModSellPriceTypesKeys"]],} for row in relational_reader["ModType.dat"] } write_json(mod_types, data_path, "mod_types")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = [] for row in relational_reader['CraftingBenchOptions.dat']: if row['RequiredLevel'] > 100 or row['IsDisabled']: continue item_class_row_lists = [ categories['ItemClassesKeys'] for categories in row['CraftingItemClassCategoriesKeys'] ] item_class_rows = itertools.chain.from_iterable( item_class_row_lists) item_classes = [item_class['Id'] for item_class in item_class_rows] root.append({ 'master': row['HideoutNPCsKey']['NPCMasterKey']['Id'], 'bench_group': row['ModFamily'], 'bench_tier': row['Tier'], 'item_classes': item_classes, 'cost': {base_item['Id']: value for base_item, value in row['Cost']}, 'actions': crafting_bench_options._get_actions(row, relational_reader), }) write_json(root, data_path, 'crafting_bench_options')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): essences = { row["BaseItemTypesKey"]["Id"]: { "name": row["BaseItemTypesKey"]["Name"], "spawn_level_min": row["DropLevelMinimum"], "spawn_level_max": row["DropLevelMaximum"], "level": row["Level"], "item_level_restriction": row["ItemLevelRestriction"] if row["ItemLevelRestriction"] > 0 else None, "type": { "tier": row["EssenceTypeKey"]["EssenceType"], "is_corruption_only": row["EssenceTypeKey"]["IsCorruptedEssence"], }, "mods": _convert_mods(row), } for row in relational_reader["Essences.dat"] } write_json(essences, data_path, "essences")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for mod in relational_reader["Mods.dat"]: domain = MOD_DOMAIN_FIX.get(mod["Id"], mod["Domain"]) if ignore_mod_domain(domain): continue obj = { "required_level": mod["Level"], "stats": _convert_stats(mod["Stats"]), "domain": domain.name.lower(), "name": mod["Name"], "type": mod["ModTypeKey"]["Name"], "generation_type": mod["GenerationType"].name.lower(), "group": mod["CorrectGroup"], "spawn_weights": _convert_spawn_weights(mod["SpawnWeight"]), "generation_weights": _convert_generation_weights(mod["GenerationWeight"]), "grants_buff": _convert_buff(mod["BuffDefinitionsKey"], mod["BuffValue"]), "grants_effects": _convert_granted_effects(mod["GrantedEffectsPerLevelKeys"]), "is_essence_only": mod["IsEssenceOnlyModifier"] > 0, "adds_tags": _convert_tags_keys(mod["TagsKeys"]), } if mod["Id"] in root: print("Duplicate mod id:", mod["Id"]) else: root[mod["Id"]] = obj write_json(root, data_path, "mods")
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): root = [] for row in relational_reader["CraftingBenchOptions.dat"]: if row["RequiredLevel"] > 100 or row["IsDisabled"]: continue item_class_row_lists = [ categories["ItemClasses"] for categories in row["CraftingItemClassCategories"] ] item_class_rows = itertools.chain.from_iterable( item_class_row_lists) item_classes = [item_class["Id"] for item_class in item_class_rows] root.append({ "master": row["HideoutNPCsKey"]["Hideout_NPCsKey"]["Name"], "bench_tier": row["Tier"], "item_classes": item_classes, "cost": {base_item["Id"]: value for base_item, value in row["Cost"]}, "actions": crafting_bench_options._get_actions(row), }) write_json(root, data_path, "crafting_bench_options")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for tag in relational_reader['GemTags.dat']: name = tag['Tag'] root[tag['Id']] = name if name != '' else None write_json(root, data_path, 'gem_tags')
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for mod in relational_reader['Mods.dat']: domain = MOD_DOMAIN_FIX.get(mod['Id'], mod['Domain']) if ignore_mod_domain(domain): continue obj = { 'required_level': mod['Level'], 'stats': _convert_stats(mod['Stats']), 'domain': domain.name.lower(), 'name': mod['Name'], 'type': mod['ModTypeKey']['Name'], 'generation_type': mod['GenerationType'].name.lower(), 'group': mod['CorrectGroup'], 'spawn_weights': _convert_spawn_weights(mod['SpawnWeight']), 'generation_weights': _convert_generation_weights(mod['GenerationWeight']), 'grants_buff': _convert_buff(mod['BuffDefinitionsKey'], mod['BuffValue']), 'grants_effects': _convert_granted_effects(mod['GrantedEffectsPerLevelKeys']), 'is_essence_only': mod['IsEssenceOnlyModifier'] > 0, 'adds_tags': _convert_tags_keys(mod['TagsKeys']) } if mod['Id'] in root: print("Duplicate mod id:", mod['Id']) else: root[mod['Id']] = obj write_json(root, data_path, 'mods')
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} all_classes = ["Duelist", "Marauder", "Ranger", "Scion", "Shadow", "Templar", "Witch"] for reward_row in relational_reader['QuestRewards.dat']: if reward_row["BaseItemTypesKey"] is None: continue quest = reward_row["QuestRewardOffersKey"]["QuestKey"] quest_id = quest["Id"] reward_id = reward_row["BaseItemTypesKey"]["Id"] if quest_id not in root: root[quest_id] = { "name": quest["Name"], "act": quest["Act"], "rewards": {} } rewards = root[quest_id]["rewards"] if reward_id not in rewards: rewards[reward_id] = { "classes": [], "name": reward_row["BaseItemTypesKey"]["Name"], "type": reward_row["BaseItemTypesKey"]["ItemClassesKey"]["Id"] } reward = rewards[reward_id] if reward_row["CharactersKey"] is None: reward["classes"] = all_classes else: char_class = reward_row["CharactersKey"]["Name"] if char_class not in reward["classes"]: reward["classes"].append(char_class) write_json(root, data_path, 'quest_rewards')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): skills = {} for row in relational_reader["PassiveTreeExpansionSkills.dat"]: size = row["PassiveTreeExpansionJewelSizesKey"]["Name"] if size not in skills: skills[size] = [] skills[size].append({ "id": row["PassiveSkillsKey"]["Id"], "name": row["PassiveSkillsKey"]["Name"], "stats": { stat["Id"]: value for stat, value in row["PassiveSkillsKey"]["Stats"] }, "tag": row["TagsKey"]["Id"], }) data = {} for row in relational_reader["PassiveTreeExpansionJewels.dat"]: size = row["PassiveTreeExpansionJewelSizesKey"]["Name"] data[row["BaseItemTypesKey"]["Id"]] = { "name": row["BaseItemTypesKey"]["Name"], "size": size, "min_skills": row["MinNodes"], "max_skills": row["MaxNodes"], "small_indices": row["SmallIndices"], "notable_indices": row["NotableIndices"], "socket_indices": row["SocketIndices"], "total_indices": row["TotalIndices"], "passive_skills": skills[size], } write_json(data, data_path, "cluster_jewels")
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for tag in relational_reader["GemTags.dat"]: name = tag["Tag"] root[tag["Id"]] = name if name != "" else None write_json(root, data_path, "gem_tags")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): skills = {} for row in relational_reader['PassiveTreeExpansionSkills.dat']: size = row['PassiveTreeExpansionJewelSizesKey']['Name'] if size not in skills: skills[size] = [] skills[size].append({ 'id': row['PassiveSkillsKey']['Id'], 'name': row['PassiveSkillsKey']['Name'], 'stats': { stat['Id']: value for stat, value in row['PassiveSkillsKey']['Stats'] }, 'tag': row['TagsKey']['Id'] }) data = {} for row in relational_reader['PassiveTreeExpansionJewels.dat']: size = row['PassiveTreeExpansionJewelSizesKey']['Name'] data[row['BaseItemTypesKey']['Id']] = { 'name': row['BaseItemTypesKey']['Name'], 'size': size, 'min_skills': row['MinNodes'], 'max_skills': row['MaxNodes'], 'small_indices': row['SmallIndices'], 'notable_indices': row['NotableIndices'], 'socket_indices': row['SocketIndices'], 'total_indices': row['TotalIndices'], 'passive_skills': skills[size] } write_json(data, data_path, 'cluster_jewels')
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): ###### quest vendor rewards root = {} all_quest_states = {} all_classes = [ "Duelist", "Marauder", "Ranger", "Scion", "Shadow", "Templar", "Witch" ] for state_row in relational_reader['QuestStates.dat']: questId = state_row["QuestKey"]["Id"] for state in state_row["QuestStates"]: all_quest_states[state] = questId for reward_row in relational_reader['QuestVendorRewards.dat']: npcId = reward_row["NPCKey"]["Id"] npcName = reward_row["NPCKey"]["Name"] npcAct = reward_row["NPCKey"]["Unknown1"] # We skip Act10 and Epilogue Lilly - They are identical to Act6 Lilly and they make the # json file much larger. if npcId == "Metadata/NPC/Epilogue/Lilly" or npcId == "Metadata/NPC/Act10/Lilly": continue if len(reward_row["CharactersKeys"]) > 0: charClass = reward_row["CharactersKeys"][0]["Name"] else: charClass = "All" if npcId not in root: root[npcId] = {"name": npcName, "act": npcAct, "rewards": {}} for key in reward_row["BaseItemTypesKeys"]: rewardId = key["Id"] if rewardId not in root[npcId]["rewards"]: root[npcId]["rewards"][rewardId] = { "name": key["Name"], "classes": [], "quest_id": "" } if charClass == "All": root[npcId]["rewards"][rewardId]["classes"] = all_classes else: if charClass not in root[npcId]["rewards"][rewardId][ "classes"]: root[npcId]["rewards"][rewardId]["classes"].append( charClass) if ((npcName != "Lilly Roth") and (npcName != "Siosa")): if reward_row["QuestState"] in all_quest_states: root[npcId]["rewards"][rewardId][ "quest_id"] = all_quest_states[ reward_row["QuestState"]] else: # BLATANT KLUDGE: Quest state 244 = a2q6, but isn't in QuestStates.dat if reward_row["QuestState"] == 244: root[npcId]["rewards"][rewardId][ "quest_id"] = "a2q6" write_json(root, data_path, 'vendor_rewards')
def write(ggpk, data_path, relational_reader, translation_file_cache, **kwargs): gems = {} tooltips = {} converter = GemConverter(ggpk, relational_reader, translation_file_cache) # Skills from gems for gem in relational_reader['SkillGems.dat']: granted_effect = gem['GrantedEffectsKey'] ge_id = granted_effect['Id'] if ge_id in gems: print("Duplicate GrantedEffectsKey.Id '%s'" % ge_id) multipliers = { 'str': gem['Str'], 'dex': gem['Dex'], 'int': gem['Int'] } gems[ge_id], tooltips[ge_id] = converter.convert( gem['BaseItemTypesKey'], granted_effect, gem['GrantedEffectsKey2'], gem['GemTagsKeys'], multipliers) # Secondary skills from gems. This adds the support skill implicitly provided by Bane for gem in relational_reader['SkillGems.dat']: granted_effect = gem['GrantedEffectsKey2'] if not granted_effect: continue ge_id = granted_effect['Id'] if ge_id in gems: continue gems[ge_id], tooltips[ge_id] = converter.convert( None, granted_effect, None, None, None) # Skills from mods for mod in relational_reader['Mods.dat']: if mod['GrantedEffectsPerLevelKeys'] is None: continue if ignore_mod_domain(mod['Domain']): continue for granted_effect_per_level in mod['GrantedEffectsPerLevelKeys']: granted_effect = granted_effect_per_level['GrantedEffectsKey'] ge_id = granted_effect['Id'] if ge_id in gems: # mod effects may exist as gems, those are handled above continue gems[ge_id], tooltips[ge_id] = converter.convert( None, granted_effect, None, None, None) # Default Attack/PlayerMelee is neither gem nor mod effect for granted_effect in relational_reader['GrantedEffects.dat']: ge_id = granted_effect['Id'] if ge_id != 'PlayerMelee': continue gems[ge_id], tooltips[ge_id] = converter.convert( None, granted_effect, None, None, None) write_json(gems, data_path, 'gems') write_json(tooltips, data_path, 'gem_tooltips')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for flavour in relational_reader["FlavourText.dat"]: if flavour["Id"] in root: print("Duplicate flavour id:", flavour["Id"]) else: root[flavour["Id"]] = flavour["Text"] write_json(root, data_path, "flavour")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): item_classes = { row['Id']: { 'name': row['Name'], 'elder_tag': get_id_or_none(row['Elder_TagsKey']), 'shaper_tag': get_id_or_none(row['Shaper_TagsKey']), } for row in relational_reader['ItemClasses.dat'] } write_json(item_classes, data_path, 'item_classes')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for row in relational_reader["CostTypes.dat"]: root[row["Id"]] = { "stat": row["StatsKey"]["Id"] if row["StatsKey"] else None, "format_text": row["FormatText"], } write_json(root, data_path, "cost_types")
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): item_classes = { row["Id"]: { "name": row["Name"], } for row in relational_reader["ItemClasses.dat"] } write_json(item_classes, data_path, "item_classes")
def write(file_system, data_path, relational_reader, translation_file_cache, **kwargs): gems = {} converter = GemConverter(file_system, relational_reader) # Skills from gems for gem in relational_reader["SkillGems.dat"]: granted_effect = gem["GrantedEffectsKey"] ge_id = granted_effect["Id"] if ge_id in gems: print("Duplicate GrantedEffectsKey.Id '%s'" % ge_id) multipliers = { "str": gem["Str"], "dex": gem["Dex"], "int": gem["Int"] } gems[ge_id] = converter.convert(gem["BaseItemTypesKey"], granted_effect, gem["GrantedEffectsKey2"], gem["GemTagsKeys"], multipliers) # Secondary skills from gems. This adds the support skill implicitly provided by Bane for gem in relational_reader["SkillGems.dat"]: granted_effect = gem["GrantedEffectsKey2"] if not granted_effect: continue ge_id = granted_effect["Id"] if ge_id in gems: continue gems[ge_id] = converter.convert(None, granted_effect, None, None, None) # Skills from mods for mod in relational_reader["Mods.dat"]: if mod["GrantedEffectsPerLevelKeys"] is None: continue if ignore_mod_domain(mod["Domain"]): continue for granted_effect_per_level in mod["GrantedEffectsPerLevelKeys"]: granted_effect = granted_effect_per_level["GrantedEffectsKey"] ge_id = granted_effect["Id"] if ge_id in gems: # mod effects may exist as gems, those are handled above continue gems[ge_id] = converter.convert(None, granted_effect, None, None, None) # Default Attack/PlayerMelee is neither gem nor mod effect for granted_effect in relational_reader["GrantedEffects.dat"]: ge_id = granted_effect["Id"] if ge_id != "PlayerMelee": continue gems[ge_id] = converter.convert(None, granted_effect, None, None, None) write_json(gems, data_path, "gems")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for row in relational_reader['DelveCraftingModifiers.dat']: base_item_key = row["BaseItemTypesKey"]["Id"] name_from_base_item = row["BaseItemTypesKey"]["Name"] root[base_item_key] = { "name": name_from_base_item, "added_mods": [mod['Id'] for mod in row["AddedModKeys"]], "forced_mods": [mod['Id'] for mod in row["ForcedAddModKeys"]], "negative_mod_weights": [{ "tag": tag["Id"], "weight": value } for tag, value in zip(row["NegativeWeight_TagsKeys"], row["NegativeWeight_Values"])], "positive_mod_weights": [{ "tag": tag["Id"], "weight": value } for tag, value in zip(row["Weight_TagsKeys"], row["Weight_Values"])], "forbidden_tags": [ tag["TagsKey"]["Id"] for tag in row["ForbiddenDelveCraftingTagsKeys"] ], "allowed_tags": [ tag["TagsKey"]["Id"] for tag in row["AllowedDelveCraftingTagsKeys"] ], "corrupted_essence_chance": row["CorruptedEssenceChance"], "mirrors": row["CanMirrorItem"], "changes_quality": row["CanImproveQuality"], "rolls_lucky": row["HasLuckyRolls"], "enchants": row["CanRollEnchant"], "rolls_white_sockets": row["CanRollWhiteSockets"], "sell_price_mods": [mod['Id'] for mod in row["SellPrice_ModsKeys"]], "descriptions": [ description["Description"] for description in row["DelveCraftingModifierDescriptionsKeys"] ], "blocked_descriptions": [ description["Id"] for description in row["BlockedDelveCraftingModifierDescriptionsKeys"] ] } write_json(root, data_path, 'fossils')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): data = [] for row in relational_reader["PassiveTreeExpansionSpecialSkills.dat"]: data.append({ "id": row["PassiveSkillsKey"]["Id"], "name": row["PassiveSkillsKey"]["Name"], "jewel_stat": row["StatsKey"]["Id"], }) write_json(data, data_path, "cluster_jewel_notables")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): tag_set = set() for in_file, out_file in STAT_TRANSLATION_DICT.items(): translations = translation_file_cache[in_file].translations result = _get_stat_translations( tag_set, translations, get_custom_translation_file().translations) write_json(result, data_path, out_file) print("Possible format tags: {}".format(tag_set))
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): data = [] for row in relational_reader['PassiveTreeExpansionSpecialSkills.dat']: data.append({ 'id': row['PassiveSkillsKey']['Id'], 'name': row['PassiveSkillsKey']['Name'], 'jewel_stat': row['StatsKey']['Id'], }) write_json(data, data_path, 'cluster_jewel_notables')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): tag_set = set() for in_file, out_file in _build_stat_translation_file_map(file_system): translations = translation_file_cache[in_file].translations result = _get_stat_translations( tag_set, translations, get_custom_translation_file().translations) write_json(result, data_path, out_file) print("Possible format tags: {}".format(tag_set))
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for row in relational_reader["DefaultMonsterStats.dat"]: root[row["DisplayLevel"]] = { "physical_damage": row["Damage"], "evasion": row["Evasion"], "accuracy": row["Accuracy"], "life": row["Life"], "ally_life": row["AllyLife"], "armour": row["Armour"], } write_json(root, data_path, "default_monster_stats")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): root = {} for row in relational_reader['DefaultMonsterStats.dat']: root[row['DisplayLevel']] = { 'physical_damage': row['Damage'], 'evasion': row['Evasion'], 'accuracy': row['Accuracy'], 'life': row['Life'], 'ally_life': row['AllyLife'] } write_json(root, data_path, 'default_monster_stats')
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): mod_types = { row['Name']: { "sell_price_types": [key["Id"] for key in row["ModSellPriceTypesKeys"]], "tags": [key["Id"] for key in row["TagsKeys"]] } for row in relational_reader['ModType.dat'] } write_json(mod_types, data_path, 'mod_types')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): attribute_requirements = _create_default_dict(relational_reader["ComponentAttributeRequirements.dat"]) armour_types = _create_default_dict(relational_reader["ComponentArmour.dat"]) shield_types = _create_default_dict(relational_reader["ShieldTypes.dat"]) flask_types = _create_default_dict(relational_reader["Flasks.dat"]) flask_charges = _create_default_dict(relational_reader["ComponentCharges.dat"]) weapon_types = _create_default_dict(relational_reader["WeaponTypes.dat"]) currency_type = _create_default_dict(relational_reader["CurrencyItems.dat"]) # Not covered here: SkillGems.dat (see gems.py), Essences.dat (see essences.py) root = {} skipped_item_classes = set() for item in relational_reader["BaseItemTypes.dat"]: if item["ItemClassesKey"]["Id"] in ITEM_CLASS_BLACKLIST: skipped_item_classes.add(item["ItemClassesKey"]["Id"]) continue elif item["ItemClassesKey"]["Id"] in ITEM_CLASS_WHITELIST: pass else: raise ValueError(f"Unknown item class, not in whitelist or blacklist: {item['ItemClassesKey']['Id']}") inherited_tags = list(ot_file_cache[item["InheritsFrom"] + ".ot"]["Base"]["tag"]) item_id = item["Id"] properties = {} _convert_armour_properties(armour_types[item_id], properties) _convert_shield_properties(shield_types[item_id], properties) _convert_flask_properties(flask_types[item_id], properties) _convert_flask_charge_properties(flask_charges[item_id], properties) _convert_weapon_properties(weapon_types[item_id], properties) _convert_currency_properties(currency_type[item_id], properties) root[item_id] = { "name": item["Name"], "item_class": item["ItemClassesKey"]["Id"], "inventory_width": item["Width"], "inventory_height": item["Height"], "drop_level": item["DropLevel"], "implicits": [mod["Id"] for mod in item["Implicit_ModsKeys"]], "tags": [tag["Id"] for tag in item["TagsKeys"]] + inherited_tags, "visual_identity": { "id": item["ItemVisualIdentityKey"]["Id"], "dds_file": item["ItemVisualIdentityKey"]["DDSFile"], }, "requirements": _convert_requirements(attribute_requirements[item_id], item["DropLevel"]), "properties": properties, "release_state": get_release_state(item_id).name, "domain": item["ModDomainsKey"].name.lower(), } _convert_flask_buff(flask_types[item_id], root[item_id]) print(f"Skipped the following item classes for base_items {skipped_item_classes}") write_json(root, data_path, "base_items")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): item_classes = { row["Id"]: { "name": row["Name"], "elder_tag": get_id_or_none(row["Elder_TagsKey"]), "shaper_tag": get_id_or_none(row["Shaper_TagsKey"]), "crusader_tag": get_id_or_none(row["Crusader_TagsKey"]), "redeemer_tag": get_id_or_none(row["Eyrie_TagsKey"]), "hunter_tag": get_id_or_none(row["Basilisk_TagsKey"]), "warlord_tag": get_id_or_none(row["Adjudicator_TagsKey"]), } for row in relational_reader["ItemClasses.dat"] } write_json(item_classes, data_path, "item_classes")
def write(ggpk, data_path, relational_reader, translation_file_cache, ot_file_cache): item_classes = { row['Id']: { 'name': row['Name'], 'elder_tag': get_id_or_none(row['Elder_TagsKey']), 'shaper_tag': get_id_or_none(row['Shaper_TagsKey']), 'crusader_tag': get_id_or_none(row['Crusader_TagsKey']), 'redeemer_tag': get_id_or_none(row['Eyrie_TagsKey']), 'hunter_tag': get_id_or_none(row['Basilisk_TagsKey']), 'warlord_tag': get_id_or_none(row['Adjudicator_TagsKey']), } for row in relational_reader['ItemClasses.dat'] } write_json(item_classes, data_path, 'item_classes')
def write(file_system, data_path, relational_reader, translation_file_cache, ot_file_cache): missing_stat_descriptions = find_missing_stat_descriptions(file_system) if missing_stat_descriptions: raise ValueError( f"The following stat descriptions are currently not accounted for: {missing_stat_descriptions}," + " please add to WRITTEN_FILES in constants.py") tag_set = set() for in_file, out_file in STAT_TRANSLATION_DICT.items(): translations = translation_file_cache[in_file].translations result = _get_stat_translations( tag_set, translations, get_custom_translation_file().translations) write_json(result, data_path, out_file) print("Possible format tags: {}".format(tag_set))