def repair_armor_data(): data = load_data() armor_map = data.armor_map armorset_map = data.armorset_map new_armor_map = DataMap() # Copy all items in armorset order for set_entry in armorset_map.values(): # All armor pieces in the set armor_names = [set_entry[part] for part in cfg.armor_parts] armor_names = list(filter(None, armor_names)) armor_lang = set_entry['armor_lang'] for armor_name in armor_names: armor_id = armor_map.id_of(armor_lang, armor_name) armor = armor_map.pop(armor_id) new_armor_map.insert(armor) # Copy over remaining items for remaining_item in armor_map: new_armor_map.insert(remaining_item) # Save results (todo: refactor, move to writer) armor_schema = schema.ArmorBaseSchema() result_list = new_armor_map.to_list() result, errors = armor_schema.dump(result_list, many=True) writer.save_csv("armors/armor_base.csv", result)
def repair_skill_data(): "Reorganizes skill data ordering to match base map" data = load_data() writer.save_data_csv("skills/skill_levels.csv", data.skill_map, key="levels", groups=['description'])
def repair_decoration_colors(): data = load_data() for entry in data.decoration_map.values(): skill_en = entry['skill_en'] skill_entry = data.skill_map.entry_of("en", skill_en) entry['icon_color'] = skill_entry['icon_color'] decoration_schema = schema.DecorationBaseSchema() result, errors = decoration_schema.dump(data.decoration_map.to_list(), many=True) writer.save_csv("decorations/decoration_base.csv", result)
def repair_rewards(): data = load_data() for monster_id, monster_entry in data.monster_map.items(): # If there are no rewards, skip if 'rewards' not in monster_entry: continue rewards_per_rank = [[] for x in range(len(cfg.supported_ranks))] # split into ranks first for idx, rank in enumerate(cfg.supported_ranks): for reward in monster_entry['rewards']: if reward['rank'].lower() == rank.lower(): rewards_per_rank[idx].append(reward) if sum(len(r) for r in rewards_per_rank) != len(monster_entry['rewards']): raise Exception( "Not all rewards successfully split, some may not belong to the right rank" ) for idx, rewards in enumerate(rewards_per_rank): grouped_conditions = {} for r in rewards: grouped_conditions.setdefault(r['condition_en'], []).append(r) new_rewards = [] for condition_entry in data.monster_reward_conditions_map.values(): condition = condition_entry['name_en'] if condition not in grouped_conditions: continue grouped_conditions[condition].sort( key=lambda r: r['percentage'] or 0, reverse=True) new_rewards.extend(grouped_conditions[condition]) del grouped_conditions[condition] for condition, entries in grouped_conditions.items(): print(f"ERROR: condition {condition} should not exist") new_rewards.extend(entries) rewards_per_rank[idx] = new_rewards monster_entry['rewards'] = sum(rewards_per_rank, []) # Now save the output. The actual monsters will be reordered by this operation writer.save_data_csv("monsters/monster_rewards.csv", data.monster_map, key="rewards", schema=schema.MonsterReward()) print("Repair complete")
def update_all(): "Updates all supported entity types using merged chunk data from ingame binaries." from mhdata.binary import metadata from mhdata.binary import ItemCollection, ArmorCollection, MonsterCollection from mhdata.load import load_data from .armor import update_armor, update_charms from .weapons import update_weapons, update_weapon_songs, update_kinsects from .monsters import update_monsters from .quests import update_quests from .items import update_items, update_decorations, register_combinations, ItemUpdater from . import simple_translate mhdata = load_data() print("Existing Data loaded. Using it as a base to merge new data") area_map = metadata.load_area_map() print("Area Map Loaded") # validate area map error = False for name in area_map.values(): if name not in mhdata.location_map.names('en'): print(f"Error: Area map has invalid location name {name}.") error = True if error: return print("Area Map validated") item_data = ItemCollection() armor_data = ArmorCollection() monster_data = MonsterCollection() item_updater = ItemUpdater(item_data) print() # newline simple_translate.translate_skills(mhdata) update_monsters(mhdata, item_data, monster_data) update_armor(mhdata, item_updater, armor_data) update_charms(mhdata, item_updater, armor_data) update_weapons(mhdata, item_updater) update_decorations(mhdata, item_data) #update_weapon_songs(mhdata) #update_kinsects(mhdata, item_updater) #update_quests(mhdata, item_updater, monster_meta, area_map) # Now finalize the item updates from parsing the rest of the data register_combinations(mhdata, item_updater) update_items(item_updater)
def update_all(): "Updates all supported entity types using merged chunk data from ingame binaries." from .items import ItemUpdater from mhdata.load import load_data from mhdata.merge.binary.load import MonsterMetadata from .armor import update_armor from .weapons import update_weapons, update_weapon_songs, update_kinsects from .monsters import update_monsters from .quests import update_quests from mhdata.io.csv import read_csv from os.path import dirname, abspath mhdata = load_data() print("Existing Data loaded. Using it as a base to merge new data") this_dir = dirname(abspath(__file__)) area_map = { int(r['id']): r['name'] for r in read_csv(this_dir + '/area_map.csv') } print("Area Map Loaded") # validate area map error = False for name in area_map.values(): if name not in mhdata.location_map.names('en'): print(f"Error: Area map has invalid location name {name}.") error = True if error: return print("Area Map validated") item_updater = ItemUpdater() monster_meta = MonsterMetadata() print() # newline update_armor(mhdata, item_updater) update_weapons(mhdata, item_updater) update_weapon_songs(mhdata) update_kinsects(mhdata, item_updater) update_monsters(mhdata, item_updater, monster_meta) update_quests(mhdata, item_updater, monster_meta, area_map) # Now finalize the item updates from parsing the rest of the data item_updater.update_items()
def update_all(): "Updates all supported entity types using merged chunk data from ingame binaries." from .items import ItemUpdater from mhdata.load import load_data from . import metadata from .armor import update_armor from .weapons import update_weapons, update_weapon_songs, update_kinsects from .monsters import update_monsters from .quests import update_quests mhdata = load_data() print("Existing Data loaded. Using it as a base to merge new data") area_map = metadata.load_area_map() print("Area Map Loaded") # validate area map error = False for name in area_map.values(): if name not in mhdata.location_map.names('en'): print(f"Error: Area map has invalid location name {name}.") error = True if error: return print("Area Map validated") item_updater = ItemUpdater() monster_meta = metadata.MonsterMetadata() print() # newline update_armor(mhdata, item_updater) update_weapons(mhdata, item_updater) update_weapon_songs(mhdata) update_kinsects(mhdata, item_updater) update_monsters(mhdata, item_updater, monster_meta) update_quests(mhdata, item_updater, monster_meta, area_map) # Now finalize the item updates from parsing the rest of the data item_updater.update_items()
def update_weapons(): mhdata = load_data() print("Existing Data loaded. Using to update weapon info") weapon_loader = WeaponDataLoader() item_text_handler = ItemTextHandler() skill_text_handler = SkillTextHandler() notes_data = load_schema(wep_wsl.WepWsl, "common/equip/wep_whistle.wep_wsl") sharpness_reader = SharpnessDataReader() ammo_reader = WeaponAmmoLoader() coating_data = load_schema(bbtbl.Bbtbl, "common/equip/bottle_table.bbtbl") print("Loaded initial weapon binary data data") def bind_weapon_blade_ext(weapon_type: str, existing_entry, binary: wp_dat.WpDatEntry): for key in [ 'kinsect_bonus', 'phial', 'phial_power', 'shelling', 'shelling_level', 'notes' ]: existing_entry[key] = None if weapon_type == cfg.CHARGE_BLADE: existing_entry['phial'] = cb_phials[binary.wep1_id] if weapon_type == cfg.SWITCH_AXE: (phial, power) = s_axe_phials[binary.wep1_id] existing_entry['phial'] = phial existing_entry['phial_power'] = power if weapon_type == cfg.GUNLANCE: # first 5 are normals, second 5 are wide, third 5 are long shelling = ['normal', 'wide', 'long'][binary.wep1_id // 5] level = (binary.wep1_id % 5) + 1 existing_entry['shelling'] = shelling existing_entry['shelling_level'] = level if weapon_type == cfg.INSECT_GLAIVE: existing_entry['kinsect_bonus'] = glaive_boosts[binary.wep1_id] if weapon_type == cfg.HUNTING_HORN: note_entry = notes_data[binary.wep1_id] notes = [note_entry.note1, note_entry.note2, note_entry.note3] notes = [str(note_colors[n]) for n in notes] existing_entry['notes'] = "".join(notes) # Store new weapon entries new_weapon_map = DataMap(languages="en", start_id=mhdata.weapon_map.max_id + 1) # Iterate over weapon types for weapon_type in cfg.weapon_types: print(f"Processing {weapon_type}") # Note: weapon data ordering is unknown. order field and tree_id asc are sometimes wrong # Therefore its unsorted, we have to work off the spreadsheet order weapon_tree = weapon_loader.load_tree(weapon_type) print(f"Loaded {weapon_type} weapon tree binary data") multiplier = cfg.weapon_multiplier[weapon_type] # Iterate over nodes in the weapon tree (does depth first search) for weapon_node in weapon_tree: binary = weapon_node.binary name = weapon_node.name existing_entry = mhdata.weapon_map.entry_of('en', name['en']) new_entry = {} if existing_entry: new_entry = {**existing_entry} # Bind name and parent new_entry['name'] = name new_entry['weapon_type'] = weapon_type new_entry['previous_en'] = None if weapon_node.parent != None: new_entry['previous_en'] = weapon_node.parent.name['en'] # Bind info new_entry['weapon_type'] = weapon_type new_entry['rarity'] = binary.rarity + 1 new_entry['attack'] = binary.raw_damage * multiplier new_entry['affinity'] = binary.affinity new_entry['defense'] = binary.defense or None new_entry['slot_1'] = binary.gem_slot1_lvl new_entry['slot_2'] = binary.gem_slot2_lvl new_entry['slot_3'] = binary.gem_slot3_lvl new_entry['elderseal'] = elderseal[binary.elderseal] # Bind Elements if name['en'] in ["Twin Nails", "Fire and Ice"]: print(f"Skipping {name['en']} element data") else: hidden = binary.hidden_element_id != 0 element_id = binary.hidden_element_id if hidden else binary.element_id element_atk = binary.hidden_element_damage if hidden else binary.element_damage new_entry['element_hidden'] = hidden new_entry['element1'] = elements[element_id] new_entry[ 'element1_attack'] = element_atk * 10 if element_atk else None new_entry['element2'] = None new_entry['element2_attack'] = None # Bind skill skill = skill_text_handler.get_skilltree_name(binary.skill_id) new_entry['skill'] = skill['en'] if binary.skill_id != 0 else None # Bind Extras (Blade/Gun/Bow data) if weapon_type in cfg.weapon_types_melee: bind_weapon_blade_ext(weapon_type, new_entry, binary) new_entry['sharpness'] = sharpness_reader.sharpness_for(binary) elif weapon_type in cfg.weapon_types_gun: (ammo_name, ammo_data) = ammo_reader.create_data_for( wtype=weapon_type, tree=weapon_node.tree, binary=weapon_node.binary) new_entry['ammo_config'] = ammo_name else: # TODO: Bows have an Enabled+ flag. Find out what it means # 1 = enabled, 2 = enabled+ coating_binary = coating_data[binary.special_ammo_type] new_entry['bow'] = { 'close': coating_binary.close_range > 0, 'power': coating_binary.power > 0, 'paralysis': coating_binary.paralysis > 0, 'poison': coating_binary.poison > 0, 'sleep': coating_binary.sleep > 0, 'blast': coating_binary.blast > 0 } # crafting data new_entry['craft'] = [] if weapon_node.craft: new_entry['craft'].append({ 'type': 'Create', **convert_recipe(item_text_handler, weapon_node.craft) }) if weapon_node.upgrade: new_entry['craft'].append({ 'type': 'Upgrade', **convert_recipe(item_text_handler, weapon_node.upgrade) }) new_weapon_map.insert(new_entry) # Write new data writer = create_writer() writer.save_base_map_csv( "weapons/weapon_base.csv", new_weapon_map, schema=schema.WeaponBaseSchema(), translation_filename="weapons/weapon_base_translations.csv") writer.save_data_csv("weapons/weapon_sharpness.csv", new_weapon_map, key="sharpness", schema=schema.WeaponSharpnessSchema()) writer.save_data_csv("weapons/weapon_bow_ext.csv", new_weapon_map, key="bow", schema=schema.WeaponBowSchema()) writer.save_data_csv("weapons/weapon_craft.csv", new_weapon_map, key="craft", schema=schema.WeaponCraftSchema()) writer.save_keymap_csv("weapons/weapon_ammo.csv", ammo_reader.data, schema=schema.WeaponAmmoSchema()) print("Weapon files updated\n") add_missing_items(item_text_handler.encountered, mhdata=mhdata)
def merge_weapons(): inc_data = requests.get("https://mhw-db.com/weapons").json() data = load_data().weapon_map not_exist = [] mismatches_atk = [] mismatches_def = [] mismatches_other = [] def print_all(items): for item in items: print(item) print() for weapon_inc in inc_data: inc_id = weapon_inc['id'] inc_type = weapon_inc['type'] name = weapon_inc['name'] inc_label = f"{name} ({inc_type})" # Our system uses I/II/III, their's uses 1/2/3 if name not in data.names('en'): name = name.replace(" 3", " III") name = name.replace(" 2", " II") name = name.replace(" 1", " I") if name not in data.names('en'): not_exist.append(f"{name} does not exist ({inc_type} {inc_id}).") continue # todo: add to our database existing = data.entry_of('en', name) # Incoming basic data for the weapon entry inc_attack = weapon_inc['attack']['display'] inc_defense = weapon_inc['attributes'].get('defense', 0) inc_phial = weapon_inc['attributes'].get('phialType', None) inc_phial_power = None inc_kinsect = weapon_inc['attributes'].get('boostType', None) inc_affinity = weapon_inc['attributes'].get('affinity', 0) # Ensure minimum of 3 slots (avoid out of bounds) weapon_inc['slots'] += [{'rank':0}] * 3 inc_slot1 = weapon_inc['slots'][0]['rank'] inc_slot2 = weapon_inc['slots'][1]['rank'] inc_slot3 = weapon_inc['slots'][2]['rank'] # If there are two values and the second is a number, populate the phial power if inc_phial and ' ' in inc_phial: values = inc_phial.split(' ') if len(values) == 2 and values[1].isdigit(): inc_phial = values[0] inc_phial_power = int(values[1]) inc_shelling_type = None inc_shelling_level = None if 'shellingType' in weapon_inc['attributes']: (left, right) = weapon_inc['attributes']['shellingType'].split(' ') inc_shelling_type = left.lower() inc_shelling_level = int(right.lower().replace('lv', '')) # Simple validation comparisons if existing['attack'] != inc_attack: mismatches_atk.append(f"WARNING: {inc_label} has mismatching attack " + f"(internal {existing['attack']} | external {inc_attack} | ext id {inc_id})") if (existing['defense'] or 0) != inc_defense: mismatches_def.append(f"WARNING: {inc_label} has mismatching defense " + f"(internal {existing['defense']} | external {inc_defense} | ext id {inc_id})") if existing['kinsect_bonus'] and existing['kinsect_bonus'] != inc_kinsect: mismatches_other.append(f"Warning: {inc_label} has mismatching kinsect bonus") if existing['phial'] and existing['phial'] != inc_phial: mismatches_other.append(f"WARNING: {inc_label} has mismatching phial") if existing['phial_power'] and existing['phial_power'] != inc_phial_power: mismatches_other.append(f"WARNING: {inc_label} has mismatching phial power") if existing['shelling'] and existing['shelling'] != inc_shelling_type: mismatches_other.append(f"Warning: {inc_label} has mismatching shell type") if existing['shelling_level'] and existing['shelling_level'] != inc_shelling_level: mismatches_other.append(f"Warning: {inc_label} has mismatching shell level") def copy_maybe(field_name, value): "Inner function to copy a value if no value exists and there is a new val" if not existing[field_name] and value: existing[field_name] = value def copy_with_warning(field_name, value): if existing[field_name] != value: print(f"OVERRIDING: {inc_label} will get new {field_name}") existing[field_name] = value # Copy over new base data if there are new fields copy_maybe('kinsect_bonus', inc_kinsect) copy_maybe('phial', inc_phial) copy_maybe('phial_power', inc_phial_power) copy_maybe('shelling', inc_shelling_type) copy_maybe('shelling_level', inc_shelling_level) copy_maybe('affinity', inc_affinity) # Copy over with warning. TODO: Add arg to require opt in to overwrite slots copy_with_warning('slot_1', inc_slot1) copy_with_warning('slot_2', inc_slot2) copy_with_warning('slot_3', inc_slot3) # Add sharpness data for anything that's missing sharpness data if 'durability' in weapon_inc and not existing.get('sharpness', None): inc_sharpness = weapon_inc['durability'][5] maxed = weapon_inc['durability'][0] == inc_sharpness existing['sharpness'] = { 'maxed': 'TRUE' if maxed else 'FALSE', 'red': inc_sharpness['red'], 'orange': inc_sharpness['orange'], 'yellow': inc_sharpness['yellow'], 'green': inc_sharpness['green'], 'blue': inc_sharpness['blue'], 'white': inc_sharpness['white'], 'purple': 0 } # print errors and warnings print_all(not_exist) print_all(mismatches_atk) print_all(mismatches_def) print_all(mismatches_other) weapon_base_schema = schema.WeaponBaseSchema() writer.save_base_map_csv('weapons/weapon_base_NEW.csv', data, schema=weapon_base_schema) writer.save_data_csv('weapons/weapon_sharpness_NEW.csv', data, key='sharpness')
def update_items(item_updater: ItemUpdater, *, mhdata=None): if not mhdata: mhdata = load_data() print("Existing Data loaded. Using to expand item list") new_item_map = DataMap(languages='en', start_id=mhdata.item_map.max_id + 1) unlinked_item_names = OrderedSet() # used to track dupes to throw proper errors updated_names = set() # First pass. Iterate over existing ingame items and merge with existing data for entry in item_updater.item_data: name_dict, description_dict = item_updater.name_and_description_for( entry.id, track=False) existing_item = mhdata.item_map.entry_of('en', name_dict['en']) is_encountered = entry.id in item_updater.encountered_item_ids if not is_encountered and not existing_item: unlinked_item_names.add(name_dict['en']) continue if name_dict['en'] in updated_names: raise Exception(f"Duplicate item {name_dict['en']}") updated_names.add(name_dict['en']) # note: we omit buy price as items may have a buy price even if not sold. # We only care about the buy price of BUYABLE items new_data = { 'name': name_dict, 'description': description_dict, 'rarity': entry.rarity + 1, 'sell_price': None, 'points': None } is_ez = entry.flags.ez is_account = entry.type == 'endemic' is_tradein = "(Trade-in Item)" in description_dict['en'] is_appraisal = entry.flags.appraisal sell_value = entry.sell_price if entry.sell_price != 0 else None if is_account: new_data['points'] = sell_value else: new_data['sell_price'] = sell_value if name_dict['en'] == 'Normal Ammo 1': new_data['category'] = 'hidden' elif is_ez: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'supply' elif is_account: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'account' elif is_appraisal or ('Appraised after investigation' in description_dict['en']): new_data['category'] = 'misc' new_data['subcategory'] = 'appraisal' new_data['sell_price'] = None # why does this have values? else: new_data['category'] = entry.type new_data['subcategory'] = 'trade' if is_tradein else None # Whether we show carry limit at all is based on item type. # Materials are basically infinite carry infinite_carry = new_data['category'] == 'material' new_data[ 'carry_limit'] = None if infinite_carry else entry.carry_limit if existing_item: new_item_map.insert({**existing_item, **new_data}) else: new_item_map.insert(new_data) # Second pass, add old entries that are not in the new one for old_entry in mhdata.item_map.values(): if old_entry.name('en') not in new_item_map.names('en'): new_item_map.insert(old_entry) # Third pass. Items need to be reordered based on type unsorted_item_map = new_item_map # store reference to former map def filter_category(category, subcategory=None): "helper that returns items and then removes from unsorted item map" results = [] for item in unsorted_item_map.values(): if item['category'] == category and item[ 'subcategory'] == subcategory: results.append(item) for result in results: del unsorted_item_map[result.id] return results normal_ammo_1 = unsorted_item_map.entry_of("en", "Normal Ammo 1") # start the before-mentioned third pass by creating a new map based off the old one new_item_map = DataMap(languages="en") new_item_map.extend(filter_category('item')) new_item_map.extend(filter_category('material')) new_item_map.extend(filter_category('material', 'trade')) if normal_ammo_1: new_item_map.insert(normal_ammo_1) new_item_map.extend(filter_category('ammo')) new_item_map.extend(filter_category('misc', 'appraisal')) new_item_map.extend(filter_category('misc', 'account')) new_item_map.extend(filter_category('misc', 'supply')) # Write out data writer = create_writer() writer.save_base_map_csv( "items/item_base.csv", new_item_map, schema=schema.ItemSchema(), translation_filename="items/item_base_translations.csv", translation_extra=['description']) # Write out artifact data print("Writing unlinked item names to artifacts") artifacts.write_names_artifact('items_unlinked.txt', unlinked_item_names) print("Writing all items and ids") artifact_data = [{ 'id': i.id, 'name': i.name['en'] } for i in item_updater.data] artifacts.write_dicts_artifact('items_ids.csv', artifact_data) print("Item files updated")
def update_armor(): "Populates and updates armor information using the armorset_base as a source of truth" armor_series = load_armor_series() # Get number of times armor can be upgraded by rarity level. # Unk7 is max level pre-augment, Unk8 is max post-augment # Thanks to the MHWorld Modders for the above info rarity_upgrades = {} for entry in load_schema(arm_up.ArmUp, "common/equip/arm_upgrade.arm_up").entries: rarity_upgrades[entry.index + 1] = (entry.unk7 - 1, entry.unk8 - 1) print("Binary armor data loaded") mhdata = load_data() print( "Existing Data loaded. Using existing armorset data to drive new armor data." ) print("Writing list of armorset names (in order) to artifacts") artifacts.write_names_artifact( 'setnames.txt', [s.name['en'] for s in armor_series.values()]) # Will store results. Language lookup and validation will be in english new_armorset_map = DataMap(languages="en", start_id=mhdata.armorset_map.max_id + 1) new_armor_map = DataMap(languages="en", start_id=mhdata.armor_map.max_id + 1) new_armorset_bonus_map = DataMap(languages="en") # Temporary storage for later processes all_set_skill_ids = OrderedSet() item_text_handler = ItemTextHandler() skill_text_handler = SkillTextHandler() armor_data_by_name = {} print( "Updating set data, keyed by the existing names in armorset_base.csv") for armorset_entry in mhdata.armorset_map.values(): armorseries_data = armor_series.get(armorset_entry.name('en')) if not armorseries_data: print( f"Armor series {armorset_entry.name('en')} doesn't exist in binary, skipping" ) new_armorset_map.insert(armorset_entry) continue new_entry = { **armorset_entry, 'name': armorseries_data.name, 'rank': armorseries_data.rank } first_armor = armorseries_data.armors[0].binary if first_armor.set_skill1_lvl > 0: skill_id = first_armor.set_skill1 all_set_skill_ids.add(skill_id) new_entry['bonus'] = skill_text_handler.get_skilltree_name( skill_id)['en'] for part in cfg.armor_parts: armor = armorseries_data.get_part(part) if armor: armor_data_by_name[armor.name['en']] = armor new_entry[part] = armor.name['en'] else: new_entry[part] = None new_armorset_map.insert(new_entry) print("Armorset entries updated") print("Updating armor") for armorset_entry in new_armorset_map.values(): # Handle armor pieces for part, armor_name in datafn.iter_armorset_pieces(armorset_entry): existing_armor = mhdata.armor_map.entry_of('en', armor_name) armor_data = armor_data_by_name.get(armor_name, None) if not armor_data: print( f"Failed to find binary armor data for {armor_name}, maintaining existing data" ) new_armor_map.insert(existing_armor) continue armor_binary = armor_data.binary rarity = armor_binary.rarity + 1 # Initial new armor data new_data = { 'name': armor_data.name, 'rarity': rarity, 'type': part, 'gender': gender_list[armor_binary.gender], 'slot_1': armor_binary.gem_slot1_lvl, 'slot_2': armor_binary.gem_slot2_lvl, 'slot_3': armor_binary.gem_slot3_lvl, 'defense_base': armor_binary.defense, 'defense_max': armor_binary.defense + rarity_upgrades[rarity][0] * 2, 'defense_augment_max': armor_binary.defense + rarity_upgrades[rarity][1] * 2, 'defense_fire': armor_binary.fire_res, 'defense_water': armor_binary.water_res, 'defense_thunder': armor_binary.thunder_res, 'defense_ice': armor_binary.ice_res, 'defense_dragon': armor_binary.dragon_res, 'skills': {}, 'craft': {} } if existing_armor: new_data['id'] = existing_armor.id # Add skills to new armor data for i in range(1, 2 + 1): skill_lvl = getattr(armor_binary, f"skill{i}_lvl") if skill_lvl != 0: skill_id = getattr(armor_binary, f"skill{i}") name_en = skill_text_handler.get_skilltree_name( skill_id)['en'] new_data['skills'][f'skill{i}_name'] = name_en new_data['skills'][f'skill{i}_pts'] = skill_lvl else: new_data['skills'][f'skill{i}_name'] = None new_data['skills'][f'skill{i}_pts'] = None # Add recipe to new armor data. Also track the encounter. recipe_binary = armor_data.recipe new_data['craft'] = convert_recipe(item_text_handler, recipe_binary) # Add new data to new armor map new_armor_map.insert(new_data) # Process set skills. As we don't currently understand the set -> skill map, we only translate # We pull the already established set skill name from existing CSV for bonus_entry in mhdata.armorset_bonus_map.values(): skilltree = skill_text_handler.get_skilltree(bonus_entry.name('en')) name_dict = skill_text_handler.get_skilltree_name(skilltree.index) new_armorset_bonus_map.insert({**bonus_entry, 'name': name_dict}) # Write new data writer = create_writer() writer.save_base_map_csv( "armors/armorset_base.csv", new_armorset_map, schema=schema.ArmorSetSchema(), translation_filename="armors/armorset_base_translations.csv") writer.save_base_map_csv( "armors/armor_base.csv", new_armor_map, schema=schema.ArmorBaseSchema(), translation_filename="armors/armor_base_translations.csv") writer.save_data_csv("armors/armor_skills_ext.csv", new_armor_map, key="skills") writer.save_data_csv("armors/armor_craft_ext.csv", new_armor_map, key="craft") writer.save_base_map_csv( "armors/armorset_bonus_base.csv", new_armorset_bonus_map, schema=schema.ArmorSetBonus(), translation_filename="armors/armorset_bonus_base_translations.csv") print("Armor files updated\n") add_missing_items(item_text_handler.encountered, mhdata=mhdata)
def update_armor(): "Populates and updates armor information using the armorset_base as a source of truth" armor_text = load_text("common/text/steam/armor") armorset_text = load_text("common/text/steam/armor_series") # Parses binary armor data, mapped by the english name armor_data = {} for armor_entry in load_schema(am_dat.AmDat, "common/equip/armor.am_dat").entries: if armor_entry.gender == 0: continue if armor_entry.order == 0: continue name_en = armor_text[armor_entry.gmd_name_index]['en'] armor_data[name_en] = armor_entry # Parses craft data, mapped by the binary armor id armor_craft_data = {} for craft_entry in load_schema(eq_crt.EqCrt, "common/equip/armor.eq_crt").entries: armor_craft_data[craft_entry.equip_id] = craft_entry # Get number of times armor can be upgraded by rarity level. # Unk7 is max level pre-augment, Unk8 is max post-augment # Thanks to the MHWorld Modders for the above info rarity_upgrades = {} for entry in load_schema(arm_up.ArmUp, "common/equip/arm_upgrade.arm_up").entries: rarity_upgrades[entry.index + 1] = (entry.unk7 - 1, entry.unk8 - 1) print("Binary data loaded") mhdata = load_data() print( "Existing Data loaded. Using existing armorset data to drive new armor data." ) # Will store results. Language lookup and validation will be in english new_armor_map = DataMap(languages="en") new_armorset_bonus_map = DataMap(languages="en") # Temporary storage for later processes all_set_skill_ids = OrderedSet() item_text_handler = ItemTextHandler() skill_text_handler = SkillTextHandler() print("Populating armor data, keyed by the armorset data") next_armor_id = mhdata.armor_map.max_id + 1 for armorset in mhdata.armorset_map.values(): # Handle armor pieces for part, armor_name in datafn.iter_armorset_pieces(armorset): existing_armor = mhdata.armor_map.entry_of('en', armor_name) armor_binary = armor_data.get(armor_name) if not armor_binary: raise Exception( f"Failed to find binary armor data for {armor_name}") if armor_binary.set_skill1_lvl > 0: all_set_skill_ids.add(armor_binary.set_skill1) rarity = armor_binary.rarity + 1 name_dict = armor_text[armor_binary.gmd_name_index] # Initial new armor data new_data = { 'name': name_dict, # Override for translation support! 'rarity': rarity, 'type': part, 'gender': gender_list[armor_binary.gender], 'slot_1': armor_binary.gem_slot1_lvl, 'slot_2': armor_binary.gem_slot2_lvl, 'slot_3': armor_binary.gem_slot3_lvl, 'defense_base': armor_binary.defense, 'defense_max': armor_binary.defense + rarity_upgrades[rarity][0] * 2, 'defense_augment_max': armor_binary.defense + rarity_upgrades[rarity][1] * 2, 'defense_fire': armor_binary.fire_res, 'defense_water': armor_binary.water_res, 'defense_thunder': armor_binary.thunder_res, 'defense_ice': armor_binary.ice_res, 'defense_dragon': armor_binary.dragon_res, 'skills': {}, 'craft': {} } # Add skills to new armor data for i in range(1, 2 + 1): skill_lvl = getattr(armor_binary, f"skill{i}_lvl") if skill_lvl != 0: skill_id = getattr(armor_binary, f"skill{i}") name_en = skill_text_handler.get_skilltree_name( skill_id)['en'] new_data['skills'][f'skill{i}_name'] = name_en new_data['skills'][f'skill{i}_pts'] = skill_lvl else: new_data['skills'][f'skill{i}_name'] = None new_data['skills'][f'skill{i}_pts'] = None # Add recipe to new armor data. Also track the encounter. recipe_binary = armor_craft_data[armor_binary.id] new_data['craft'] = convert_recipe(item_text_handler, recipe_binary) armor_entry = None if not existing_armor: print( f"Entry for {armor_name} not in armor map, creating new entry" ) armor_entry = new_armor_map.add_entry(next_armor_id, new_data) next_armor_id += 1 else: armor_entry = new_armor_map.add_entry(existing_armor.id, { **existing_armor, **new_data }) # Process set skills. As we don't currently understand the set -> skill map, we only translate # We pull the already established set skill name from existing CSV for bonus_entry in mhdata.armorset_bonus_map.values(): skilltree = skill_text_handler.get_skilltree(bonus_entry.name('en')) name_dict = skill_text_handler.get_skilltree_name(skilltree.index) new_armorset_bonus_map.insert({**bonus_entry, 'name': name_dict}) # Write new data writer = create_writer() writer.save_base_map_csv( "armors/armor_base.csv", new_armor_map, schema=schema.ArmorBaseSchema(), translation_filename="armors/armor_base_translations.csv") writer.save_data_csv("armors/armor_skills_ext.csv", new_armor_map, key="skills") writer.save_data_csv("armors/armor_craft_ext.csv", new_armor_map, key="craft") writer.save_base_map_csv( "armors/armorset_bonus_base.csv", new_armorset_bonus_map, schema=schema.ArmorSetBonus(), translation_filename="armors/armorset_bonus_base_translations.csv") print("Armor files updated\n") add_missing_items(item_text_handler.encountered, mhdata=mhdata)
def add_missing_items(encountered_item_ids: Iterable[int], *, mhdata=None): if not mhdata: mhdata = load_data() print("Existing Data loaded. Using to expand item list") item_data = sorted(load_schema(itm.Itm, "common/item/itemData.itm").entries, key=lambda i: i.order) item_text_manager = ItemTextHandler() new_item_map = DataMap(languages='en') # First pass. Iterate over existing ingame items and merge with existing data for entry in item_data: name_dict, description_dict = item_text_manager.text_for(entry.id) existing_item = mhdata.item_map.entry_of('en', name_dict['en']) is_encountered = entry.id in encountered_item_ids if not is_encountered and not existing_item: continue # note: we omit buy price as items may have a buy price even if not sold. # We only care about the buy price of BUYABLE items new_data = { 'name': name_dict, 'description': description_dict, 'rarity': entry.rarity + 1, 'sell_price': entry.sell_price if entry.sell_price != 0 else None } is_ez = (entry.flags & itm.ItmFlag.IsQuestOnly.value) != 0 is_account = item_type_list[entry.type] == 'endemic' is_tradein = "(Trade-in Item)" in description_dict['en'] is_appraisal = (entry.flags & itm.ItmFlag.IsAppraisal.value) != 0 if name_dict['en'] == 'Normal Ammo 1': new_data['category'] = 'hidden' elif is_ez: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'supply' elif is_account: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'account' elif is_appraisal: new_data['category'] = 'misc' new_data['subcategory'] = 'appraisal' new_data['sell_price'] = None # why does this have values? else: new_data['category'] = item_type_list[entry.type] new_data['subcategory'] = 'trade' if is_tradein else None # Whether we show carry limit at all is based on item type. # Materials are basically infinite carry infinite_carry = new_data['category'] == 'material' new_data[ 'carry_limit'] = None if infinite_carry else entry.carry_limit if existing_item: new_item_map.insert({**existing_item, **new_data}) else: new_item_map.insert(new_data) # Second pass, add old entries that are not in the new one for old_entry in mhdata.item_map.values(): if old_entry.name('en') not in new_item_map.names('en'): new_item_map.insert(old_entry) # Third pass. Items need to be reordered based on type unsorted_item_map = new_item_map # store reference to former map def filter_category(category, subcategory=None): "helper that returns items and then removes from unsorted item map" results = [] for item in unsorted_item_map.values(): if item['category'] == category and item[ 'subcategory'] == subcategory: results.append(item) for result in results: del unsorted_item_map[result.id] return results normal_ammo_1 = unsorted_item_map.entry_of("en", "Normal Ammo 1") # start the before-mentioned third pass by creating a new map based off the old one new_item_map = DataMap(languages="en") new_item_map.extend(filter_category('item')) new_item_map.extend(filter_category('material')) new_item_map.extend(filter_category('material', 'trade')) if normal_ammo_1: new_item_map.insert(normal_ammo_1) new_item_map.extend(filter_category('ammo')) new_item_map.extend(filter_category('misc', 'appraisal')) new_item_map.extend(filter_category('misc', 'account')) new_item_map.extend(filter_category('misc', 'supply')) # Write out data writer = create_writer() writer.save_base_map_csv( "items/item_base.csv", new_item_map, schema=schema.ItemSchema(), translation_filename="items/item_base_translations.csv", translation_extra=['description']) print("Item files updated")
def mhdata(): return load_data()
def mhdata_raw(): return load_data()