def repair_armor_data(): data = load_data() armor_map = data.armor_map armorset_map = data.armorset_map new_armor_map = DataMap() # Copy all items in armorset order for set_entry in armorset_map.values(): # All armor pieces in the set armor_names = [set_entry[part] for part in cfg.armor_parts] armor_names = list(filter(None, armor_names)) armor_lang = set_entry['armor_lang'] for armor_name in armor_names: armor_id = armor_map.id_of(armor_lang, armor_name) armor = armor_map.pop(armor_id) new_armor_map.insert(armor) # Copy over remaining items for remaining_item in armor_map: new_armor_map.insert(remaining_item) # Save results (todo: refactor, move to writer) armor_schema = schema.ArmorBaseSchema() result_list = new_armor_map.to_list() result, errors = armor_schema.dump(result_list, many=True) writer.save_csv("armors/armor_base.csv", result)
def update_tools(mhdata): tool_data = ToolCollection() new_tools = DataMap(start_id=mhdata.tool_map.max_id + 1) for tool in tool_data.tools: name_en = tool.name_upgraded['en'] existing_entry = mhdata.tool_map.entry_of('en', name_en) new_entry = {} if existing_entry: new_entry = {**existing_entry} new_entry['name'] = tool.name_upgraded new_entry['name_base'] = tool.name new_entry['description'] = tool.description new_entry['tool_type'] = 'booster' if 'booster' in tool.name[ 'en'].lower() else 'mantle' new_entry.setdefault('duration', 0) new_entry.setdefault('duration_upgraded', None) new_entry.setdefault('recharge', 0) new_entry['slot_1'] = tool.slots[0] new_entry['slot_2'] = tool.slots[1] new_entry['slot_3'] = tool.slots[2] new_entry.setdefault('icon_color', None) new_tools.insert(new_entry) writer = create_writer() writer.save_base_map_csv( "tools/tool_base.csv", new_tools, schema=schema.ToolSchema(), translation_filename="tools/tool_base_translations.csv", translation_extra=['name_base', 'description'])
def test_can_lookup_entry_by_name(): map = DataMap() map.insert(create_test_entry_en("test1")) map.insert(create_test_entry_en("test2")) map.insert(create_test_entry_en("test3")) entry = map.entry_of("en", "test2") assert entry.name('en') == 'test2', "expected entry name to match"
def test_save_base_csv_symmetric(writer: DataReaderWriter): data = DataMap() data.insert(create_entry_en('test1')) data.insert(create_entry_en('test2')) groups = ['name', 'description'] writer.save_base_map_csv('testbase.csv', data, groups=groups) new_data = writer.load_base_csv('testbase.csv', groups=groups) assert data.to_list() == new_data.to_list(), "saved data didn't match"
def test_save_base_csv_symmetric(writer: DataReaderWriter): # Note: CSVs do not save typing info, so everything is strings data = DataMap() data.insert(create_entry_en('test1', {'id': '1'})) data.insert(create_entry_en('test2', {'id': '2'})) groups = ['name', 'description'] writer.save_base_map_csv('testbase.csv', data, groups=groups) new_data = writer.load_base_csv('testbase.csv', languages, groups=groups) assert data.to_list() == new_data.to_list(), "saved data didn't match"
def test_to_dict_correct_data(): data = { 25: create_test_entry_en('test1', { 'id': 25, 'somedata': {'nested': 5}}), 28: create_test_entry_en('test2', { 'id': 28, 'somedata': {'alsonested': 'hey'}}) } datamap = DataMap() for row in data.values(): datamap.insert(row) serialized = datamap.to_dict() assert serialized == data, "expected serialized data to match original data"
def test_manual_id_resets_sequence(): datamap = DataMap() datamap.add_entry(25, create_test_entry_en('test1')) new_entry = datamap.insert(create_test_entry_en('test2')) assert new_entry.id > 25, "new id should have been higher"
def update_kinsects(mhdata, item_updater: ItemUpdater): print('Loading kinsect info') kinsect_tree = load_kinsect_tree() def resolve_parent_name(entry): if entry.parent: return entry.parent.name['en'] return '' items = [ f"{r.id},{r.name['en']},{resolve_parent_name(r)}" for r in kinsect_tree.crafted() ] artifacts.write_artifact('kinsect_all.txt', *items) items = [f"{r.id},{r.name['en']}" for r in kinsect_tree.roots] artifacts.write_artifact('kinsect_roots.txt', *items) kinsect_map = DataMap(languages=['en']) for kinsect_node in kinsect_tree.crafted(): binary = kinsect_node.binary new_entry = kinsect_map.insert({ 'id': binary.id + 1, 'name': kinsect_node.name, 'previous_en': resolve_parent_name(kinsect_node), 'rarity': binary.rarity + 1, 'attack_type': kinsect_attack_types[binary.attack_type], 'dust_effect': kinsect_dusts[binary.dust_type], 'power': binary.power, 'speed': binary.speed, 'heal': binary.heal }) if kinsect_node.upgrade: new_entry['craft'] = convert_recipe(item_updater, kinsect_node.upgrade) # Write new data writer = create_writer() writer.save_base_map_csv( "weapons/kinsect_base.csv", kinsect_map, schema=schema.KinsectBaseSchema(), translation_filename="weapons/kinsect_base_translations.csv") writer.save_data_csv("weapons/kinsect_craft_ext.csv", kinsect_map, key="craft", schema=schema.RecipeSchema()) print("Kinsect files updated\n")
def test_row_add_value_in_middle(): test_keys = ['id', 'test1', 'test2', 'test3'] test_dict = {k: 1 for k in test_keys} test_dict['name'] = {'en': 'a test'} # required field datamap = DataMap() entry = datamap.insert(test_dict) entry.set_value('NEW', 1, after='test2') # note: name exists because it was manually added to test_dict expected_keys = ['id', 'test1', 'test2', 'NEW', 'test3', 'name'] entry_keys = list(entry.keys()) assert entry_keys == expected_keys, "Expected new to be after test2"
def update_weapons(): mhdata = load_data() print("Existing Data loaded. Using to update weapon info") weapon_loader = WeaponDataLoader() item_text_handler = ItemTextHandler() skill_text_handler = SkillTextHandler() notes_data = load_schema(wep_wsl.WepWsl, "common/equip/wep_whistle.wep_wsl") sharpness_reader = SharpnessDataReader() ammo_reader = WeaponAmmoLoader() coating_data = load_schema(bbtbl.Bbtbl, "common/equip/bottle_table.bbtbl") print("Loaded initial weapon binary data data") def bind_weapon_blade_ext(weapon_type: str, existing_entry, binary: wp_dat.WpDatEntry): for key in [ 'kinsect_bonus', 'phial', 'phial_power', 'shelling', 'shelling_level', 'notes' ]: existing_entry[key] = None if weapon_type == cfg.CHARGE_BLADE: existing_entry['phial'] = cb_phials[binary.wep1_id] if weapon_type == cfg.SWITCH_AXE: (phial, power) = s_axe_phials[binary.wep1_id] existing_entry['phial'] = phial existing_entry['phial_power'] = power if weapon_type == cfg.GUNLANCE: # first 5 are normals, second 5 are wide, third 5 are long shelling = ['normal', 'wide', 'long'][binary.wep1_id // 5] level = (binary.wep1_id % 5) + 1 existing_entry['shelling'] = shelling existing_entry['shelling_level'] = level if weapon_type == cfg.INSECT_GLAIVE: existing_entry['kinsect_bonus'] = glaive_boosts[binary.wep1_id] if weapon_type == cfg.HUNTING_HORN: note_entry = notes_data[binary.wep1_id] notes = [note_entry.note1, note_entry.note2, note_entry.note3] notes = [str(note_colors[n]) for n in notes] existing_entry['notes'] = "".join(notes) # Store new weapon entries new_weapon_map = DataMap(languages="en", start_id=mhdata.weapon_map.max_id + 1) # Iterate over weapon types for weapon_type in cfg.weapon_types: print(f"Processing {weapon_type}") # Note: weapon data ordering is unknown. order field and tree_id asc are sometimes wrong # Therefore its unsorted, we have to work off the spreadsheet order weapon_tree = weapon_loader.load_tree(weapon_type) print(f"Loaded {weapon_type} weapon tree binary data") multiplier = cfg.weapon_multiplier[weapon_type] # Iterate over nodes in the weapon tree (does depth first search) for weapon_node in weapon_tree: binary = weapon_node.binary name = weapon_node.name existing_entry = mhdata.weapon_map.entry_of('en', name['en']) new_entry = {} if existing_entry: new_entry = {**existing_entry} # Bind name and parent new_entry['name'] = name new_entry['weapon_type'] = weapon_type new_entry['previous_en'] = None if weapon_node.parent != None: new_entry['previous_en'] = weapon_node.parent.name['en'] # Bind info new_entry['weapon_type'] = weapon_type new_entry['rarity'] = binary.rarity + 1 new_entry['attack'] = binary.raw_damage * multiplier new_entry['affinity'] = binary.affinity new_entry['defense'] = binary.defense or None new_entry['slot_1'] = binary.gem_slot1_lvl new_entry['slot_2'] = binary.gem_slot2_lvl new_entry['slot_3'] = binary.gem_slot3_lvl new_entry['elderseal'] = elderseal[binary.elderseal] # Bind Elements if name['en'] in ["Twin Nails", "Fire and Ice"]: print(f"Skipping {name['en']} element data") else: hidden = binary.hidden_element_id != 0 element_id = binary.hidden_element_id if hidden else binary.element_id element_atk = binary.hidden_element_damage if hidden else binary.element_damage new_entry['element_hidden'] = hidden new_entry['element1'] = elements[element_id] new_entry[ 'element1_attack'] = element_atk * 10 if element_atk else None new_entry['element2'] = None new_entry['element2_attack'] = None # Bind skill skill = skill_text_handler.get_skilltree_name(binary.skill_id) new_entry['skill'] = skill['en'] if binary.skill_id != 0 else None # Bind Extras (Blade/Gun/Bow data) if weapon_type in cfg.weapon_types_melee: bind_weapon_blade_ext(weapon_type, new_entry, binary) new_entry['sharpness'] = sharpness_reader.sharpness_for(binary) elif weapon_type in cfg.weapon_types_gun: (ammo_name, ammo_data) = ammo_reader.create_data_for( wtype=weapon_type, tree=weapon_node.tree, binary=weapon_node.binary) new_entry['ammo_config'] = ammo_name else: # TODO: Bows have an Enabled+ flag. Find out what it means # 1 = enabled, 2 = enabled+ coating_binary = coating_data[binary.special_ammo_type] new_entry['bow'] = { 'close': coating_binary.close_range > 0, 'power': coating_binary.power > 0, 'paralysis': coating_binary.paralysis > 0, 'poison': coating_binary.poison > 0, 'sleep': coating_binary.sleep > 0, 'blast': coating_binary.blast > 0 } # crafting data new_entry['craft'] = [] if weapon_node.craft: new_entry['craft'].append({ 'type': 'Create', **convert_recipe(item_text_handler, weapon_node.craft) }) if weapon_node.upgrade: new_entry['craft'].append({ 'type': 'Upgrade', **convert_recipe(item_text_handler, weapon_node.upgrade) }) new_weapon_map.insert(new_entry) # Write new data writer = create_writer() writer.save_base_map_csv( "weapons/weapon_base.csv", new_weapon_map, schema=schema.WeaponBaseSchema(), translation_filename="weapons/weapon_base_translations.csv") writer.save_data_csv("weapons/weapon_sharpness.csv", new_weapon_map, key="sharpness", schema=schema.WeaponSharpnessSchema()) writer.save_data_csv("weapons/weapon_bow_ext.csv", new_weapon_map, key="bow", schema=schema.WeaponBowSchema()) writer.save_data_csv("weapons/weapon_craft.csv", new_weapon_map, key="craft", schema=schema.WeaponCraftSchema()) writer.save_keymap_csv("weapons/weapon_ammo.csv", ammo_reader.data, schema=schema.WeaponAmmoSchema()) print("Weapon files updated\n") add_missing_items(item_text_handler.encountered, mhdata=mhdata)
def update_items(item_updater: ItemUpdater, *, mhdata=None): if not mhdata: mhdata = load_data() print("Existing Data loaded. Using to expand item list") new_item_map = DataMap(languages='en', start_id=mhdata.item_map.max_id + 1) unlinked_item_names = OrderedSet() # used to track dupes to throw proper errors updated_names = set() # First pass. Iterate over existing ingame items and merge with existing data for entry in item_updater.item_data: name_dict, description_dict = item_updater.name_and_description_for( entry.id, track=False) existing_item = mhdata.item_map.entry_of('en', name_dict['en']) is_encountered = entry.id in item_updater.encountered_item_ids if not is_encountered and not existing_item: unlinked_item_names.add(name_dict['en']) continue if name_dict['en'] in updated_names: raise Exception(f"Duplicate item {name_dict['en']}") updated_names.add(name_dict['en']) # note: we omit buy price as items may have a buy price even if not sold. # We only care about the buy price of BUYABLE items new_data = { 'name': name_dict, 'description': description_dict, 'rarity': entry.rarity + 1, 'sell_price': None, 'points': None } is_ez = entry.flags.ez is_account = entry.type == 'endemic' is_tradein = "(Trade-in Item)" in description_dict['en'] is_appraisal = entry.flags.appraisal sell_value = entry.sell_price if entry.sell_price != 0 else None if is_account: new_data['points'] = sell_value else: new_data['sell_price'] = sell_value if name_dict['en'] == 'Normal Ammo 1': new_data['category'] = 'hidden' elif is_ez: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'supply' elif is_account: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'account' elif is_appraisal or ('Appraised after investigation' in description_dict['en']): new_data['category'] = 'misc' new_data['subcategory'] = 'appraisal' new_data['sell_price'] = None # why does this have values? else: new_data['category'] = entry.type new_data['subcategory'] = 'trade' if is_tradein else None # Whether we show carry limit at all is based on item type. # Materials are basically infinite carry infinite_carry = new_data['category'] == 'material' new_data[ 'carry_limit'] = None if infinite_carry else entry.carry_limit if existing_item: new_item_map.insert({**existing_item, **new_data}) else: new_item_map.insert(new_data) # Second pass, add old entries that are not in the new one for old_entry in mhdata.item_map.values(): if old_entry.name('en') not in new_item_map.names('en'): new_item_map.insert(old_entry) # Third pass. Items need to be reordered based on type unsorted_item_map = new_item_map # store reference to former map def filter_category(category, subcategory=None): "helper that returns items and then removes from unsorted item map" results = [] for item in unsorted_item_map.values(): if item['category'] == category and item[ 'subcategory'] == subcategory: results.append(item) for result in results: del unsorted_item_map[result.id] return results normal_ammo_1 = unsorted_item_map.entry_of("en", "Normal Ammo 1") # start the before-mentioned third pass by creating a new map based off the old one new_item_map = DataMap(languages="en") new_item_map.extend(filter_category('item')) new_item_map.extend(filter_category('material')) new_item_map.extend(filter_category('material', 'trade')) if normal_ammo_1: new_item_map.insert(normal_ammo_1) new_item_map.extend(filter_category('ammo')) new_item_map.extend(filter_category('misc', 'appraisal')) new_item_map.extend(filter_category('misc', 'account')) new_item_map.extend(filter_category('misc', 'supply')) # Write out data writer = create_writer() writer.save_base_map_csv( "items/item_base.csv", new_item_map, schema=schema.ItemSchema(), translation_filename="items/item_base_translations.csv", translation_extra=['description']) # Write out artifact data print("Writing unlinked item names to artifacts") artifacts.write_names_artifact('items_unlinked.txt', unlinked_item_names) print("Writing all items and ids") artifact_data = [{ 'id': i.id, 'name': i.name['en'] } for i in item_updater.data] artifacts.write_dicts_artifact('items_ids.csv', artifact_data) print("Item files updated")
def test_uses_provided_id(): map = DataMap() map.insert({'id': 3, **create_test_entry_en("test1")}) assert 3 in map.keys(), "entry should have used id 3"
def update_armor(mhdata, item_updater: ItemUpdater): "Populates and updates armor information using the armorset_base as a source of truth" armor_series = load_armor_series() # Get number of times armor can be upgraded by rarity level. # Unk7 is max level pre-augment, Unk8 is max post-augment # Thanks to the MHWorld Modders for the above info rarity_upgrades = {} for entry in load_schema(arm_up.ArmUp, "common/equip/arm_upgrade.arm_up").entries: rarity_upgrades[entry.index + 1] = (entry.unk7 - 1, entry.unk8 - 1) print("Binary armor data loaded") print("Writing list of armorset names (in order) to artifacts") artifacts.write_names_artifact( 'setnames.txt', [s.name['en'] for s in armor_series.values()]) # Will store results. Language lookup and validation will be in english new_armorset_map = DataMap(languages="en", start_id=mhdata.armorset_map.max_id + 1) new_armor_map = DataMap(languages="en", start_id=mhdata.armor_map.max_id + 1) new_armorset_bonus_map = DataMap(languages="en") # Temporary storage for later processes all_set_skill_ids = OrderedSet() item_text_handler = ItemTextHandler() skill_text_handler = SkillTextHandler() armor_data_by_name = {} print( "Updating set data, keyed by the existing names in armorset_base.csv") for armorset_entry in mhdata.armorset_map.values(): armorseries_data = armor_series.get(armorset_entry.name('en')) if not armorseries_data: print( f"Armor series {armorset_entry.name('en')} doesn't exist in binary, skipping" ) new_armorset_map.insert(armorset_entry) continue new_entry = { **armorset_entry, 'name': armorseries_data.name, 'rank': armorseries_data.rank } first_armor = armorseries_data.armors[0].binary if first_armor.set_skill1_lvl > 0: skill_id = first_armor.set_skill1 all_set_skill_ids.add(skill_id) new_entry['bonus'] = skill_text_handler.get_skilltree_name( skill_id)['en'] for part in cfg.armor_parts: armor = armorseries_data.get_part(part) if armor: armor_data_by_name[armor.name['en']] = armor new_entry[part] = armor.name['en'] else: new_entry[part] = None new_armorset_map.insert(new_entry) print("Armorset entries updated") print("Updating armor") for armorset_entry in new_armorset_map.values(): # Handle armor pieces for part, armor_name in datafn.iter_armorset_pieces(armorset_entry): existing_armor = mhdata.armor_map.entry_of('en', armor_name) armor_data = armor_data_by_name.get(armor_name, None) if not armor_data: print( f"Failed to find binary armor data for {armor_name}, maintaining existing data" ) new_armor_map.insert(existing_armor) continue armor_binary = armor_data.binary rarity = armor_binary.rarity + 1 # Initial new armor data new_data = { 'name': armor_data.name, 'rarity': rarity, 'type': part, 'gender': gender_list[armor_binary.gender], 'slot_1': armor_binary.gem_slot1_lvl, 'slot_2': armor_binary.gem_slot2_lvl, 'slot_3': armor_binary.gem_slot3_lvl, 'defense_base': armor_binary.defense, 'defense_max': armor_binary.defense + rarity_upgrades[rarity][0] * 2, 'defense_augment_max': armor_binary.defense + rarity_upgrades[rarity][1] * 2, 'defense_fire': armor_binary.fire_res, 'defense_water': armor_binary.water_res, 'defense_thunder': armor_binary.thunder_res, 'defense_ice': armor_binary.ice_res, 'defense_dragon': armor_binary.dragon_res, 'skills': {}, 'craft': {} } if existing_armor: new_data['id'] = existing_armor.id # Add skills to new armor data for i in range(1, 2 + 1): skill_lvl = getattr(armor_binary, f"skill{i}_lvl") if skill_lvl != 0: skill_id = getattr(armor_binary, f"skill{i}") name_en = skill_text_handler.get_skilltree_name( skill_id)['en'] new_data['skills'][f'skill{i}_name'] = name_en new_data['skills'][f'skill{i}_pts'] = skill_lvl else: new_data['skills'][f'skill{i}_name'] = None new_data['skills'][f'skill{i}_pts'] = None # Add recipe to new armor data. Also track the encounter. recipe_binary = armor_data.recipe new_data['craft'] = convert_recipe(item_text_handler, recipe_binary) # Add new data to new armor map new_armor_map.insert(new_data) # Process set skills. As we don't currently understand the set -> skill map, we only translate # We pull the already established set skill name from existing CSV for bonus_entry in mhdata.armorset_bonus_map.values(): skilltree = skill_text_handler.get_skilltree(bonus_entry.name('en')) name_dict = skill_text_handler.get_skilltree_name(skilltree.index) new_armorset_bonus_map.insert({**bonus_entry, 'name': name_dict}) # Write new data writer = create_writer() writer.save_base_map_csv( "armors/armorset_base.csv", new_armorset_map, schema=schema.ArmorSetSchema(), translation_filename="armors/armorset_base_translations.csv") writer.save_base_map_csv( "armors/armor_base.csv", new_armor_map, schema=schema.ArmorBaseSchema(), translation_filename="armors/armor_base_translations.csv") writer.save_data_csv("armors/armor_skills_ext.csv", new_armor_map, key="skills") writer.save_data_csv("armors/armor_craft_ext.csv", new_armor_map, key="craft") writer.save_base_map_csv( "armors/armorset_bonus_base.csv", new_armorset_bonus_map, schema=schema.ArmorSetBonus(), translation_filename="armors/armorset_bonus_base_translations.csv") print("Armor files updated\n") item_updater.add_missing_items(item_text_handler.encountered)
def update_weapons(mhdata, item_updater: ItemUpdater): skill_text_handler = SkillTextHandler() print("Beginning load of binary weapon data") weapon_loader = WeaponDataLoader() notes_data = load_schema(wep_wsl.WepWsl, "common/equip/wep_whistle.wep_wsl") sharpness_reader = SharpnessDataReader() ammo_reader = WeaponAmmoLoader() coating_data = load_schema(bbtbl.Bbtbl, "common/equip/bottle_table.bbtbl") print("Loaded weapon binary data") def bind_weapon_blade_ext(weapon_type: str, existing_entry, weapon): binary: wp_dat.WpDatEntry = weapon.binary for key in ['kinsect_bonus', 'phial', 'phial_power', 'shelling', 'shelling_level', 'notes']: existing_entry[key] = None if weapon_type == cfg.CHARGE_BLADE: existing_entry['phial'] = cb_phials[binary.wep1_id] if weapon_type == cfg.SWITCH_AXE: try: (phial, power) = s_axe_phials[binary.wep1_id] existing_entry['phial'] = phial existing_entry['phial_power'] = power except: raise KeyError(f"Failed to load saxe phials for {weapon.name['en']} (SAXE ID: {binary.wep1_id})") if weapon_type == cfg.GUNLANCE: # first 5 are normals, second 5 are wide, third 5 are long if binary.wep1_id >= 15: value = binary.wep1_id - 15 shelling = ['normal', 'wide', 'long'][value % 3] level = value // 3 + 6 else: shelling = ['normal', 'wide', 'long'][binary.wep1_id // 5] level = (binary.wep1_id % 5) + 1 existing_entry['shelling'] = shelling existing_entry['shelling_level'] = level if weapon_type == cfg.INSECT_GLAIVE: try: existing_entry['kinsect_bonus'] = glaive_boosts[binary.wep1_id] except: raise KeyError(f"Failed to load kinsect bonus for {weapon.name['en']} (BOOST ID: {binary.wep1_id})") if weapon_type == cfg.HUNTING_HORN: note_entry = notes_data[binary.wep1_id] notes = [note_entry.note1, note_entry.note2, note_entry.note3] notes = [str(note_colors[n]) for n in notes] existing_entry['notes'] = "".join(notes) # Load weapon tree binary data weapon_trees = {} for weapon_type in cfg.weapon_types: weapon_tree = weapon_loader.load_tree(weapon_type) print(f"Loaded {weapon_type} weapon tree binary data") weapon_trees[weapon_type] = weapon_tree # Load Kulve Augment Data kulve_augments = weapon_loader.load_kulve_augments() artifacts.write_dicts_artifact("kulve_augments.csv", kulve_augments.flattened()) # Write artifact lines print("Writing artifact files for weapons (use it to add new weapons)") write_weapon_artifacts(mhdata, weapon_trees, ammo_reader) # Store new weapon entries new_weapon_map = DataMap(languages=["en"], start_id=mhdata.weapon_map.max_id+1, keys_ex=["weapon_type"]) # Iterate over existing weapons, merge new data in for existing_entry in mhdata.weapon_map.values(): weapon_type = existing_entry['weapon_type'] weapon_tree = weapon_trees[weapon_type] # Note: weapon data ordering is unknown. order field and tree_id asc are sometimes wrong # Therefore its unsorted, we have to work off the spreadsheet order multiplier = cfg.weapon_multiplier[weapon_type] weapon = weapon_tree.by_name(existing_entry.name('en')) if not weapon: print(f"Could not find binary entry for {existing_entry.name('en')}") new_weapon_map.insert(existing_entry) continue is_kulve = existing_entry['category'] == 'Kulve' is_special = existing_entry['category'] in ('Kulve', 'Safi') binary = weapon.binary name = weapon.name new_entry = { **existing_entry } # Bind name and parent new_entry['name'] = name new_entry['weapon_type'] = weapon_type new_entry['previous_en'] = None if weapon.parent != None: new_entry['previous_en'] = weapon.parent.name['en'] # Apply augmentation if its a kulve weapon that can get augmented if is_kulve: augment_params = kulve_augments.get(weapon_type, weapon.rarity) if augment_params: weapon = AugmentedWeapon(weapon, augment_params, 4) # Bind info new_entry['weapon_type'] = weapon_type new_entry['rarity'] = weapon.rarity new_entry['attack'] = (weapon.attack * multiplier).quantize(Decimal('1.'), rounding=ROUND_HALF_UP) new_entry['affinity'] = weapon.affinity new_entry['defense'] = weapon.defense or None new_entry['slot_1'] = binary.gem_slot1_lvl new_entry['slot_2'] = binary.gem_slot2_lvl new_entry['slot_3'] = binary.gem_slot3_lvl new_entry['elderseal'] = elderseal[binary.elderseal] # Bind Elements if name['en'] in ["Twin Nails", "Fire and Ice", "Blizzard and Blaze"]: print(f"Skipping {name['en']} element data") else: hidden = binary.hidden_element_id != 0 element_atk = weapon.element_value new_entry['element_hidden'] = hidden new_entry['element1'] = weapon.element_type new_entry['element1_attack'] = element_atk * 10 if element_atk else None new_entry['element2'] = None new_entry['element2_attack'] = None # Bind skill skill = skill_text_handler.get_skilltree_name(binary.skill_id) new_entry['skill'] = skill['en'] if binary.skill_id != 0 else None # Bind Extras (Blade/Gun/Bow data) if weapon_type in cfg.weapon_types_melee: bind_weapon_blade_ext(weapon_type, new_entry, weapon) new_entry['sharpness'] = sharpness_reader.sharpness_for(binary) elif weapon_type in cfg.weapon_types_gun: tree = weapon.tree if is_special: tree = existing_entry['category'] (ammo_name, ammo_data) = ammo_reader.create_data_for( wtype=weapon_type, tree=tree, binary=weapon.binary) new_entry['ammo_config'] = ammo_name else: # TODO: Bows have an Enabled+ flag. Find out what it means # 1 = enabled, 2 = enabled+ coating_binary = coating_data[binary.special_ammo_type] new_entry['bow'] = { 'close': coating_binary.close_range > 0, 'power': coating_binary.power > 0, 'paralysis': coating_binary.paralysis > 0, 'poison': coating_binary.poison > 0, 'sleep': coating_binary.sleep > 0, 'blast': coating_binary.blast > 0 } # crafting data new_entry['craft'] = [] if weapon.craft: new_entry['craft'].append({ 'type': 'Create', **convert_recipe(item_updater, weapon.craft) }) if weapon.upgrade: new_entry['craft'].append({ 'type': 'Upgrade', **convert_recipe(item_updater, weapon.upgrade) }) new_weapon_map.insert(new_entry) # Write new data writer = create_writer() writer.save_base_map_csv( "weapons/weapon_base.csv", new_weapon_map, schema=schema.WeaponBaseSchema(), translation_filename="weapons/weapon_base_translations.csv" ) writer.save_data_csv( "weapons/weapon_sharpness.csv", new_weapon_map, key="sharpness", schema=schema.WeaponSharpnessSchema() ) writer.save_data_csv( "weapons/weapon_bow_ext.csv", new_weapon_map, key="bow", schema=schema.WeaponBowSchema() ) writer.save_data_csv( "weapons/weapon_craft.csv", new_weapon_map, key="craft", schema=schema.WeaponRecipeSchema() ) writer.save_keymap_csv( "weapons/weapon_ammo.csv", ammo_reader.data, schema=schema.WeaponAmmoSchema() ) print("Weapon files updated\n")
def update_armor(): "Populates and updates armor information using the armorset_base as a source of truth" armor_text = load_text("common/text/steam/armor") armorset_text = load_text("common/text/steam/armor_series") # Parses binary armor data, mapped by the english name armor_data = {} for armor_entry in load_schema(am_dat.AmDat, "common/equip/armor.am_dat").entries: if armor_entry.gender == 0: continue if armor_entry.order == 0: continue name_en = armor_text[armor_entry.gmd_name_index]['en'] armor_data[name_en] = armor_entry # Parses craft data, mapped by the binary armor id armor_craft_data = {} for craft_entry in load_schema(eq_crt.EqCrt, "common/equip/armor.eq_crt").entries: armor_craft_data[craft_entry.equip_id] = craft_entry # Get number of times armor can be upgraded by rarity level. # Unk7 is max level pre-augment, Unk8 is max post-augment # Thanks to the MHWorld Modders for the above info rarity_upgrades = {} for entry in load_schema(arm_up.ArmUp, "common/equip/arm_upgrade.arm_up").entries: rarity_upgrades[entry.index + 1] = (entry.unk7 - 1, entry.unk8 - 1) print("Binary data loaded") mhdata = load_data() print( "Existing Data loaded. Using existing armorset data to drive new armor data." ) # Will store results. Language lookup and validation will be in english new_armor_map = DataMap(languages="en") new_armorset_bonus_map = DataMap(languages="en") # Temporary storage for later processes all_set_skill_ids = OrderedSet() item_text_handler = ItemTextHandler() skill_text_handler = SkillTextHandler() print("Populating armor data, keyed by the armorset data") next_armor_id = mhdata.armor_map.max_id + 1 for armorset in mhdata.armorset_map.values(): # Handle armor pieces for part, armor_name in datafn.iter_armorset_pieces(armorset): existing_armor = mhdata.armor_map.entry_of('en', armor_name) armor_binary = armor_data.get(armor_name) if not armor_binary: raise Exception( f"Failed to find binary armor data for {armor_name}") if armor_binary.set_skill1_lvl > 0: all_set_skill_ids.add(armor_binary.set_skill1) rarity = armor_binary.rarity + 1 name_dict = armor_text[armor_binary.gmd_name_index] # Initial new armor data new_data = { 'name': name_dict, # Override for translation support! 'rarity': rarity, 'type': part, 'gender': gender_list[armor_binary.gender], 'slot_1': armor_binary.gem_slot1_lvl, 'slot_2': armor_binary.gem_slot2_lvl, 'slot_3': armor_binary.gem_slot3_lvl, 'defense_base': armor_binary.defense, 'defense_max': armor_binary.defense + rarity_upgrades[rarity][0] * 2, 'defense_augment_max': armor_binary.defense + rarity_upgrades[rarity][1] * 2, 'defense_fire': armor_binary.fire_res, 'defense_water': armor_binary.water_res, 'defense_thunder': armor_binary.thunder_res, 'defense_ice': armor_binary.ice_res, 'defense_dragon': armor_binary.dragon_res, 'skills': {}, 'craft': {} } # Add skills to new armor data for i in range(1, 2 + 1): skill_lvl = getattr(armor_binary, f"skill{i}_lvl") if skill_lvl != 0: skill_id = getattr(armor_binary, f"skill{i}") name_en = skill_text_handler.get_skilltree_name( skill_id)['en'] new_data['skills'][f'skill{i}_name'] = name_en new_data['skills'][f'skill{i}_pts'] = skill_lvl else: new_data['skills'][f'skill{i}_name'] = None new_data['skills'][f'skill{i}_pts'] = None # Add recipe to new armor data. Also track the encounter. recipe_binary = armor_craft_data[armor_binary.id] new_data['craft'] = convert_recipe(item_text_handler, recipe_binary) armor_entry = None if not existing_armor: print( f"Entry for {armor_name} not in armor map, creating new entry" ) armor_entry = new_armor_map.add_entry(next_armor_id, new_data) next_armor_id += 1 else: armor_entry = new_armor_map.add_entry(existing_armor.id, { **existing_armor, **new_data }) # Process set skills. As we don't currently understand the set -> skill map, we only translate # We pull the already established set skill name from existing CSV for bonus_entry in mhdata.armorset_bonus_map.values(): skilltree = skill_text_handler.get_skilltree(bonus_entry.name('en')) name_dict = skill_text_handler.get_skilltree_name(skilltree.index) new_armorset_bonus_map.insert({**bonus_entry, 'name': name_dict}) # Write new data writer = create_writer() writer.save_base_map_csv( "armors/armor_base.csv", new_armor_map, schema=schema.ArmorBaseSchema(), translation_filename="armors/armor_base_translations.csv") writer.save_data_csv("armors/armor_skills_ext.csv", new_armor_map, key="skills") writer.save_data_csv("armors/armor_craft_ext.csv", new_armor_map, key="craft") writer.save_base_map_csv( "armors/armorset_bonus_base.csv", new_armorset_bonus_map, schema=schema.ArmorSetBonus(), translation_filename="armors/armorset_bonus_base_translations.csv") print("Armor files updated\n") add_missing_items(item_text_handler.encountered, mhdata=mhdata)
def update_quests(mhdata, item_updater: ItemUpdater, monster_data: MonsterCollection, area_map): print('Beginning load of quest binary data') quests = load_quests() print('Loaded quest binary data') quest_data = [ get_quest_data(q, item_updater, monster_data, area_map) for q in quests ] quest_by_id = {q.id: q for q in quests} quest_data_by_id = {q['id']: q for q in quest_data} # test for duplicates first. duplicate_candidates = get_quests_with_duplicate_names(quest_by_id) for (q1, q2) in duplicate_candidates: quest2_data = quest_data_by_id[q2.id] if compare_quest_data(quest_data_by_id[q1.id], quest2_data): quest_name = q1.name['en'] print(f'Warning: Quest {quest_name} has exact duplicates.') write_quest_raw_data(quests, item_updater.data, monster_data) print( 'Quest artifacts written. Copy ids and names to quest_base.csv to add to build' ) # Merge the quest data existing_quest_names = set(q['name_en'] for q in mhdata.quest_map.values()) quest_new = DataMap(languages=[]) for raw in quest_data: existing_entry = mhdata.quest_map.get(raw['id']) if existing_entry: existing_entry.update(raw) elif raw['name']['en'] not in existing_quest_names: quest_new.insert(raw) print('Quests merged') artifact_writer = create_artifact_writer() artifact_writer.save_base_map_csv( "quests_new.csv", quest_new, translation_filename="quest_new_translations.csv", translation_extra=['objective', 'description'], schema=schema.QuestBaseSchema()) print( 'Quest artifact quest_new.csv added. Add any new entries to quest_base.csv' ) writer = create_writer() writer.save_base_map_csv( "quests/quest_base.csv", mhdata.quest_map, translation_filename="quests/quest_base_translations.csv", translation_extra=['objective', 'description'], schema=schema.QuestBaseSchema(), key_join='id') writer.save_data_csv('quests/quest_monsters.csv', mhdata.quest_map, key='monsters', key_join='id') writer.save_data_csv('quests/quest_rewards.csv', mhdata.quest_map, key='rewards', key_join='id') print('Quest files updated\n')
def test_add_entries_adds_length(): map = DataMap() map.insert(create_test_entry_en("test1")) map.insert(create_test_entry_en("test2")) assert len(map) == 2, "expected 2 entries to exist"
def update_charms(mhdata, item_updater: ItemUpdater, armor_collection: ArmorCollection): "Populates and updates charm information using the charm_base as a source of truth" print("Writing list of charm names (in order) to artifacts") def get_charm_raw(c): return { 'name_en': c.name['en'], 'parent': c.parent and c.parent.name['en'] } artifacts.write_dicts_artifact( 'charms_raw.csv', [get_charm_raw(c) for c in armor_collection.charms]) skill_text_handler = SkillTextHandler() charm_by_name = {c.name['en']: c for c in armor_collection.charms} new_charm_map = DataMap(languages=["en"]) for charm_entry in mhdata.charm_map.values(): new_charm_entry = {**charm_entry} data = charm_by_name.get(charm_entry['name_en']) if not data: print( f"Warning: Charm {charm_entry['name_en']} has no associated binary data" ) new_charm_map.insert(new_charm_entry) continue new_charm_entry['name'] = data.name new_charm_entry['previous_en'] = data.parent and data.parent.name['en'] new_charm_entry['rarity'] = data.rarity # Add skills to new armor data skills = data.skills + ([(None, None)] * (2 - len(data.skills))) for i, (skill_id, skill_lvl) in enumerate(skills): if skill_id is None: new_charm_entry[f'skill{i+1}_name'] = None new_charm_entry[f'skill{i+1}_level'] = None else: name_en = skill_text_handler.get_skilltree_name(skill_id)['en'] new_charm_entry[f'skill{i+1}_name'] = name_en new_charm_entry[f'skill{i+1}_level'] = skill_lvl new_charm_entry['craft'] = [] recipes = [('Create', data.craft), ('Upgrade', data.upgrade)] for rtype, recipe in recipes: if recipe: new_charm_entry['craft'].append({ 'type': rtype, **convert_recipe(item_updater, recipe) }) new_charm_map.insert(new_charm_entry) # Write new data writer = create_writer() writer.save_base_map_csv( 'charms/charm_base.csv', new_charm_map, translation_filename="charms/charm_base_translations.csv", schema=schema.CharmBaseSchema()) writer.save_data_csv("charms/charm_craft.csv", new_charm_map, key="craft") print("Charm files updated\n")
def add_missing_items(encountered_item_ids: Iterable[int], *, mhdata=None): if not mhdata: mhdata = load_data() print("Existing Data loaded. Using to expand item list") item_data = sorted(load_schema(itm.Itm, "common/item/itemData.itm").entries, key=lambda i: i.order) item_text_manager = ItemTextHandler() new_item_map = DataMap(languages='en') # First pass. Iterate over existing ingame items and merge with existing data for entry in item_data: name_dict, description_dict = item_text_manager.text_for(entry.id) existing_item = mhdata.item_map.entry_of('en', name_dict['en']) is_encountered = entry.id in encountered_item_ids if not is_encountered and not existing_item: continue # note: we omit buy price as items may have a buy price even if not sold. # We only care about the buy price of BUYABLE items new_data = { 'name': name_dict, 'description': description_dict, 'rarity': entry.rarity + 1, 'sell_price': entry.sell_price if entry.sell_price != 0 else None } is_ez = (entry.flags & itm.ItmFlag.IsQuestOnly.value) != 0 is_account = item_type_list[entry.type] == 'endemic' is_tradein = "(Trade-in Item)" in description_dict['en'] is_appraisal = (entry.flags & itm.ItmFlag.IsAppraisal.value) != 0 if name_dict['en'] == 'Normal Ammo 1': new_data['category'] = 'hidden' elif is_ez: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'supply' elif is_account: new_data['category'] = 'misc' new_data['subcategory'] = 'trade' if is_tradein else 'account' elif is_appraisal: new_data['category'] = 'misc' new_data['subcategory'] = 'appraisal' new_data['sell_price'] = None # why does this have values? else: new_data['category'] = item_type_list[entry.type] new_data['subcategory'] = 'trade' if is_tradein else None # Whether we show carry limit at all is based on item type. # Materials are basically infinite carry infinite_carry = new_data['category'] == 'material' new_data[ 'carry_limit'] = None if infinite_carry else entry.carry_limit if existing_item: new_item_map.insert({**existing_item, **new_data}) else: new_item_map.insert(new_data) # Second pass, add old entries that are not in the new one for old_entry in mhdata.item_map.values(): if old_entry.name('en') not in new_item_map.names('en'): new_item_map.insert(old_entry) # Third pass. Items need to be reordered based on type unsorted_item_map = new_item_map # store reference to former map def filter_category(category, subcategory=None): "helper that returns items and then removes from unsorted item map" results = [] for item in unsorted_item_map.values(): if item['category'] == category and item[ 'subcategory'] == subcategory: results.append(item) for result in results: del unsorted_item_map[result.id] return results normal_ammo_1 = unsorted_item_map.entry_of("en", "Normal Ammo 1") # start the before-mentioned third pass by creating a new map based off the old one new_item_map = DataMap(languages="en") new_item_map.extend(filter_category('item')) new_item_map.extend(filter_category('material')) new_item_map.extend(filter_category('material', 'trade')) if normal_ammo_1: new_item_map.insert(normal_ammo_1) new_item_map.extend(filter_category('ammo')) new_item_map.extend(filter_category('misc', 'appraisal')) new_item_map.extend(filter_category('misc', 'account')) new_item_map.extend(filter_category('misc', 'supply')) # Write out data writer = create_writer() writer.save_base_map_csv( "items/item_base.csv", new_item_map, schema=schema.ItemSchema(), translation_filename="items/item_base_translations.csv", translation_extra=['description']) print("Item files updated")