def get_aamp_diff(file: Union[Path, str], tmp_dir: Path): """ Diffs a modded AAMP file from the stock game version :param file: The modded AAMP file to diff :type file: class:`typing.Union[class:pathlib.Path, str]` :param tmp_dir: The temp directory containing the mod :type tmp_dir: class:`pathlib.Path` :return: Returns a string representation of the AAMP file diff """ if isinstance(file, str): nests = file.split('//') mod_bytes = util.get_nested_file_bytes(file) ref_path = str(util.get_game_file( Path(nests[0]).relative_to(tmp_dir))) + '//' + '//'.join(nests[1:]) ref_bytes = util.get_nested_file_bytes(ref_path) else: with file.open('rb') as m_file: mod_bytes = m_file.read() mod_bytes = util.unyaz_if_needed(mod_bytes) with util.get_game_file(file.relative_to(tmp_dir)).open('rb') as r_file: ref_bytes = r_file.read() ref_bytes = util.unyaz_if_needed(ref_bytes) ref_aamp = aamp.Reader(ref_bytes).parse() mod_aamp = aamp.Reader(mod_bytes).parse() return _aamp_diff(ref_aamp, mod_aamp)
def _clean_sarc_file(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return try: old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return if canon not in hashes: return try: base_sarc = oead.Sarc(util.unyaz_if_needed(file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return new_sarc = _clean_sarc(old_sarc, base_sarc) if not new_sarc: file.unlink() else: write_bytes = new_sarc.write()[1] file.write_bytes( write_bytes if not (file.suffix.startswith(".s") and file.suffix != ".ssarc") else util.compress(write_bytes) )
def log_drop_file(file: str, mod_dir: Path): if "Bootup.pack" in file: return {} drop = ParameterIO.from_binary( util.get_nested_file_bytes(str(mod_dir) + "/" + file)) drop_table = _drop_to_dict(drop) del drop try: base_file = file[:file.index("//")] sub_file = file[file.index("//"):] ref_drop = ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_file)) + sub_file)) ref_table = _drop_to_dict(ref_drop) del ref_drop for table, contents in drop_table.items(): if table not in ref_table: continue for item, prob in {(i, p) for i, p in contents["items"].items() if i in ref_table[table]["items"]}: if prob == ref_table[table]["items"][item]: drop_table[table]["items"][item] = util.UNDERRIDE del ref_table except ( FileNotFoundError, oead.InvalidDataError, AttributeError, RuntimeError, ValueError, ): util.vprint(f"Could not load stock {file}") return {file: drop_table}
def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]) -> ParameterIO: print("Logging changes to shop files...") diffs = ParameterIO() file_names = ParameterObject() for file in [ file for file in modded_files if Path(file).suffix in EXT_FOLDERS ]: try: mod_bytes = util.get_nested_file_bytes( str(mod_dir) + "/" + str(file)) nests = str(file).split("//", 1) try: ref_path = str(util.get_game_file(Path( nests[0]))) + "//" + nests[1] except FileNotFoundError: continue try: ref_bytes = util.get_nested_file_bytes(ref_path) except AttributeError: continue shop_type = str(file).split(".")[-1] mod_pio = get_named_pio(ParameterIO.from_binary(mod_bytes), shop_type) ref_pio = get_named_pio(ParameterIO.from_binary(ref_bytes), shop_type) file_names.params[oead.aamp.Name(file).hash] = Parameter(file) diffs.lists[file] = gen_diffs(ref_pio, mod_pio) except (KeyError, AttributeError) as err: raise err diffs.objects["Filenames"] = file_names return diffs
def main() -> None: parser = argparse.ArgumentParser( description="Tool for managing flags in LoZ:BotW") subparsers = parser.add_subparsers(dest="command", help="Command") subparsers.required = True f_parser = subparsers.add_parser( "find", description="Search for flags in Bootup.pack", aliases=["f"]) f_parser.add_argument("directory", help="The root folder of your mod") f_parser.add_argument( "flag_name", help="The name (or part of the name) of the flag to search for") f_parser.set_defaults(func=lambda a: find(a)) g_parser = subparsers.add_parser( "generate", description="Builds GameData and SaveGameData flags", aliases=["g"]) g_parser.add_argument("directory", help="The root folder of your mod") g_parser.add_argument("-a", "--actor", help="Generate IsGet_/compendium flags for actors", action="store_true") g_parser.add_argument( "-r", "--revival", action="store", nargs=2, default=[-1, -1], type=int, help="Generate revival flags for actor instances", metavar=("MainFieldResetType", "ShrineResetType"), ) g_parser.set_defaults(func=lambda a: generate(a)) for p in [f_parser, g_parser]: p.add_argument("-b", "--bigendian", help="Use big endian mode (for Wii U)", action="store_true") p.add_argument("-v", "--verbose", help="Give verbose after-action report", action="store_true") args = parser.parse_args() directory: Path = Path(args.directory) if not (directory / "content").exists(): print( f"{directory} is not the root folder of your mod. Please try again and enter the root directory of your mod." ) return bootup_path: Path = directory / "content" / "Pack" / "Bootup.pack" if not bootup_path.exists(): bootup_path.parent.mkdir(parents=True, exist_ok=True) shutil.copy(bcmlutil.get_game_file("Pack/Bootup.pack"), bootup_path) bootup_dir = str(bootup_path).replace("\\", "/") args.func(args)
def perform_merge(self): # pylint: disable=unsupported-assignment-operation user_langs = ( {util.get_settings("lang")} if not self._options["all_langs"] else util.get_user_languages() ) print("Loading text mods...") diffs = self.consolidate_diffs(self.get_all_diffs()) if not diffs: print("No text merge necessary") for bootup in util.get_master_modpack_dir().rglob("**/Bootup_????.pack"): bootup.unlink() return # find a mod lang for each user lang lang_map = map_languages(user_langs, set(diffs.keys())) for user_lang, mod_lang in lang_map.items(): print(f"Merging modded texts for {mod_lang} into {user_lang}...") rsext.mergers.texts.merge_language( json.dumps(diffs[mod_lang]), str(util.get_game_file(f"Pack/Bootup_{user_lang}.pack")), str( util.get_master_modpack_dir() / util.get_content_path() / "Pack" / f"Bootup_{user_lang}.pack" ), util.get_settings("wiiu"), ) print(f"{user_lang} texts merged successfully")
def perform_merge(self): print("Loading modded SARC list...") sarcs = { s: ss for s, ss in self.consolidate_diffs(self.get_all_diffs()).items() if ss } for file in [ file for file in util.get_master_modpack_dir().rglob("**/*") if file.suffix in util.SARC_EXTS - EXCLUDE_EXTS and not any(ex in file.name for ex in SPECIAL) ]: file.unlink() for sarc_file in sarcs: try: sarcs[sarc_file].insert(0, util.get_game_file(sarc_file)) except FileNotFoundError: continue if not sarcs: print("No SARC merging necessary") return print(f"Merging {len(sarcs)} SARC files...") from bcml import bcml as rsext rsext.mergers.packs.merge_sarcs(sarcs) # pool = self._pool or Pool(maxtasksperchild=500) # results = pool.starmap(merge_sarcs, sarcs.items()) # pool.starmap(write_sarc, results) # if not self._pool: # pool.close() # pool.join() print("Finished merging SARCs")
def merge_drop_file(file: str, drop_table: dict): base_path = file[:file.index("//")] sub_path = file[file.index("//"):] try: ref_drop = _drop_to_dict( ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_path)) + sub_path))) for table in set(ref_drop.keys()): if table not in drop_table: del ref_drop[table] else: for item in set(ref_drop[table]["items"].keys()): if item not in drop_table[table]["items"]: del ref_drop[table]["items"][item] util.dict_merge(ref_drop, drop_table) drop_table = ref_drop except (FileNotFoundError, AttributeError, RuntimeError): pass actor_name_matches = re.search(r"Pack\/(.+)\.sbactorpack", file) if actor_name_matches: actor_name = actor_name_matches.groups()[0] else: raise ValueError(f"No actor name found in {file}") pio = _dict_to_drop(drop_table) util.inject_files_into_actor(actor_name, {file.split("//")[-1]: pio.to_binary()})
def perform_merge(self): merged_events = util.get_master_modpack_dir() / "logs" / "eventinfo.byml" event_merge_log = util.get_master_modpack_dir() / "logs" / "eventinfo.log" print("Loading event info mods...") modded_events = self.consolidate_diffs(self.get_all_diffs()) event_mod_hash = hash(str(modded_events)) if not modded_events: print("No event info merging necessary") if merged_events.exists(): merged_events.unlink() event_merge_log.unlink() try: stock_eventinfo = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Event/EventInfo.product.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Event/EventInfo.product.sbyml", stock_eventinfo, "Pack/Bootup.pack", ) except FileNotFoundError: pass return if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash: print("No event info merging necessary") return new_events = get_stock_eventinfo() for event, data in modded_events.items(): new_events[event] = data del modded_events print("Writing new event info...") event_bytes = oead.byml.to_binary( new_events, big_endian=util.get_settings("wiiu") ) del new_events util.inject_file_into_sarc( "Event/EventInfo.product.sbyml", util.compress(event_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving event info merge log...") event_merge_log.parent.mkdir(parents=True, exist_ok=True) event_merge_log.write_text(str(event_mod_hash)) merged_events.write_bytes(event_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( bytes(event_bytes), True, ".byml" ) del event_bytes rstable.set_size("Event/EventInfo.product.byml", rstb_size)
def get_stock_rstb() -> rstb.ResourceSizeTable: """ Gets the unmodified RSTB """ if not hasattr(get_stock_rstb, 'table'): get_stock_rstb.table = read_rstb( str(util.get_game_file('System/Resource/ResourceSizeTable.product.srsizetable')), True ) return deepcopy(get_stock_rstb.table)
def get_stock_rstb() -> rstb.ResourceSizeTable: if not hasattr(get_stock_rstb, "table"): get_stock_rstb.table = read_rstb( str( util.get_game_file( "System/Resource/ResourceSizeTable.product.srsizetable")), util.get_settings("wiiu"), ) return deepcopy(get_stock_rstb.table)
def get_stock_eventinfo() -> {}: """ Gets the contents of the stock `EventInfo.product.sbyml` """ if not hasattr(get_stock_eventinfo, 'event_info'): get_stock_eventinfo.event_info = byml.Byml( util.get_nested_file_bytes( str(util.get_game_file('Pack/Bootup.pack')) + '//Event/EventInfo.product.sbyml', unyaz=True ) ).parse() return deepcopy(get_stock_eventinfo.event_info)
def get_stock_eventinfo() -> oead.byml.Hash: if not hasattr(get_stock_eventinfo, "event_info"): get_stock_eventinfo.event_info = oead.byml.to_text( oead.byml.from_binary( util.get_nested_file_bytes( str(util.get_game_file("Pack/Bootup.pack")) + "//Event/EventInfo.product.sbyml", unyaz=True, ) ) ) return oead.byml.from_text(get_stock_eventinfo.event_info)
def threaded_merge(item, verbose: bool) -> (str, dict): """Deep merges an individual file, suitable for multiprocessing""" file, stuff = item failures = {} base_file = util.get_game_file(file, file.startswith('aoc')) if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_ext = os.path.splitext(file)[1] if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists(): base_file = (util.get_master_modpack_dir() / file) file_bytes = base_file.read_bytes() yazd = file_bytes[0:4] == b'Yaz0' file_bytes = file_bytes if not yazd else util.decompress(file_bytes) magic = file_bytes[0:4] if magic == b'SARC': new_sarc, sub_failures = nested_patch(sarc.SARC(file_bytes), stuff) del file_bytes new_bytes = new_sarc.get_bytes() for failure, contents in sub_failures.items(): print(f'Some patches to {failure} failed to apply.') failures[failure] = contents else: try: if magic == b'AAMP': aamp_contents = aamp.Reader(file_bytes).parse() for change in stuff: aamp_contents = _aamp_merge(aamp_contents, change) aamp_bytes = aamp.Writer(aamp_contents).get_bytes() del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress( aamp_bytes) else: raise ValueError(f'{file} is not a SARC or AAMP file.') except ValueError: new_bytes = file_bytes del file_bytes print(f'Deep merging file {file} failed. No changes were made.') new_bytes = new_bytes if not yazd else util.compress(new_bytes) output_file = (util.get_master_modpack_dir() / file) if base_file == output_file: output_file.unlink() output_file.parent.mkdir(parents=True, exist_ok=True) output_file.write_bytes(new_bytes) del new_bytes if magic == b'SARC' and verbose: print(f'Finished patching files inside {file}') elif verbose: print(f'Finished patching {file}') return util.get_canon_name(file), failures
def _convert_actorpack(actor_pack: Path, to_wiiu: bool) -> Union[None, str]: error = None sarc = oead.Sarc(util.unyaz_if_needed(actor_pack.read_bytes())) new_sarc = oead.SarcWriter.from_sarc(sarc) new_sarc.set_endianness(oead.Endianness.Big if to_wiiu else oead.Endianness.Little) for file in sarc.get_files(): if "Physics/" in file.name and "Actor/" not in file.name: ext = file.name[file.name.rindex(".") :] if ext in NO_CONVERT_EXTS: if not util.is_file_modded( util.get_canon_name(file.name, allow_no_source=True), file.data, count_new=True, ): actor_name = file.name[ file.name.rindex("/") : file.name.rindex(".") ] try: pack_path = util.get_game_file( f"Actor/Pack/{actor_name}.sbactorpack" ) stock_data = util.get_nested_file_bytes( f"{str(pack_path)}//{file.name}" ) if stock_data: new_sarc.files[file.name] = stock_data else: raise FileNotFoundError(file.name) except (FileNotFoundError, AttributeError): error = ( "This mod contains a Havok file not supported by the " f"converter: {file.name}" ) else: error = ( "This mod contains a Havok file not supported by the" f" converter: {file.name}" ) else: if file.data[0:4] == b"AAMP": continue try: hk = Havok.from_bytes(bytes(file.data)) except: # pylint: disable=bare-except return f"Could not parse Havok file {file.name}" if to_wiiu: hk.to_wiiu() else: hk.to_switch() hk.serialize() new_sarc.files[file.name] = hk.to_bytes() actor_pack.write_bytes(util.compress(new_sarc.write()[1])) return error
def perform_merge(self): merged_effects = util.get_master_modpack_dir() / "logs" / "effects.byml" print("Loading status effect mods...") diffs = self.consolidate_diffs(self.get_all_diffs()) if not diffs: print("No status effect merging necessary...") if merged_effects.exists(): merged_effects.unlink() try: stock_effects = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Ecosystem/StatusEffectList.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Ecosystem/StatusEffectList.sbyml", stock_effects, "Pack/Bootup.pack", ) del stock_effects except FileNotFoundError: pass return util.vprint("All status effect diffs:") util.vprint(diffs) effects = get_stock_effects() util.dict_merge(effects, diffs, overwrite_lists=True) del diffs print("Writing new effects list...") effect_bytes = oead.byml.to_binary( oead.byml.Array([effects]), big_endian=util.get_settings("wiiu") ) del effects util.inject_file_into_sarc( "Ecosystem/StatusEffectList.sbyml", util.compress(effect_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving status effect merge log...") merged_effects.parent.mkdir(parents=True, exist_ok=True) merged_effects.write_bytes(effect_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( effect_bytes, True, ".byml" ) del effect_bytes rstable.set_size("Ecosystem/StatusEffectList.byml", rstb_size)
def merge_events(): """ Merges all installed event info mods """ event_mods = [mod for mod in util.get_installed_mods() \ if (mod.path / 'logs' / 'eventinfo.yml').exists()] merged_events = util.get_master_modpack_dir() / 'logs' / 'eventinfo.byml' event_merge_log = util.get_master_modpack_dir() / 'logs' / 'eventinfo.log' event_mod_hash = str(hash(tuple(event_mods))) if not event_mods: print('No event info merging necessary') if merged_events.exists(): merged_events.unlink() event_merge_log.unlink() try: stock_eventinfo = util.get_nested_file_bytes( str(util.get_game_file('Pack/Bootup.pack')) + '//Event/EventInfo.product.sbyml', unyaz=False ) util.inject_file_into_bootup( 'Event/EventInfo.product.sbyml', stock_eventinfo ) except FileNotFoundError: pass return if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash: print('No event info merging necessary') return print('Loading event info mods...') modded_events = {} for mod in event_mods: modded_events.update(get_events_for_mod(mod)) new_events = get_stock_eventinfo() for event, data in modded_events.items(): new_events[event] = data print('Writing new event info...') event_bytes = byml.Writer(new_events, be=True).get_bytes() util.inject_file_into_bootup( 'Event/EventInfo.product.sbyml', util.compress(event_bytes), create_bootup=True ) print('Saving event info merge log...') event_merge_log.write_text(event_mod_hash) merged_events.write_bytes(event_bytes) print('Updating RSTB...') rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(event_bytes, True, '.byml') rstable.set_size('Event/EventInfo.product.byml', rstb_size)
def get_shop_diffs(file: str, tree: dict, tmp_dir: Path) -> Optional[dict]: try: ref_sarc = Sarc(util.unyaz_if_needed(util.get_game_file(file).read_bytes())) except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError) as err: util.vprint(f"{file} ignored on stock side, cuz {err}") return None try: sarc = Sarc(util.unyaz_if_needed((tmp_dir / file).read_bytes())) except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError): util.vprint(f"{file} corrupt, ignored") return None diffs = _get_diffs_from_sarc(sarc, ref_sarc, tree, file) del sarc del ref_sarc return diffs
def merge_aamp_files(file: str, tree: dict): try: base_file = util.get_game_file(file) except FileNotFoundError: util.vprint(f"Skipping {file}, not found in dump") return if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file sarc = Sarc(util.unyaz_if_needed(base_file.read_bytes())) new_data = _merge_in_sarc(sarc, tree) if base_file.suffix.startswith(".s") and base_file.suffix != ".ssarc": new_data = util.compress(new_data) (util.get_master_modpack_dir() / file).parent.mkdir(parents=True, exist_ok=True) (util.get_master_modpack_dir() / file).write_bytes(new_data)
def rem_underride(data: dict): for file, tables in data.items(): stock: Optional[dict] = None for name, table in tables.items(): for actor, prob in table["items"].items(): if prob == util.UNDERRIDE: if stock == None: base_file = file[:file.index("//")] sub_file = file[file.index("//"):] ref_drop = ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_file)) + sub_file)) stock = _drop_to_dict(ref_drop) data[file][name]["items"][actor] = stock[name][ "items"][actor]
def get_stock_areadata() -> oead.byml.Hash: if not hasattr(get_stock_areadata, "areadata"): get_stock_areadata.areadata = oead.byml.to_text( oead.byml.Hash( { str(area["AreaNumber"].v): area for area in oead.byml.from_binary( util.get_nested_file_bytes( str(util.get_game_file("Pack/Bootup.pack")) + "//Ecosystem/AreaData.sbyml", unyaz=True, ) ) } ) ) return oead.byml.from_text(get_stock_areadata.areadata)
def _pack_sarc(folder: Path, tmp_dir: Path, hashes: dict): packed = oead.SarcWriter( endian=oead.Endianness.Big if util.get_settings("wiiu") else oead.Endianness.Little ) try: canon = util.get_canon_name( folder.relative_to(tmp_dir).as_posix(), allow_no_source=True ) if canon not in hashes: raise FileNotFoundError("File not in game dump") stock_file = util.get_game_file(folder.relative_to(tmp_dir)) try: old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): raise ValueError("Cannot open file from game dump") old_files = {f.name for f in old_sarc.get_files()} except (FileNotFoundError, ValueError): for file in {f for f in folder.rglob("**/*") if f.is_file()}: packed.files[file.relative_to(folder).as_posix()] = file.read_bytes() else: for file in { f for f in folder.rglob("**/*") if f.is_file() and not f.suffix in EXCLUDE_EXTS }: file_data = file.read_bytes() xhash = xxhash.xxh64_intdigest(util.unyaz_if_needed(file_data)) file_name = file.relative_to(folder).as_posix() if file_name in old_files: old_hash = xxhash.xxh64_intdigest( util.unyaz_if_needed(old_sarc.get_file(file_name).data) ) if file_name not in old_files or (xhash != old_hash): packed.files[file_name] = file_data finally: shutil.rmtree(folder) if not packed.files: return # pylint: disable=lost-exception sarc_bytes = packed.write()[1] folder.write_bytes( util.compress(sarc_bytes) if (folder.suffix.startswith(".s") and not folder.suffix == ".sarc") else sarc_bytes )
def perform_merge(self): print("Loading modded SARC list...") sarcs = { s: ss for s, ss in self.consolidate_diffs(self.get_all_diffs()).items() if ss } if "only_these" in self._options: for sarc_file in self._options["only_these"]: master_path = util.get_master_modpack_dir() / sarc_file if master_path.exists(): master_path.unlink() for sarc_file in [ file for file in sarcs if file not in self._options["only_these"] ]: del sarcs[sarc_file] else: for file in [ file for file in util.get_master_modpack_dir().rglob("**/*") if file.suffix in util.SARC_EXTS ]: file.unlink() for sarc_file in sarcs: try: sarcs[sarc_file].insert(0, util.get_game_file(sarc_file)) except FileNotFoundError: continue if not sarcs: print("No SARC merging necessary") return print(f"Merging {len(sarcs)} SARC files...") pool = self._pool or Pool(maxtasksperchild=500) results = pool.starmap(merge_sarcs, sarcs.items()) for result in results: file, file_data = result output_path = util.get_master_modpack_dir() / file output_path.parent.mkdir(parents=True, exist_ok=True) if output_path.suffix.startswith(".s"): file_data = util.compress(file_data) output_path.write_bytes(file_data) if not self._pool: pool.close() pool.join() print("Finished merging SARCs")
def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return with stock_file.open('rb') as old_file: old_sarc = sarc.read_file_and_make_sarc(old_file) if not old_sarc: return old_files = set(old_sarc.list_files()) if canon not in hashes: return with file.open('rb') as s_file: base_sarc = sarc.read_file_and_make_sarc(s_file) if not base_sarc: return new_sarc = sarc.SARCWriter(True) can_delete = True for nest_file in base_sarc.list_files(): canon = nest_file.replace('.s', '.') ext = Path(canon).suffix if ext in {'.yml', '.bak'}: continue file_data = base_sarc.get_file_data(nest_file).tobytes() xhash = xxhash.xxh32(util.unyaz_if_needed(file_data)).hexdigest() if nest_file in old_files: old_hash = xxhash.xxh32( util.unyaz_if_needed( old_sarc.get_file_data(nest_file).tobytes())).hexdigest() if nest_file not in old_files or (xhash != old_hash and ext not in util.AAMP_EXTS): can_delete = False new_sarc.add_file(nest_file, file_data) del old_sarc if can_delete: del new_sarc file.unlink() else: with file.open('wb') as s_file: if file.suffix.startswith('.s') and file.suffix != '.ssarc': s_file.write(util.compress(new_sarc.get_bytes())) else: new_sarc.write(s_file)
def perform_merge(self): print('Loading modded SARC list...') sarcs = self.consolidate_diffs(self.get_all_diffs()) if 'only_these' in self._options: for sarc_file in self._options['only_these']: master_path = (util.get_master_modpack_dir() / sarc_file) if master_path.exists(): master_path.unlink() for sarc_file in [ file for file in sarcs if file not in self._options['only_these'] ]: del sarcs[sarc_file] else: for file in [file for file in util.get_master_modpack_dir().rglob('**/*') \ if file.suffix in util.SARC_EXTS]: file.unlink() for sarc_file in sarcs: try: sarcs[sarc_file].insert(0, util.get_game_file(sarc_file)) except FileNotFoundError: continue if not sarcs: print('No SARC merging necessary') return num_threads = min(cpu_count(), len(sarcs)) pool = self._pool or Pool(processes=num_threads) print(f'Merging {len(sarcs)} SARC files...') results = pool.starmap(merge_sarcs, sarcs.items()) for result in results: file, data = result output_path = util.get_master_modpack_dir() / file output_path.parent.mkdir(parents=True, exist_ok=True) if output_path.suffix.startswith('.s'): data = util.compress(data) output_path.write_bytes(data) if not self._pool: pool.close() pool.join() print('Finished merging SARCs')
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Checking for modified languages...") mod_langs = { util.get_file_language(file) for file in modded_files if ( isinstance(file, Path) and "Bootup_" in file.name and "Graphic" not in file.name ) } if not mod_langs: return None util.vprint(f'Languages: {",".join(mod_langs)}') # find a user lang for each mod lang language_map = map_languages(mod_langs, util.get_user_languages()) util.vprint("Language map:") util.vprint(language_map) language_diffs = {} for mod_lang, user_lang in language_map.items(): print(f"Logging text changes for {user_lang}...") mod_pack = ( mod_dir / util.get_content_path() / "Pack" / f"Bootup_{mod_lang}.pack" ) if not user_lang == mod_lang: mod_pack = swap_region(mod_pack, user_lang) ref_pack = util.get_game_file(f"Pack/Bootup_{user_lang}.pack") language_diffs[user_lang] = rsext.mergers.texts.diff_language( str(mod_pack), str(ref_pack), user_lang[2:4] != mod_lang[2:4] ) if not user_lang == mod_lang: mod_pack.unlink() return language_diffs
def get_stock_savedata() -> oead.Sarc: bootup = oead.Sarc(util.get_game_file("Pack/Bootup.pack").read_bytes()) return oead.Sarc( util.decompress(bootup.get_file("GameData/savedataformat.ssarc").data))
def get_stock_quests() -> oead.byml.Array: title_sarc = oead.Sarc( util.get_game_file("Pack/TitleBG.pack").read_bytes()) return oead.byml.from_binary( util.decompress( title_sarc.get_file("Quest/QuestProduct.sbquestpack").data))
def generate_revival_flags(resettypes: list) -> None: moddir: Path = util.root_dir() if not resettypes[0] == -1: for map_unit in moddir.rglob("*_*.smubin"): map_start = time.time() map_data = oead.byml.from_binary( oead.yaz0.decompress(map_unit.read_bytes())) map_section = map_unit.stem.split("_") stock_map = mubin.get_stock_map((map_section[0], map_section[1])) generate_revival_flags_for_map(map_data, stock_map, "MainField", resettypes[0]) print( f"Finished processing {map_unit.name} in {time.time() - map_start} seconds..." ) for static_unit in moddir.rglob("MainField/Static.smubin"): map_start = time.time() static_data = oead.byml.from_binary( oead.yaz0.decompress(static_unit.read_bytes())) for marker in static_data["LocationMarker"]: if not "Icon" in marker: continue if not marker["Icon"] == "Dungeon": continue if "MessageID" in marker: if not marker["MessageID"] in vanilla_shrine_locs: misc_s32_flag(marker["SaveFlag"]) misc_bool_flag(f"Enter_{marker['MessageID']}") misc_bool_flag( f"CompleteTreasure_{marker['MessageID']}") print( f"Finished processing MainField/Static.smubin in {time.time() - map_start} seconds..." ) if not resettypes[1] == -1: global current_map for map_pack in moddir.rglob("Pack/Dungeon*.pack"): current_map = map_pack.stem pack_data = oead.Sarc(map_pack.read_bytes()) try: stock_pack = oead.Sarc( bcmlutil.get_game_file(map_pack).read_bytes()) except FileNotFoundError: stock_pack = None map_types = ("_Static", "_Dynamic") for map_type in map_types: map_start = time.time() map_name = f"{map_pack.stem}{map_type}.smubin" map_data = oead.byml.from_binary( oead.yaz0.decompress( pack_data.get_file( f"Map/CDungeon/{map_pack.stem}/{map_name}").data)) if stock_pack: stock_map = oead.byml.from_binary( oead.yaz0.decompress( stock_pack.get_file( f"Map/CDungeon/{map_pack.stem}/{map_name}"). data)) else: stock_map = oead.byml.Hash() stock_map["Objs"] = oead.byml.Array() generate_revival_flags_for_map(map_data, stock_map, "CDungeon", resettypes[1]) print( f"Finished processing {map_name} in {time.time() - map_start} seconds..." ) current_map = "" # ^ unnecessary given that it's updated every loop, but we rely on it possibly being # empty for determining whether LinkTags are valid, so it's better to be safe for hash in orphaned_flag_hashes: if hash not in added_flag_hashes: bgdata.remove("bool_data", hash)
def get_stock_effects() -> oead.byml.Hash: bootup_sarc = oead.Sarc(util.get_game_file("Pack/Bootup.pack").read_bytes()) return oead.byml.from_binary( util.decompress(bootup_sarc.get_file("Ecosystem/StatusEffectList.sbyml").data) )[0]