def inject_savedata_into_bootup(bgsvdata: sarc.SARCWriter, bootup_path: Path = None) -> int: """ Packs a savedata SARC into Bootup.pack and returns the RSTB size of the new savedataformat.sarc :param bgsvdata: A SARCWriter for the new savedata :type bgsvdata: class:`sarc.SARCWriter` :param bootup_path: Path to the Bootup.pack to update, defaults to a master BCML copy :type bootup_path: class:`pathlib.Path`, optional :returns: Returns the RSTB size of the new savedataformat.sarc :rtype: int """ if not bootup_path: master_boot = util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack' bootup_path = master_boot if master_boot.exists() \ else util.get_game_file('Pack/Bootup.pack') with bootup_path.open('rb') as b_file: bootup_pack = sarc.read_file_and_make_sarc(b_file) new_pack = sarc.make_writer_from_sarc(bootup_pack) new_pack.delete_file('GameData/savedataformat.ssarc') savedata_bytes = bgsvdata.get_bytes() new_pack.add_file('GameData/savedataformat.ssarc', util.compress(savedata_bytes)) (util.get_master_modpack_dir() / 'content' / 'Pack').mkdir(parents=True, exist_ok=True) with (util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack').open('wb') as b_file: new_pack.write(b_file) return rstb.SizeCalculator().calculate_file_size_with_ext(savedata_bytes, True, '.sarc')
def _get_sizes_in_sarc(file: Union[Path, sarc.SARC]) -> {}: calc = rstb.SizeCalculator() sizes = {} guess = util.get_settings_bool('guess_merge') if isinstance(file, Path): with file.open('rb') as s_file: file = sarc.read_file_and_make_sarc(s_file) if not file: return {} for nest_file in file.list_files(): canon = nest_file.replace('.s', '.') data = util.unyaz_if_needed(file.get_file_data(nest_file).tobytes()) ext = Path(canon).suffix if util.is_file_modded(canon, data) and ext not in RSTB_EXCLUDE_EXTS and canon not in RSTB_EXCLUDE_NAMES: size = calc.calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': size = 0 if size == 0 and guess: if ext in util.AAMP_EXTS: size = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: size = guess_bfres_size(data, canon) sizes[canon] = size if util.is_file_sarc(nest_file) and not nest_file.endswith('.ssarc'): try: nest_sarc = sarc.SARC(data) except ValueError: continue sizes.update(_get_sizes_in_sarc(nest_sarc)) return sizes
def bootup_from_msbts( lang: str = 'USen', msbt_dir: Path = util.get_work_dir() / 'tmp_text' / 'merged' ) -> (Path, int): """ Generates a new Bootup_XXxx.pack from a directory of MSBT files :param lang: The game language to use, defaults to USen. :type lang: str, optional :param msbt_dir: The directory to pull MSBTs from, defaults to "tmp_text/merged" in BCML's working directory. :type msbt_dir: class:`pathlib.Path`, optional :returns: A tuple with the path to the new Bootup_XXxx.pack and the RSTB size of the new Msg_XXxx.product.sarc :rtype: (class:`pathlib.Path`, int) """ new_boot_path = msbt_dir.parent / f'Bootup_{lang}.pack' with new_boot_path.open('wb') as new_boot: s_msg = sarc.SARCWriter(True) for new_msbt in msbt_dir.rglob('**/*.msbt'): with new_msbt.open('rb') as f_new: s_msg.add_file( str(new_msbt.relative_to(msbt_dir)).replace('\\', '/'), f_new.read()) new_msg_stream = io.BytesIO() s_msg.write(new_msg_stream) unyaz_bytes = new_msg_stream.getvalue() rsize = rstb.SizeCalculator().calculate_file_size_with_ext( unyaz_bytes, True, '.sarc') new_msg_bytes = util.compress(unyaz_bytes) s_boot = sarc.SARCWriter(True) s_boot.add_file(f'Message/Msg_{lang}.product.ssarc', new_msg_bytes) s_boot.write(new_boot) return new_boot_path, rsize
def perform_merge(self): merged_events = util.get_master_modpack_dir() / "logs" / "eventinfo.byml" event_merge_log = util.get_master_modpack_dir() / "logs" / "eventinfo.log" print("Loading event info mods...") modded_events = self.consolidate_diffs(self.get_all_diffs()) event_mod_hash = hash(str(modded_events)) if not modded_events: print("No event info merging necessary") if merged_events.exists(): merged_events.unlink() event_merge_log.unlink() try: stock_eventinfo = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Event/EventInfo.product.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Event/EventInfo.product.sbyml", stock_eventinfo, "Pack/Bootup.pack", ) except FileNotFoundError: pass return if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash: print("No event info merging necessary") return new_events = get_stock_eventinfo() for event, data in modded_events.items(): new_events[event] = data del modded_events print("Writing new event info...") event_bytes = oead.byml.to_binary( new_events, big_endian=util.get_settings("wiiu") ) del new_events util.inject_file_into_sarc( "Event/EventInfo.product.sbyml", util.compress(event_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving event info merge log...") event_merge_log.parent.mkdir(parents=True, exist_ok=True) event_merge_log.write_text(str(event_mod_hash)) merged_events.write_bytes(event_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( bytes(event_bytes), True, ".byml" ) del event_bytes rstable.set_size("Event/EventInfo.product.byml", rstb_size)
def perform_merge(self): no_del = self._options.get("no_del", False) shutil.rmtree( str(util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Map" / "MainField"), ignore_errors=True, ) shutil.rmtree( str(util.get_master_modpack_dir() / util.get_content_path() / "Map" / "MainField"), ignore_errors=True, ) log_path = util.get_master_modpack_dir() / "logs" / "map.log" if log_path.exists(): log_path.unlink() print("Loading map mods...") map_diffs = self.consolidate_diffs(self.get_all_diffs()) util.vprint("All map diffs:") util.vprint(map_diffs) if not map_diffs: print("No map merge necessary") return aoc_pack = (util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Pack" / "AocMainField.pack") if not aoc_pack.exists() or aoc_pack.stat().st_size > 0: print("Emptying AocMainField.pack...") aoc_pack.parent.mkdir(parents=True, exist_ok=True) aoc_pack.write_bytes(b"") rstb_vals = {} rstb_calc = rstb.SizeCalculator() print("Merging modded map units...") pool = self._pool or Pool(maxtasksperchild=500) rstb_results = pool.map( partial(merge_map, rstb_calc=rstb_calc, no_del=no_del), map_diffs.items(), ) for result in rstb_results: rstb_vals[result[util.get_dlc_path()][0]] = result[ util.get_dlc_path()][1] rstb_vals[result["main"][0]] = result["main"][1] if not self._pool: pool.close() pool.join() print("Adjusting RSTB...") log_path.parent.mkdir(parents=True, exist_ok=True) with log_path.open("w", encoding="utf-8") as l_file: for canon, val in rstb_vals.items(): l_file.write(f"{canon},{val}\n") print("Map merge complete")
def perform_merge(self): merged_effects = util.get_master_modpack_dir() / "logs" / "effects.byml" print("Loading status effect mods...") diffs = self.consolidate_diffs(self.get_all_diffs()) if not diffs: print("No status effect merging necessary...") if merged_effects.exists(): merged_effects.unlink() try: stock_effects = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Ecosystem/StatusEffectList.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Ecosystem/StatusEffectList.sbyml", stock_effects, "Pack/Bootup.pack", ) del stock_effects except FileNotFoundError: pass return util.vprint("All status effect diffs:") util.vprint(diffs) effects = get_stock_effects() util.dict_merge(effects, diffs, overwrite_lists=True) del diffs print("Writing new effects list...") effect_bytes = oead.byml.to_binary( oead.byml.Array([effects]), big_endian=util.get_settings("wiiu") ) del effects util.inject_file_into_sarc( "Ecosystem/StatusEffectList.sbyml", util.compress(effect_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving status effect merge log...") merged_effects.parent.mkdir(parents=True, exist_ok=True) merged_effects.write_bytes(effect_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( effect_bytes, True, ".byml" ) del effect_bytes rstable.set_size("Ecosystem/StatusEffectList.byml", rstb_size)
def merge_events(): """ Merges all installed event info mods """ event_mods = [mod for mod in util.get_installed_mods() \ if (mod.path / 'logs' / 'eventinfo.yml').exists()] merged_events = util.get_master_modpack_dir() / 'logs' / 'eventinfo.byml' event_merge_log = util.get_master_modpack_dir() / 'logs' / 'eventinfo.log' event_mod_hash = str(hash(tuple(event_mods))) if not event_mods: print('No event info merging necessary') if merged_events.exists(): merged_events.unlink() event_merge_log.unlink() try: stock_eventinfo = util.get_nested_file_bytes( str(util.get_game_file('Pack/Bootup.pack')) + '//Event/EventInfo.product.sbyml', unyaz=False ) util.inject_file_into_bootup( 'Event/EventInfo.product.sbyml', stock_eventinfo ) except FileNotFoundError: pass return if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash: print('No event info merging necessary') return print('Loading event info mods...') modded_events = {} for mod in event_mods: modded_events.update(get_events_for_mod(mod)) new_events = get_stock_eventinfo() for event, data in modded_events.items(): new_events[event] = data print('Writing new event info...') event_bytes = byml.Writer(new_events, be=True).get_bytes() util.inject_file_into_bootup( 'Event/EventInfo.product.sbyml', util.compress(event_bytes), create_bootup=True ) print('Saving event info merge log...') event_merge_log.write_text(event_mod_hash) merged_events.write_bytes(event_bytes) print('Updating RSTB...') rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(event_bytes, True, '.byml') rstable.set_size('Event/EventInfo.product.byml', rstb_size)
def make_loadable_layer(content_dir: Path, patch_dir: Path, target_dir: Path, wiiu: bool, table: rstb.ResourceSizeTable, is_aoc: bool): """Converts an extracted content patch view into a loadable content layer. Directories that have an SARC extension in their name will be recursively repacked as archives. """ # Copy files to the target directory so that we don't trash the original files. shutil.copytree(str(patch_dir), str(target_dir)) # Build a list of files and directories that need to be patched. files_by_depth: typing.DefaultDict[int, typing.List[Path]] = defaultdict(list) for root, dirs, files in os.walk(target_dir, topdown=False): for file_name in files: full_path = os.path.join(root, file_name) files_by_depth[full_path.count(os.path.sep)].append( Path(full_path)) for file_name in dirs: full_path = os.path.join(root, file_name) files_by_depth[full_path.count(os.path.sep)].append( Path(full_path)) size_calculator = rstb.SizeCalculator() for depth in sorted(files_by_depth.keys(), reverse=True): for file in files_by_depth[depth]: rel_path = file.relative_to(target_dir) # Repack any extracted archive. if _is_dir(file) and _is_archive_filename(file): sys.stderr.write( f'repacking {Fore.CYAN}%s{Style.RESET_ALL}...\n' % rel_path) repack_archive(content_dir=content_dir, archive_path=file, rel_archive_dir=rel_path, wiiu=wiiu) if not file.is_file(): continue # TODO: support arbitrary file conversions (contentfs needs to be modified too), # for example yaml -> byml, bxml -> xml. # Fix the size in the RSTB *before* compression. _fix_rstb_resource_size(path=file, rel_path=rel_path, table=table, wiiu=wiiu, is_aoc=is_aoc)
def generate_diff(self, mod_dir: Path, modded_files: List[Path]): rstb_diff = {} open_sarcs = {} for file in modded_files: if isinstance(file, Path): canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if Path(canon).suffix not in RSTB_EXCLUDE_EXTS and\ Path(canon).name not in RSTB_EXCLUDE_NAMES: size = calculate_size(file) if file.suffix == '.bdmgparam': size = 0 if size == 0 and self._options['guess']: if file.suffix in util.AAMP_EXTS: size = guess_aamp_size(file) elif file.suffix in ['.bfres', '.sbfres']: size = guess_bfres_size(file) rstb_diff[file] = size elif isinstance(file, str): parts = file.split('//') name = parts[-1] if parts[0] not in open_sarcs: with (mod_dir / parts[0]).open('rb') as s_file: open_sarcs[parts[0]] = sarc.read_file_and_make_sarc(s_file) for part in parts[1:-1]: if part not in open_sarcs: open_sarcs[part] = sarc.SARC( util.unyaz_if_needed( open_sarcs[parts[parts.index(part) - 1]]\ .get_file_data(part).tobytes() ) ) ext = Path(name).suffix data = util.unyaz_if_needed(open_sarcs[parts[-2]].get_file_data(name).tobytes()) rstb_val = rstb.SizeCalculator().calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': rstb_val = 0 if rstb_val == 0 and self._options['guess']: if ext in util.AAMP_EXTS: rstb_val = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: rstb_val = guess_bfres_size(data, name) rstb_diff[file] = rstb_val for open_sarc in open_sarcs: del open_sarc return rstb_diff
def merge_maps(no_del: bool = False, link_del: bool = False, verbose: bool = False, original_pool: Pool = None): """Merges all installed modifications to mainfield maps""" aoc_pack = util.get_master_modpack_dir() / 'aoc' / '0010' / \ 'Pack' / 'AocMainField.pack' if not aoc_pack.exists() or aoc_pack.stat().st_size > 0: print('Emptying AocMainField.pack...') aoc_pack.parent.mkdir(parents=True, exist_ok=True) aoc_pack.write_bytes(b'') shutil.rmtree(str(util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / 'MainField'), ignore_errors=True) shutil.rmtree(str(util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField'), ignore_errors=True) log_path = util.get_master_modpack_dir() / 'logs' / 'map.log' if log_path.exists(): log_path.unlink() print('Loading map mods...') map_diffs = get_all_map_diffs() if not map_diffs: print('No map merge necessary') return rstb_vals = {} rstb_calc = rstb.SizeCalculator() print('Merging modded map units...') num_threads = min(cpu_count() - 1, len(map_diffs)) pool = original_pool or Pool(processes=num_threads) rstb_results = pool.map( partial(merge_map, rstb_calc=rstb_calc, no_del=no_del, link_del=link_del, verbose=verbose), list(map_diffs.items())) for result in rstb_results: rstb_vals[result['aoc'][0]] = result['aoc'][1] rstb_vals[result['main'][0]] = result['main'][1] if not original_pool: pool.close() pool.join() print('Adjusting RSTB...') with log_path.open('w', encoding='utf-8') as l_file: for canon, val in rstb_vals.items(): l_file.write(f'{canon},{val}\n') print('Map merge complete')
def calculate_size(path: Path) -> int: """ Calculates the resource size value for the given file :returns: The proper RSTB value for the file if it can be calculated, otherwise 0. :rtype: int """ if not hasattr(calculate_size, 'rstb_calc'): calculate_size.rstb_calc = rstb.SizeCalculator() try: return calculate_size.rstb_calc.calculate_file_size( file_name=str(path), wiiu=True, force=False ) except struct.error: return 0
continue elif table.is_in_table(canon) and size < table.get_size(canon): del diffs[canon] continue else: table.set_size(canon, size) if table.is_in_table(f"Message/Msg_{util.get_settings('lang')}.product.sarc"): table.delete_entry(f"Message/Msg_{util.get_settings('lang')}.product.sarc") out = ( master / util.get_content_path() / "System" / "Resource" / "ResourceSizeTable.product.srsizetable" ) out.parent.mkdir(parents=True, exist_ok=True) with io.BytesIO() as buf: table.write(buf, util.get_settings("wiiu")) out.write_bytes(util.compress(buf.getvalue())) log = master / "logs" / "rstb.json" log.parent.mkdir(parents=True, exist_ok=True) log.write_text(json.dumps(diffs, ensure_ascii=False, indent=2, sort_keys=True)) def get_mod_edit_info(self, mod: util.BcmlMod) -> set: return set(self.get_mod_diff(mod).keys()) setattr(calculate_size, "calculator", rstb.SizeCalculator())
def _load_botw_resource_factory_info( ) -> typing.Dict[str, rstb.SizeCalculator.Factory]: return rstb.SizeCalculator().get_factory_info()
if archive_path.suffix.startswith('.s'): sys.stderr.write('compressing...\n') _compress_file(archive_path) shutil.rmtree(temp_archive_dir) return True _RSTB_PATH_IN_CONTENT = 'System/Resource/ResourceSizeTable.product.srsizetable' _RSTB_BLACKLIST = {'Actor/ActorInfo.product.byml'} _RSTB_BLACKLIST_ARCHIVE_EXT = { '.blarc', '.sblarc', '.genvb', '.sgenvb', '.bfarc', '.sbfarc' } _RSTB_BLACKLIST_SUFFIXES = {'.pack', '.yml', '.yaml', '.aamp', '.xml'} size_calculator = rstb.SizeCalculator() def _should_be_listed_in_rstb(resource_path: Path, rel_path: Path) -> bool: if str(resource_path) in _RSTB_BLACKLIST: return False for parent in rel_path.parents: if parent.suffix in _RSTB_BLACKLIST_ARCHIVE_EXT: return False return resource_path.suffix not in _RSTB_BLACKLIST_SUFFIXES def _fix_rstb_resource_size(path: Path, rel_path: Path, table: rstb.ResourceSizeTable, wiiu: bool, is_aoc: bool): resource_path = _get_resource_path_for_rstb(rel_path, is_aoc)
def perform_merge(self): shutil.rmtree( str(util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Map" / "MainField"), ignore_errors=True, ) shutil.rmtree( str(util.get_master_modpack_dir() / util.get_content_path() / "Map" / "MainField"), ignore_errors=True, ) log_path = util.get_master_modpack_dir() / "logs" / "map.log" if log_path.exists(): log_path.unlink() print("Loading map mods...") map_diffs = self.consolidate_diffs(self.get_all_diffs()) util.vprint("All map diffs:") util.vprint(map_diffs) if not map_diffs: print("No map merge necessary") return aoc_pack = (util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Pack" / "AocMainField.pack") if not aoc_pack.exists() or aoc_pack.stat().st_size > 0: print("Emptying AocMainField.pack...") aoc_pack.parent.mkdir(parents=True, exist_ok=True) aoc_pack.write_bytes(b"") rstb_vals = {} rstb_calc = rstb.SizeCalculator() print("Merging modded map units...") pool = self._pool or Pool(maxtasksperchild=500) rstb_results = pool.map( partial(merge_map, rstb_calc=rstb_calc), map_diffs.items(), ) for result in rstb_results: rstb_vals[result[util.get_dlc_path()][0]] = result[ util.get_dlc_path()][1] rstb_vals[result["main"][0]] = result["main"][1] if not self._pool: pool.close() pool.join() stock_static = [m for m in map_diffs if m[1] == "Static"] if stock_static: title_path = (util.get_master_modpack_dir() / util.get_content_path() / "Pack" / "TitleBG.pack") if not title_path.exists(): title_path.parent.mkdir(parents=True, exist_ok=True) shutil.copyfile(util.get_game_file("Pack/TitleBG.pack"), title_path) title_bg: oead.SarcWriter = oead.SarcWriter.from_sarc( oead.Sarc(title_path.read_bytes())) for static in stock_static: del title_bg.files[ f"Map/MainField/{static[0]}/{static[0]}_Static.smubin"] title_path.write_bytes(title_bg.write()[1]) print("Adjusting RSTB...") log_path.parent.mkdir(parents=True, exist_ok=True) with log_path.open("w", encoding="utf-8") as l_file: for canon, val in rstb_vals.items(): l_file.write(f"{canon},{val}\n") print("Map merge complete")
def perform_merge(self): merged_areadata = util.get_master_modpack_dir() / "logs" / "areadata.byml" areadata_merge_log = util.get_master_modpack_dir() / "logs" / "areadata.log" print("Loading area data mods...") modded_areadata = self.consolidate_diffs(self.get_all_diffs()) areadata_mod_hash = hash(str(modded_areadata)) if not modded_areadata: print("No area data merging necessary") if merged_areadata.exists(): merged_areadata.unlink() areadata_merge_log.unlink() try: stock_areadata = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Ecosystem/AreaData.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Ecosystem/AreaData.sbyml", stock_areadata, "Pack/Bootup.pack", ) except FileNotFoundError: pass return if ( areadata_merge_log.exists() and areadata_merge_log.read_text() == areadata_mod_hash ): print("No area data merging necessary") return new_areadata = get_stock_areadata() util.dict_merge(new_areadata, modded_areadata, overwrite_lists=True) print("Writing new area data...") areadata_bytes = oead.byml.to_binary( oead.byml.Array( [v for _, v in sorted(new_areadata.items(), key=lambda x: int(x[0]))] ), big_endian=util.get_settings("wiiu"), ) del new_areadata util.inject_file_into_sarc( "Ecosystem/AreaData.sbyml", util.compress(areadata_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving area data merge log...") areadata_merge_log.parent.mkdir(parents=True, exist_ok=True) areadata_merge_log.write_text(str(areadata_mod_hash)) merged_areadata.write_bytes(areadata_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( bytes(areadata_bytes), True, ".byml" ) del areadata_bytes rstable.set_size("Ecosystem/AreaData.byml", rstb_size)