def get_aamp_diff(file: Union[Path, str], tmp_dir: Path): """ Diffs a modded AAMP file from the stock game version :param file: The modded AAMP file to diff :type file: class:`typing.Union[class:pathlib.Path, str]` :param tmp_dir: The temp directory containing the mod :type tmp_dir: class:`pathlib.Path` :return: Returns a string representation of the AAMP file diff """ if isinstance(file, str): nests = file.split('//') mod_bytes = util.get_nested_file_bytes(file) ref_path = str(util.get_game_file( Path(nests[0]).relative_to(tmp_dir))) + '//' + '//'.join(nests[1:]) ref_bytes = util.get_nested_file_bytes(ref_path) else: with file.open('rb') as m_file: mod_bytes = m_file.read() mod_bytes = util.unyaz_if_needed(mod_bytes) with util.get_game_file(file.relative_to(tmp_dir)).open('rb') as r_file: ref_bytes = r_file.read() ref_bytes = util.unyaz_if_needed(ref_bytes) ref_aamp = aamp.Reader(ref_bytes).parse() mod_aamp = aamp.Reader(mod_bytes).parse() return _aamp_diff(ref_aamp, mod_aamp)
def _clean_sarc(old_sarc: oead.Sarc, base_sarc: oead.Sarc) -> Optional[oead.SarcWriter]: old_files = {f.name for f in old_sarc.get_files()} new_sarc = oead.SarcWriter(endian=oead.Endianness.Big if util.get_settings( "wiiu") else oead.Endianness.Little) can_delete = True for nest_file, file_data in [(f.name, f.data) for f in base_sarc.get_files()]: ext = Path(nest_file).suffix if ext in {".yml", ".bak"}: continue if nest_file in old_files: old_data = util.unyaz_if_needed(old_sarc.get_file(nest_file).data) file_data = util.unyaz_if_needed(file_data) if nest_file not in old_files or (file_data != old_data and ext not in util.AAMP_EXTS): if (ext in util.SARC_EXTS and nest_file in old_files and nest_file not in SPECIAL): nest_old_sarc = oead.Sarc(old_data) nest_base_sarc = oead.Sarc(file_data) nest_new_sarc = _clean_sarc(nest_old_sarc, nest_base_sarc) if nest_new_sarc: new_bytes = nest_new_sarc.write()[1] if ext.startswith(".s") and ext != ".sarc": new_bytes = util.compress(new_bytes) new_sarc.files[nest_file] = oead.Bytes(new_bytes) can_delete = False else: continue else: if ext.startswith(".s") and ext != ".sarc": file_data = util.compress(file_data) new_sarc.files[nest_file] = oead.Bytes(file_data) can_delete = False return None if can_delete else new_sarc
def _clean_sarc_file(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return try: old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return if canon not in hashes: return try: base_sarc = oead.Sarc(util.unyaz_if_needed(file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return new_sarc = _clean_sarc(old_sarc, base_sarc) if not new_sarc: file.unlink() else: write_bytes = new_sarc.write()[1] file.write_bytes( write_bytes if not (file.suffix.startswith(".s") and file.suffix != ".ssarc") else util.compress(write_bytes) )
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: dict, path: str) -> dict: diffs = {} for file, edits in edits.items(): if edits: try: rsub_sarc = Sarc( util.unyaz_if_needed(ref_sarc.get_file(file).data)) except (AttributeError, InvalidDataError, ValueError, RuntimeError) as err: util.vprint(f'Skipping "{path}//{file}", {err}') continue sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) diffs.update( _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits, path + "//" + file)) del sub_sarc del rsub_sarc else: full_path = f"{path}//{file}" try: ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data) except AttributeError: continue try: pio = ParameterIO.from_binary(sarc.get_file(file).data) except AttributeError as err: raise ValueError( f"Failed to read nested file:\n{path}//{file}") from err except (ValueError, RuntimeError, InvalidDataError) as err: raise ValueError(f"Failed to parse AAMP file:\n{path}//{file}") diffs.update({full_path: get_aamp_diff(pio, ref_pio)}) return diffs
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: Union[dict, list], path: str) -> dict: diffs = {} if isinstance(edits, dict): for file, edits in edits.items(): try: rsub_sarc = Sarc( util.unyaz_if_needed(ref_sarc.get_file(file).data)) except (AttributeError, InvalidDataError, ValueError, RuntimeError) as e: util.vprint(f'Skipping "{path}//{file}", {e}') continue sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) diffs.update( _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits, path + "//" + file)) del sub_sarc del rsub_sarc else: for file in edits: full_path = f"{path}//{file}" try: ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data) except AttributeError: continue pio = ParameterIO.from_binary(sarc.get_file(file).data) diffs.update({full_path: get_aamp_diff(pio, ref_pio)}) return diffs
def merge_sarcs(file_name: str, sarcs: List[Union[Path, bytes]]) -> (str, bytes): opened_sarcs: List[sarc.SARC] = [] if isinstance(sarcs[0], Path): for i, sarc_path in enumerate(sarcs): sarcs[i] = sarc_path.read_bytes() for sarc_bytes in sarcs: sarc_bytes = util.unyaz_if_needed(sarc_bytes) try: opened_sarcs.append(sarc.SARC(sarc_bytes)) except ValueError: continue all_files = {key for open_sarc in opened_sarcs for key in open_sarc.list_files()} nested_sarcs = {} new_sarc = sarc.SARCWriter(be=True) files_added = [] # for file in all_files: # dm_cache = util.get_master_modpack_dir() / 'logs' / 'dm' / file # if dm_cache.exists(): # file_data = dm_cache.read_bytes() # new_sarc.add_file(file, file_data) # files_added.append(file) for opened_sarc in reversed(opened_sarcs): for file in [file for file in opened_sarc.list_files() if file not in files_added]: data = opened_sarc.get_file_data(file).tobytes() if util.is_file_modded(file.replace('.s', '.'), data, count_new=True): if not Path(file).suffix in util.SARC_EXTS: new_sarc.add_file(file, data) files_added.append(file) else: if file not in nested_sarcs: nested_sarcs[file] = [] nested_sarcs[file].append(util.unyaz_if_needed(data)) for file, sarcs in nested_sarcs.items(): merged_bytes = merge_sarcs(file, sarcs)[1] if Path(file).suffix.startswith('.s') and not file.endswith('.sarc'): merged_bytes = util.compress(merged_bytes) new_sarc.add_file(file, merged_bytes) files_added.append(file) for file in [file for file in all_files if file not in files_added]: for opened_sarc in [open_sarc for open_sarc in opened_sarcs \ if file in open_sarc.list_files()]: new_sarc.add_file(file, opened_sarc.get_file_data(file).tobytes()) break if 'Bootup.pack' in file_name: for merger in [merger() for merger in mergers.get_mergers() if merger.is_bootup_injector()]: inject = merger.get_bootup_injection() if not inject: continue file, data = inject try: new_sarc.delete_file(file) except KeyError: pass new_sarc.add_file(file, data) return (file_name, new_sarc.get_bytes())
def generate_diff(self, mod_dir: Path, modded_files: List[Path]): rstb_diff = {} open_sarcs = {} for file in modded_files: if isinstance(file, Path): canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if Path(canon).suffix not in RSTB_EXCLUDE_EXTS and\ Path(canon).name not in RSTB_EXCLUDE_NAMES: size = calculate_size(file) if file.suffix == '.bdmgparam': size = 0 if size == 0 and self._options['guess']: if file.suffix in util.AAMP_EXTS: size = guess_aamp_size(file) elif file.suffix in ['.bfres', '.sbfres']: size = guess_bfres_size(file) rstb_diff[file] = size elif isinstance(file, str): parts = file.split('//') name = parts[-1] if parts[0] not in open_sarcs: with (mod_dir / parts[0]).open('rb') as s_file: open_sarcs[parts[0]] = sarc.read_file_and_make_sarc(s_file) for part in parts[1:-1]: if part not in open_sarcs: open_sarcs[part] = sarc.SARC( util.unyaz_if_needed( open_sarcs[parts[parts.index(part) - 1]]\ .get_file_data(part).tobytes() ) ) ext = Path(name).suffix data = util.unyaz_if_needed(open_sarcs[parts[-2]].get_file_data(name).tobytes()) rstb_val = rstb.SizeCalculator().calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': rstb_val = 0 if rstb_val == 0 and self._options['guess']: if ext in util.AAMP_EXTS: rstb_val = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: rstb_val = guess_bfres_size(data, name) rstb_diff[file] = rstb_val for open_sarc in open_sarcs: del open_sarc return rstb_diff
def get_shop_diffs(file: str, tree: dict, tmp_dir: Path) -> Optional[dict]: try: ref_sarc = Sarc(util.unyaz_if_needed(util.get_game_file(file).read_bytes())) except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError) as err: util.vprint(f"{file} ignored on stock side, cuz {err}") return None try: sarc = Sarc(util.unyaz_if_needed((tmp_dir / file).read_bytes())) except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError): util.vprint(f"{file} corrupt, ignored") return None diffs = _get_diffs_from_sarc(sarc, ref_sarc, tree, file) del sarc del ref_sarc return diffs
def _get_sizes_in_sarc( file: Union[Path, oead.Sarc], guess: bool, is_aoc: bool = False ) -> {}: sizes = {} if isinstance(file, Path): is_aoc = util.get_dlc_path() in file.as_posix() try: file = oead.Sarc(util.unyaz_if_needed(file.read_bytes())) except (RuntimeError, oead.InvalidDataError): print(f"{file} could not be opened") return {} for nest_file, data in [(file.name, file.data) for file in file.get_files()]: canon = nest_file.replace(".s", ".") if data[0:4] == b"Yaz0": data = util.decompress(data) ext = Path(canon).suffix if ( util.is_file_modded(canon, data) and ext not in EXCLUDE_EXTS and canon not in EXCLUDE_NAMES ): sizes[canon] = calculate_size(canon, data, guess=guess) if ext in util.SARC_EXTS - SARC_EXCLUDES: try: nest_sarc = oead.Sarc(data) except (ValueError, RuntimeError, oead.InvalidDataError): continue sizes.update(_get_sizes_in_sarc(nest_sarc, guess, is_aoc=is_aoc)) del nest_sarc del data del file return sizes
def calculate_size( path: Union[Path, str], data: ByteString = None, guess: bool = True ) -> int: ext = path.suffix if isinstance(path, Path) else path[path.rindex(".") :] data = util.unyaz_if_needed(path.read_bytes() if isinstance(path, Path) else data) try: be = util.get_settings("wiiu") # pylint: disable=invalid-name size = getattr(calculate_size, "calculator").calculate_file_size_with_ext( data, wiiu=be, ext=ext, force=False ) if ext == ".bdmgparam": size = 0 if ext == ".hkrb": size += 40 if ext == ".baniminfo": size = int( (((len(data) + 31) & -32) * (1.5 if len(data) > 36864 else 4)) + 0xE4 + 0x24C ) if not be: size = int(size * 1.5) if size == 0 and guess: if ext in util.AAMP_EXTS: size = guess_aamp_size(data, be, ext) elif ext in {".bfres", ".sbfres"}: size = guess_bfres_size( data, be, path if isinstance(path, str) else path.name, ) return size except struct.error: return 0
def _get_sizes_in_sarc(file: Union[Path, sarc.SARC]) -> {}: calc = rstb.SizeCalculator() sizes = {} guess = util.get_settings_bool('guess_merge') if isinstance(file, Path): with file.open('rb') as s_file: file = sarc.read_file_and_make_sarc(s_file) if not file: return {} for nest_file in file.list_files(): canon = nest_file.replace('.s', '.') data = util.unyaz_if_needed(file.get_file_data(nest_file).tobytes()) ext = Path(canon).suffix if util.is_file_modded(canon, data) and ext not in RSTB_EXCLUDE_EXTS and canon not in RSTB_EXCLUDE_NAMES: size = calc.calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': size = 0 if size == 0 and guess: if ext in util.AAMP_EXTS: size = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: size = guess_bfres_size(data, canon) sizes[canon] = size if util.is_file_sarc(nest_file) and not nest_file.endswith('.ssarc'): try: nest_sarc = sarc.SARC(data) except ValueError: continue sizes.update(_get_sizes_in_sarc(nest_sarc)) return sizes
def get_sizes_in_sarc( sarc: oead.Sarc, contents: Contents, guess: bool, dlc: bool ) -> Dict[str, int]: prefix = "" if not dlc else "Aoc/0010/" vals = {} if isinstance(contents, list): for file in contents: if file[file.rindex(".") :] in EXCLUDE_EXTS: continue canon = prefix + file.replace(".s", ".") vals[canon] = calculate_size(canon, sarc.get_file(file).data, guess) elif isinstance(contents, dict): for subpath, subcontents in contents.items(): ext = subpath[subpath.rindex(".") :] if ext in EXCLUDE_EXTS: continue data = util.unyaz_if_needed(sarc.get_file(subpath).data) canon = prefix + subpath.replace(".s", ".") vals[canon] = calculate_size(canon, data, guess) if ext not in SARC_EXCLUDES: try: subsarc = oead.Sarc(data) except (ValueError, RuntimeError, oead.InvalidDataError): continue vals.update(get_sizes_in_sarc(subsarc, subcontents, guess, dlc)) return vals
def _pack_sarc(folder: Path, tmp_dir: Path, hashes: dict): packed = oead.SarcWriter( endian=oead.Endianness.Big if util.get_settings("wiiu") else oead.Endianness.Little ) try: canon = util.get_canon_name( folder.relative_to(tmp_dir).as_posix(), allow_no_source=True ) if canon not in hashes: raise FileNotFoundError("File not in game dump") stock_file = util.get_game_file(folder.relative_to(tmp_dir)) try: old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): raise ValueError("Cannot open file from game dump") old_files = {f.name for f in old_sarc.get_files()} except (FileNotFoundError, ValueError): for file in {f for f in folder.rglob("**/*") if f.is_file()}: packed.files[file.relative_to(folder).as_posix()] = file.read_bytes() else: for file in { f for f in folder.rglob("**/*") if f.is_file() and not f.suffix in EXCLUDE_EXTS }: file_data = file.read_bytes() xhash = xxhash.xxh64_intdigest(util.unyaz_if_needed(file_data)) file_name = file.relative_to(folder).as_posix() if file_name in old_files: old_hash = xxhash.xxh64_intdigest( util.unyaz_if_needed(old_sarc.get_file(file_name).data) ) if file_name not in old_files or (xhash != old_hash): packed.files[file_name] = file_data finally: shutil.rmtree(folder) if not packed.files: return # pylint: disable=lost-exception sarc_bytes = packed.write()[1] folder.write_bytes( util.compress(sarc_bytes) if (folder.suffix.startswith(".s") and not folder.suffix == ".sarc") else sarc_bytes )
def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return with stock_file.open('rb') as old_file: old_sarc = sarc.read_file_and_make_sarc(old_file) if not old_sarc: return old_files = set(old_sarc.list_files()) if canon not in hashes: return with file.open('rb') as s_file: base_sarc = sarc.read_file_and_make_sarc(s_file) if not base_sarc: return new_sarc = sarc.SARCWriter(True) can_delete = True for nest_file in base_sarc.list_files(): canon = nest_file.replace('.s', '.') ext = Path(canon).suffix if ext in {'.yml', '.bak'}: continue file_data = base_sarc.get_file_data(nest_file).tobytes() xhash = xxhash.xxh32(util.unyaz_if_needed(file_data)).hexdigest() if nest_file in old_files: old_hash = xxhash.xxh32( util.unyaz_if_needed( old_sarc.get_file_data(nest_file).tobytes())).hexdigest() if nest_file not in old_files or (xhash != old_hash and ext not in util.AAMP_EXTS): can_delete = False new_sarc.add_file(nest_file, file_data) del old_sarc if can_delete: del new_sarc file.unlink() else: with file.open('wb') as s_file: if file.suffix.startswith('.s') and file.suffix != '.ssarc': s_file.write(util.compress(new_sarc.get_bytes())) else: new_sarc.write(s_file)
def _convert_sarc_file(pack: Path, to_wiiu: bool) -> list: data = pack.read_bytes() if not data: return [] sarc = oead.Sarc(util.unyaz_if_needed(data)) new_bytes, error = _convert_sarc(sarc, to_wiiu) pack.write_bytes( util.compress(new_bytes) if pack.suffix.startswith(".s") and pack.suffix != ".sarc" else new_bytes) return error
def _convert_actorpack(actor_pack: Path, to_wiiu: bool) -> Union[None, str]: error = None sarc = oead.Sarc(util.unyaz_if_needed(actor_pack.read_bytes())) new_sarc = oead.SarcWriter.from_sarc(sarc) new_sarc.set_endianness(oead.Endianness.Big if to_wiiu else oead.Endianness.Little) for file in sarc.get_files(): if "Physics/" in file.name and "Actor/" not in file.name: ext = file.name[file.name.rindex(".") :] if ext in NO_CONVERT_EXTS: if not util.is_file_modded( util.get_canon_name(file.name, allow_no_source=True), file.data, count_new=True, ): actor_name = file.name[ file.name.rindex("/") : file.name.rindex(".") ] try: pack_path = util.get_game_file( f"Actor/Pack/{actor_name}.sbactorpack" ) stock_data = util.get_nested_file_bytes( f"{str(pack_path)}//{file.name}" ) if stock_data: new_sarc.files[file.name] = stock_data else: raise FileNotFoundError(file.name) except (FileNotFoundError, AttributeError): error = ( "This mod contains a Havok file not supported by the " f"converter: {file.name}" ) else: error = ( "This mod contains a Havok file not supported by the" f" converter: {file.name}" ) else: if file.data[0:4] == b"AAMP": continue try: hk = Havok.from_bytes(bytes(file.data)) except: # pylint: disable=bare-except return f"Could not parse Havok file {file.name}" if to_wiiu: hk.to_wiiu() else: hk.to_switch() hk.serialize() new_sarc.files[file.name] = hk.to_bytes() actor_pack.write_bytes(util.compress(new_sarc.write()[1])) return error
def find_modded_sarc_files(mod_sarc: Union[Path, oead.Sarc], tmp_dir: Path, name: str = "", aoc: bool = False) -> List[str]: if isinstance(mod_sarc, Path): if any(mod_sarc.name.startswith(exclude) for exclude in ["Bootup_"]): return [] name = str(mod_sarc.relative_to(tmp_dir)) aoc = util.get_dlc_path() in mod_sarc.parts or "Aoc" in mod_sarc.parts try: mod_sarc = oead.Sarc(util.unyaz_if_needed(mod_sarc.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return [] modded_files = [] for file, contents in [(f.name, bytes(f.data)) for f in mod_sarc.get_files()]: canon = file.replace(".s", ".") if aoc: canon = "Aoc/0010/" + canon contents = util.unyaz_if_needed(contents) nest_path = str(name).replace("\\", "/") + "//" + file if util.is_file_modded(canon, contents, True): modded_files.append(nest_path) util.vprint( f'Found modded file {canon} in {str(name).replace("//", "/")}') if util.is_file_sarc(canon) and ".ssarc" not in file: try: nest_sarc = oead.Sarc(contents) except ValueError: continue sub_mod_files = find_modded_sarc_files(nest_sarc, name=nest_path, tmp_dir=tmp_dir, aoc=aoc) modded_files.extend(sub_mod_files) else: util.vprint( f'Ignored unmodded file {canon} in {str(name).replace("//", "/")}' ) return modded_files
def _convert_sarc(sarc: oead.Sarc, to_wiiu: bool) -> Tuple[bytes, List[str]]: error = [] new_sarc = oead.SarcWriter.from_sarc(sarc) new_sarc.set_endianness( oead.Endianness.Big if to_wiiu else oead.Endianness.Little) for file in sarc.get_files(): ext = file.name[file.name.rindex("."):] if ext in NO_CONVERT_EXTS: error.append( f"This mod contains a file not supported by the converter: {file.name}" ) elif ext in BYML_EXTS: byml = oead.byml.from_binary(util.unyaz_if_needed(file.data)) new_sarc.files[file.name] = oead.byml.to_binary(byml, big_endian=to_wiiu) elif ext in SARC_EXTS and ext not in NO_CONVERT_EXTS: nest = oead.Sarc(util.unyaz_if_needed(file.data)) new_bytes, errs = _convert_sarc(nest, to_wiiu) new_sarc.files[file.name] = ( new_bytes if not (ext.startswith(".s") and ext != ".sarc") else util.compress(new_bytes)) error.extend(errs) return new_sarc.write()[1], error
def merge_aamp_files(file: str, tree: dict): try: base_file = util.get_game_file(file) except FileNotFoundError: util.vprint(f"Skipping {file}, not found in dump") return if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file sarc = Sarc(util.unyaz_if_needed(base_file.read_bytes())) new_data = _merge_in_sarc(sarc, tree) if base_file.suffix.startswith(".s") and base_file.suffix != ".ssarc": new_data = util.compress(new_data) (util.get_master_modpack_dir() / file).parent.mkdir(parents=True, exist_ok=True) (util.get_master_modpack_dir() / file).write_bytes(new_data)
def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]): if (f"{util.get_content_path()}/Pack/Bootup.pack//GameData/gamedata.ssarc" in modded_files): print("Logging changes to game data flags...") bootup_sarc = oead.Sarc( util.unyaz_if_needed((mod_dir / util.get_content_path() / "Pack" / "Bootup.pack").read_bytes())) data_sarc = oead.Sarc( util.decompress( bootup_sarc.get_file("GameData/gamedata.ssarc").data)) diff = get_modded_gamedata_entries(data_sarc, pool=self._pool) del bootup_sarc del data_sarc return diff else: return {}
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString: new_sarc = SarcWriter.from_sarc(sarc) for file, stuff in edits.items(): if isinstance(stuff, dict): try: ofile = sarc.get_file(file) if ofile == None: raise FileNotFoundError(f"Could not find nested file {file} in SARC") sub_sarc = Sarc(util.unyaz_if_needed(ofile.data)) except ( InvalidDataError, ValueError, AttributeError, RuntimeError, FileNotFoundError, ): util.vprint(f"Couldn't merge into nested SARC {file}") continue nsub_bytes = _merge_in_sarc(sub_sarc, stuff) new_sarc.files[file] = ( util.compress(nsub_bytes) if file[file.rindex(".") :].startswith(".s") else nsub_bytes ) elif isinstance(stuff, ParameterList): try: ofile = sarc.get_file(file) if ofile == None: raise FileNotFoundError(f"Could not find nested file {file} in SARC") pio = ParameterIO.from_binary(ofile.data) except ( AttributeError, ValueError, InvalidDataError, FileNotFoundError, ) as err: util.vprint(f"Couldn't open {file}: {err}") continue new_pio = merge_shopdata(pio, stuff) new_sarc.files[file] = new_pio.to_binary() return new_sarc.write()[1]
def _get_nest_file_sizes( file: str, contents: Contents, mod_dir: Path, guess: bool, ) -> Dict[str, int]: def get_sizes_in_sarc( sarc: oead.Sarc, contents: Contents, guess: bool, dlc: bool ) -> Dict[str, int]: prefix = "" if not dlc else "Aoc/0010/" vals = {} for file, subs in contents.items(): if not subs: if file[file.rindex(".") :] in EXCLUDE_EXTS: continue canon = prefix + file.replace(".s", ".") vals[canon] = calculate_size(canon, sarc.get_file(file).data, guess) else: ext = file[file.rindex(".") :] if ext in EXCLUDE_EXTS: continue data = util.unyaz_if_needed(sarc.get_file(file).data) canon = prefix + file.replace(".s", ".") vals[canon] = calculate_size(canon, data, guess) if ext not in SARC_EXCLUDES: try: subsarc = oead.Sarc(data) except (ValueError, RuntimeError, oead.InvalidDataError): continue vals.update(get_sizes_in_sarc(subsarc, subs, guess, dlc)) return vals dlc = util.get_dlc_path() in file vals = {} try: sarc = oead.Sarc(util.unyaz_if_needed((mod_dir / file).read_bytes())) except (ValueError, RuntimeError, oead.InvalidDataError): return {} vals.update(get_sizes_in_sarc(sarc, contents, guess, dlc)) return vals
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString: new_sarc = SarcWriter.from_sarc(sarc) for file, stuff in edits.items(): if isinstance(stuff, dict): try: sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) except (InvalidDataError, ValueError, AttributeError, RuntimeError): util.vprint(f"Couldn't merge into nested SARC {file}") continue nsub_bytes = _merge_in_sarc(sub_sarc, stuff) new_sarc.files[file] = (util.compress(nsub_bytes) if file[file.rindex("."):].startswith(".s") else nsub_bytes) elif isinstance(stuff, ParameterList): try: pio = ParameterIO.from_binary(sarc.get_file(file).data) except (AttributeError, ValueError, InvalidDataError) as e: util.vprint(f"Couldn't open {file}: {e}") continue merge_plists(pio, stuff) new_sarc.files[file] = pio.to_binary() return new_sarc.write()[1]
def install_mod( mod: Path, options: dict = None, selects: dict = None, pool: Optional[multiprocessing.pool.Pool] = None, insert_priority: int = 0, merge_now: bool = False, updated: bool = False, ): if not insert_priority: insert_priority = get_next_priority() try: if isinstance(mod, str): mod = Path(mod) if mod.is_file(): print("Opening mod...") tmp_dir = open_mod(mod) elif mod.is_dir(): if not ((mod / "rules.txt").exists() or (mod / "info.json").exists()): print( f"Cannot open mod at {str(mod)}, no rules.txt or info.json found" ) return print(f"Loading mod from {str(mod)}...") tmp_dir = Path(mkdtemp()) if tmp_dir.exists(): shutil.rmtree(tmp_dir) shutil.copytree(str(mod), str(tmp_dir)) if (mod / "rules.txt").exists() and not (mod / "info.json").exists(): print("Upgrading old mod format...") upgrade.convert_old_mod(mod, delete_old=True) else: print(f"Error: {str(mod)} is neither a valid file nor a directory") return except Exception as err: # pylint: disable=broad-except raise util.InstallError(err) from err if not options: options = {"options": {}, "disable": []} this_pool: Optional[multiprocessing.pool.Pool] = None # type: ignore try: rules = json.loads((tmp_dir / "info.json").read_text("utf-8")) mod_name = rules["name"].strip(" '\"").replace("_", "") print(f"Identified mod: {mod_name}") if rules["depends"]: try: installed_metas = { v[0]: v[1] for m in util.get_installed_mods() for v in util.BcmlMod.meta_from_id(m.id) } except (IndexError, TypeError) as err: raise RuntimeError( f"This BNP has invalid or corrupt dependency data.") for depend in rules["depends"]: depend_name, depend_version = util.BcmlMod.meta_from_id(depend) if (depend_name not in installed_metas) or ( depend_name in installed_metas and depend_version > installed_metas[depend_name]): raise RuntimeError( f"{mod_name} requires {depend_name} version {depend_version}, " f"but it is not installed. Please install {depend_name} and " "try again.") friendly_plaform = lambda p: "Wii U" if p == "wiiu" else "Switch" user_platform = "wiiu" if util.get_settings("wiiu") else "switch" if rules["platform"] != user_platform: raise ValueError( f'"{mod_name}" is for {friendly_plaform(rules["platform"])}, not ' f" {friendly_plaform(user_platform)}.'") if "priority" in rules and rules["priority"] == "base": insert_priority = 100 logs = tmp_dir / "logs" if logs.exists(): print("Loading mod logs...") for merger in [ merger() # type: ignore for merger in mergers.get_mergers() if merger.NAME in options["disable"] ]: if merger.is_mod_logged(BcmlMod(tmp_dir)): (tmp_dir / "logs" / merger.log_name).unlink() else: this_pool = pool or Pool(maxtasksperchild=500) dev._pack_sarcs(tmp_dir, util.get_hash_table(util.get_settings("wiiu")), this_pool) generate_logs(tmp_dir=tmp_dir, options=options, pool=this_pool) if not util.get_settings("strip_gfx"): (tmp_dir / ".processed").touch() except Exception as err: # pylint: disable=broad-except try: name = mod_name except NameError: name = "your mod, the name of which could not be detected" raise util.InstallError(err, name) from err if selects: for opt_dir in { d for d in (tmp_dir / "options").glob("*") if d.is_dir() }: if opt_dir.name not in selects: shutil.rmtree(opt_dir, ignore_errors=True) else: file: Path for file in { f for f in opt_dir.rglob("**/*") if ("logs" not in f.parts and f.is_file()) }: out = tmp_dir / file.relative_to(opt_dir) out.parent.mkdir(parents=True, exist_ok=True) try: os.link(file, out) except FileExistsError: if file.suffix in util.SARC_EXTS: try: old_sarc = oead.Sarc( util.unyaz_if_needed(out.read_bytes())) except (ValueError, oead.InvalidDataError, RuntimeError): out.unlink() os.link(file, out) try: link_sarc = oead.Sarc( util.unyaz_if_needed(file.read_bytes())) except (ValueError, oead.InvalidDataError, RuntimeError): del old_sarc continue new_sarc = oead.SarcWriter.from_sarc(link_sarc) link_files = { f.name for f in link_sarc.get_files() } for sarc_file in old_sarc.get_files(): if sarc_file.name not in link_files: new_sarc.files[sarc_file.name] = bytes( sarc_file.data) del old_sarc del link_sarc out.write_bytes(new_sarc.write()[1]) del new_sarc else: out.unlink() os.link(file, out) rstb_path = (tmp_dir / util.get_content_path() / "System" / "Resource" / "ResourceSizeTable.product.srsizetable") if rstb_path.exists(): rstb_path.unlink() priority = insert_priority print(f"Assigned mod priority of {priority}") mod_id = util.get_mod_id(mod_name, priority) mod_dir = util.get_modpack_dir() / mod_id try: if not updated: for existing_mod in util.get_installed_mods(True): if existing_mod.priority >= priority: existing_mod.change_priority(existing_mod.priority + 1) if (tmp_dir / "patches").exists() and not util.get_settings("no_cemu"): patch_dir = (util.get_cemu_dir() / "graphicPacks" / f"bcmlPatches" / util.get_safe_pathname(rules["name"])) patch_dir.mkdir(parents=True, exist_ok=True) for file in { f for f in (tmp_dir / "patches").rglob("*") if f.is_file() }: out = patch_dir / file.relative_to(tmp_dir / "patches") out.parent.mkdir(parents=True, exist_ok=True) shutil.copyfile(file, out) mod_dir.parent.mkdir(parents=True, exist_ok=True) print(f"Moving mod to {str(mod_dir)}...") if mod.is_file(): try: shutil.move(str(tmp_dir), str(mod_dir)) except Exception: # pylint: disable=broad-except try: shutil.rmtree(str(mod_dir)) shutil.copytree(str(tmp_dir), str(mod_dir)) shutil.rmtree(str(tmp_dir), ignore_errors=True) except Exception: # pylint: disable=broad-except raise OSError( "BCML could not transfer your mod from the temp directory to the" " BCML directory.") elif mod.is_dir(): shutil.copytree(str(tmp_dir), str(mod_dir)) shutil.rmtree(tmp_dir, ignore_errors=True) rules["priority"] = priority (mod_dir / "info.json").write_text(json.dumps(rules, ensure_ascii=False, indent=2), encoding="utf-8") (mod_dir / "options.json").write_text(json.dumps(options, ensure_ascii=False, indent=2), encoding="utf-8") output_mod = BcmlMod(mod_dir) try: util.get_mod_link_meta(rules) util.get_mod_preview(output_mod) except Exception: # pylint: disable=broad-except pass except Exception as err: # pylint: disable=broad-except if mod_dir.exists(): try: uninstall_mod(mod_dir, wait_merge=True) except Exception: # pylint: disable=broad-except shutil.rmtree(str(mod_dir)) raise util.InstallError(err, mod_name) from err try: if merge_now: for merger in [m() for m in mergers.get_mergers()]: if this_pool or pool: merger.set_pool(this_pool or pool) if merger.NAME in options["options"]: merger.set_options(options["options"][merger.NAME]) merger.perform_merge() except Exception as err: # pylint: disable=broad-except raise util.MergeError(err) from err if this_pool and not pool: this_pool.close() this_pool.join() return output_mod
def merge_sarcs(file_name: str, sarcs: List[Union[Path, bytes]]) -> (str, bytes): opened_sarcs: List[oead.Sarc] = [] if "ThunderRodLv2" in file_name: print() if isinstance(sarcs[0], Path): for i, sarc_path in enumerate(sarcs): sarcs[i] = sarc_path.read_bytes() for sarc_bytes in sarcs: sarc_bytes = util.unyaz_if_needed(sarc_bytes) try: opened_sarcs.append(oead.Sarc(sarc_bytes)) except (ValueError, RuntimeError, oead.InvalidDataError): continue all_files = { file.name for open_sarc in opened_sarcs for file in open_sarc.get_files() } nested_sarcs = {} new_sarc = oead.SarcWriter(endian=oead.Endianness.Big if util.get_settings( "wiiu") else oead.Endianness.Little) files_added = set() for opened_sarc in reversed(opened_sarcs): for file in [ f for f in opened_sarc.get_files() if f.name not in files_added ]: file_data = oead.Bytes(file.data) if (file.name[file.name.rindex("."):] in util.SARC_EXTS - EXCLUDE_EXTS) and file.name not in SPECIAL: if file.name not in nested_sarcs: nested_sarcs[file.name] = [] nested_sarcs[file.name].append(util.unyaz_if_needed(file_data)) elif util.is_file_modded(file.name.replace(".s", "."), file_data, count_new=True): new_sarc.files[file.name] = file_data files_added.add(file.name) util.vprint(set(nested_sarcs.keys())) for file, sarcs in nested_sarcs.items(): if not sarcs: continue merged_bytes = merge_sarcs(file, sarcs[::-1])[1] if Path(file).suffix.startswith(".s") and not file.endswith(".sarc"): merged_bytes = util.compress(merged_bytes) new_sarc.files[file] = merged_bytes files_added.add(file) for file in [file for file in all_files if file not in files_added]: for opened_sarc in [ open_sarc for open_sarc in opened_sarcs if (file in [f.name for f in open_sarc.get_files()]) ]: new_sarc.files[file] = oead.Bytes(opened_sarc.get_file(file).data) break if "Bootup.pack" in file_name: for merger in [ merger() for merger in mergers.get_mergers() if merger.is_bootup_injector() ]: inject = merger.get_bootup_injection() if not inject: continue file, data = inject new_sarc.files[file] = data return (file_name, bytes(new_sarc.write()[1]))
def convert_mod(mod: Path, to_wiiu: bool, warn_only: bool = False) -> list: warnings = [] def handle_warning(warning: str) -> None: if not warn_only: raise ValueError(warning) else: warnings.append(warning) to_content: str from_content: str to_aoc: str from_aoc: str if to_wiiu: to_content = "content" from_content = "01007EF00011E000/romfs" to_aoc = "aoc/0010" from_aoc = "01007EF00011F001/romfs" else: to_content = "01007EF00011E000/romfs" from_content = "content" to_aoc = "01007EF00011F001/romfs" from_aoc = "aoc/0010" special_files = {"ActorInfo.product.sbyml"} all_files = { f for f in mod.rglob("**/*.*") if f.is_file() and "options" not in f.parts } for file in all_files: if file.suffix in NO_CONVERT_EXTS: handle_warning( "This mod contains a file which the platform converter does not support:" f" {str(file.relative_to(mod))}") actorinfo_log = mod / "logs" / "actorinfo.yml" if actorinfo_log.exists(): actorinfo = oead.byml.from_text(actorinfo_log.read_text("utf-8")) if any("instSize" in actor for _, actor in actorinfo.items()): handle_warning( "This mod contains changes to actor instSize values, " "which cannot be automatically converted.") del actorinfo for log in {"drops.json", "packs.json"}: log_path = mod / "logs" / log if log_path.exists(): text = log_path.read_text("utf-8").replace("\\\\", "/").replace("\\", "/") log_path.write_text( text.replace(from_content, to_content).replace(from_aoc, to_aoc), "utf-8", ) for log in {"deepmerge.aamp", "shop.aamp"}: log_path = mod / "logs" / log if log_path.exists(): pio = oead.aamp.ParameterIO.from_binary(log_path.read_bytes()) if "deepmerge" in log: table = oead.aamp.get_default_name_table() for i in range(len(pio.objects["FileTable"].params)): table.add_name(f"File{i}") text = pio.to_text().replace("\\\\", "/").replace("\\", "/") log_path.write_bytes( oead.aamp.ParameterIO.from_text( text.replace(from_content, to_content).replace(from_aoc, to_aoc)).to_binary()) for file in { f for f in all_files if f.suffix in BYML_EXTS and f.name not in special_files }: byml = oead.byml.from_binary(util.unyaz_if_needed(file.read_bytes())) file.write_bytes(oead.byml.to_binary(byml, big_endian=to_wiiu)) with Pool(maxtasksperchild=500) as pool: errs = pool.map( partial(_convert_actorpack, to_wiiu=to_wiiu), {f for f in all_files if f.suffix == ".sbactorpack"}, ) for err in errs: if err: handle_warning(err) errs = pool.map( partial(_convert_sarc_file, to_wiiu=to_wiiu), # type: ignore { f for f in all_files if f.suffix in SARC_EXTS if f.suffix != ".sbactorpack" }, ) for err in errs: if err: handle_warning(err) if (mod / from_content).exists(): shutil.move(mod / from_content, mod / to_content) # type: ignore if (mod / from_aoc).exists(): shutil.move(mod / from_aoc, mod / to_aoc) # type: ignore with TempSettingsContext({"wiiu": to_wiiu}): rstb_log = mod / "logs" / "rstb.json" if rstb_log.exists(): # pylint: disable=import-outside-toplevel rstb_log.unlink() from bcml.install import find_modded_files from bcml.mergers.rstable import RstbMerger files = find_modded_files(mod, pool) merger = RstbMerger() merger.set_pool(pool) merger.log_diff(mod, files) if (mod / "options").exists(): for folder in {d for d in (mod / "options").glob("*") if d.is_dir()}: convert_mod(folder, to_wiiu=to_wiiu, warn_only=warn_only) meta = loads((mod / "info.json").read_text("utf-8")) meta["platform"] = "wiiu" if to_wiiu else "switch" (mod / "info.json").write_text(dumps(meta, indent=2, ensure_ascii=False), "utf-8") return warnings
def find_modded_sarc_files(mod_sarc: Union[Path, sarc.SARC], tmp_dir: Path, name: str = '', aoc: bool = False, verbose: bool = False) -> List[str]: """ Detects all of the modified files in a SARC :param mod_sarc: The SARC to scan for modded files. :type mod_sarc: class:`sarc.SARC` :param tmp_dir: The path to the base directory of the mod. :type tmp_dir: class:`pathlib.Path` :param name: The name of the SARC which contains the current SARC. :type name: str :param aoc: Specifies whether the SARC is DLC content, defaults to False. :type aoc: bool, optional :param nest_level: The depth to which the current SARC is nested in more SARCs, defaults to 0 :type nest_level: int, optional :param deep_merge: Whether to log diffs for individual AAMP and BYML files, defaults to False :type deep_merge: bool, optional :param verbose: Specifies whether to return more detailed output :type verbose: bool, optional """ if isinstance(mod_sarc, Path): if any(mod_sarc.name.startswith(exclude) for exclude in ['Bootup_']): return [] name = str(mod_sarc.relative_to(tmp_dir)) aoc = 'aoc' in mod_sarc.parts or 'Aoc' in mod_sarc.parts with mod_sarc.open('rb') as s_file: mod_sarc = sarc.read_file_and_make_sarc(s_file) if not mod_sarc: return [] modded_files = [] for file in mod_sarc.list_files(): canon = file.replace('.s', '.') if aoc: canon = 'Aoc/0010/' + canon contents = mod_sarc.get_file_data(file).tobytes() contents = util.unyaz_if_needed(contents) nest_path = str(name).replace('\\', '/') + '//' + file if util.is_file_modded(canon, contents, True): modded_files.append(nest_path) if verbose: print( f'Found modded file {canon} in {str(name).replace("//", "/")}' ) if util.is_file_sarc(canon) and '.ssarc' not in file: try: nest_sarc = sarc.SARC(contents) except ValueError: continue sub_mod_files = find_modded_sarc_files(nest_sarc, name=nest_path, tmp_dir=tmp_dir, aoc=aoc, verbose=verbose) modded_files.extend(sub_mod_files) else: if verbose: print( f'Ignored unmodded file {canon} in {str(name).replace("//", "/")}' ) return modded_files