def _clean_sarc(old_sarc: oead.Sarc, base_sarc: oead.Sarc) -> Optional[oead.SarcWriter]: old_files = {f.name for f in old_sarc.get_files()} new_sarc = oead.SarcWriter(endian=oead.Endianness.Big if util.get_settings( "wiiu") else oead.Endianness.Little) can_delete = True for nest_file, file_data in [(f.name, f.data) for f in base_sarc.get_files()]: ext = Path(nest_file).suffix if ext in {".yml", ".bak"}: continue if nest_file in old_files: old_data = util.unyaz_if_needed(old_sarc.get_file(nest_file).data) file_data = util.unyaz_if_needed(file_data) if nest_file not in old_files or (file_data != old_data and ext not in util.AAMP_EXTS): if (ext in util.SARC_EXTS and nest_file in old_files and nest_file not in SPECIAL): nest_old_sarc = oead.Sarc(old_data) nest_base_sarc = oead.Sarc(file_data) nest_new_sarc = _clean_sarc(nest_old_sarc, nest_base_sarc) if nest_new_sarc: new_bytes = nest_new_sarc.write()[1] if ext.startswith(".s") and ext != ".sarc": new_bytes = util.compress(new_bytes) new_sarc.files[nest_file] = oead.Bytes(new_bytes) can_delete = False else: continue else: if ext.startswith(".s") and ext != ".sarc": file_data = util.compress(file_data) new_sarc.files[nest_file] = oead.Bytes(file_data) can_delete = False return None if can_delete else new_sarc
def get_sizes_in_sarc( sarc: oead.Sarc, contents: Contents, guess: bool, dlc: bool ) -> Dict[str, int]: prefix = "" if not dlc else "Aoc/0010/" vals = {} if isinstance(contents, list): for file in contents: if file[file.rindex(".") :] in EXCLUDE_EXTS: continue canon = prefix + file.replace(".s", ".") vals[canon] = calculate_size(canon, sarc.get_file(file).data, guess) elif isinstance(contents, dict): for subpath, subcontents in contents.items(): ext = subpath[subpath.rindex(".") :] if ext in EXCLUDE_EXTS: continue data = util.unyaz_if_needed(sarc.get_file(subpath).data) canon = prefix + subpath.replace(".s", ".") vals[canon] = calculate_size(canon, data, guess) if ext not in SARC_EXCLUDES: try: subsarc = oead.Sarc(data) except (ValueError, RuntimeError, oead.InvalidDataError): continue vals.update(get_sizes_in_sarc(subsarc, subcontents, guess, dlc)) return vals
def get_sarc_tree(parent_sarc: Sarc) -> Tuple[dict, list]: tree = {} modded = set() for file in sorted(parent_sarc.get_files(), key=lambda f: f.name): path_parts = Path(file.name).parts magic = file.data[0:4] nest_tree = {} if magic == b"SARC" or (magic == b"Yaz0" and file.data[0x11:0x15] == b"SARC"): nest_sarc = Sarc(file.data if not magic == b"Yaz0" else decompress(file.data)) nest_tree, mods = get_sarc_tree(nest_sarc) modded.update(mods) del nest_sarc _dict_merge( tree, reduce(lambda res, cur: {cur: res}, reversed(path_parts), nest_tree), ) if util.get_hashtable(parent_sarc.get_endianness() == Endianness.Big).is_file_modded( file.name.replace(".s", "."), bytes(file.data)): modded.add(file.name) return tree, modded
def _unbuild_sarc( s: oead.Sarc, output: Path, skip_actorpack: bool = False, skip_texts: bool = False ): SKIP_SARCS = { "tera_resource.Cafe_Cafe_GX2.release.ssarc", "tera_resource.Nin_NX_NVN.release.ssarc", } output.mkdir(parents=True, exist_ok=True) if any(f.name.startswith("/") for f in s.get_files()): (output / ".slash").write_bytes(b"") names = set() for sarc_file in s.get_files(): sf = sarc_file.name osf = output / sf names.add(sf.replace(".s", ".")) if sf.startswith("/"): osf = output / sf[1:] osf.parent.mkdir(parents=True, exist_ok=True) ext = osf.suffix if ext in SARC_EXTS: if osf.name in SKIP_SARCS or (osf.name.startswith("Msg_") and skip_texts): osf.write_bytes(sarc_file.data) continue try: ss = oead.Sarc(_if_unyaz(sarc_file.data)) if ( "bactorpack" in ext and output.stem == "TitleBG" and not skip_actorpack ): names.update(_unbuild_actorpack(ss, output.parent.parent)) else: names.update(_unbuild_sarc(ss, osf, skip_texts=skip_texts)) del ss except ValueError: osf.write_bytes(b"") elif ext in AAMP_EXTS: if osf.with_suffix(f"{osf.suffix}.yml").exists(): continue osf.with_suffix(f"{osf.suffix}.yml").write_bytes( _aamp_to_yml(sarc_file.data) ) elif ext in BYML_EXTS: osf.with_suffix(f"{osf.suffix}.yml").write_bytes( _byml_to_yml(sarc_file.data) ) else: osf.write_bytes(sarc_file.data) if "Msg_" in output.name: pymsyt.export(output, output) rmtree(output) output.with_suffix("").rename(output) if output.suffix in {".ssarc", ".sarc"}: (output / ".align").write_text(str(s.guess_min_alignment())) return names
def get_shop_diffs(file: str, tree: dict, tmp_dir: Path) -> Optional[dict]: try: ref_sarc = Sarc(util.unyaz_if_needed(util.get_game_file(file).read_bytes())) except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError) as err: util.vprint(f"{file} ignored on stock side, cuz {err}") return None try: sarc = Sarc(util.unyaz_if_needed((tmp_dir / file).read_bytes())) except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError): util.vprint(f"{file} corrupt, ignored") return None diffs = _get_diffs_from_sarc(sarc, ref_sarc, tree, file) del sarc del ref_sarc return diffs
def nested_patch(pack: oead.Sarc, nest: dict) -> Tuple[oead.SarcWriter, dict]: new_sarc: oead.SarcWriter = oead.SarcWriter.from_sarc(pack) failures: dict = {} for file, stuff in nest.items(): file_bytes = pack.get_file(file).data yazd = file_bytes[0:4] == b"Yaz0" file_bytes = util.decompress(file_bytes) if yazd else file_bytes if isinstance(stuff, dict): sub_sarc = oead.Sarc(file_bytes) new_sub_sarc, sub_failures = nested_patch(sub_sarc, stuff) for failure in sub_failures: failure[file + "//" + failure] = sub_failures[failure] del sub_sarc new_bytes = bytes(new_sub_sarc.write()[1]) new_sarc.files[file] = new_bytes if not yazd else util.compress( new_bytes) elif isinstance(stuff, ParameterList): try: if file_bytes[0:4] == b"AAMP": aamp_contents = ParameterIO.from_binary(file_bytes) try: file_ext = os.path.splitext(file)[1] aamp_contents = shop_merge( aamp_contents, file_ext.replace(".", ""), stuff.lists["Additions"], stuff.lists["Removals"], ) aamp_bytes = ParameterIO.to_binary(aamp_contents) except: # pylint: disable=bare-except raise RuntimeError( f"AAMP file {file} could be merged.") del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress( aamp_bytes) cache_merged_shop(file, new_bytes) else: raise ValueError( "Wait, what the heck, this isn't an AAMP file?!") except ValueError: new_bytes = pack.get_file(file).data print(f"Deep merging {file} failed. No changes were made.") new_sarc.files[file] = oead.Bytes(new_bytes) return new_sarc, failures
def _unbuild_actorpack(s: oead.Sarc, output: Path): output.mkdir(parents=True, exist_ok=True) for f in {f for f in s.get_files() if "Actor" in f.name}: out = (output / f.name).with_suffix(f"{Path(f.name).suffix}.yml") out.parent.mkdir(parents=True, exist_ok=True) if f.data[0:4] == b"AAMP": out.write_bytes(_aamp_to_yml(f.data)) elif f.data[0:2] in [b"BY", b"YB"]: out.write_bytes(_byml_to_yml(f.data)) for f in { f for f in s.get_files() if "Physics" in f.name and "Actor" not in f.name }: out = output / f.name out.parent.mkdir(parents=True, exist_ok=True) out.write_bytes(f.data) return {f.name for f in s.get_files()}
def open_sarc(sarc: Union[Path, Sarc]) -> Tuple[Sarc, dict, list]: global sarcs sarcs.clear() if isinstance(sarc, Path): data = util.unyaz_if_yazd(sarc.read_bytes()) sarc = Sarc(data) def get_sarc_tree(parent_sarc: Sarc) -> Tuple[dict, list]: tree = {} modded = set() for file in sorted(parent_sarc.get_files(), key=lambda f: f.name): path_parts = Path(file.name).parts magic = file.data[0:4] nest_tree = {} if magic == b"SARC" or (magic == b"Yaz0" and file.data[0x11:0x15] == b"SARC"): nest_sarc = Sarc(file.data if not magic == b"Yaz0" else decompress(file.data)) nest_tree, mods = get_sarc_tree(nest_sarc) modded.update(mods) del nest_sarc _dict_merge( tree, reduce(lambda res, cur: {cur: res}, reversed(path_parts), nest_tree), ) if util.get_hashtable(parent_sarc.get_endianness() == Endianness.Big).is_file_modded( file.name.replace(".s", "."), bytes(file.data)): modded.add(file.name) return tree, modded tree, modded = get_sarc_tree(sarc) return sarc, tree, list(modded)
def get_modded_savedata_entries(savedata: oead.Sarc) -> Hash: ref_savedata = get_stock_savedata().get_files() ref_hashes = { int(item["HashValue"]) for file in sorted(ref_savedata, key=lambda f: f.name)[0:-2] for item in oead.byml.from_binary(file.data)["file_list"][1] } new_entries = oead.byml.Array() mod_hashes = set() for file in savedata.get_files(): data = oead.byml.from_binary(file.data) if data["file_list"][0]["file_name"] != "game_data.sav": continue entries = data["file_list"][1] mod_hashes |= {int(item["HashValue"]) for item in entries} new_entries.extend( [item for item in entries if int(item["HashValue"]) not in ref_hashes] ) del ref_savedata return Hash( { "add": new_entries, "del": oead.byml.Array( oead.S32(item) for item in {item for item in ref_hashes if item not in mod_hashes} ), } )
def get_modded_savedata_entries(savedata: oead.Sarc) -> {}: ref_savedata = get_stock_savedata().get_files() ref_hashes = { int(item["HashValue"]) for file in sorted(ref_savedata, key=lambda f: f.name)[0:-2] for item in oead.byml.from_binary(file.data)["file_list"][1] } new_entries = oead.byml.Array() mod_hashes = set() for file in sorted( savedata.get_files(), key=lambda f: f.name, )[0:-2]: entries = oead.byml.from_binary(file.data)["file_list"][1] mod_hashes |= {int(item["HashValue"]) for item in entries} new_entries.extend({ item for item in entries if int(item["HashValue"]) not in ref_hashes }) del ref_savedata return oead.byml.Hash({ "add": new_entries, "del": oead.byml.Array( {oead.S32(item) for item in ref_hashes if item not in mod_hashes}), })
def is_savedata_modded(savedata: oead.Sarc) -> {}: hashes = get_savedata_hashes() sv_files = sorted(savedata.get_files(), key=lambda file: file.name) fix_slash = "/" if not sv_files[0].name.startswith("/") else "" modded = False for svdata in sv_files[0:-2]: svdata_hash = xxhash.xxh64_intdigest(svdata.data) if not modded: modded = (fix_slash + svdata.name not in hashes or svdata_hash != hashes[fix_slash + svdata.name]) return modded
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString: new_sarc = SarcWriter.from_sarc(sarc) for file, stuff in edits.items(): if isinstance(stuff, dict): try: ofile = sarc.get_file(file) if ofile == None: raise FileNotFoundError(f"Could not find nested file {file} in SARC") sub_sarc = Sarc(util.unyaz_if_needed(ofile.data)) except ( InvalidDataError, ValueError, AttributeError, RuntimeError, FileNotFoundError, ): util.vprint(f"Couldn't merge into nested SARC {file}") continue nsub_bytes = _merge_in_sarc(sub_sarc, stuff) new_sarc.files[file] = ( util.compress(nsub_bytes) if file[file.rindex(".") :].startswith(".s") else nsub_bytes ) elif isinstance(stuff, ParameterList): try: ofile = sarc.get_file(file) if ofile == None: raise FileNotFoundError(f"Could not find nested file {file} in SARC") pio = ParameterIO.from_binary(ofile.data) except ( AttributeError, ValueError, InvalidDataError, FileNotFoundError, ) as err: util.vprint(f"Couldn't open {file}: {err}") continue new_pio = merge_shopdata(pio, stuff) new_sarc.files[file] = new_pio.to_binary() return new_sarc.write()[1]
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: dict, path: str) -> dict: diffs = {} for file, edits in edits.items(): if edits: try: rsub_sarc = Sarc( util.unyaz_if_needed(ref_sarc.get_file(file).data)) except (AttributeError, InvalidDataError, ValueError, RuntimeError) as err: util.vprint(f'Skipping "{path}//{file}", {err}') continue sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) diffs.update( _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits, path + "//" + file)) del sub_sarc del rsub_sarc else: full_path = f"{path}//{file}" try: ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data) except AttributeError: continue try: pio = ParameterIO.from_binary(sarc.get_file(file).data) except AttributeError as err: raise ValueError( f"Failed to read nested file:\n{path}//{file}") from err except (ValueError, RuntimeError, InvalidDataError) as err: raise ValueError(f"Failed to parse AAMP file:\n{path}//{file}") diffs.update({full_path: get_aamp_diff(pio, ref_pio)}) return diffs
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: Union[dict, list], path: str) -> dict: diffs = {} if isinstance(edits, dict): for file, edits in edits.items(): try: rsub_sarc = Sarc( util.unyaz_if_needed(ref_sarc.get_file(file).data)) except (AttributeError, InvalidDataError, ValueError, RuntimeError) as e: util.vprint(f'Skipping "{path}//{file}", {e}') continue sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) diffs.update( _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits, path + "//" + file)) del sub_sarc del rsub_sarc else: for file in edits: full_path = f"{path}//{file}" try: ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data) except AttributeError: continue pio = ParameterIO.from_binary(sarc.get_file(file).data) diffs.update({full_path: get_aamp_diff(pio, ref_pio)}) return diffs
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString: new_sarc = SarcWriter.from_sarc(sarc) for file, stuff in edits.items(): if isinstance(stuff, dict): try: sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) except (InvalidDataError, ValueError, AttributeError, RuntimeError): util.vprint(f"Couldn't merge into nested SARC {file}") continue nsub_bytes = _merge_in_sarc(sub_sarc, stuff) new_sarc.files[file] = (util.compress(nsub_bytes) if file[file.rindex("."):].startswith(".s") else nsub_bytes) elif isinstance(stuff, ParameterList): try: pio = ParameterIO.from_binary(sarc.get_file(file).data) except (AttributeError, ValueError, InvalidDataError) as e: util.vprint(f"Couldn't open {file}: {e}") continue merge_plists(pio, stuff) new_sarc.files[file] = pio.to_binary() return new_sarc.write()[1]
def merge_aamp_files(file: str, tree: dict): try: base_file = util.get_game_file(file) except FileNotFoundError: util.vprint(f"Skipping {file}, not found in dump") return if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file sarc = Sarc(util.unyaz_if_needed(base_file.read_bytes())) new_data = _merge_in_sarc(sarc, tree) if base_file.suffix.startswith(".s") and base_file.suffix != ".ssarc": new_data = util.compress(new_data) (util.get_master_modpack_dir() / file).parent.mkdir(parents=True, exist_ok=True) (util.get_master_modpack_dir() / file).write_bytes(new_data)
def replace_file(root_sarc: Sarc, file: str, new_data: bytes) -> Sarc: if file.endswith("/"): file = file[0:-1] parent = get_parent_sarc(root_sarc, file) filename = file.split("//")[-1] new_sarc: SarcWriter = SarcWriter.from_sarc(parent) new_sarc.files[filename] = new_data while root_sarc != parent: _, child = new_sarc.write() file = file[0:file.rindex("//")] if file.endswith("/"): file = file[:-1] parent = get_parent_sarc(root_sarc, file) new_sarc = SarcWriter.from_sarc(parent) ext = file[file.rindex("."):] new_sarc.files[file] = (child if not (ext.startswith(".s") and ext != ".sarc") else compress(child)) return Sarc(new_sarc.write()[1])
def get_parent_sarc(root_sarc: Sarc, file: str) -> Sarc: if file.endswith("/"): file = file[0:-1] nests = file.replace("SARC:", "").split("//") parent = root_sarc i = 0 global sarcs while i < len(nests) - 1: try: nest_file = parent.get_file(nests[i]) sarc_bytes = util.unyaz_if_yazd(nest_file.data) except AttributeError: raise FileNotFoundError( f"Could not find file {nests[i]} in {nests[i - 1]}") nest_sarc = Sarc(sarc_bytes) del parent parent = nest_sarc sarcs.append(parent) i += 1 return parent
def _convert_sarc(sarc: oead.Sarc, to_wiiu: bool) -> Tuple[bytes, List[str]]: error = [] new_sarc = oead.SarcWriter.from_sarc(sarc) new_sarc.set_endianness( oead.Endianness.Big if to_wiiu else oead.Endianness.Little) for file in sarc.get_files(): ext = file.name[file.name.rindex("."):] if ext in NO_CONVERT_EXTS: error.append( f"This mod contains a file not supported by the converter: {file.name}" ) elif ext in BYML_EXTS: byml = oead.byml.from_binary(util.unyaz_if_needed(file.data)) new_sarc.files[file.name] = oead.byml.to_binary(byml, big_endian=to_wiiu) elif ext in SARC_EXTS and ext not in NO_CONVERT_EXTS: nest = oead.Sarc(util.unyaz_if_needed(file.data)) new_bytes, errs = _convert_sarc(nest, to_wiiu) new_sarc.files[file.name] = ( new_bytes if not (ext.startswith(".s") and ext != ".sarc") else util.compress(new_bytes)) error.extend(errs) return new_sarc.write()[1], error
def rename_file(root_sarc: Sarc, file: str, new_name: str) -> Sarc: if file.endswith("/"): file = file[0:-1] if any(char in new_name for char in r"""\/:*?"'<>|"""): raise ValueError(f"{new_name} is not a valid file name.") parent = get_parent_sarc(root_sarc, file) filename = file.split("//")[-1] new_sarc: SarcWriter = SarcWriter.from_sarc(parent) new_sarc.files[(Path(filename).parent / new_name).as_posix()] = Bytes( parent.get_file(filename).data) del new_sarc.files[filename] while root_sarc != parent: _, child = new_sarc.write() file = file[0:file.rindex("//")] if file.endswith("/"): file = file[:-1] parent = get_parent_sarc(root_sarc, file) new_sarc = SarcWriter.from_sarc(parent) ext = file[file.rindex("."):] new_sarc.files[file] = (child if not (ext.startswith(".s") and ext != ".sarc") else compress(child)) return Sarc(new_sarc.write()[1])
def consolidate_gamedata(gamedata: oead.Sarc) -> {}: data = {} for file in gamedata.get_files(): util.dict_merge(data, oead.byml.from_binary(file.data)) del gamedata return data
def init(self): with open(self.file + '', 'rb') as f: data = yaz0.decompress(f.read()) data_dec = Sarc(data) return data_dec
def update_from_folder(sarc: Sarc, folder: Path) -> Sarc: new_sarc: SarcWriter = SarcWriter.from_sarc(sarc) for file in {f for f in folder.rglob("**/*") if f.is_file()}: new_sarc.files[file.relative_to(folder).as_posix()] = file.read_bytes() return Sarc(new_sarc.write()[1])