def get_archive(self): if not self._archive: buf = self._data_wrapper.get_data_clear() self._archive = sarc.SARC(buf) if list(self._archive.list_files())[0].startswith("/"): self._leading_slash = True return self._archive
def get_msbt_hashes(lang: str = 'USen') -> {}: """ Gets the MSBT hash table for the given language, or US English by default :param lang: The game language to use, defaults to USen. :type lang: str, optional :returns: A dictionary of MSBT files and their vanilla hashes. :rtype: dict of str: str """ if not hasattr(get_msbt_hashes, 'texthashes'): get_msbt_hashes.texthashes = {} if lang not in get_msbt_hashes.texthashes: hash_table = util.get_exec_dir() / 'data' / 'msyt' / \ f'Msg_{lang}_hashes.csv' if hash_table.exists(): get_msbt_hashes.texthashes[lang] = {} with hash_table.open('r') as h_file: csv_loop = csv.reader(h_file) for row in csv_loop: get_msbt_hashes.texthashes[lang][row[0]] = row[1] elif util.get_game_file(f'Pack/Bootup_{lang}.pack').exists(): get_msbt_hashes.texthashes[lang] = {} with util.get_game_file(f'Pack/Bootup_{lang}.pack').open( 'rb') as b_file: bootup_pack = sarc.read_file_and_make_sarc(b_file) msg_bytes = util.decompress( bootup_pack.get_file_data( f'Message/Msg_{lang}.product.ssarc').tobytes()) msg_pack = sarc.SARC(msg_bytes) for msbt in msg_pack.list_files(): get_msbt_hashes.texthashes[lang][msbt] = xxhash.xxh32( msg_pack.get_file_data(msbt)).hexdigest() return get_msbt_hashes.texthashes[lang]
def load_pbc(name): blob = open('pbc/' + name + '_pbc.Nin_NX_NVN.zs', 'rb').read() blob = zstandard.ZstdDecompressor().decompress(blob) # blob = sarc.SARC(blob).get_file_data(name + '.pbc') arc = sarc.SARC(blob) # open('pbc_ex/%s.pbc' % name, 'wb').write(blob) # blob = open('pbc_ex/%s.pbc' % name, 'rb').read() for name in arc.list_files(): blob = arc.get_file_data(name) assert(blob[0:4] == b'pbc\0') w,h,offset_x,offset_y = struct.unpack_from('<iiii', blob, 4) print('%40s | %5d %5d %5d %5d' % (name, w, h, offset_x, offset_y)) sets = [set() for i in range(12)] img = Image.new('RGBA', (w*2, h*2)) pix = img.load() offset = 0x14 for y in range(h): for x in range(w): for i in range(12): f = struct.unpack_from('<f', blob, offset + i * 4)[0] sets[i].add(f) a = blob[offset + 0x30] b = blob[offset + 0x31] c = blob[offset + 0x32] d = blob[offset + 0x33] pix[x*2,y*2] = (a,a,a,255) pix[x*2,y*2+1] = (b,b,b,255) pix[x*2+1,y*2+1] = (c,c,c,255) pix[x*2+1,y*2] = (d,d,d,255) offset += 0x34
def get_nested_file_bytes(file: str, unyaz: bool = True) -> bytes: """ Get the contents of a file nested inside one or more SARCs :param file: A string containing the nested SARC path to the file :type file: str :param unyaz: Whether to decompress the file if yaz0 compressed, defaults to True :type unyaz: bool, optional :return: Returns the bytes to the file :rtype: bytes """ nests = file.split('//') sarcs = [] with open(nests[0], 'rb') as s_file: sarcs.append(sarc.read_file_and_make_sarc(s_file)) i = 1 while i < len(nests) - 1: sarc_bytes = unyaz_if_needed(sarcs[i - 1].get_file_data( nests[i]).tobytes()) sarcs.append(sarc.SARC(sarc_bytes)) i += 1 file_bytes = sarcs[-1].get_file_data(nests[-1]).tobytes() if file_bytes[0:4] == b'Yaz0' and unyaz: file_bytes = decompress(file_bytes) del sarcs return file_bytes
def _get_sizes_in_sarc(file: Union[Path, sarc.SARC]) -> {}: calc = rstb.SizeCalculator() sizes = {} guess = util.get_settings_bool('guess_merge') if isinstance(file, Path): with file.open('rb') as s_file: file = sarc.read_file_and_make_sarc(s_file) if not file: return {} for nest_file in file.list_files(): canon = nest_file.replace('.s', '.') data = util.unyaz_if_needed(file.get_file_data(nest_file).tobytes()) ext = Path(canon).suffix if util.is_file_modded(canon, data) and ext not in RSTB_EXCLUDE_EXTS and canon not in RSTB_EXCLUDE_NAMES: size = calc.calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': size = 0 if size == 0 and guess: if ext in util.AAMP_EXTS: size = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: size = guess_bfres_size(data, canon) sizes[canon] = size if util.is_file_sarc(nest_file) and not nest_file.endswith('.ssarc'): try: nest_sarc = sarc.SARC(data) except ValueError: continue sizes.update(_get_sizes_in_sarc(nest_sarc)) return sizes
def extract_ref_msyts(lang: str = 'USen', for_merge: bool = False, tmp_dir: Path = util.get_work_dir() / 'tmp_text'): """ Extracts the reference MSYT texts for the given language to a temp dir :param lang: The game language to use, defaults to USen. :type lang: str, optional :param for_merge: Whether the output is to be merged (or as reference), defaults to False :type for_merge: bool :param tmp_dir: The temp directory to extract to, defaults to "tmp_text" in BCML's working directory. :type tmp_dir: class:`pathlib.Path`, optional """ if tmp_dir.exists(): shutil.rmtree(tmp_dir, ignore_errors=True) with util.get_game_file(f'Pack/Bootup_{lang}.pack').open('rb') as b_file: bootup_pack = sarc.read_file_and_make_sarc(b_file) msg_bytes = util.decompress( bootup_pack.get_file_data( f'Message/Msg_{lang}.product.ssarc').tobytes()) msg_pack = sarc.SARC(msg_bytes) if not for_merge: merge_dir = tmp_dir / 'ref' else: merge_dir = tmp_dir / 'merged' msg_pack.extract_to_dir(str(merge_dir)) msbt_to_msyt(merge_dir)
def merge_sarcs(file_name: str, sarcs: List[Union[Path, bytes]]) -> (str, bytes): opened_sarcs: List[sarc.SARC] = [] if isinstance(sarcs[0], Path): for i, sarc_path in enumerate(sarcs): sarcs[i] = sarc_path.read_bytes() for sarc_bytes in sarcs: sarc_bytes = util.unyaz_if_needed(sarc_bytes) try: opened_sarcs.append(sarc.SARC(sarc_bytes)) except ValueError: continue all_files = {key for open_sarc in opened_sarcs for key in open_sarc.list_files()} nested_sarcs = {} new_sarc = sarc.SARCWriter(be=True) files_added = [] # for file in all_files: # dm_cache = util.get_master_modpack_dir() / 'logs' / 'dm' / file # if dm_cache.exists(): # file_data = dm_cache.read_bytes() # new_sarc.add_file(file, file_data) # files_added.append(file) for opened_sarc in reversed(opened_sarcs): for file in [file for file in opened_sarc.list_files() if file not in files_added]: data = opened_sarc.get_file_data(file).tobytes() if util.is_file_modded(file.replace('.s', '.'), data, count_new=True): if not Path(file).suffix in util.SARC_EXTS: new_sarc.add_file(file, data) files_added.append(file) else: if file not in nested_sarcs: nested_sarcs[file] = [] nested_sarcs[file].append(util.unyaz_if_needed(data)) for file, sarcs in nested_sarcs.items(): merged_bytes = merge_sarcs(file, sarcs)[1] if Path(file).suffix.startswith('.s') and not file.endswith('.sarc'): merged_bytes = util.compress(merged_bytes) new_sarc.add_file(file, merged_bytes) files_added.append(file) for file in [file for file in all_files if file not in files_added]: for opened_sarc in [open_sarc for open_sarc in opened_sarcs \ if file in open_sarc.list_files()]: new_sarc.add_file(file, opened_sarc.get_file_data(file).tobytes()) break if 'Bootup.pack' in file_name: for merger in [merger() for merger in mergers.get_mergers() if merger.is_bootup_injector()]: inject = merger.get_bootup_injection() if not inject: continue file, data = inject try: new_sarc.delete_file(file) except KeyError: pass new_sarc.add_file(file, data) return (file_name, new_sarc.get_bytes())
def get_stock_gamedata() -> sarc.SARC: """ Gets the contents of the unmodded gamedata.sarc """ if not hasattr(get_stock_gamedata, 'gamedata'): with util.get_game_file('Pack/Bootup.pack').open('rb') as b_file: bootup = sarc.read_file_and_make_sarc(b_file) get_stock_gamedata.gamedata = sarc.SARC(util.decompress( bootup.get_file_data('GameData/gamedata.ssarc'))) return get_stock_gamedata.gamedata
def get_text_mods_from_bootup(bootup_path: Union[Path, str], tmp_dir: Path = util.get_work_dir() / 'tmp_text', verbose: bool = False, lang: str = ''): """ Detects modifications to text files inside a given Bootup_XXxx.pack :param bootup_path: Path to the Bootup_XXxx.pack file. :type bootup_path: class:`pathlib.Path` :param tmp_dir: The temp directory to use, defaults to "tmp_text" in BCML's working directory. :type tmp_dir: class:`pathlib.Path` :param verbose: Whether to display more detailed output, defaults to False. :type verbose: bool, optional :returns: Return a tuple containing a dict of modded text entries, a SARC containing added text MSBTs, and the game language of the bootup pack. :rtype: (dict, class:`sarc.SARCWriter`, str) """ if not lang: lang = util.get_file_language(bootup_path) print(f'Scanning text modifications for language {lang}...') spaces = ' ' if verbose: print(f'{spaces}Identifying modified text files...') with open(bootup_path, 'rb') as b_file: bootup_sarc = sarc.read_file_and_make_sarc(b_file) msg_bytes = util.decompress(bootup_sarc.get_file_data(f'Message/Msg_{lang}.product.ssarc')) msg_sarc = sarc.SARC(msg_bytes) if not msg_sarc: print(f'Failed to open Msg_{lang}.product.ssarc, could not analyze texts') modded_msyts, added_msbts = get_modded_msyts(msg_sarc, lang) added_text_store = None if added_msbts: added_text_store = store_added_texts(added_msbts) if verbose: for modded_text in modded_msyts: print(f'{spaces}{spaces}{modded_text} has been changed') for added_text in added_msbts: print(f'{spaces}{spaces}{added_text} has been added') problems = msbt_to_msyt() for problem in problems: msyt_name = problem.relative_to(tmp_dir).with_suffix('.msyt').as_posix() try: modded_msyts.remove(msyt_name) except ValueError: pass if verbose: print(f'{spaces}Scanning texts files for modified entries...') modded_texts = get_modded_texts(modded_msyts, lang=lang) s_modded = 's' if len(modded_texts) != 1 else '' s_added = 's' if len(added_msbts) != 1 else '' print(f'Language {lang} has total {len(modded_texts)} modified text file{s_modded} and ' f'{len(added_msbts)} new text file{s_added}') shutil.rmtree(tmp_dir) return modded_texts, added_text_store, lang
def threaded_merge(item, verbose: bool) -> (str, dict): """Deep merges an individual file, suitable for multiprocessing""" file, stuff = item failures = {} base_file = util.get_game_file(file, file.startswith('aoc')) if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_ext = os.path.splitext(file)[1] if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists(): base_file = (util.get_master_modpack_dir() / file) file_bytes = base_file.read_bytes() yazd = file_bytes[0:4] == b'Yaz0' file_bytes = file_bytes if not yazd else util.decompress(file_bytes) magic = file_bytes[0:4] if magic == b'SARC': new_sarc, sub_failures = nested_patch(sarc.SARC(file_bytes), stuff) del file_bytes new_bytes = new_sarc.get_bytes() for failure, contents in sub_failures.items(): print(f'Some patches to {failure} failed to apply.') failures[failure] = contents else: try: if magic == b'AAMP': aamp_contents = aamp.Reader(file_bytes).parse() for change in stuff: aamp_contents = _aamp_merge(aamp_contents, change) aamp_bytes = aamp.Writer(aamp_contents).get_bytes() del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress( aamp_bytes) else: raise ValueError(f'{file} is not a SARC or AAMP file.') except ValueError: new_bytes = file_bytes del file_bytes print(f'Deep merging file {file} failed. No changes were made.') new_bytes = new_bytes if not yazd else util.compress(new_bytes) output_file = (util.get_master_modpack_dir() / file) if base_file == output_file: output_file.unlink() output_file.parent.mkdir(parents=True, exist_ok=True) output_file.write_bytes(new_bytes) del new_bytes if magic == b'SARC' and verbose: print(f'Finished patching files inside {file}') elif verbose: print(f'Finished patching {file}') return util.get_canon_name(file), failures
def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]): if 'content/Pack/Bootup.pack//GameData/savedataformat.ssarc' in modded_files: with (mod_dir / 'content' / 'Pack' / 'Bootup.pack').open('rb') as bootup_file: bootup_sarc = sarc.read_file_and_make_sarc(bootup_file) return get_modded_savedata_entries( sarc.SARC( util.decompress( bootup_sarc.get_file_data('GameData/savedataformat.ssarc').tobytes() ) ) ) else: return []
def nested_patch(pack: sarc.SARC, nest: dict) -> (sarc.SARCWriter, dict): """ Recursively patches deep merge files in a SARC :param pack: The SARC in which to recursively patch. :type pack: class:`sarc.SARC` :param nest: A dict of nested patches to apply. :type nest: dict :return: Returns a new SARC with patches applied and a dict of any failed patches. :rtype: (class:`sarc.SARCWriter`, dict, dict) """ new_sarc = sarc.make_writer_from_sarc(pack) failures = {} for file, stuff in nest.items(): file_bytes = pack.get_file_data(file).tobytes() yazd = file_bytes[0:4] == b'Yaz0' file_bytes = util.decompress(file_bytes) if yazd else file_bytes if isinstance(stuff, dict): sub_sarc = sarc.SARC(file_bytes) new_sarc.delete_file(file) new_sub_sarc, sub_failures = nested_patch(sub_sarc, stuff) for failure in sub_failures: failure[file + '//' + failure] = sub_failures[failure] del sub_sarc new_bytes = new_sub_sarc.get_bytes() new_sarc.add_file( file, new_bytes if not yazd else util.compress(new_bytes)) elif isinstance(stuff, list): try: if file_bytes[0:4] == b'AAMP': aamp_contents = aamp.Reader(file_bytes).parse() for change in stuff: aamp_contents = _aamp_merge(aamp_contents, change) aamp_bytes = aamp.Writer(aamp_contents).get_bytes() del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress( aamp_bytes) cache_merged_aamp(file, new_bytes) else: raise ValueError( 'Wait, what the heck, this isn\'t an AAMP file?!') except ValueError: new_bytes = pack.get_file_data(file).tobytes() print(f'Deep merging {file} failed. No changed were made.') new_sarc.delete_file(file) new_sarc.add_file(file, new_bytes) return new_sarc, failures
def generate_diff(self, mod_dir: Path, modded_files: List[Path]): rstb_diff = {} open_sarcs = {} for file in modded_files: if isinstance(file, Path): canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if Path(canon).suffix not in RSTB_EXCLUDE_EXTS and\ Path(canon).name not in RSTB_EXCLUDE_NAMES: size = calculate_size(file) if file.suffix == '.bdmgparam': size = 0 if size == 0 and self._options['guess']: if file.suffix in util.AAMP_EXTS: size = guess_aamp_size(file) elif file.suffix in ['.bfres', '.sbfres']: size = guess_bfres_size(file) rstb_diff[file] = size elif isinstance(file, str): parts = file.split('//') name = parts[-1] if parts[0] not in open_sarcs: with (mod_dir / parts[0]).open('rb') as s_file: open_sarcs[parts[0]] = sarc.read_file_and_make_sarc(s_file) for part in parts[1:-1]: if part not in open_sarcs: open_sarcs[part] = sarc.SARC( util.unyaz_if_needed( open_sarcs[parts[parts.index(part) - 1]]\ .get_file_data(part).tobytes() ) ) ext = Path(name).suffix data = util.unyaz_if_needed(open_sarcs[parts[-2]].get_file_data(name).tobytes()) rstb_val = rstb.SizeCalculator().calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': rstb_val = 0 if rstb_val == 0 and self._options['guess']: if ext in util.AAMP_EXTS: rstb_val = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: rstb_val = guess_bfres_size(data, name) rstb_diff[file] = rstb_val for open_sarc in open_sarcs: del open_sarc return rstb_diff
def process_resource(device: file.FileDevice, rel_path: Path) -> None: data = device.read_file_and_decomp(rel_path) if is_resource_pack_path(rel_path): archive = sarc.SARC(data) archive_file_device = file.FileDeviceArchive(archive) for srp in get_resources(archive_file_device): process_resource(archive_file_device, srp) else: dest_rel_path = file.remove_extension_prefix_char_from_path( rel_path, 's') text_ext, text_data_get = convert_binary_to_text( dest_rel_path, data) if text_ext: dest_rel_path = file.remove_extension_prefix_char_from_path( dest_rel_path, 'b') dest_rel_path = dest_rel_path.with_suffix( dest_rel_path.suffix + text_ext) dest_rel_path = file.fix_weird_looking_extensions(dest_rel_path) if is_aoc: dest_rel_path = change_paths_for_aoc_map_units(dest_rel_path) dest_rel_path = dest_rel_path.with_suffix(".aoc" + dest_rel_path.suffix) dest_path = dest_dir / dest_rel_path if dest_path.is_file(): return if data[0:4] == b'SARC': archive = sarc.SARC(data) archive.extract_to_dir(str(dest_path)) else: dest_path.parent.mkdir(parents=True, exist_ok=True) with dest_path.open("wb") as f: f.write(text_data_get() if text_ext else data)
offset += 4 results[string] = index return results def _load_atr1(self, data): count, size = struct.unpack_from('<II', data, 0) return [data[8 + i * size:8 + i * size + size] for i in range(count)] def _load_txt2(self, data): count = struct.unpack_from('<I', data, 0)[0] offsets = list(struct.unpack_from('<%dI' % count, data, 4)) offsets.append(len(data)) # dummy end offset return [ codecs.decode(data[offsets[i]:offsets[i + 1] - 2], 'utf-16le') for i in range(count) ] if __name__ == '__main__': import sys, sarc, zstandard arc = sarc.SARC(zstandard.ZstdDecompressor().decompress( open(sys.argv[1], 'rb').read())) if len(sys.argv) == 2: for f in sorted(arc.list_files()): print(f) else: msbt = MSBT() msbt.load(arc.get_file_data(sys.argv[2])) for label, index in sorted(msbt.labels.items()): print('%s: %r' % (label, msbt.strings[index]))
import msbt import sarc import sys import zstandard def fixup(name): if name.startswith('\x0e2'): name = name[6:] name = name.replace('\x0en\x1e\0', '<name>') return name data = open(sys.argv[1], 'rb').read() data = zstandard.ZstdDecompressor().decompress(data) data = sarc.SARC(data) all_names = {} for name in data.list_files(): if 'STR_ItemName' in name: m = msbt.MSBT() m.load(data.get_file_data(name)) for label, index in m.labels.items(): if not label.endswith('_pl'): item_id = int(label[label.rfind('_') + 1:], 10) all_names[item_id] = fixup(m.strings[index]) with open('item_names.json', 'w') as f: json.dump(all_names, f, sort_keys=True, indent=4)
messages_path = sys.argv[1] bcsv_path = sys.argv[2] def fixup(name): if name.startswith('\x0e2'): name = name[6:] name = name.replace('\x0en\x1e\0', '<name>') return name output = {} # MESSAGES data = open(messages_path + '/String_EUen.sarc.zs', 'rb').read() data = zstandard.ZstdDecompressor().decompress(data) msgArc = sarc.SARC(data) output['items'] = {} outfitGroup = {} for name in sorted(msgArc.list_files()): if 'STR_ItemName' in name: m = msbt.MSBT() m.load(msgArc.get_file_data(name)) for label, index in m.labels.items(): if not label.endswith('_pl'): item_id = int(label[label.rfind('_') + 1:], 10) output['items'][item_id] = fixup(m.strings[index]) if 'STR_OutfitGroupColor' in name: m = msbt.MSBT() m.load(msgArc.get_file_data(name))
def import_pbc_arc(filename): arc = open(romfs_path + '/Model/' + filename, 'rb').read() arc = zstandard.ZstdDecompressor().decompress(arc) arc = sarc.SARC(arc) for name in arc.list_files(): tile_pbcs[name] = pbc.PBC(arc.get_file_data(name))
def load_single_pbc(name): arc = open(romfs_path + '/Model/' + name + '_pbc.Nin_NX_NVN.zs', 'rb').read() arc = zstandard.ZstdDecompressor().decompress(arc) arc = sarc.SARC(arc) return pbc.PBC(arc.get_file_data(name + '.pbc'))
import sarc import zstandard import os import sys file_path = sys.argv[1] output_path = sys.argv[2] file_filter = (sys.argv[3] if len(sys.argv) > 3 else '') # get all files in Message files = (lambda path: [os.path.join(path, x) for x in os.listdir(path) if x.endswith('sarc.zs') and x.startswith(file_filter)])(file_path) for file in files: archive = sarc.SARC(zstandard.decompress(open(file, 'rb').read())) for file_name in archive.list_files(): folders = file_name.split('/') folders, name = folders[:-1], folders[-1] path = os.path.join(file.partition('.sarc.zs')[0], *folders).replace(file_path, output_path, 1) # extract the file os.makedirs(path, exist_ok=True) with open(os.path.join(path, name), 'wb') as output_file: output_file.write(archive.get_file_data(file_name)) print(f'Finished extracting {file} successfully')
def find_modded_sarc_files(mod_sarc: Union[Path, sarc.SARC], tmp_dir: Path, name: str = '', aoc: bool = False, verbose: bool = False) -> List[str]: """ Detects all of the modified files in a SARC :param mod_sarc: The SARC to scan for modded files. :type mod_sarc: class:`sarc.SARC` :param tmp_dir: The path to the base directory of the mod. :type tmp_dir: class:`pathlib.Path` :param name: The name of the SARC which contains the current SARC. :type name: str :param aoc: Specifies whether the SARC is DLC content, defaults to False. :type aoc: bool, optional :param nest_level: The depth to which the current SARC is nested in more SARCs, defaults to 0 :type nest_level: int, optional :param deep_merge: Whether to log diffs for individual AAMP and BYML files, defaults to False :type deep_merge: bool, optional :param verbose: Specifies whether to return more detailed output :type verbose: bool, optional """ if isinstance(mod_sarc, Path): if any(mod_sarc.name.startswith(exclude) for exclude in ['Bootup_']): return [] name = str(mod_sarc.relative_to(tmp_dir)) aoc = 'aoc' in mod_sarc.parts or 'Aoc' in mod_sarc.parts with mod_sarc.open('rb') as s_file: mod_sarc = sarc.read_file_and_make_sarc(s_file) if not mod_sarc: return [] modded_files = [] for file in mod_sarc.list_files(): canon = file.replace('.s', '.') if aoc: canon = 'Aoc/0010/' + canon contents = mod_sarc.get_file_data(file).tobytes() contents = util.unyaz_if_needed(contents) nest_path = str(name).replace('\\', '/') + '//' + file if util.is_file_modded(canon, contents, True): modded_files.append(nest_path) if verbose: print( f'Found modded file {canon} in {str(name).replace("//", "/")}' ) if util.is_file_sarc(canon) and '.ssarc' not in file: try: nest_sarc = sarc.SARC(contents) except ValueError: continue sub_mod_files = find_modded_sarc_files(nest_sarc, name=nest_path, tmp_dir=tmp_dir, aoc=aoc, verbose=verbose) modded_files.extend(sub_mod_files) else: if verbose: print( f'Ignored unmodded file {canon} in {str(name).replace("//", "/")}' ) return modded_files