def _find_sarc(path: Path) -> typing.Optional[sarc.SARC]: archive: typing.Optional[sarc.SARC] = None archive_path: str = '' for i, p in enumerate(_get_parents_and_path(path)): if _exists(p) and _is_dir(p): continue if archive: path_in_archive = p.relative_to(archive_path).as_posix() if path_in_archive not in archive.list_files(): continue archive = sarc.read_file_and_make_sarc( io.BytesIO(archive.get_file_data(path_in_archive).tobytes())) if not archive: return None else: try: with p.open('rb') as f: archive = sarc.read_file_and_make_sarc(f) # type: ignore archive_path = p if not archive: return None except: return None return archive
def get_msbt_hashes(lang: str = 'USen') -> {}: """ Gets the MSBT hash table for the given language, or US English by default :param lang: The game language to use, defaults to USen. :type lang: str, optional :returns: A dictionary of MSBT files and their vanilla hashes. :rtype: dict of str: str """ if not hasattr(get_msbt_hashes, 'texthashes'): get_msbt_hashes.texthashes = {} if lang not in get_msbt_hashes.texthashes: hash_table = util.get_exec_dir() / 'data' / 'msyt' / \ f'Msg_{lang}_hashes.csv' if hash_table.exists(): get_msbt_hashes.texthashes[lang] = {} with hash_table.open('r') as h_file: csv_loop = csv.reader(h_file) for row in csv_loop: get_msbt_hashes.texthashes[lang][row[0]] = row[1] elif util.get_game_file(f'Pack/Bootup_{lang}.pack').exists(): get_msbt_hashes.texthashes[lang] = {} with util.get_game_file(f'Pack/Bootup_{lang}.pack').open( 'rb') as b_file: bootup_pack = sarc.read_file_and_make_sarc(b_file) msg_bytes = util.decompress( bootup_pack.get_file_data( f'Message/Msg_{lang}.product.ssarc').tobytes()) msg_pack = sarc.SARC(msg_bytes) for msbt in msg_pack.list_files(): get_msbt_hashes.texthashes[lang][msbt] = xxhash.xxh32( msg_pack.get_file_data(msbt)).hexdigest() return get_msbt_hashes.texthashes[lang]
def extract_ref_msyts(lang: str = 'USen', for_merge: bool = False, tmp_dir: Path = util.get_work_dir() / 'tmp_text'): """ Extracts the reference MSYT texts for the given language to a temp dir :param lang: The game language to use, defaults to USen. :type lang: str, optional :param for_merge: Whether the output is to be merged (or as reference), defaults to False :type for_merge: bool :param tmp_dir: The temp directory to extract to, defaults to "tmp_text" in BCML's working directory. :type tmp_dir: class:`pathlib.Path`, optional """ if tmp_dir.exists(): shutil.rmtree(tmp_dir, ignore_errors=True) with util.get_game_file(f'Pack/Bootup_{lang}.pack').open('rb') as b_file: bootup_pack = sarc.read_file_and_make_sarc(b_file) msg_bytes = util.decompress( bootup_pack.get_file_data( f'Message/Msg_{lang}.product.ssarc').tobytes()) msg_pack = sarc.SARC(msg_bytes) if not for_merge: merge_dir = tmp_dir / 'ref' else: merge_dir = tmp_dir / 'merged' msg_pack.extract_to_dir(str(merge_dir)) msbt_to_msyt(merge_dir)
def inject_file_into_bootup(file: str, data: bytes, create_bootup: bool = False): """ Injects a file into the master BCML `Bootup.pack` :param file: The path of the file to inject :type file: str :param data: The bytes of the file to inject :type data: bytes :param create_bootup: Whether to create `Bootup.pack` if it does not exist, defaults to False :type create_bootup: bool, optional """ bootup_path = get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack' if bootup_path.exists() or create_bootup: if not bootup_path.exists(): bootup_path.parent.mkdir(parents=True, exist_ok=True) shutil.copy(get_game_file('Pack/Bootup.pack'), bootup_path) with bootup_path.open('rb') as b_file: old_bootup = sarc.read_file_and_make_sarc(b_file) new_bootup = sarc.make_writer_from_sarc(old_bootup) if file in old_bootup.list_files(): new_bootup.delete_file(file) new_bootup.add_file(file, data) bootup_path.write_bytes(new_bootup.get_bytes()) else: raise FileNotFoundError( 'Bootup.pack is not present in the master BCML mod')
def get_nested_file_bytes(file: str, unyaz: bool = True) -> bytes: """ Get the contents of a file nested inside one or more SARCs :param file: A string containing the nested SARC path to the file :type file: str :param unyaz: Whether to decompress the file if yaz0 compressed, defaults to True :type unyaz: bool, optional :return: Returns the bytes to the file :rtype: bytes """ nests = file.split('//') sarcs = [] with open(nests[0], 'rb') as s_file: sarcs.append(sarc.read_file_and_make_sarc(s_file)) i = 1 while i < len(nests) - 1: sarc_bytes = unyaz_if_needed(sarcs[i - 1].get_file_data( nests[i]).tobytes()) sarcs.append(sarc.SARC(sarc_bytes)) i += 1 file_bytes = sarcs[-1].get_file_data(nests[-1]).tobytes() if file_bytes[0:4] == b'Yaz0' and unyaz: file_bytes = decompress(file_bytes) del sarcs return file_bytes
def inject_savedata_into_bootup(bgsvdata: sarc.SARCWriter, bootup_path: Path = None) -> int: """ Packs a savedata SARC into Bootup.pack and returns the RSTB size of the new savedataformat.sarc :param bgsvdata: A SARCWriter for the new savedata :type bgsvdata: class:`sarc.SARCWriter` :param bootup_path: Path to the Bootup.pack to update, defaults to a master BCML copy :type bootup_path: class:`pathlib.Path`, optional :returns: Returns the RSTB size of the new savedataformat.sarc :rtype: int """ if not bootup_path: master_boot = util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack' bootup_path = master_boot if master_boot.exists() \ else util.get_game_file('Pack/Bootup.pack') with bootup_path.open('rb') as b_file: bootup_pack = sarc.read_file_and_make_sarc(b_file) new_pack = sarc.make_writer_from_sarc(bootup_pack) new_pack.delete_file('GameData/savedataformat.ssarc') savedata_bytes = bgsvdata.get_bytes() new_pack.add_file('GameData/savedataformat.ssarc', util.compress(savedata_bytes)) (util.get_master_modpack_dir() / 'content' / 'Pack').mkdir(parents=True, exist_ok=True) with (util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack').open('wb') as b_file: new_pack.write(b_file) return rstb.SizeCalculator().calculate_file_size_with_ext(savedata_bytes, True, '.sarc')
def _get_sizes_in_sarc(file: Union[Path, sarc.SARC]) -> {}: calc = rstb.SizeCalculator() sizes = {} guess = util.get_settings_bool('guess_merge') if isinstance(file, Path): with file.open('rb') as s_file: file = sarc.read_file_and_make_sarc(s_file) if not file: return {} for nest_file in file.list_files(): canon = nest_file.replace('.s', '.') data = util.unyaz_if_needed(file.get_file_data(nest_file).tobytes()) ext = Path(canon).suffix if util.is_file_modded(canon, data) and ext not in RSTB_EXCLUDE_EXTS and canon not in RSTB_EXCLUDE_NAMES: size = calc.calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': size = 0 if size == 0 and guess: if ext in util.AAMP_EXTS: size = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: size = guess_bfres_size(data, canon) sizes[canon] = size if util.is_file_sarc(nest_file) and not nest_file.endswith('.ssarc'): try: nest_sarc = sarc.SARC(data) except ValueError: continue sizes.update(_get_sizes_in_sarc(nest_sarc)) return sizes
def get_stock_gamedata() -> sarc.SARC: """ Gets the contents of the unmodded gamedata.sarc """ if not hasattr(get_stock_gamedata, 'gamedata'): with util.get_game_file('Pack/Bootup.pack').open('rb') as b_file: bootup = sarc.read_file_and_make_sarc(b_file) get_stock_gamedata.gamedata = sarc.SARC(util.decompress( bootup.get_file_data('GameData/gamedata.ssarc'))) return get_stock_gamedata.gamedata
def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return with stock_file.open('rb') as old_file: old_sarc = sarc.read_file_and_make_sarc(old_file) if not old_sarc: return old_files = set(old_sarc.list_files()) if canon not in hashes: return with file.open('rb') as s_file: base_sarc = sarc.read_file_and_make_sarc(s_file) if not base_sarc: return new_sarc = sarc.SARCWriter(True) can_delete = True for nest_file in base_sarc.list_files(): canon = nest_file.replace('.s', '.') ext = Path(canon).suffix if ext in {'.yml', '.bak'}: continue file_data = base_sarc.get_file_data(nest_file).tobytes() xhash = xxhash.xxh32(util.unyaz_if_needed(file_data)).hexdigest() if nest_file in old_files: old_hash = xxhash.xxh32( util.unyaz_if_needed( old_sarc.get_file_data(nest_file).tobytes())).hexdigest() if nest_file not in old_files or (xhash != old_hash and ext not in util.AAMP_EXTS): can_delete = False new_sarc.add_file(nest_file, file_data) del old_sarc if can_delete: del new_sarc file.unlink() else: with file.open('wb') as s_file: if file.suffix.startswith('.s') and file.suffix != '.ssarc': s_file.write(util.compress(new_sarc.get_bytes())) else: new_sarc.write(s_file)
def get_text_mods_from_bootup(bootup_path: Union[Path, str], tmp_dir: Path = util.get_work_dir() / 'tmp_text', verbose: bool = False, lang: str = ''): """ Detects modifications to text files inside a given Bootup_XXxx.pack :param bootup_path: Path to the Bootup_XXxx.pack file. :type bootup_path: class:`pathlib.Path` :param tmp_dir: The temp directory to use, defaults to "tmp_text" in BCML's working directory. :type tmp_dir: class:`pathlib.Path` :param verbose: Whether to display more detailed output, defaults to False. :type verbose: bool, optional :returns: Return a tuple containing a dict of modded text entries, a SARC containing added text MSBTs, and the game language of the bootup pack. :rtype: (dict, class:`sarc.SARCWriter`, str) """ if not lang: lang = util.get_file_language(bootup_path) print(f'Scanning text modifications for language {lang}...') spaces = ' ' if verbose: print(f'{spaces}Identifying modified text files...') with open(bootup_path, 'rb') as b_file: bootup_sarc = sarc.read_file_and_make_sarc(b_file) msg_bytes = util.decompress(bootup_sarc.get_file_data(f'Message/Msg_{lang}.product.ssarc')) msg_sarc = sarc.SARC(msg_bytes) if not msg_sarc: print(f'Failed to open Msg_{lang}.product.ssarc, could not analyze texts') modded_msyts, added_msbts = get_modded_msyts(msg_sarc, lang) added_text_store = None if added_msbts: added_text_store = store_added_texts(added_msbts) if verbose: for modded_text in modded_msyts: print(f'{spaces}{spaces}{modded_text} has been changed') for added_text in added_msbts: print(f'{spaces}{spaces}{added_text} has been added') problems = msbt_to_msyt() for problem in problems: msyt_name = problem.relative_to(tmp_dir).with_suffix('.msyt').as_posix() try: modded_msyts.remove(msyt_name) except ValueError: pass if verbose: print(f'{spaces}Scanning texts files for modified entries...') modded_texts = get_modded_texts(modded_msyts, lang=lang) s_modded = 's' if len(modded_texts) != 1 else '' s_added = 's' if len(added_msbts) != 1 else '' print(f'Language {lang} has total {len(modded_texts)} modified text file{s_modded} and ' f'{len(added_msbts)} new text file{s_added}') shutil.rmtree(tmp_dir) return modded_texts, added_text_store, lang
def _get_sarc(self, base_path: PPPath, path: PPPath) -> typing.Tuple[Directory, sarc.SARC]: parent = self._get_directory(base_path, path.parent) archive_file = parent.open_file(parent.get_path_relative_to_this(path), os.O_RDONLY) archive = sarc.read_file_and_make_sarc( io.BytesIO(archive_file.read(archive_file.get_size()))) if archive: return (parent, archive) raise FuseOSError(errno.ENOENT)
def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]): if 'content/Pack/Bootup.pack//Event/EventInfo.product.sbyml' in modded_files: with (mod_dir / 'content' / 'Pack' / 'Bootup.pack').open('rb') as bootup_file: bootup_sarc = sarc.read_file_and_make_sarc(bootup_file) event_info = byml.Byml( util.decompress( bootup_sarc.get_file_data('Event/EventInfo.product.sbyml').tobytes() ) ).parse() return get_modded_events(event_info) else: return {}
def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]): if 'content/Pack/Bootup.pack//GameData/savedataformat.ssarc' in modded_files: with (mod_dir / 'content' / 'Pack' / 'Bootup.pack').open('rb') as bootup_file: bootup_sarc = sarc.read_file_and_make_sarc(bootup_file) return get_modded_savedata_entries( sarc.SARC( util.decompress( bootup_sarc.get_file_data('GameData/savedataformat.ssarc').tobytes() ) ) ) else: return []
def get_added_text_mods(lang: str = 'USen') -> List[sarc.SARC]: """ Gets a list containing all mod-original texts installed """ textmods = [] tm = TextsMerger() for mod in [mod for mod in util.get_installed_mods() if tm.is_mod_logged(mod)]: l = match_language(lang, mod.path / 'logs') try: with (mod.path / 'logs' / f'newtexts_{l}.sarc').open('rb') as s_file: textmods.append(sarc.read_file_and_make_sarc(s_file)) except FileNotFoundError: pass return textmods
def generate_diff(self, mod_dir: Path, modded_files: List[Path]): rstb_diff = {} open_sarcs = {} for file in modded_files: if isinstance(file, Path): canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if Path(canon).suffix not in RSTB_EXCLUDE_EXTS and\ Path(canon).name not in RSTB_EXCLUDE_NAMES: size = calculate_size(file) if file.suffix == '.bdmgparam': size = 0 if size == 0 and self._options['guess']: if file.suffix in util.AAMP_EXTS: size = guess_aamp_size(file) elif file.suffix in ['.bfres', '.sbfres']: size = guess_bfres_size(file) rstb_diff[file] = size elif isinstance(file, str): parts = file.split('//') name = parts[-1] if parts[0] not in open_sarcs: with (mod_dir / parts[0]).open('rb') as s_file: open_sarcs[parts[0]] = sarc.read_file_and_make_sarc(s_file) for part in parts[1:-1]: if part not in open_sarcs: open_sarcs[part] = sarc.SARC( util.unyaz_if_needed( open_sarcs[parts[parts.index(part) - 1]]\ .get_file_data(part).tobytes() ) ) ext = Path(name).suffix data = util.unyaz_if_needed(open_sarcs[parts[-2]].get_file_data(name).tobytes()) rstb_val = rstb.SizeCalculator().calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': rstb_val = 0 if rstb_val == 0 and self._options['guess']: if ext in util.AAMP_EXTS: rstb_val = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: rstb_val = guess_bfres_size(data, name) rstb_diff[file] = rstb_val for open_sarc in open_sarcs: del open_sarc return rstb_diff
def get_mod_diff(self, mod: BcmlMod): diff = {} for file in (mod.path / 'logs').glob('texts_*.yml'): lang = util.get_file_language(file) if not lang in diff: diff[lang] = {} with file.open('r', encoding='utf-8') as log: diff[lang]['mod'] = yaml.safe_load(log) for file in (mod.path / 'logs').glob('newtexts*.sarc'): lang = util.get_file_language(file) if not lang in diff: diff[lang] = {} with file.open('rb') as t_sarc: diff[lang]['add'] = sarc.read_file_and_make_sarc(t_sarc) return diff
def get_modded_map(map_unit: Union[Map, tuple], tmp_dir: Path) -> dict: """ Finds the most significant available map_unit unit in a mod for a given section and type and returns its contents as a dict. Checks `AocMainField.pack` first, then the unpacked aoc map_unit files, and then the base game map_unit files. :param map_unit: The map_unit section and type. :type map_unit: class:`bcml.mubin.Map` :param tmp_dir: The path to the base directory of the mod. :type tmp_dir: class:`pathlib.Path` :return: Returns a dict representation of the requested map_unit unit. :rtype: dict """ if isinstance(map_unit, tuple): map_unit = Map(*map_unit) map_bytes = None aoc_dir = tmp_dir / 'aoc' / '0010' / 'content' if not aoc_dir.exists(): aoc_dir = tmp_dir / 'aoc' / 'content' / '0010' if not aoc_dir.exists(): aoc_dir = tmp_dir / 'aoc' / '0010' if (aoc_dir / 'Pack' / 'AocMainField.pack').exists(): with (aoc_dir / 'Pack' / 'AocMainField.pack').open('rb') as s_file: map_pack = sarc.read_file_and_make_sarc(s_file) if map_pack: try: map_bytes = map_pack.get_file_data( f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.smubin' ).tobytes() except KeyError: pass if not map_bytes: if (aoc_dir / 'Map' / 'MainField' / map_unit.section /\ f'{map_unit.section}_{map_unit.type}.smubin').exists(): map_bytes = (tmp_dir / 'aoc' / '0010' / 'Map' / 'MainField' / map_unit.section /\ f'{map_unit.section}_{map_unit.type}.smubin').read_bytes() elif (tmp_dir / 'content' / 'Map' / 'MainField' / map_unit.section /\ f'{map_unit.section}_{map_unit.type}.smubin').exists(): map_bytes = (tmp_dir / 'content' / 'Map' / 'MainField' / map_unit.section /\ f'{map_unit.section}_{map_unit.type}.smubin').read_bytes() if not map_bytes: raise FileNotFoundError( f'Oddly, the modded map {map_unit.section}_{map_unit.type}.smubin ' 'could not be found.') map_bytes = util.decompress(map_bytes) return byml.Byml(map_bytes).parse()
def handle_file(res_name: str, full_name: str, stream: typing.Optional[typing.BinaryIO]) -> None: add_entry(res_name, full_name=full_name) res_name_without_ext, ext = get_name_and_extension(res_name) full_name_without_ext, ext = get_name_and_extension(full_name) if ext.startswith('s'): res_name = "%s.%s" % (res_name_without_ext, ext[1:]) full_name = "%s.%s" % (full_name_without_ext, ext[1:]) add_entry(res_name, full_name=full_name) if not stream: return arc = sarc.read_file_and_make_sarc(stream) if arc: for arc_res_name in arc.list_files(): full_arc_res_name = "%s/%s" % (full_name, arc_res_name) _stream = arc.get_file_data(arc_res_name) if arc.is_archive( arc_res_name) else None handle_file(arc_res_name, full_arc_res_name, _stream) if arc_res_name.startswith('/'): add_entry(arc_res_name[1:], full_name=full_arc_res_name)
def create_bnp_mod(mod: Path, output: Path, options: dict = None): """[summary] :param mod: [description] :type mod: Path :param output: [description] :type output: Path :param options: [description], defaults to {} :type options: dict, optional """ if isinstance(mod, str): mod = Path(mod) if mod.is_file(): print('Extracting mod...') tmp_dir: Path = open_mod(mod) elif mod.is_dir(): print(f'Loading mod from {str(mod)}...') tmp_dir: Path = util.get_work_dir() / \ f'tmp_{xxhash.xxh32(str(mod)).hexdigest()}' shutil.copytree(str(mod), str(tmp_dir)) else: print(f'Error: {str(mod)} is neither a valid file nor a directory') return print('Packing loose files...') pack_folders = sorted( { d for d in tmp_dir.rglob('**/*') if d.is_dir() and d.suffix in util.SARC_EXTS }, key=lambda d: len(d.parts), reverse=True) for folder in pack_folders: new_tmp: Path = folder.with_suffix(folder.suffix + '.tmp') shutil.move(folder, new_tmp) new_sarc = sarc.SARCWriter(be=True) for file in {f for f in new_tmp.rglob('**/*') if f.is_file()}: new_sarc.add_file( file.relative_to(new_tmp).as_posix(), file.read_bytes()) sarc_bytes = new_sarc.get_bytes() if str(folder.suffix).startswith('.s') and folder.suffix != '.sarc': sarc_bytes = util.compress(sarc_bytes) folder.write_bytes(sarc_bytes) shutil.rmtree(new_tmp) if not options: options = {} options['texts'] = {'user_only': False} pool = Pool(cpu_count()) logged_files = generate_logs(tmp_dir, options=options, original_pool=pool) print('Removing unnecessary files...') if (tmp_dir / 'logs' / 'map.yml').exists(): print('Removing map units...') for file in [file for file in logged_files if isinstance(file, Path) and \ fnmatch(file.name, '[A-Z]-[0-9]_*.smubin')]: file.unlink() if [file for file in (tmp_dir / 'logs').glob('*texts*')]: print('Removing language bootup packs...') for bootup_lang in (tmp_dir / 'content' / 'Pack').glob('Bootup_*.pack'): bootup_lang.unlink() if (tmp_dir / 'logs' / 'actorinfo.yml').exists() and \ (tmp_dir / 'content' / 'Actor' / 'ActorInfo.product.sbyml').exists(): print('Removing ActorInfo.product.sbyml...') (tmp_dir / 'content' / 'Actor' / 'ActorInfo.product.sbyml').unlink() if (tmp_dir / 'logs' / 'gamedata.yml').exists() or ( tmp_dir / 'logs' / 'savedata.yml').exists(): print('Removing gamedata sarcs...') with (tmp_dir / 'content' / 'Pack' / 'Bootup.pack').open('rb') as b_file: bsarc = sarc.read_file_and_make_sarc(b_file) csarc = sarc.make_writer_from_sarc(bsarc) bsarc_files = list(bsarc.list_files()) if 'GameData/gamedata.ssarc' in bsarc_files: csarc.delete_file('GameData/gamedata.ssarc') if 'GameData/savedataformat.ssarc' in bsarc_files: csarc.delete_file('GameData/savedataformat.ssarc') with (tmp_dir / 'content' / 'Pack' / 'Bootup.pack').open('wb') as b_file: csarc.write(b_file) hashes = util.get_hash_table() print('Creating partial packs...') sarc_files = { file for file in tmp_dir.rglob('**/*') if file.suffix in util.SARC_EXTS } if sarc_files: pool.map(partial(_clean_sarc, hashes=hashes, tmp_dir=tmp_dir), sarc_files) pool.close() pool.join() sarc_files = { file for file in tmp_dir.rglob('**/*') if file.suffix in util.SARC_EXTS } if sarc_files: with (tmp_dir / 'logs' / 'packs.log').open( 'w', encoding='utf-8') as p_file: final_packs = [ file for file in list(tmp_dir.rglob('**/*')) if file.suffix in util.SARC_EXTS ] if final_packs: p_file.write('name,path\n') for file in final_packs: p_file.write( f'{util.get_canon_name(file.relative_to(tmp_dir))},' f'{file.relative_to(tmp_dir)}\n') else: if (tmp_dir / 'logs' / 'packs.log').exists(): (tmp_dir / 'logs' / 'packs.log').unlink() print('Cleaning any junk files...') for file in tmp_dir.rglob('**/*'): if file.parent.stem == 'logs': continue if file.suffix in ['.yml', '.bak', '.tmp', '.old']: file.unlink() print('Removing blank folders...') for folder in reversed(list(tmp_dir.rglob('**/*'))): if folder.is_dir() and not list(folder.glob('*')): shutil.rmtree(folder) print(f'Saving output file to {str(output)}...') x_args = [ str(util.get_exec_dir() / 'helpers' / '7z.exe'), 'a', str(output), f'{str(tmp_dir / "*")}' ] subprocess.run(x_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=util.CREATE_NO_WINDOW) print('Conversion complete.')
def find_modded_files(tmp_dir: Path, verbose: bool = False, original_pool: Pool = None) -> List[Union[Path, str]]: """ Detects all of the modified files in an extracted mod :param tmp_dir: The path to the base directory of the mod. :type tmp_dir: class:`pathlib.Path` :param deep_merge: Whether to log diffs for individual AAMP and BYML files, defaults to False :type deep_merge: bool, optional :param verbose: Specifies whether to return more detailed output :type verbose: bool, optional :returns: Returns a tuple with a dict of modified files and the RSTB entries, a list of changes, and (if deep merge) diffs of modded BYML and AAMP files :rtype: (dict of class:`pathlib.Path`: int, list of str, dict of str: str) """ modded_files = [] if isinstance(tmp_dir, str): tmp_dir = Path(tmp_dir) rstb_path = tmp_dir / 'content' / 'System' / 'Resource' /\ 'ResourceSizeTable.product.srsizetable' if rstb_path.exists(): rstb_path.unlink() if (tmp_dir / 'aoc').exists: try: util.get_aoc_dir() except FileNotFoundError as err: err.error_text = ( 'This mod uses DLC files, but you do not appear to have the DLC ' 'installed. If you still want to use this mod, unpack it and ' 'remove the "aoc" folder.') raise err aoc_field = tmp_dir / 'aoc' / '0010' / 'Pack' / 'AocMainField.pack' if aoc_field.exists() and aoc_field.stat().st_size > 0: with aoc_field.open('rb') as a_file: sarc.read_file_and_make_sarc(a_file).extract_to_dir( str(tmp_dir / 'aoc' / '0010')) aoc_field.write_bytes(b'') for file in tmp_dir.rglob('**/*'): if file.is_file(): canon = util.get_canon_name(file.relative_to(tmp_dir).as_posix()) if canon is None: if verbose: print( f'Ignored unknown file {file.relative_to(tmp_dir).as_posix()}' ) continue if util.is_file_modded(canon, file, True): modded_files.append(file) if verbose: print(f'Found modded file {canon}') else: if 'Aoc/0010/Map/MainField' in canon: file.unlink() if verbose: print(f'Ignored unmodded file {canon}') continue total = len(modded_files) print(f'Found {total} modified file{"s" if total > 1 else ""}') total = 0 sarc_files = [ file for file in modded_files if file.suffix in util.SARC_EXTS ] if sarc_files: print(f'Scanning files packed in SARCs...') num_threads = min(len(sarc_files), cpu_count() - 1) pool = original_pool or Pool(processes=num_threads) modded_sarc_files = pool.map( partial(find_modded_sarc_files, tmp_dir=tmp_dir, verbose=verbose), sarc_files) for files in modded_sarc_files: total += len(files) modded_files.extend(files) if not original_pool: pool.close() pool.join() print(f'Found {total} modified packed file{"s" if total > 1 else ""}') return modded_files
def get_stock_map(map_unit: Union[Map, tuple], force_vanilla: bool = False) -> dict: """ Finds the most significant available map unit from the unmodded game and returns its contents as a dict. :param map: The map section and type. :type map: class:`bcml.mubin.Map` :return: Returns a dict representation of the requested map unit. :rtype: dict """ if isinstance(map_unit, tuple): map_unit = Map(*map_unit) try: aoc_dir = util.get_aoc_dir() except FileNotFoundError: force_vanilla = True map_bytes = None if force_vanilla: try: map_path = ( util.get_update_dir() / 'Map/MainField/' f'{map_unit.section}/{map_unit.section}_{map_unit.type}.smubin' ) if not map_path.exists(): map_path = ( util.get_game_dir() / 'Map/MainField/' f'{map_unit.section}/{map_unit.section}_{map_unit.type}.smubin' ) map_bytes = map_path.read_bytes() except FileNotFoundError: with util.get_game_file('Pack/TitleBG.pack').open('rb') \ as s_file: title_pack = sarc.read_file_and_make_sarc(s_file) if title_pack: try: map_bytes = title_pack.get_file_data( f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}' '.smubin').tobytes() except KeyError: map_bytes = None else: if (aoc_dir / 'Pack' / 'AocMainField.pack').exists(): with (aoc_dir / 'Pack' / 'AocMainField.pack').open('rb') as s_file: map_pack = sarc.read_file_and_make_sarc(s_file) if map_pack: try: map_bytes = map_pack.get_file_data( f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}' '.smubin').tobytes() except KeyError: map_bytes = None if not map_bytes: map_path = f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.smubin' try: map_bytes = util.get_game_file(map_path, aoc=True).read_bytes() except FileNotFoundError: try: map_bytes = util.get_game_file(map_path).read_bytes() except FileNotFoundError: with util.get_game_file('Pack/TitleBG.pack').open('rb') \ as s_file: title_pack = sarc.read_file_and_make_sarc(s_file) if title_pack: try: map_bytes = title_pack.get_file_data( f'Map/MainField/{map_unit.section}/' f'{map_unit.section}_{map_unit.type}.smubin' ).tobytes() except KeyError: map_bytes = None if not map_bytes: raise FileNotFoundError( f'The stock map file {map_unit.section}_{map_unit.type}.smubin could not be found.' ) map_bytes = util.decompress(map_bytes) return byml.Byml(map_bytes).parse()
def find_modded_sarc_files(mod_sarc: Union[Path, sarc.SARC], tmp_dir: Path, name: str = '', aoc: bool = False, verbose: bool = False) -> List[str]: """ Detects all of the modified files in a SARC :param mod_sarc: The SARC to scan for modded files. :type mod_sarc: class:`sarc.SARC` :param tmp_dir: The path to the base directory of the mod. :type tmp_dir: class:`pathlib.Path` :param name: The name of the SARC which contains the current SARC. :type name: str :param aoc: Specifies whether the SARC is DLC content, defaults to False. :type aoc: bool, optional :param nest_level: The depth to which the current SARC is nested in more SARCs, defaults to 0 :type nest_level: int, optional :param deep_merge: Whether to log diffs for individual AAMP and BYML files, defaults to False :type deep_merge: bool, optional :param verbose: Specifies whether to return more detailed output :type verbose: bool, optional """ if isinstance(mod_sarc, Path): if any(mod_sarc.name.startswith(exclude) for exclude in ['Bootup_']): return [] name = str(mod_sarc.relative_to(tmp_dir)) aoc = 'aoc' in mod_sarc.parts or 'Aoc' in mod_sarc.parts with mod_sarc.open('rb') as s_file: mod_sarc = sarc.read_file_and_make_sarc(s_file) if not mod_sarc: return [] modded_files = [] for file in mod_sarc.list_files(): canon = file.replace('.s', '.') if aoc: canon = 'Aoc/0010/' + canon contents = mod_sarc.get_file_data(file).tobytes() contents = util.unyaz_if_needed(contents) nest_path = str(name).replace('\\', '/') + '//' + file if util.is_file_modded(canon, contents, True): modded_files.append(nest_path) if verbose: print( f'Found modded file {canon} in {str(name).replace("//", "/")}' ) if util.is_file_sarc(canon) and '.ssarc' not in file: try: nest_sarc = sarc.SARC(contents) except ValueError: continue sub_mod_files = find_modded_sarc_files(nest_sarc, name=nest_path, tmp_dir=tmp_dir, aoc=aoc, verbose=verbose) modded_files.extend(sub_mod_files) else: if verbose: print( f'Ignored unmodded file {canon} in {str(name).replace("//", "/")}' ) return modded_files