def log_diff(self, mod_dir: Path, diff_material: Union[dict, List[Path]]): diffs = {} if isinstance(diff_material, dict): diffs = diff_material elif isinstance(diff_material, List): diffs = self.generate_diff(mod_dir, diff_material) log_path = mod_dir / 'logs' / self._log_name with log_path.open('w', encoding='utf-8') as log: log.write('name,rstb,path\n') for diff, value in diffs.items(): ext = Path(diff).suffix if isinstance(diff, Path): canon = util.get_canon_name(str(diff.relative_to(mod_dir))) path = diff.relative_to(mod_dir).as_posix() elif isinstance(diff, str): canon = diff.split('//')[-1].replace('.s', '.') path = diff if ext not in RSTB_EXCLUDE_EXTS and canon not in RSTB_EXCLUDE_NAMES: log.write(f'{canon},{value},{path}\n') if 'leave' in self._options and self._options['leave']: (mod_dir / 'logs' / '.leave').write_bytes(b'') if 'shrink' in self._options and self._options['shrink']: (mod_dir / 'logs' / '.shrink').write_bytes(b'')
def log_merged_files_rstb(): """ Generates an RSTB log for the master BCML modpack containing merged files """ print('Updating RSTB for merged files...') diffs = {} files = [item for item in util.get_master_modpack_dir().rglob('**/*') if item.is_file()] guess = util.get_settings_bool('guess_merge') for file in files: if file.parent == 'logs': continue if file.suffix not in RSTB_EXCLUDE_EXTS and file.name not in RSTB_EXCLUDE_NAMES: size = calculate_size(file) if size == 0 and guess: if file.suffix in util.AAMP_EXTS: size = guess_aamp_size(file) elif file.suffix in ['.bfres', '.sbfres']: size = guess_bfres_size(file) canon = util.get_canon_name(file.relative_to(util.get_master_modpack_dir())) if canon: diffs[canon] = size sarc_files = [file for file in files if util.is_file_sarc(str(file)) \ and file.suffix != '.ssarc'] if sarc_files: num_threads = min(multiprocessing.cpu_count(), len(sarc_files)) pool = multiprocessing.Pool(processes=num_threads) results = pool.map(_get_sizes_in_sarc, sarc_files) pool.close() pool.join() for result in results: diffs.update(result) with (util.get_master_modpack_dir() / 'logs' / 'rstb.log').open('w', encoding='utf-8') as log: log.write('name,size,path\n') for canon, size in diffs.items(): log.write(f'{canon},{size},//\n')
def _clean_sarc_file(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return try: old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return if canon not in hashes: return try: base_sarc = oead.Sarc(util.unyaz_if_needed(file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): return new_sarc = _clean_sarc(old_sarc, base_sarc) if not new_sarc: file.unlink() else: write_bytes = new_sarc.write()[1] file.write_bytes( write_bytes if not (file.suffix.startswith(".s") and file.suffix != ".ssarc") else util.compress(write_bytes) )
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): packs = {} for file in [file for file in modded_files \ if isinstance(file, Path) and file.suffix in util.SARC_EXTS]: canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if canon and not any(ex in file.name for ex in ['Dungeon', 'Bootup_', 'AocMainField']): packs[canon] = file.relative_to(mod_dir).as_posix() return packs
def _get_modded_file_size(file: Path, mod_dir: Path, guess: bool) -> Dict[str, int]: try: canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) except ValueError: return {} if file.suffix not in EXCLUDE_EXTS and canon not in EXCLUDE_NAMES: return { canon: calculate_size( file, guess=guess or file.suffix in {".bas", ".baslist"}, ) } return {}
def threaded_merge(item, verbose: bool) -> (str, dict): """Deep merges an individual file, suitable for multiprocessing""" file, stuff = item failures = {} base_file = util.get_game_file(file, file.startswith('aoc')) if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_ext = os.path.splitext(file)[1] if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists(): base_file = (util.get_master_modpack_dir() / file) file_bytes = base_file.read_bytes() yazd = file_bytes[0:4] == b'Yaz0' file_bytes = file_bytes if not yazd else util.decompress(file_bytes) magic = file_bytes[0:4] if magic == b'SARC': new_sarc, sub_failures = nested_patch(sarc.SARC(file_bytes), stuff) del file_bytes new_bytes = new_sarc.get_bytes() for failure, contents in sub_failures.items(): print(f'Some patches to {failure} failed to apply.') failures[failure] = contents else: try: if magic == b'AAMP': aamp_contents = aamp.Reader(file_bytes).parse() for change in stuff: aamp_contents = _aamp_merge(aamp_contents, change) aamp_bytes = aamp.Writer(aamp_contents).get_bytes() del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress( aamp_bytes) else: raise ValueError(f'{file} is not a SARC or AAMP file.') except ValueError: new_bytes = file_bytes del file_bytes print(f'Deep merging file {file} failed. No changes were made.') new_bytes = new_bytes if not yazd else util.compress(new_bytes) output_file = (util.get_master_modpack_dir() / file) if base_file == output_file: output_file.unlink() output_file.parent.mkdir(parents=True, exist_ok=True) output_file.write_bytes(new_bytes) del new_bytes if magic == b'SARC' and verbose: print(f'Finished patching files inside {file}') elif verbose: print(f'Finished patching {file}') return util.get_canon_name(file), failures
def _convert_actorpack(actor_pack: Path, to_wiiu: bool) -> Union[None, str]: error = None sarc = oead.Sarc(util.unyaz_if_needed(actor_pack.read_bytes())) new_sarc = oead.SarcWriter.from_sarc(sarc) new_sarc.set_endianness(oead.Endianness.Big if to_wiiu else oead.Endianness.Little) for file in sarc.get_files(): if "Physics/" in file.name and "Actor/" not in file.name: ext = file.name[file.name.rindex(".") :] if ext in NO_CONVERT_EXTS: if not util.is_file_modded( util.get_canon_name(file.name, allow_no_source=True), file.data, count_new=True, ): actor_name = file.name[ file.name.rindex("/") : file.name.rindex(".") ] try: pack_path = util.get_game_file( f"Actor/Pack/{actor_name}.sbactorpack" ) stock_data = util.get_nested_file_bytes( f"{str(pack_path)}//{file.name}" ) if stock_data: new_sarc.files[file.name] = stock_data else: raise FileNotFoundError(file.name) except (FileNotFoundError, AttributeError): error = ( "This mod contains a Havok file not supported by the " f"converter: {file.name}" ) else: error = ( "This mod contains a Havok file not supported by the" f" converter: {file.name}" ) else: if file.data[0:4] == b"AAMP": continue try: hk = Havok.from_bytes(bytes(file.data)) except: # pylint: disable=bare-except return f"Could not parse Havok file {file.name}" if to_wiiu: hk.to_wiiu() else: hk.to_switch() hk.serialize() new_sarc.files[file.name] = hk.to_bytes() actor_pack.write_bytes(util.compress(new_sarc.write()[1])) return error
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Finding modified SARCs...") packs = {} for file in [ file for file in modded_files if isinstance(file, Path) and file.suffix in util.SARC_EXTS ]: canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if canon and not any( ex in file.name for ex in ["Dungeon", "Bootup_", "AocMainField", "beventpack"]): packs[canon] = file.relative_to(mod_dir).as_posix() return packs
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Finding modified SARCs...") packs = {} for file in [ file for file in modded_files if isinstance(file, Path) and file.suffix in util.SARC_EXTS - EXCLUDE_EXTS and not any(ex in file.name for ex in SPECIAL) ]: canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if canon: packs[canon] = file.relative_to(mod_dir).as_posix() return packs
def generate_diff(self, mod_dir: Path, modded_files: List[Path]): rstb_diff = {} open_sarcs = {} for file in modded_files: if isinstance(file, Path): canon = util.get_canon_name(file.relative_to(mod_dir).as_posix()) if Path(canon).suffix not in RSTB_EXCLUDE_EXTS and\ Path(canon).name not in RSTB_EXCLUDE_NAMES: size = calculate_size(file) if file.suffix == '.bdmgparam': size = 0 if size == 0 and self._options['guess']: if file.suffix in util.AAMP_EXTS: size = guess_aamp_size(file) elif file.suffix in ['.bfres', '.sbfres']: size = guess_bfres_size(file) rstb_diff[file] = size elif isinstance(file, str): parts = file.split('//') name = parts[-1] if parts[0] not in open_sarcs: with (mod_dir / parts[0]).open('rb') as s_file: open_sarcs[parts[0]] = sarc.read_file_and_make_sarc(s_file) for part in parts[1:-1]: if part not in open_sarcs: open_sarcs[part] = sarc.SARC( util.unyaz_if_needed( open_sarcs[parts[parts.index(part) - 1]]\ .get_file_data(part).tobytes() ) ) ext = Path(name).suffix data = util.unyaz_if_needed(open_sarcs[parts[-2]].get_file_data(name).tobytes()) rstb_val = rstb.SizeCalculator().calculate_file_size_with_ext( data, wiiu=True, ext=ext ) if ext == '.bdmgparam': rstb_val = 0 if rstb_val == 0 and self._options['guess']: if ext in util.AAMP_EXTS: rstb_val = guess_aamp_size(data, ext) elif ext in ['.bfres', '.sbfres']: rstb_val = guess_bfres_size(data, name) rstb_diff[file] = rstb_val for open_sarc in open_sarcs: del open_sarc return rstb_diff
def _check_modded(file: Path, tmp_dir: Path): try: canon = util.get_canon_name(file.relative_to(tmp_dir).as_posix()) except ValueError: util.vprint( f"Ignored unknown file {file.relative_to(tmp_dir).as_posix()}") return None if util.is_file_modded(canon, file, True): util.vprint(f"Found modded file {canon}") return file else: if "Aoc/0010/Map/MainField" in canon: file.unlink() util.vprint(f"Ignored unmodded file {canon}") return None
def _pack_sarc(folder: Path, tmp_dir: Path, hashes: dict): packed = oead.SarcWriter( endian=oead.Endianness.Big if util.get_settings("wiiu") else oead.Endianness.Little ) try: canon = util.get_canon_name( folder.relative_to(tmp_dir).as_posix(), allow_no_source=True ) if canon not in hashes: raise FileNotFoundError("File not in game dump") stock_file = util.get_game_file(folder.relative_to(tmp_dir)) try: old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes())) except (RuntimeError, ValueError, oead.InvalidDataError): raise ValueError("Cannot open file from game dump") old_files = {f.name for f in old_sarc.get_files()} except (FileNotFoundError, ValueError): for file in {f for f in folder.rglob("**/*") if f.is_file()}: packed.files[file.relative_to(folder).as_posix()] = file.read_bytes() else: for file in { f for f in folder.rglob("**/*") if f.is_file() and not f.suffix in EXCLUDE_EXTS }: file_data = file.read_bytes() xhash = xxhash.xxh64_intdigest(util.unyaz_if_needed(file_data)) file_name = file.relative_to(folder).as_posix() if file_name in old_files: old_hash = xxhash.xxh64_intdigest( util.unyaz_if_needed(old_sarc.get_file(file_name).data) ) if file_name not in old_files or (xhash != old_hash): packed.files[file_name] = file_data finally: shutil.rmtree(folder) if not packed.files: return # pylint: disable=lost-exception sarc_bytes = packed.write()[1] folder.write_bytes( util.compress(sarc_bytes) if (folder.suffix.startswith(".s") and not folder.suffix == ".sarc") else sarc_bytes )
def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path): canon = util.get_canon_name(file.relative_to(tmp_dir)) try: stock_file = util.get_game_file(file.relative_to(tmp_dir)) except FileNotFoundError: return with stock_file.open('rb') as old_file: old_sarc = sarc.read_file_and_make_sarc(old_file) if not old_sarc: return old_files = set(old_sarc.list_files()) if canon not in hashes: return with file.open('rb') as s_file: base_sarc = sarc.read_file_and_make_sarc(s_file) if not base_sarc: return new_sarc = sarc.SARCWriter(True) can_delete = True for nest_file in base_sarc.list_files(): canon = nest_file.replace('.s', '.') ext = Path(canon).suffix if ext in {'.yml', '.bak'}: continue file_data = base_sarc.get_file_data(nest_file).tobytes() xhash = xxhash.xxh32(util.unyaz_if_needed(file_data)).hexdigest() if nest_file in old_files: old_hash = xxhash.xxh32( util.unyaz_if_needed( old_sarc.get_file_data(nest_file).tobytes())).hexdigest() if nest_file not in old_files or (xhash != old_hash and ext not in util.AAMP_EXTS): can_delete = False new_sarc.add_file(nest_file, file_data) del old_sarc if can_delete: del new_sarc file.unlink() else: with file.open('wb') as s_file: if file.suffix.startswith('.s') and file.suffix != '.ssarc': s_file.write(util.compress(new_sarc.get_bytes())) else: new_sarc.write(s_file)
def _clean_sarcs(tmp_dir: Path, hashes: dict, pool: multiprocessing.pool.Pool): sarc_files = { file for file in tmp_dir.rglob("**/*") if file.suffix in CLEAN_EXTS and "options" not in file.relative_to(tmp_dir).parts } if sarc_files: print("Creating partial packs...") pool.map(partial(_clean_sarc_file, hashes=hashes, tmp_dir=tmp_dir), sarc_files) sarc_files = { file for file in tmp_dir.rglob("**/*") if file.suffix in CLEAN_EXTS and "options" not in file.relative_to(tmp_dir).parts } if sarc_files: print("Updating pack log...") final_packs = [file for file in sarc_files if file.suffix in CLEAN_EXTS] if final_packs: (tmp_dir / "logs").mkdir(parents=True, exist_ok=True) (tmp_dir / "logs" / "packs.json").write_text( dumps( { util.get_canon_name(file.relative_to(tmp_dir)): str( file.relative_to(tmp_dir) ) for file in final_packs }, indent=2, ) ) else: try: (tmp_dir / "logs" / "packs.json").unlink() except FileNotFoundError: pass else: try: (tmp_dir / "logs" / "packs.json").unlink() except FileNotFoundError: pass
def find_modded_files(tmp_dir: Path, verbose: bool = False, original_pool: Pool = None) -> List[Union[Path, str]]: """ Detects all of the modified files in an extracted mod :param tmp_dir: The path to the base directory of the mod. :type tmp_dir: class:`pathlib.Path` :param deep_merge: Whether to log diffs for individual AAMP and BYML files, defaults to False :type deep_merge: bool, optional :param verbose: Specifies whether to return more detailed output :type verbose: bool, optional :returns: Returns a tuple with a dict of modified files and the RSTB entries, a list of changes, and (if deep merge) diffs of modded BYML and AAMP files :rtype: (dict of class:`pathlib.Path`: int, list of str, dict of str: str) """ modded_files = [] if isinstance(tmp_dir, str): tmp_dir = Path(tmp_dir) rstb_path = tmp_dir / 'content' / 'System' / 'Resource' /\ 'ResourceSizeTable.product.srsizetable' if rstb_path.exists(): rstb_path.unlink() if (tmp_dir / 'aoc').exists: try: util.get_aoc_dir() except FileNotFoundError as err: err.error_text = ( 'This mod uses DLC files, but you do not appear to have the DLC ' 'installed. If you still want to use this mod, unpack it and ' 'remove the "aoc" folder.') raise err aoc_field = tmp_dir / 'aoc' / '0010' / 'Pack' / 'AocMainField.pack' if aoc_field.exists() and aoc_field.stat().st_size > 0: with aoc_field.open('rb') as a_file: sarc.read_file_and_make_sarc(a_file).extract_to_dir( str(tmp_dir / 'aoc' / '0010')) aoc_field.write_bytes(b'') for file in tmp_dir.rglob('**/*'): if file.is_file(): canon = util.get_canon_name(file.relative_to(tmp_dir).as_posix()) if canon is None: if verbose: print( f'Ignored unknown file {file.relative_to(tmp_dir).as_posix()}' ) continue if util.is_file_modded(canon, file, True): modded_files.append(file) if verbose: print(f'Found modded file {canon}') else: if 'Aoc/0010/Map/MainField' in canon: file.unlink() if verbose: print(f'Ignored unmodded file {canon}') continue total = len(modded_files) print(f'Found {total} modified file{"s" if total > 1 else ""}') total = 0 sarc_files = [ file for file in modded_files if file.suffix in util.SARC_EXTS ] if sarc_files: print(f'Scanning files packed in SARCs...') num_threads = min(len(sarc_files), cpu_count() - 1) pool = original_pool or Pool(processes=num_threads) modded_sarc_files = pool.map( partial(find_modded_sarc_files, tmp_dir=tmp_dir, verbose=verbose), sarc_files) for files in modded_sarc_files: total += len(files) modded_files.extend(files) if not original_pool: pool.close() pool.join() print(f'Found {total} modified packed file{"s" if total > 1 else ""}') return modded_files
def threaded_merge(item) -> Tuple[str, dict]: """Deep merges an individual file, suitable for multiprocessing""" file, stuff = item failures = {} try: base_file = util.get_game_file(file, file.startswith(util.get_dlc_path())) except FileNotFoundError: return "", {} if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_ext = os.path.splitext(file)[1] if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_bytes = base_file.read_bytes() yazd = file_bytes[0:4] == b"Yaz0" file_bytes = file_bytes if not yazd else util.decompress(file_bytes) magic = file_bytes[0:4] if magic == b"SARC": new_sarc, sub_failures = nested_patch(oead.Sarc(file_bytes), stuff) del file_bytes new_bytes = bytes(new_sarc.write()[1]) for failure, contents in sub_failures.items(): print(f"Some patches to {failure} failed to apply.") failures[failure] = contents elif magic == b"AAMP": try: aamp_contents = ParameterIO.from_binary(file_bytes) try: aamp_contents = shop_merge( aamp_contents, file_ext.replace(".", ""), stuff.lists["Additions"], stuff.lists["Removals"], ) aamp_bytes = ParameterIO.to_binary(aamp_contents) except: # pylint: disable=bare-except raise RuntimeError(f"AAMP file {file} could be merged.") del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress(aamp_bytes) except ValueError: new_bytes = file_bytes del file_bytes print(f"Deep merging file {file} failed. No changes were made.") else: raise ValueError(f"{file} is not a SARC or AAMP file.") new_bytes = new_bytes if not yazd else util.compress(new_bytes) output_file = util.get_master_modpack_dir() / file if base_file == output_file: output_file.unlink() output_file.parent.mkdir(parents=True, exist_ok=True) output_file.write_bytes(new_bytes) del new_bytes if magic == b"SARC": util.vprint(f"Finished patching files inside {file}") else: util.vprint(f"Finished patching {file}") return util.get_canon_name(file), failures