def consolidate_diff_files(diffs: dict) -> dict: """ Consolidates the files which need to be deep merged to avoid any need to repeatedly open the same files. """ consolidated_diffs = {} for file, diff_list in diffs.items(): nest = reduce(lambda res, cur: {cur: res}, reversed(file.split("//")), diff_list) util.dict_merge(consolidated_diffs, nest) return consolidated_diffs
def perform_merge(self): merged_effects = util.get_master_modpack_dir() / "logs" / "effects.byml" print("Loading status effect mods...") diffs = self.consolidate_diffs(self.get_all_diffs()) if not diffs: print("No status effect merging necessary...") if merged_effects.exists(): merged_effects.unlink() try: stock_effects = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Ecosystem/StatusEffectList.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Ecosystem/StatusEffectList.sbyml", stock_effects, "Pack/Bootup.pack", ) del stock_effects except FileNotFoundError: pass return util.vprint("All status effect diffs:") util.vprint(diffs) effects = get_stock_effects() util.dict_merge(effects, diffs, overwrite_lists=True) del diffs print("Writing new effects list...") effect_bytes = oead.byml.to_binary( oead.byml.Array([effects]), big_endian=util.get_settings("wiiu") ) del effects util.inject_file_into_sarc( "Ecosystem/StatusEffectList.sbyml", util.compress(effect_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving status effect merge log...") merged_effects.parent.mkdir(parents=True, exist_ok=True) merged_effects.write_bytes(effect_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( effect_bytes, True, ".byml" ) del effect_bytes rstable.set_size("Ecosystem/StatusEffectList.byml", rstb_size)
def get_mod_diff(self, mod: util.BcmlMod): diff: Dict[str, dict] = {} if self.is_mod_logged(mod): util.dict_merge( diff, json.loads((mod.path / "logs" / self._log_name).read_text("utf-8")) ) for opt in {d for d in (mod.path / "options").glob("*") if d.is_dir()}: if (opt / "logs" / self._log_name).exists(): util.dict_merge( diff, json.loads((opt / "logs" / self._log_name).read_text("utf-8")) ) return diff
def get_mod_diff(self, mod: util.BcmlMod): diff = oead.byml.Hash() if self.is_mod_logged(mod): diff = oead.byml.from_text( (mod.path / "logs" / self._log_name).read_text("utf-8")) for opt in {d for d in (mod.path / "options").glob("*") if d.is_dir()}: if (opt / "logs" / self._log_name).exists(): util.dict_merge( diff, oead.byml.from_text( (opt / "logs" / self._log_name).read_text("utf-8")), overwrite_lists=True, ) return diff
def consolidate_diffs(self, diffs: list) -> dict: if not diffs: return {} all_diffs_pio: ParameterIO = reduce(util.pio_merge, diffs) all_diffs: dict = {} for file_key, file_list in all_diffs_pio.lists.items(): all_diffs[file_key] = file_list consolidated_diffs: dict = {} for file_key, diff_list in all_diffs.items(): file_name = all_diffs_pio.objects["Filenames"].params[file_key].v nest = reduce(lambda res, cur: {cur: res}, reversed(file_name.split("//")), diff_list) util.dict_merge(consolidated_diffs, nest) return consolidated_diffs
def generate_diff(self, mod_dir: Path, modded_files: List[Path]): diff = {} nested_files = {} if not self._table: self._table = get_stock_rstb() pool = self._pool or Pool(maxtasksperchild=500) for nest in {n for n in modded_files if isinstance(n, str)}: util.dict_merge( nested_files, reduce( lambda res, cur: {cur: res} if res is not None else [cur], reversed(nest.split("//")), None, ), ) diff.update( { k: v for r in pool.map( partial( _get_modded_file_size, mod_dir=mod_dir, guess=not self._options.get("no_guess", False), ), {f for f in modded_files if isinstance(f, Path)}, ) for k, v in r.items() if r is not None and not self.should_exclude(k, v) } ) diff.update( { k: v for r in pool.starmap( partial( _get_nest_file_sizes, guess=not self._options.get("no_guess", False), mod_dir=mod_dir, ), nested_files.items(), ) for k, v in r.items() if r is not None and not self.should_exclude(k, v) } ) if not self._pool: pool.close() pool.join() return diff
def consolidate_diffs(self, diffs: ParameterIO): if not diffs: return None consolidated: Dict[str, Any] = {} for _, file in diffs.objects["Filenames"].params.items(): try: util.dict_merge( consolidated, reduce( lambda res, cur: {cur: res}, reversed(file.v.split("//")), diffs.lists[file.v], ), ) except KeyError: util.vprint(diffs) raise Exception(f"{_}: {file} in diff lists: {file.v in diffs.lists}") return consolidated
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Detecting general changes to AAMP files...") aamps = { m for m in modded_files if isinstance(m, str) and m[m.rindex("."):] in ( util.AAMP_EXTS - HANDLED) and "Dummy" not in m and "CDungeon" not in m } if not aamps: return None consolidated: Dict[str, Any] = {} for aamp in aamps: util.dict_merge( consolidated, reduce( lambda res, cur: {cur: res if res is not None else {} }, # type: ignore reversed(aamp.split("//")), None, ), ) this_pool = self._pool or Pool(maxtasksperchild=500) results = this_pool.starmap(partial(get_aamp_diffs, tmp_dir=mod_dir), list(consolidated.items())) if not self._pool: this_pool.close() this_pool.join() del consolidated del aamps diffs = ParameterIO() diffs.objects["FileTable"] = ParameterObject() i: int = 0 for file, diff in sorted( (k, v) for r in [r for r in results if r is not None] for k, v in r.items()): diffs.objects["FileTable"].params[f"File{i}"] = Parameter(file) diffs.lists[file] = diff i += 1 return diffs
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Detecting general changes to AAMP files...") aamps = { m for m in modded_files if isinstance(m, str) and m[m.rindex("."):] in (util.AAMP_EXTS - HANDLED) } if not aamps: return None consolidated = {} for aamp in aamps: util.dict_merge( consolidated, reduce( lambda res, cur: {cur: res} if res is not None else [cur], reversed(aamp.split("//")), None, ), ) this_pool = self._pool or Pool() results = this_pool.starmap(partial(get_aamp_diffs, tmp_dir=mod_dir), list(consolidated.items())) if not self._pool: this_pool.close() this_pool.join() del consolidated del aamps diffs = ParameterIO() diffs.objects["FileTable"] = ParameterObject() i: int = 0 for result in results: if not result: continue for file, diff in result.items(): diffs.objects["FileTable"].params[f"File{i}"] = Parameter(file) diffs.lists[file] = diff i += 1 return diffs
def merge_drop_file(file: str, drop_table: dict): base_path = file[:file.index("//")] sub_path = file[file.index("//"):] try: ref_drop = _drop_to_dict( ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_path)) + sub_path))) for table in set(ref_drop.keys()): if table not in drop_table: del ref_drop[table] else: for item in set(ref_drop[table]["items"].keys()): if item not in drop_table[table]["items"]: del ref_drop[table]["items"][item] util.dict_merge(ref_drop, drop_table) drop_table = ref_drop except (FileNotFoundError, AttributeError, RuntimeError): pass actor_name = re.search(r"Pack\/(.+)\.sbactorpack", file).groups()[0] pio = _dict_to_drop(drop_table) util.inject_files_into_actor(actor_name, {file.split("//")[-1]: pio.to_binary()})
def consolidate_gamedata(gamedata: oead.Sarc) -> {}: data = {} for file in gamedata.get_files(): util.dict_merge(data, oead.byml.from_binary(file.data)) del gamedata return data
def perform_merge(self): force = self._options.get("force", False) glog_path = util.get_master_modpack_dir() / "logs" / "gamedata.log" modded_entries = self.consolidate_diffs(self.get_all_diffs()) util.vprint("All gamedata diffs:") util.vprint(modded_entries) if not modded_entries: print("No gamedata merging necessary.") if glog_path.exists(): glog_path.unlink() if (util.get_master_modpack_dir() / "logs" / "gamedata.sarc").exists(): (util.get_master_modpack_dir() / "logs" / "gamedata.sarc").unlink() return if glog_path.exists() and not force: with glog_path.open("r") as l_file: if xxhash.xxh64_hexdigest( str(modded_entries)) == l_file.read(): print("No gamedata merging necessary.") return print("Loading stock gamedata...") gamedata = consolidate_gamedata(get_stock_gamedata()) merged_entries = { data_type: oead.byml.Hash({entry["DataName"]: entry for entry in entries}) for data_type, entries in gamedata.items() } del gamedata print("Merging changes...") for data_type in {d for d in merged_entries if d in modded_entries}: util.dict_merge( merged_entries[data_type], modded_entries[data_type]["add"], shallow=True, ) for entry in modded_entries[data_type]["del"]: try: del merged_entries[data_type][entry] except KeyError: continue merged_entries = oead.byml.Hash({ data_type: oead.byml.Array({value for _, value in entries.items()}) for data_type, entries in merged_entries.items() }) print("Creating and injecting new gamedata.sarc...") new_gamedata = oead.SarcWriter( endian=oead.Endianness.Big if util.get_settings("wiiu") else oead. Endianness.Little) for data_type in merged_entries: num_files = ceil(len(merged_entries[data_type]) / 4096) for i in range(num_files): end_pos = (i + 1) * 4096 if end_pos > len(merged_entries[data_type]): end_pos = len(merged_entries[data_type]) new_gamedata.files[ f"/{data_type}_{i}.bgdata"] = oead.byml.to_binary( oead.byml.Hash({ data_type: merged_entries[data_type][i * 4096:end_pos] }), big_endian=util.get_settings("wiiu"), ) new_gamedata_bytes = new_gamedata.write()[1] del new_gamedata util.inject_file_into_sarc( "GameData/gamedata.ssarc", util.compress(new_gamedata_bytes), "Pack/Bootup.pack", create_sarc=True, ) (util.get_master_modpack_dir() / "logs").mkdir(parents=True, exist_ok=True) (util.get_master_modpack_dir() / "logs" / "gamedata.sarc").write_bytes(new_gamedata_bytes) print("Updating RSTB...") rstable.set_size( "GameData/gamedata.sarc", rstable.calculate_size("GameData/gamedata.sarc", new_gamedata_bytes), ) del new_gamedata_bytes glog_path.parent.mkdir(parents=True, exist_ok=True) with glog_path.open("w", encoding="utf-8") as l_file: l_file.write(xxhash.xxh64_hexdigest(str(modded_entries)))
def consolidate_diffs(self, diffs: list): all_diffs = oead.byml.Hash() for diff in diffs: util.dict_merge(all_diffs, diff, overwrite_lists=True) return all_diffs
def consolidate_diffs(self, diffs: list): all_diffs: Dict[str, oead.Byml.Hash] = {} for diff in diffs: util.dict_merge(all_diffs, diff, overwrite_lists=True) return oead.byml.Hash(all_diffs)
def perform_merge(self): merged_areadata = util.get_master_modpack_dir() / "logs" / "areadata.byml" areadata_merge_log = util.get_master_modpack_dir() / "logs" / "areadata.log" print("Loading area data mods...") modded_areadata = self.consolidate_diffs(self.get_all_diffs()) areadata_mod_hash = hash(str(modded_areadata)) if not modded_areadata: print("No area data merging necessary") if merged_areadata.exists(): merged_areadata.unlink() areadata_merge_log.unlink() try: stock_areadata = util.get_nested_file_bytes( ( str(util.get_game_file("Pack/Bootup.pack")) + "//Ecosystem/AreaData.sbyml" ), unyaz=False, ) util.inject_file_into_sarc( "Ecosystem/AreaData.sbyml", stock_areadata, "Pack/Bootup.pack", ) except FileNotFoundError: pass return if ( areadata_merge_log.exists() and areadata_merge_log.read_text() == areadata_mod_hash ): print("No area data merging necessary") return new_areadata = get_stock_areadata() util.dict_merge(new_areadata, modded_areadata, overwrite_lists=True) print("Writing new area data...") areadata_bytes = oead.byml.to_binary( oead.byml.Array( [v for _, v in sorted(new_areadata.items(), key=lambda x: int(x[0]))] ), big_endian=util.get_settings("wiiu"), ) del new_areadata util.inject_file_into_sarc( "Ecosystem/AreaData.sbyml", util.compress(areadata_bytes), "Pack/Bootup.pack", create_sarc=True, ) print("Saving area data merge log...") areadata_merge_log.parent.mkdir(parents=True, exist_ok=True) areadata_merge_log.write_text(str(areadata_mod_hash)) merged_areadata.write_bytes(areadata_bytes) print("Updating RSTB...") rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext( bytes(areadata_bytes), True, ".byml" ) del areadata_bytes rstable.set_size("Ecosystem/AreaData.byml", rstb_size)
def consolidate_diffs(self, diffs): consolidated = {} for diff in diffs: util.dict_merge(consolidated, diff) return consolidated