def get_named_pio(shop: ParameterIO, shop_type: str) -> ParameterIO: named_pio = ParameterIO() shop_keys = EXT_PARAMS[shop_type] for table_key, table_obj in shop.objects.items(): if table_key.hash == oead.aamp.Name("Header").hash: tablenames = ParameterObject() named_pio.objects["TableNames"] = ParameterObject() tablenum = table_obj.params["TableNum"].v for idx in range(1, tablenum + 1): table_name = str(table_obj.params[f"Table{idx:02}"].v) tablenames.params[oead.aamp.Name(table_name)] = table_name named_pio.objects["TableNames"] = tablenames continue table_max = table_obj.params["ColumnNum"].v table_obj_new = ParameterList() for idx in range(1, table_max + 1): if shop_type == "brecipe": entry_key = "%02d" % idx elif shop_type == "bshop": entry_key = "%03d" % idx else: raise KeyError(shop_type) entry_value = ParameterObject() try: for curr_shop_key in shop_keys: no_shop_key = curr_shop_key + entry_key entry_value.params[curr_shop_key] = table_obj.params[ no_shop_key] except KeyError: continue table_obj_new.objects[str( entry_value.params["ItemName"].v)] = entry_value named_pio.lists[table_key] = table_obj_new return named_pio
def make_bshop(plist: ParameterList) -> ParameterIO: bshop = ParameterIO() bshop.type = "xml" tables: List[str] = [ str(t.v) for _, t in plist.objects["TableNames"].params.items() ] bshop.objects["Header"] = ParameterObject() bshop.objects["Header"].params["TableNum"] = Parameter(len(tables)) for i, table in enumerate(tables, 1): table_hash = crc32(table.encode()) bshop.objects["Header"].params[f"Table{i:02d}"] = Parameter( FixedSafeString64(table) ) table_pobj = ParameterObject() table_pobj.params["ColumnNum"] = Parameter( len(plist.lists[table_hash].objects) ) for j, item in enumerate( [item for _, item in plist.lists[table_hash].objects.items()], 1 ): table_pobj.params[f"ItemSort{j:03d}"] = Parameter(j - 1) for shop_key in shop_keys: table_pobj.params[f"{shop_key}{j:03d}"] = item.params[shop_key] if table_pobj.params: bshop.objects[table_hash] = table_pobj return bshop
def log_drop_file(file: str, mod_dir: Path): if "Bootup.pack" in file: return {} drop = ParameterIO.from_binary( util.get_nested_file_bytes(str(mod_dir) + "/" + file)) drop_table = _drop_to_dict(drop) del drop try: base_file = file[:file.index("//")] sub_file = file[file.index("//"):] ref_drop = ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_file)) + sub_file)) ref_table = _drop_to_dict(ref_drop) del ref_drop for table, contents in drop_table.items(): if table not in ref_table: continue for item, prob in {(i, p) for i, p in contents["items"].items() if i in ref_table[table]["items"]}: if prob == ref_table[table]["items"][item]: drop_table[table]["items"][item] = util.UNDERRIDE del ref_table except ( FileNotFoundError, oead.InvalidDataError, AttributeError, RuntimeError, ValueError, ): util.vprint(f"Could not load stock {file}") return {file: drop_table}
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: Union[dict, list], path: str) -> dict: diffs = {} if isinstance(edits, dict): for file, edits in edits.items(): try: rsub_sarc = Sarc( util.unyaz_if_needed(ref_sarc.get_file(file).data)) except (AttributeError, InvalidDataError, ValueError, RuntimeError) as e: util.vprint(f'Skipping "{path}//{file}", {e}') continue sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) diffs.update( _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits, path + "//" + file)) del sub_sarc del rsub_sarc else: for file in edits: full_path = f"{path}//{file}" try: ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data) except AttributeError: continue pio = ParameterIO.from_binary(sarc.get_file(file).data) diffs.update({full_path: get_aamp_diff(pio, ref_pio)}) return diffs
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: dict, path: str) -> dict: diffs = {} for file, edits in edits.items(): if edits: try: rsub_sarc = Sarc( util.unyaz_if_needed(ref_sarc.get_file(file).data)) except (AttributeError, InvalidDataError, ValueError, RuntimeError) as err: util.vprint(f'Skipping "{path}//{file}", {err}') continue sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) diffs.update( _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits, path + "//" + file)) del sub_sarc del rsub_sarc else: full_path = f"{path}//{file}" try: ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data) except AttributeError: continue try: pio = ParameterIO.from_binary(sarc.get_file(file).data) except AttributeError as err: raise ValueError( f"Failed to read nested file:\n{path}//{file}") from err except (ValueError, RuntimeError, InvalidDataError) as err: raise ValueError(f"Failed to parse AAMP file:\n{path}//{file}") diffs.update({full_path: get_aamp_diff(pio, ref_pio)}) return diffs
def _dict_to_drop(drop_dict: dict) -> ParameterIO: pio = ParameterIO() pio.type = "xml" header = ParameterObject() header.params["TableNum"] = Parameter(len(drop_dict)) for i, table in enumerate(drop_dict.keys()): header.params[f"Table{i + 1:02}"] = Parameter( oead.FixedSafeString64(table)) pio.objects["Header"] = header for i, (table, contents) in enumerate(drop_dict.items()): header.params[f"Table{i:02}"] = table table_obj = ParameterObject() table_obj.params["RepeatNumMin"] = Parameter( contents["repeat_num_min"]) table_obj.params["RepeatNumMax"] = Parameter( contents["repeat_num_max"]) table_obj.params["ApproachType"] = Parameter(contents["approach_type"]) table_obj.params["OccurrenceSpeedType"] = Parameter( contents["occurrence_speed_type"]) table_obj.params["ColumnNum"] = Parameter(len(contents["items"])) for idx, item in enumerate(contents["items"]): table_obj.params[f"ItemName{idx + 1:02}"] = Parameter( oead.FixedSafeString64(item)) table_obj.params[f"ItemProbability{idx + 1:02}"] = Parameter( contents["items"][item]) pio.objects[table] = table_obj return pio
def shop_merge(base: ParameterIO, ext: str, adds: ParameterList, rems: ParameterList) -> ParameterIO: base_sorted = get_named_pio(base, ext) base_sorted = util.pio_subtract(base_sorted, rems) base_sorted = util.pio_merge(base_sorted, adds) if ext == "bshop": fix_itemsorts(base_sorted) merged = ParameterIO() merged.objects["Header"] = ParameterObject() merged.objects["Header"].params["TableNum"] = Parameter( len(base_sorted.lists)) table_no = 1 for table_key, table_list in base_sorted.lists.items(): merged.objects["Header"].params[f"Table{table_no:02}"] = Parameter( base_sorted.objects["TableNames"].params[table_key].v) table_no += 1 merged_table_obj = ParameterObject() merged_table_obj.params["ColumnNum"] = Parameter( len(table_list.objects)) for _, item_obj in table_list.objects.items(): if ext == "brecipe": entry_key = "%02d" % (item_obj.params["ItemSort"].v + 1) elif ext == "bshop": entry_key = "%03d" % (item_obj.params["ItemSort"].v + 1) else: raise KeyError(ext) for param_key in EXT_PARAMS[ext]: param_name = param_key + entry_key merged_table_obj.params[param_name] = item_obj.params[ param_key] merged.objects[table_key] = merged_table_obj return merged
def main() -> None: parser = argparse.ArgumentParser(description="Tool for converting CookData in LoZ:BotW") parser.add_argument( "file", help="Filename to be converted (accepts wildcards for converting multiple files)", ) parser.add_argument("output", help="Output type: bas or yml") parser.add_argument( "-n", "--names", help="Output with human-readable names", action="store_true" ) parser.add_argument( "-d", "--digits", help="Output with machine-readable values", action="store_true" ) args = parser.parse_args() if not args.names and not args.digits: raise RuntimeError("-n or -d must be specified!") if args.names and args.digits: raise RuntimeError("Only one of -n and -d must be specified!") output_str: str = str.lower(args.output) output_yaml: bool = True if output_str == "yml" or output_str == "yaml" else False output_aamp: bool = True if output_str == "bas" or output_str == "aamp" else False if not output_yaml and not output_aamp: raise RuntimeError("Output type must be one of: 'yml', 'yaml', 'bas', or 'aamp'") folder = Path(args.file).parent.resolve() filename = Path(args.file).name for file in folder.glob(filename): ext: str = file.suffix pio: ParameterIO = ParameterIO() if ext == ".bas": pio = ParameterIO.from_binary(file.read_bytes()) elif ext == ".yml": pio = ParameterIO.from_text(file.read_text()) try: if args.names: convert.to_names(pio) else: convert.to_numbers(pio) except: output_type: str = "names" if args.names else "numbers" print(f"{file.name} already had {output_type}! Skipping...") continue new_file: Path = Path() if output_yaml: new_file = file.with_suffix(".yml") to_write = ParameterIO.to_text(pio) new_file.write_text(to_write) else: new_file = file.with_suffix(".bas") to_write = ParameterIO.to_binary(pio) new_file.write_bytes(to_write)
def get_mod_diff(self, mod: util.BcmlMod) -> ParameterIO: separate_diffs = [] if self.is_mod_logged(mod): separate_diffs.append( ParameterIO.from_binary( (mod.path / "logs" / self._log_name).read_bytes())) for opt in {d for d in (mod.path / "options").glob("*") if d.is_dir()}: if (opt / "logs" / self._log_name).exists(): separate_diffs.append( ParameterIO.from_binary( (opt / "logs" / self._log_name).read_bytes())) return reduce(util.pio_merge, separate_diffs) if separate_diffs else None
def nested_patch(pack: oead.Sarc, nest: dict) -> Tuple[oead.SarcWriter, dict]: new_sarc: oead.SarcWriter = oead.SarcWriter.from_sarc(pack) failures: dict = {} for file, stuff in nest.items(): file_bytes = pack.get_file(file).data yazd = file_bytes[0:4] == b"Yaz0" file_bytes = util.decompress(file_bytes) if yazd else file_bytes if isinstance(stuff, dict): sub_sarc = oead.Sarc(file_bytes) new_sub_sarc, sub_failures = nested_patch(sub_sarc, stuff) for failure in sub_failures: failure[file + "//" + failure] = sub_failures[failure] del sub_sarc new_bytes = bytes(new_sub_sarc.write()[1]) new_sarc.files[file] = new_bytes if not yazd else util.compress( new_bytes) elif isinstance(stuff, ParameterList): try: if file_bytes[0:4] == b"AAMP": aamp_contents = ParameterIO.from_binary(file_bytes) try: file_ext = os.path.splitext(file)[1] aamp_contents = shop_merge( aamp_contents, file_ext.replace(".", ""), stuff.lists["Additions"], stuff.lists["Removals"], ) aamp_bytes = ParameterIO.to_binary(aamp_contents) except: # pylint: disable=bare-except raise RuntimeError( f"AAMP file {file} could be merged.") del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress( aamp_bytes) cache_merged_shop(file, new_bytes) else: raise ValueError( "Wait, what the heck, this isn't an AAMP file?!") except ValueError: new_bytes = pack.get_file(file).data print(f"Deep merging {file} failed. No changes were made.") new_sarc.files[file] = oead.Bytes(new_bytes) return new_sarc, failures
def get_mod_diff(self, mod: util.BcmlMod): diff = None if self.is_mod_logged(mod): diff = ParameterIO.from_binary( (mod.path / "logs" / self._log_name).read_bytes()) for opt in {d for d in (mod.path / "options").glob("*") if d.is_dir()}: if (opt / "logs" / self._log_name).exists(): if not diff: diff = ParameterIO() merge_plists( diff, ParameterIO.from_binary( (opt / "logs" / self._log_name).read_bytes()), True, ) return diff
def merge_drop_file(file: str, drop_table: dict): base_path = file[:file.index("//")] sub_path = file[file.index("//"):] try: ref_drop = _drop_to_dict( ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_path)) + sub_path))) for table in set(ref_drop.keys()): if table not in drop_table: del ref_drop[table] else: for item in set(ref_drop[table]["items"].keys()): if item not in drop_table[table]["items"]: del ref_drop[table]["items"][item] util.dict_merge(ref_drop, drop_table) drop_table = ref_drop except (FileNotFoundError, AttributeError, RuntimeError): pass actor_name_matches = re.search(r"Pack\/(.+)\.sbactorpack", file) if actor_name_matches: actor_name = actor_name_matches.groups()[0] else: raise ValueError(f"No actor name found in {file}") pio = _dict_to_drop(drop_table) util.inject_files_into_actor(actor_name, {file.split("//")[-1]: pio.to_binary()})
def generate_pio(self) -> ParameterIO: from zlib import crc32 pio = ParameterIO.from_binary(self._aiprog.to_binary()) if self._ais: ais_list = ParameterList() for idx, ai in enumerate(self._ais): ais_list.lists[f"AI_{idx}"] = ai pio.lists["AI"] = ais_list if self._actions: actions_list = ParameterList() for idx, action in enumerate(self._actions): actions_list.lists[f"Action_{idx}"] = action pio.lists["Action"] = actions_list if self._behaviors: behaviors_list = ParameterList() for idx, behavior in enumerate(self._behaviors): behaviors_list.lists[f"Behavior_{idx}"] = behavior pio.lists["Behavior"] = behaviors_list if self._queries: queries_list = ParameterList() for idx, query in enumerate(self._queries): queries_list.lists[f"Query_{idx}"] = query pio.lists["Query"] = queries_list self._aiprog = pio return pio
def get_all_diffs(self): diffs = ParameterIO() for mod in util.get_installed_mods(): diff = self.get_mod_diff(mod) if diff: merge_plists(diffs, diff, True) return diffs if diffs.lists or diffs.objects else None
def get_all_diffs(self): diffs = None for mod in util.get_installed_mods(): diff = self.get_mod_diff(mod) if diff: if not diffs: diffs = ParameterIO() merge_plists(diffs, diff, True) return diffs
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Detecting general changes to AAMP files...") aamps = { m for m in modded_files if isinstance(m, str) and m[m.rindex("."):] in ( util.AAMP_EXTS - HANDLED) and "Dummy" not in m and "CDungeon" not in m } if not aamps: return None consolidated: Dict[str, Any] = {} for aamp in aamps: util.dict_merge( consolidated, reduce( lambda res, cur: {cur: res if res is not None else {} }, # type: ignore reversed(aamp.split("//")), None, ), ) this_pool = self._pool or Pool(maxtasksperchild=500) results = this_pool.starmap(partial(get_aamp_diffs, tmp_dir=mod_dir), list(consolidated.items())) if not self._pool: this_pool.close() this_pool.join() del consolidated del aamps diffs = ParameterIO() diffs.objects["FileTable"] = ParameterObject() i: int = 0 for file, diff in sorted( (k, v) for r in [r for r in results if r is not None] for k, v in r.items()): diffs.objects["FileTable"].params[f"File{i}"] = Parameter(file) diffs.lists[file] = diff i += 1 return diffs
def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]): print("Detecting general changes to AAMP files...") aamps = { m for m in modded_files if isinstance(m, str) and m[m.rindex("."):] in (util.AAMP_EXTS - HANDLED) } if not aamps: return None consolidated = {} for aamp in aamps: util.dict_merge( consolidated, reduce( lambda res, cur: {cur: res} if res is not None else [cur], reversed(aamp.split("//")), None, ), ) this_pool = self._pool or Pool() results = this_pool.starmap(partial(get_aamp_diffs, tmp_dir=mod_dir), list(consolidated.items())) if not self._pool: this_pool.close() this_pool.join() del consolidated del aamps diffs = ParameterIO() diffs.objects["FileTable"] = ParameterObject() i: int = 0 for result in results: if not result: continue for file, diff in result.items(): diffs.objects["FileTable"].params[f"File{i}"] = Parameter(file) diffs.lists[file] = diff i += 1 return diffs
def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]) -> ParameterIO: print("Logging changes to shop files...") diffs = ParameterIO() file_names = ParameterObject() for file in [ file for file in modded_files if Path(file).suffix in EXT_FOLDERS ]: try: mod_bytes = util.get_nested_file_bytes( str(mod_dir) + "/" + str(file)) nests = str(file).split("//", 1) try: ref_path = str(util.get_game_file(Path( nests[0]))) + "//" + nests[1] except FileNotFoundError: continue try: ref_bytes = util.get_nested_file_bytes(ref_path) except AttributeError: continue shop_type = str(file).split(".")[-1] mod_pio = get_named_pio(ParameterIO.from_binary(mod_bytes), shop_type) ref_pio = get_named_pio(ParameterIO.from_binary(ref_bytes), shop_type) file_names.params[oead.aamp.Name(file).hash] = Parameter(file) diffs.lists[file] = gen_diffs(ref_pio, mod_pio) except (KeyError, AttributeError) as err: raise err diffs.objects["Filenames"] = file_names return diffs
def rem_underride(data: dict): for file, tables in data.items(): stock: Optional[dict] = None for name, table in tables.items(): for actor, prob in table["items"].items(): if prob == util.UNDERRIDE: if stock == None: base_file = file[:file.index("//")] sub_file = file[file.index("//"):] ref_drop = ParameterIO.from_binary( util.get_nested_file_bytes( str(util.get_game_file(base_file)) + sub_file)) stock = _drop_to_dict(ref_drop) data[file][name]["items"][actor] = stock[name][ "items"][actor]
def _to_pio(self, obj) -> ParameterIO: pio = ParameterIO() pio.type = obj["type"] pio.version = obj["version"] obj = obj["lists"]["param_root"] if "lists" in obj and obj["lists"]: for name, content in obj["lists"].items(): if name.isnumeric(): pio.lists[int(name)] = self._to_plist(content) else: pio.lists[name] = self._to_plist(content) if "objects" in obj and obj["objects"]: for name, content in obj["objects"].items(): if "params" in content and content["params"]: if name.isnumeric(): pio.objects[int(name)] = self._to_pobj(content) else: pio.objects[name] = self._to_pobj(content) return pio
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString: new_sarc = SarcWriter.from_sarc(sarc) for file, stuff in edits.items(): if isinstance(stuff, dict): try: ofile = sarc.get_file(file) if ofile == None: raise FileNotFoundError(f"Could not find nested file {file} in SARC") sub_sarc = Sarc(util.unyaz_if_needed(ofile.data)) except ( InvalidDataError, ValueError, AttributeError, RuntimeError, FileNotFoundError, ): util.vprint(f"Couldn't merge into nested SARC {file}") continue nsub_bytes = _merge_in_sarc(sub_sarc, stuff) new_sarc.files[file] = ( util.compress(nsub_bytes) if file[file.rindex(".") :].startswith(".s") else nsub_bytes ) elif isinstance(stuff, ParameterList): try: ofile = sarc.get_file(file) if ofile == None: raise FileNotFoundError(f"Could not find nested file {file} in SARC") pio = ParameterIO.from_binary(ofile.data) except ( AttributeError, ValueError, InvalidDataError, FileNotFoundError, ) as err: util.vprint(f"Couldn't open {file}: {err}") continue new_pio = merge_shopdata(pio, stuff) new_sarc.files[file] = new_pio.to_binary() return new_sarc.write()[1]
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString: new_sarc = SarcWriter.from_sarc(sarc) for file, stuff in edits.items(): if isinstance(stuff, dict): try: sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data)) except (InvalidDataError, ValueError, AttributeError, RuntimeError): util.vprint(f"Couldn't merge into nested SARC {file}") continue nsub_bytes = _merge_in_sarc(sub_sarc, stuff) new_sarc.files[file] = (util.compress(nsub_bytes) if file[file.rindex("."):].startswith(".s") else nsub_bytes) elif isinstance(stuff, ParameterList): try: pio = ParameterIO.from_binary(sarc.get_file(file).data) except (AttributeError, ValueError, InvalidDataError) as e: util.vprint(f"Couldn't open {file}: {e}") continue merge_plists(pio, stuff) new_sarc.files[file] = pio.to_binary() return new_sarc.write()[1]
def threaded_merge(item) -> Tuple[str, dict]: """Deep merges an individual file, suitable for multiprocessing""" file, stuff = item failures = {} try: base_file = util.get_game_file(file, file.startswith(util.get_dlc_path())) except FileNotFoundError: return "", {} if (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_ext = os.path.splitext(file)[1] if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists(): base_file = util.get_master_modpack_dir() / file file_bytes = base_file.read_bytes() yazd = file_bytes[0:4] == b"Yaz0" file_bytes = file_bytes if not yazd else util.decompress(file_bytes) magic = file_bytes[0:4] if magic == b"SARC": new_sarc, sub_failures = nested_patch(oead.Sarc(file_bytes), stuff) del file_bytes new_bytes = bytes(new_sarc.write()[1]) for failure, contents in sub_failures.items(): print(f"Some patches to {failure} failed to apply.") failures[failure] = contents elif magic == b"AAMP": try: aamp_contents = ParameterIO.from_binary(file_bytes) try: aamp_contents = shop_merge( aamp_contents, file_ext.replace(".", ""), stuff.lists["Additions"], stuff.lists["Removals"], ) aamp_bytes = ParameterIO.to_binary(aamp_contents) except: # pylint: disable=bare-except raise RuntimeError(f"AAMP file {file} could be merged.") del aamp_contents new_bytes = aamp_bytes if not yazd else util.compress(aamp_bytes) except ValueError: new_bytes = file_bytes del file_bytes print(f"Deep merging file {file} failed. No changes were made.") else: raise ValueError(f"{file} is not a SARC or AAMP file.") new_bytes = new_bytes if not yazd else util.compress(new_bytes) output_file = util.get_master_modpack_dir() / file if base_file == output_file: output_file.unlink() output_file.parent.mkdir(parents=True, exist_ok=True) output_file.write_bytes(new_bytes) del new_bytes if magic == b"SARC": util.vprint(f"Finished patching files inside {file}") else: util.vprint(f"Finished patching {file}") return util.get_canon_name(file), failures