def consolidate_diffs(self, diffs: list): all_diffs = {} for diff in diffs: all_diffs.update(diff) util.vprint("All shrine entry diffs:") util.vprint(all_diffs) return all_diffs
def perform_merge(self): no_del = self._options.get("no_del", False) shutil.rmtree( str(util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Map" / "MainField"), ignore_errors=True, ) shutil.rmtree( str(util.get_master_modpack_dir() / util.get_content_path() / "Map" / "MainField"), ignore_errors=True, ) log_path = util.get_master_modpack_dir() / "logs" / "map.log" if log_path.exists(): log_path.unlink() print("Loading map mods...") map_diffs = self.consolidate_diffs(self.get_all_diffs()) util.vprint("All map diffs:") util.vprint(map_diffs) if not map_diffs: print("No map merge necessary") return aoc_pack = (util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Pack" / "AocMainField.pack") if not aoc_pack.exists() or aoc_pack.stat().st_size > 0: print("Emptying AocMainField.pack...") aoc_pack.parent.mkdir(parents=True, exist_ok=True) aoc_pack.write_bytes(b"") rstb_vals = {} rstb_calc = rstb.SizeCalculator() print("Merging modded map units...") pool = self._pool or Pool(maxtasksperchild=500) rstb_results = pool.map( partial(merge_map, rstb_calc=rstb_calc, no_del=no_del), map_diffs.items(), ) for result in rstb_results: rstb_vals[result[util.get_dlc_path()][0]] = result[ util.get_dlc_path()][1] rstb_vals[result["main"][0]] = result["main"][1] if not self._pool: pool.close() pool.join() print("Adjusting RSTB...") log_path.parent.mkdir(parents=True, exist_ok=True) with log_path.open("w", encoding="utf-8") as l_file: for canon, val in rstb_vals.items(): l_file.write(f"{canon},{val}\n") print("Map merge complete")
def perform_merge(self): shutil.rmtree( str(util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Map" / "MainField"), ignore_errors=True, ) shutil.rmtree( str(util.get_master_modpack_dir() / util.get_content_path() / "Map" / "MainField"), ignore_errors=True, ) log_path = util.get_master_modpack_dir() / "logs" / "map.log" if log_path.exists(): log_path.unlink() print("Loading map mods...") map_diffs = self.consolidate_diffs(self.get_all_diffs()) util.vprint("All map diffs:") util.vprint(map_diffs) if not map_diffs: print("No map merge necessary") return aoc_pack = (util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Pack" / "AocMainField.pack") if not aoc_pack.exists() or aoc_pack.stat().st_size > 0: print("Emptying AocMainField.pack...") aoc_pack.parent.mkdir(parents=True, exist_ok=True) aoc_pack.write_bytes(b"") rstb_vals = {} rstb_calc = rstb.SizeCalculator() print("Merging modded map units...") pool = self._pool or Pool(maxtasksperchild=500) rstb_results = pool.map( partial(merge_map, rstb_calc=rstb_calc), map_diffs.items(), ) for result in rstb_results: rstb_vals[result[util.get_dlc_path()][0]] = result[ util.get_dlc_path()][1] rstb_vals[result["main"][0]] = result["main"][1] if not self._pool: pool.close() pool.join() stock_static = [m for m in map_diffs if m[1] == "Static"] if stock_static: title_path = (util.get_master_modpack_dir() / util.get_content_path() / "Pack" / "TitleBG.pack") if not title_path.exists(): title_path.parent.mkdir(parents=True, exist_ok=True) shutil.copyfile(util.get_game_file("Pack/TitleBG.pack"), title_path) title_bg: oead.SarcWriter = oead.SarcWriter.from_sarc( oead.Sarc(title_path.read_bytes())) for static in stock_static: del title_bg.files[ f"Map/MainField/{static[0]}/{static[0]}_Static.smubin"] title_path.write_bytes(title_bg.write()[1]) print("Adjusting RSTB...") log_path.parent.mkdir(parents=True, exist_ok=True) with log_path.open("w", encoding="utf-8") as l_file: for canon, val in rstb_vals.items(): l_file.write(f"{canon},{val}\n") print("Map merge complete")
def merge_map(map_pair: tuple, rstb_calc: rstb.SizeCalculator) -> Dict[str, Tuple[str, int]]: map_unit, changes = map_pair[0], map_pair[1] util.vprint(f'Merging {len(changes)} versions of {"_".join(map_unit)}...') new_map = get_stock_map(map_unit) stock_obj_hashes = [int(obj["HashId"]) for obj in new_map["Objs"]] for hash_id, actor in changes["Objs"]["mod"].items(): try: new_map["Objs"][stock_obj_hashes.index(int(hash_id))] = actor except ValueError: changes["Objs"]["add"].append(actor) for map_del in sorted( changes["Objs"]["del"], key=lambda change: stock_obj_hashes.index(change) if change in stock_obj_hashes else -1, reverse=True, ): if int(map_del) in stock_obj_hashes: try: new_map["Objs"].pop(stock_obj_hashes.index(map_del)) except IndexError: try: obj_to_delete = next( iter([ actor for actor in new_map["Objs"] if actor["HashId"] == map_del ])) new_map["Objs"].remove(obj_to_delete) except (StopIteration, ValueError): util.vprint( f"Could not delete actor with HashId {map_del}") new_map["Objs"].extend([ change for change in changes["Objs"]["add"] if int(change["HashId"]) not in stock_obj_hashes ]) new_map["Objs"] = sorted(new_map["Objs"], key=lambda actor: int(actor["HashId"])) if len(new_map["Rails"]): stock_rail_hashes = [int(rail["HashId"]) for rail in new_map["Rails"]] for hash_id, rail in changes["Rails"]["mod"].items(): try: new_map["Rails"][stock_rail_hashes.index(int(hash_id))] = rail except ValueError: changes["Rails"]["add"].append(rail) for map_del in sorted( changes["Rails"]["del"], key=lambda change: stock_rail_hashes.index(int(change)) if int(change) in stock_rail_hashes else -1, reverse=True, ): if int(map_del) in stock_rail_hashes: try: new_map["Rails"].pop(stock_rail_hashes.index(int(map_del))) except IndexError: try: obj_to_delete = next( iter([ rail for rail in new_map["Rails"] if rail["HashId"] == map_del ])) new_map["Rails"].remove(obj_to_delete) except (StopIteration, ValueError): util.vprint( f"Could not delete rail with HashId {map_del}") new_map["Rails"].extend([ change for change in changes["Rails"]["add"] if int(change["HashId"]) not in stock_rail_hashes ]) new_map["Rails"] = sorted(new_map["Rails"], key=lambda rail: int(rail["HashId"])) aoc_out: Path = (util.get_master_modpack_dir() / util.get_dlc_path() / ("0010" if util.get_settings("wiiu") else "") / "Map" / "MainField" / map_unit.section / f"{map_unit.section}_{map_unit.type}.smubin") aoc_out.parent.mkdir(parents=True, exist_ok=True) aoc_bytes = oead.byml.to_binary(new_map, big_endian=util.get_settings("wiiu")) aoc_out.write_bytes(util.compress(aoc_bytes)) new_map["Objs"] = [ obj for obj in new_map["Objs"] if not str(obj["UnitConfigName"]).startswith("DLC") ] (util.get_master_modpack_dir() / util.get_content_path() / "Map" / "MainField" / map_unit.section).mkdir(parents=True, exist_ok=True) base_out = (util.get_master_modpack_dir() / util.get_content_path() / "Map" / "MainField" / map_unit.section / f"{map_unit.section}_{map_unit.type}.smubin") base_out.parent.mkdir(parents=True, exist_ok=True) base_bytes = oead.byml.to_binary(new_map, big_endian=util.get_settings("wiiu")) base_out.write_bytes(util.compress(base_bytes)) return { util.get_dlc_path(): ( f"Aoc/0010/Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin", rstb_calc.calculate_file_size_with_ext(bytes(aoc_bytes), util.get_settings("wiiu"), ".mubin"), ), "main": ( f"Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin", rstb_calc.calculate_file_size_with_ext(bytes(base_bytes), util.get_settings("wiiu"), ".mubin"), ), }