def merge_dungeonstatic(diffs: dict = None): """Merges all changes to the CDungeon Static.smubin""" if not diffs: diffs = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for mod in [mod for mod in util.get_installed_mods() \ if (mod.path / 'logs' / 'dstatic.yml').exists()]: diffs.update( yaml.load((mod.path / 'logs' / 'dstatic.yml').read_bytes(), Loader=loader)) if not diffs: return new_static = byml.Byml( util.decompress_file( str(util.get_game_file( 'aoc/0010/Map/CDungeon/Static.smubin')))).parse() base_dungeons = [dungeon['Map'] for dungeon in new_static['StartPos']] for dungeon, diff in diffs.items(): if dungeon not in base_dungeons: new_static['StartPos'].append(diff) else: for key, value in diff.items(): new_static['StartPos'][base_dungeons.index( dungeon)][key] = value output_static = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \ 'CDungeon' / 'Static.smubin' output_static.parent.mkdir(parents=True, exist_ok=True) output_static.write_bytes( util.compress(byml.Writer(new_static, True).get_bytes()))
def get_mod_diff(self, mod: BcmlMod): if self.is_mod_logged(mod): with (mod.path / 'logs' / self._log_name).open('r', encoding='utf-8') as log: loader = yaml.CSafeLoader yaml_util.add_constructors(loader) return yaml.load(log, Loader=loader) else: return {}
def yml_to_byml_dir(tmp_dir: Path, ext: str = '.byml'): """ Converts YAML files in given temp dir to BYML """ loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for yml in tmp_dir.rglob('**/*.yml'): with yml.open('r', encoding='utf-8') as y_file: root = yaml.load(y_file, loader) with (yml.with_name(yml.stem + ext)).open('wb') as b_file: byml.Writer(root, True).write(b_file) yml.unlink()
def merge_actorinfo(verbose: bool = False): """Merges installed changes to actor info""" mods = get_actorinfo_mods() actor_path = (util.get_master_modpack_dir() / 'content' / 'Actor' / 'ActorInfo.product.sbyml') if not mods: print('No actor info merging necessary.') if actor_path.exists(): actor_path.unlink() return print('Loading modded actor info...') modded_actors = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for mod in mods: with (mod.path / 'logs' / 'actorinfo.yml').open( 'r', encoding='utf-8') as a_file: entries = yaml.load(a_file, Loader=loader) util.dict_merge(modded_actors, entries, overwrite_lists=True) if verbose: print(f'Loaded {len(entries)} entries from {mod.name}') del entries print('Loading unmodded actor info...') actorinfo = get_stock_actorinfo() print('Merging changes...') for actor_hash, actor_info in modded_actors.items(): if actor_hash in actorinfo['Hashes']: idx = actorinfo['Hashes'].index(actor_hash) util.dict_merge(actorinfo['Actors'][idx], actor_info, overwrite_lists=True) if verbose: print(f' Updated entry for {actorinfo["Actors"][idx]}') else: actorinfo['Hashes'].append(actor_hash) actorinfo['Actors'].append(actor_info) if verbose: print(f' Added entry for {actor_info["name"]}') print('Sorting new actor info...') actorinfo['Hashes'].sort() actorinfo['Hashes'] = list( map(lambda x: byml.Int(x) if x < 2147483648 else byml.UInt(x), actorinfo['Hashes'])) actorinfo['Actors'].sort( key=lambda x: zlib.crc32(x['name'].encode('utf-8'))) print('Saving new actor info...') buf = BytesIO() byml.Writer(actorinfo, True).write(buf) actor_path.parent.mkdir(parents=True, exist_ok=True) actor_path.write_bytes(util.compress(buf.getvalue())) print('Actor info merged successfully')
def get_events_for_mod(mod: util.BcmlMod) -> {}: """ Gets all of the logged event info changes for a mod :return: Returns a dict of new and modded event info entries :rtype: dict """ events = {} if (mod.path / 'logs' / 'eventinfo.yml').exists(): loader = yaml.CSafeLoader yaml_util.add_constructors(loader) events = yaml.load((mod.path / 'logs' / 'eventinfo.yml').open('r'), Loader=loader) return events
def _convert_map_log(log: Path): loader = yaml.CLoader byu.add_constructors(loader) diff = yaml.load(log.read_text("utf-8"), Loader=loader) new_diff = {} for unit, changes in diff.items(): new_changes = { "add": changes["add"], "del": changes["del"], "mod": {str(hashid): actor for hashid, actor in changes["mod"].items()}, } new_diff[unit] = new_changes dumper = yaml.CDumper byu.add_representers(dumper) log.write_text(yaml.dump(new_diff, Dumper=dumper, allow_unicode=True), encoding="utf-8")
def get_all_map_diffs() -> dict: """ Consolidates diffs for installed map unit mods into a single set of additions, modifications, and deletions. :return: Returns a dict of modded map units with their added, modified, and deleted actors. :rtype: dict of str: dict """ diffs = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for mod in util.get_installed_mods(): if (mod.path / 'logs' / 'map.yml').exists(): with (mod.path / 'logs' / 'map.yml').open( 'r', encoding='utf-8') as y_file: map_yml = yaml.load(y_file, Loader=loader) for file, diff in map_yml.items(): a_map = Map(*file.split('_')) if a_map not in diffs: diffs[a_map] = [] diffs[a_map].append(diff) c_diffs = {} for file, mods in list(diffs.items()): c_diffs[file] = { 'add': [], 'mod': {}, 'del': list(set([hash_id for hashes in [mod['del']\ for mod in mods] for hash_id in hashes])) } for mod in mods: for hash_id, actor in mod['mod'].items(): c_diffs[file]['mod'][hash_id] = deepcopy(actor) add_hashes = [] for mod in reversed(mods): for actor in mod['add']: if actor['HashId'] not in add_hashes: add_hashes.append(actor['HashId']) c_diffs[file]['add'].append(actor) return c_diffs
def merge_savedata(verbose: bool = False, force: bool = False): """ Merges install savedata mods and saves the new Bootup.pack, fixing the RSTB if needed""" mods = get_savedata_mods() slog_path = util.get_master_modpack_dir() / 'logs' / 'savedata.log' if not mods: print('No gamedata merging necessary.') if slog_path.exists(): slog_path.unlink() if (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').exists(): (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').unlink() return if slog_path.exists() and not force: with slog_path.open('r') as l_file: if xxhash.xxh32(str(mods)).hexdigest() == l_file.read(): print('No savedata merging necessary.') return new_entries = [] new_hashes = [] loader = yaml.CSafeLoader yaml_util.add_constructors(loader) print('Loading savedata mods...') for mod in mods: with open(mod.path / 'logs' / 'savedata.yml') as s_file: yml = yaml.load(s_file, Loader=loader) for entry in yml: if entry['HashValue'] in new_hashes: continue else: new_entries.append(entry) new_hashes.append(entry['HashValue']) if verbose: print(f' Added {entry["DataName"]} from {mod.name}') savedata = get_stock_savedata() merged_entries = [] save_files = sorted(savedata.list_files())[0:-2] print('Loading stock savedata...') for file in save_files: merged_entries.extend(byml.Byml(savedata.get_file_data( file).tobytes()).parse()['file_list'][1]) print('Merging changes...') merged_entries.extend(new_entries) merged_entries.sort(key=lambda x: x['HashValue']) special_bgsv = [ savedata.get_file_data('/saveformat_6.bgsvdata').tobytes(), savedata.get_file_data('/saveformat_7.bgsvdata').tobytes(), ] print('Creating and injecting new savedataformat.sarc...') new_savedata = sarc.SARCWriter(True) num_files = ceil(len(merged_entries) / 8192) for i in range(num_files): end_pos = (i+1) * 8192 if end_pos > len(merged_entries): end_pos = len(merged_entries) buf = BytesIO() byml.Writer({ 'file_list': [ { 'IsCommon': False, 'IsCommonAtSameAccount': False, 'IsSaveSecureCode': True, 'file_name': 'game_data.sav' }, merged_entries[i*8192:end_pos] ], 'save_info': [ { 'directory_num': byml.Int(8), 'is_build_machine': True, 'revision': byml.Int(18203) } ] }, True).write(buf) new_savedata.add_file(f'/saveformat_{i}.bgsvdata', buf.getvalue()) new_savedata.add_file(f'/saveformat_{num_files}.bgsvdata', special_bgsv[0]) new_savedata.add_file( f'/saveformat_{num_files + 1}.bgsvdata', special_bgsv[1]) bootup_rstb = inject_savedata_into_bootup(new_savedata) (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True) with (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').open('wb') as s_file: new_savedata.write(s_file) print('Updating RSTB...') rstable.set_size('GameData/savedataformat.sarc', bootup_rstb) slog_path.parent.mkdir(parents=True, exist_ok=True) with slog_path.open('w', encoding='utf-8') as l_file: l_file.write(xxhash.xxh32(str(mods)).hexdigest())
def merge_gamedata(verbose: bool = False, force: bool = False): """ Merges installed gamedata mods and saves the new Bootup.pack, fixing the RSTB if needed """ mods = get_gamedata_mods() glog_path = util.get_master_modpack_dir() / 'logs' / 'gamedata.log' if not mods: print('No gamedata merging necessary.') if glog_path.exists(): glog_path.unlink() if (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').exists(): (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').unlink() return if glog_path.exists() and not force: with glog_path.open('r') as l_file: if xxhash.xxh32(str(mods)).hexdigest() == l_file.read(): print('No gamedata merging necessary.') return modded_entries = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) print('Loading gamedata mods...') for mod in mods: with (mod.path / 'logs' / 'gamedata.yml').open('r') as g_file: yml = yaml.load(g_file, Loader=loader) for data_type in yml: if data_type not in modded_entries: modded_entries[data_type] = {} modded_entries[data_type].update(yml[data_type]) if verbose: print(f' Added entries for {data_type} from {mod.name}') gamedata = get_stock_gamedata() merged_entries = {} print('Loading stock gamedata...') for yml in gamedata.list_files(): base_yml = byml.Byml(gamedata.get_file_data(yml).tobytes()).parse() for data_type in base_yml: if data_type not in merged_entries: merged_entries[data_type] = [] merged_entries[data_type].extend(base_yml[data_type]) print('Merging changes...') for data_type in merged_entries: if data_type in modded_entries: for entry in [entry for entry in merged_entries[data_type] if entry['DataName'] in modded_entries[data_type]]: i = merged_entries[data_type].index(entry) if verbose: print(f' {entry["DataName"]} has been modified') merged_entries[data_type][i] = deepcopy( modded_entries[data_type][entry['DataName']]) print(f'Merged modified {data_type} entries') for data_type in modded_entries: for entry in [entry for entry in modded_entries[data_type] if entry not in [entry['DataName'] for entry in merged_entries[data_type]]]: if verbose: print(f' {entry} has been added') merged_entries[data_type].append(modded_entries[data_type][entry]) print(f'Merged new {data_type} entries') print('Creating and injecting new gamedata.sarc...') new_gamedata = sarc.SARCWriter(True) for data_type in merged_entries: num_files = ceil(len(merged_entries[data_type]) / 4096) for i in range(num_files): end_pos = (i+1) * 4096 if end_pos > len(merged_entries[data_type]): end_pos = len(merged_entries[data_type]) buf = BytesIO() byml.Writer( {data_type: merged_entries[data_type][i*4096:end_pos]}, be=True).write(buf) new_gamedata.add_file(f'/{data_type}_{i}.bgdata', buf.getvalue()) bootup_rstb = inject_gamedata_into_bootup(new_gamedata) (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True) with (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').open('wb') as g_file: new_gamedata.write(g_file) print('Updating RSTB...') rstable.set_size('GameData/gamedata.sarc', bootup_rstb) glog_path.parent.mkdir(parents=True, exist_ok=True) with glog_path.open('w', encoding='utf-8') as l_file: l_file.write(xxhash.xxh32(str(mods)).hexdigest())