def merge_dungeonstatic(diffs: dict = None): """Merges all changes to the CDungeon Static.smubin""" if not diffs: diffs = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for mod in [mod for mod in util.get_installed_mods() \ if (mod.path / 'logs' / 'dstatic.yml').exists()]: diffs.update( yaml.load((mod.path / 'logs' / 'dstatic.yml').read_bytes(), Loader=loader)) if not diffs: return new_static = byml.Byml( util.decompress_file( str(util.get_game_file( 'aoc/0010/Map/CDungeon/Static.smubin')))).parse() base_dungeons = [dungeon['Map'] for dungeon in new_static['StartPos']] for dungeon, diff in diffs.items(): if dungeon not in base_dungeons: new_static['StartPos'].append(diff) else: for key, value in diff.items(): new_static['StartPos'][base_dungeons.index( dungeon)][key] = value output_static = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \ 'CDungeon' / 'Static.smubin' output_static.parent.mkdir(parents=True, exist_ok=True) output_static.write_bytes( util.compress(byml.Writer(new_static, True).get_bytes()))
def _generate_byml(self) -> None: print('generating BYMLs') byml.yaml_util.add_constructors(yaml.CSafeLoader) for path in self.build_assets_dir.glob('**/*.yml'): with path.open('r') as f: data = yaml.load(f, Loader=yaml.CSafeLoader) writer = byml.Writer(data, be=self.wiiu, version=2) with Path(str(path)[:-4]).open('wb') as f: writer.write(f) # type: ignore path.unlink()
def yml_to_byml_dir(tmp_dir: Path, ext: str = '.byml'): """ Converts YAML files in given temp dir to BYML """ loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for yml in tmp_dir.rglob('**/*.yml'): with yml.open('r', encoding='utf-8') as y_file: root = yaml.load(y_file, loader) with (yml.with_name(yml.stem + ext)).open('wb') as b_file: byml.Writer(root, True).write(b_file) yml.unlink()
def merge_actorinfo(verbose: bool = False): """Merges installed changes to actor info""" mods = get_actorinfo_mods() actor_path = (util.get_master_modpack_dir() / 'content' / 'Actor' / 'ActorInfo.product.sbyml') if not mods: print('No actor info merging necessary.') if actor_path.exists(): actor_path.unlink() return print('Loading modded actor info...') modded_actors = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) for mod in mods: with (mod.path / 'logs' / 'actorinfo.yml').open( 'r', encoding='utf-8') as a_file: entries = yaml.load(a_file, Loader=loader) util.dict_merge(modded_actors, entries, overwrite_lists=True) if verbose: print(f'Loaded {len(entries)} entries from {mod.name}') del entries print('Loading unmodded actor info...') actorinfo = get_stock_actorinfo() print('Merging changes...') for actor_hash, actor_info in modded_actors.items(): if actor_hash in actorinfo['Hashes']: idx = actorinfo['Hashes'].index(actor_hash) util.dict_merge(actorinfo['Actors'][idx], actor_info, overwrite_lists=True) if verbose: print(f' Updated entry for {actorinfo["Actors"][idx]}') else: actorinfo['Hashes'].append(actor_hash) actorinfo['Actors'].append(actor_info) if verbose: print(f' Added entry for {actor_info["name"]}') print('Sorting new actor info...') actorinfo['Hashes'].sort() actorinfo['Hashes'] = list( map(lambda x: byml.Int(x) if x < 2147483648 else byml.UInt(x), actorinfo['Hashes'])) actorinfo['Actors'].sort( key=lambda x: zlib.crc32(x['name'].encode('utf-8'))) print('Saving new actor info...') buf = BytesIO() byml.Writer(actorinfo, True).write(buf) actor_path.parent.mkdir(parents=True, exist_ok=True) actor_path.write_bytes(util.compress(buf.getvalue())) print('Actor info merged successfully')
def merge_events(): """ Merges all installed event info mods """ event_mods = [mod for mod in util.get_installed_mods() \ if (mod.path / 'logs' / 'eventinfo.yml').exists()] merged_events = util.get_master_modpack_dir() / 'logs' / 'eventinfo.byml' event_merge_log = util.get_master_modpack_dir() / 'logs' / 'eventinfo.log' event_mod_hash = str(hash(tuple(event_mods))) if not event_mods: print('No event info merging necessary') if merged_events.exists(): merged_events.unlink() event_merge_log.unlink() try: stock_eventinfo = util.get_nested_file_bytes( str(util.get_game_file('Pack/Bootup.pack')) + '//Event/EventInfo.product.sbyml', unyaz=False ) util.inject_file_into_bootup( 'Event/EventInfo.product.sbyml', stock_eventinfo ) except FileNotFoundError: pass return if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash: print('No event info merging necessary') return print('Loading event info mods...') modded_events = {} for mod in event_mods: modded_events.update(get_events_for_mod(mod)) new_events = get_stock_eventinfo() for event, data in modded_events.items(): new_events[event] = data print('Writing new event info...') event_bytes = byml.Writer(new_events, be=True).get_bytes() util.inject_file_into_bootup( 'Event/EventInfo.product.sbyml', util.compress(event_bytes), create_bootup=True ) print('Saving event info merge log...') event_merge_log.write_text(event_mod_hash) merged_events.write_bytes(event_bytes) print('Updating RSTB...') rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(event_bytes, True, '.byml') rstable.set_size('Event/EventInfo.product.byml', rstb_size)
def _generate_gamedata_config(self) -> None: print('[ShrineRush] generating GameData configuration') flags_to_reset_set: typing.Set[str] = set(f.name for f in self.flags_to_reset) gdt_dest_dir = self.build_assets_dir / 'Pack' / 'Bootup.pack' / 'GameData' / 'gamedata.ssarc' for bgdata_name, bgdata in self.gamedata_bgdata.items(): is_edited = False for data_type_key, flags in bgdata.items(): for flag in flags: if flag['DataName'] in flags_to_reset_set: flag['IsOneTrigger'] = False is_edited = True if not is_edited: continue with (gdt_dest_dir / (bgdata_name)).open('wb') as f: writer = byml.Writer(bgdata, be=self.wiiu, version=2) writer.write(f) # type: ignore
def merge_savedata(verbose: bool = False, force: bool = False): """ Merges install savedata mods and saves the new Bootup.pack, fixing the RSTB if needed""" mods = get_savedata_mods() slog_path = util.get_master_modpack_dir() / 'logs' / 'savedata.log' if not mods: print('No gamedata merging necessary.') if slog_path.exists(): slog_path.unlink() if (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').exists(): (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').unlink() return if slog_path.exists() and not force: with slog_path.open('r') as l_file: if xxhash.xxh32(str(mods)).hexdigest() == l_file.read(): print('No savedata merging necessary.') return new_entries = [] new_hashes = [] loader = yaml.CSafeLoader yaml_util.add_constructors(loader) print('Loading savedata mods...') for mod in mods: with open(mod.path / 'logs' / 'savedata.yml') as s_file: yml = yaml.load(s_file, Loader=loader) for entry in yml: if entry['HashValue'] in new_hashes: continue else: new_entries.append(entry) new_hashes.append(entry['HashValue']) if verbose: print(f' Added {entry["DataName"]} from {mod.name}') savedata = get_stock_savedata() merged_entries = [] save_files = sorted(savedata.list_files())[0:-2] print('Loading stock savedata...') for file in save_files: merged_entries.extend(byml.Byml(savedata.get_file_data( file).tobytes()).parse()['file_list'][1]) print('Merging changes...') merged_entries.extend(new_entries) merged_entries.sort(key=lambda x: x['HashValue']) special_bgsv = [ savedata.get_file_data('/saveformat_6.bgsvdata').tobytes(), savedata.get_file_data('/saveformat_7.bgsvdata').tobytes(), ] print('Creating and injecting new savedataformat.sarc...') new_savedata = sarc.SARCWriter(True) num_files = ceil(len(merged_entries) / 8192) for i in range(num_files): end_pos = (i+1) * 8192 if end_pos > len(merged_entries): end_pos = len(merged_entries) buf = BytesIO() byml.Writer({ 'file_list': [ { 'IsCommon': False, 'IsCommonAtSameAccount': False, 'IsSaveSecureCode': True, 'file_name': 'game_data.sav' }, merged_entries[i*8192:end_pos] ], 'save_info': [ { 'directory_num': byml.Int(8), 'is_build_machine': True, 'revision': byml.Int(18203) } ] }, True).write(buf) new_savedata.add_file(f'/saveformat_{i}.bgsvdata', buf.getvalue()) new_savedata.add_file(f'/saveformat_{num_files}.bgsvdata', special_bgsv[0]) new_savedata.add_file( f'/saveformat_{num_files + 1}.bgsvdata', special_bgsv[1]) bootup_rstb = inject_savedata_into_bootup(new_savedata) (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True) with (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').open('wb') as s_file: new_savedata.write(s_file) print('Updating RSTB...') rstable.set_size('GameData/savedataformat.sarc', bootup_rstb) slog_path.parent.mkdir(parents=True, exist_ok=True) with slog_path.open('w', encoding='utf-8') as l_file: l_file.write(xxhash.xxh32(str(mods)).hexdigest())
def merge_gamedata(verbose: bool = False, force: bool = False): """ Merges installed gamedata mods and saves the new Bootup.pack, fixing the RSTB if needed """ mods = get_gamedata_mods() glog_path = util.get_master_modpack_dir() / 'logs' / 'gamedata.log' if not mods: print('No gamedata merging necessary.') if glog_path.exists(): glog_path.unlink() if (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').exists(): (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').unlink() return if glog_path.exists() and not force: with glog_path.open('r') as l_file: if xxhash.xxh32(str(mods)).hexdigest() == l_file.read(): print('No gamedata merging necessary.') return modded_entries = {} loader = yaml.CSafeLoader yaml_util.add_constructors(loader) print('Loading gamedata mods...') for mod in mods: with (mod.path / 'logs' / 'gamedata.yml').open('r') as g_file: yml = yaml.load(g_file, Loader=loader) for data_type in yml: if data_type not in modded_entries: modded_entries[data_type] = {} modded_entries[data_type].update(yml[data_type]) if verbose: print(f' Added entries for {data_type} from {mod.name}') gamedata = get_stock_gamedata() merged_entries = {} print('Loading stock gamedata...') for yml in gamedata.list_files(): base_yml = byml.Byml(gamedata.get_file_data(yml).tobytes()).parse() for data_type in base_yml: if data_type not in merged_entries: merged_entries[data_type] = [] merged_entries[data_type].extend(base_yml[data_type]) print('Merging changes...') for data_type in merged_entries: if data_type in modded_entries: for entry in [entry for entry in merged_entries[data_type] if entry['DataName'] in modded_entries[data_type]]: i = merged_entries[data_type].index(entry) if verbose: print(f' {entry["DataName"]} has been modified') merged_entries[data_type][i] = deepcopy( modded_entries[data_type][entry['DataName']]) print(f'Merged modified {data_type} entries') for data_type in modded_entries: for entry in [entry for entry in modded_entries[data_type] if entry not in [entry['DataName'] for entry in merged_entries[data_type]]]: if verbose: print(f' {entry} has been added') merged_entries[data_type].append(modded_entries[data_type][entry]) print(f'Merged new {data_type} entries') print('Creating and injecting new gamedata.sarc...') new_gamedata = sarc.SARCWriter(True) for data_type in merged_entries: num_files = ceil(len(merged_entries[data_type]) / 4096) for i in range(num_files): end_pos = (i+1) * 4096 if end_pos > len(merged_entries[data_type]): end_pos = len(merged_entries[data_type]) buf = BytesIO() byml.Writer( {data_type: merged_entries[data_type][i*4096:end_pos]}, be=True).write(buf) new_gamedata.add_file(f'/{data_type}_{i}.bgdata', buf.getvalue()) bootup_rstb = inject_gamedata_into_bootup(new_gamedata) (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True) with (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').open('wb') as g_file: new_gamedata.write(g_file) print('Updating RSTB...') rstable.set_size('GameData/gamedata.sarc', bootup_rstb) glog_path.parent.mkdir(parents=True, exist_ok=True) with glog_path.open('w', encoding='utf-8') as l_file: l_file.write(xxhash.xxh32(str(mods)).hexdigest())
def byml_to_bin(data): stream = io.BytesIO() return byml.Writer(data, be=False, version=2).write(stream)
def merge_map(map_pair: tuple, rstb_calc: rstb.SizeCalculator, no_del: bool = False, link_del: bool = False, verbose: bool = False) -> {}: """ Merges changes to a mainfield map and returns the RSTB values :param map_pair: A tuple containing class:`Map` to merge and the changes as a dict :type map_pair: tuple :param rstb_calc: An `rstb` library SizeCalculator object :type rstb_calc: class:`rstb.SizeCalculator` :param verbose: Whether to display verbose output, defaults to False :type verbose: bool, optional :return: Returns a dict containing the aoc and base RSTB paths for the merged map units along with their new size values :rtype: dict of str: int """ map_unit, changes = map_pair if verbose: print(f'Merging {len(changes)} versions of {"_".join(map_unit)}...') new_map = get_stock_map(map_unit) stock_hashes = [obj['HashId'] for obj in new_map['Objs']] for hash_id, actor in changes['mod'].items(): try: new_map['Objs'][stock_hashes.index(hash_id)] = deepcopy(actor) except ValueError: changes['add'].append(actor) if not no_del: for map_del in sorted(changes['del'], key=lambda change: stock_hashes.index(change) \ if change in stock_hashes else -1, reverse=True): if map_del in stock_hashes: try: new_map['Objs'].pop(stock_hashes.index(map_del)) except IndexError: try: obj_to_delete = next( iter([ actor for actor in new_map['Objs'] if actor['HashId'] == map_del ])) new_map['Objs'].remove(obj_to_delete) except (StopIteration, ValueError): print(f'Could not delete actor with HashId {map_del}') new_map['Objs'].extend([ change for change in changes['add'] if change['HashId'] not in stock_hashes ]) new_map['Objs'].sort(key=lambda actor: actor['HashId']) aoc_out: Path = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \ 'MainField' / map_unit.section / \ f'{map_unit.section}_{map_unit.type}.smubin' aoc_out.parent.mkdir(parents=True, exist_ok=True) aoc_bytes = byml.Writer(new_map, be=True).get_bytes() aoc_out.write_bytes(util.compress(aoc_bytes)) new_map['Objs'] = [ obj for obj in new_map['Objs'] if not obj['UnitConfigName'].startswith('DLC') ] (util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField' / map_unit.section).mkdir(parents=True, exist_ok=True) base_out = util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField' / \ map_unit.section / f'{map_unit.section}_{map_unit.type}.smubin' base_out.parent.mkdir(parents=True, exist_ok=True) base_bytes = byml.Writer(new_map, be=True).get_bytes() base_out.write_bytes(util.compress(base_bytes)) return { 'aoc': (f'Aoc/0010/Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin', rstb_calc.calculate_file_size_with_ext(aoc_bytes, True, '.mubin')), 'main': (f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin', rstb_calc.calculate_file_size_with_ext(base_bytes, True, '.mubin')) }