Ejemplo n.º 1
0
def inject_savedata_into_bootup(bgsvdata: sarc.SARCWriter, bootup_path: Path = None) -> int:
    """
    Packs a savedata SARC into Bootup.pack and returns the RSTB size of the new savedataformat.sarc

    :param bgsvdata: A SARCWriter for the new savedata
    :type bgsvdata: class:`sarc.SARCWriter`
    :param bootup_path: Path to the Bootup.pack to update, defaults to a master BCML copy
    :type bootup_path: class:`pathlib.Path`, optional
    :returns: Returns the RSTB size of the new savedataformat.sarc
    :rtype: int
    """
    if not bootup_path:
        master_boot = util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack'
        bootup_path = master_boot if master_boot.exists() \
            else util.get_game_file('Pack/Bootup.pack')
    with bootup_path.open('rb') as b_file:
        bootup_pack = sarc.read_file_and_make_sarc(b_file)
    new_pack = sarc.make_writer_from_sarc(bootup_pack)
    new_pack.delete_file('GameData/savedataformat.ssarc')
    savedata_bytes = bgsvdata.get_bytes()
    new_pack.add_file('GameData/savedataformat.ssarc',
                      util.compress(savedata_bytes))
    (util.get_master_modpack_dir() / 'content' / 'Pack').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack').open('wb') as b_file:
        new_pack.write(b_file)
    return rstb.SizeCalculator().calculate_file_size_with_ext(savedata_bytes, True, '.sarc')
Ejemplo n.º 2
0
    def perform_merge(self):
        no_del = self._options.get("no_del", False)
        shutil.rmtree(
            str(util.get_master_modpack_dir() / util.get_dlc_path() /
                ("0010" if util.get_settings("wiiu") else "") / "Map" /
                "MainField"),
            ignore_errors=True,
        )
        shutil.rmtree(
            str(util.get_master_modpack_dir() / util.get_content_path() /
                "Map" / "MainField"),
            ignore_errors=True,
        )
        log_path = util.get_master_modpack_dir() / "logs" / "map.log"
        if log_path.exists():
            log_path.unlink()
        print("Loading map mods...")
        map_diffs = self.consolidate_diffs(self.get_all_diffs())
        util.vprint("All map diffs:")
        util.vprint(map_diffs)
        if not map_diffs:
            print("No map merge necessary")
            return
        aoc_pack = (util.get_master_modpack_dir() / util.get_dlc_path() /
                    ("0010" if util.get_settings("wiiu") else "") / "Pack" /
                    "AocMainField.pack")
        if not aoc_pack.exists() or aoc_pack.stat().st_size > 0:
            print("Emptying AocMainField.pack...")
            aoc_pack.parent.mkdir(parents=True, exist_ok=True)
            aoc_pack.write_bytes(b"")

        rstb_vals = {}
        rstb_calc = rstb.SizeCalculator()
        print("Merging modded map units...")

        pool = self._pool or Pool(maxtasksperchild=500)
        rstb_results = pool.map(
            partial(merge_map, rstb_calc=rstb_calc, no_del=no_del),
            map_diffs.items(),
        )
        for result in rstb_results:
            rstb_vals[result[util.get_dlc_path()][0]] = result[
                util.get_dlc_path()][1]
            rstb_vals[result["main"][0]] = result["main"][1]
        if not self._pool:
            pool.close()
            pool.join()

        print("Adjusting RSTB...")
        log_path.parent.mkdir(parents=True, exist_ok=True)
        with log_path.open("w", encoding="utf-8") as l_file:
            for canon, val in rstb_vals.items():
                l_file.write(f"{canon},{val}\n")
        print("Map merge complete")
Ejemplo n.º 3
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        try:
            (util.get_master_modpack_dir() / "logs" /
             "dungeonstatic.smubin").unlink()
        except:
            pass
        return

    try:
        new_static = oead.byml.from_binary(
            util.decompress((util.get_aoc_dir() / "Map" / "CDungeon" /
                             "Static.smubin").read_bytes()))
    except FileNotFoundError:
        new_static = oead.byml.from_binary(
            util.get_nested_file_bytes(
                f"{util.get_game_file('Pack/Bootup.pack')}//Map/CDungeon/Static.smubin"
            ))

    base_names = [
        f"{str(spawn_point['Map'])}___{str(spawn_point['PosName'])}"
        for spawn_point in new_static["StartPos"]
    ]
    for spawn_name, diff in diffs.items():
        if "___" not in spawn_name:
            spawn_name = f"{spawn_name}___Entrance_1"
        if spawn_name not in base_names:
            new_static["StartPos"].append(diff)
        else:
            for key, value in diff.items():
                new_static["StartPos"][base_names.index(
                    spawn_name)][key] = value

    data = util.compress(
        oead.byml.to_binary(new_static, big_endian=util.get_settings("wiiu")))
    try:
        util.get_aoc_dir()
        output_static = (util.get_master_modpack_dir() / util.get_dlc_path() /
                         ("0010" if util.get_settings("wiiu") else "") /
                         "Map" / "CDungeon" / "Static.smubin")
    except FileNotFoundError:
        output_static = util.get_master_modpack_dir(
        ) / "logs" / "dungeonstatic.smubin"
        util.inject_file_into_sarc(
            "Map/CDungeon/Static.smubin",
            data,
            "Pack/Bootup.pack",
            create_sarc=True,
        )
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(data)
Ejemplo n.º 4
0
def merge_maps(no_del: bool = False,
               link_del: bool = False,
               verbose: bool = False,
               original_pool: Pool = None):
    """Merges all installed modifications to mainfield maps"""
    aoc_pack = util.get_master_modpack_dir() / 'aoc' / '0010' / \
        'Pack' / 'AocMainField.pack'
    if not aoc_pack.exists() or aoc_pack.stat().st_size > 0:
        print('Emptying AocMainField.pack...')
        aoc_pack.parent.mkdir(parents=True, exist_ok=True)
        aoc_pack.write_bytes(b'')
    shutil.rmtree(str(util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' /
                      'MainField'),
                  ignore_errors=True)
    shutil.rmtree(str(util.get_master_modpack_dir() / 'content' / 'Map' /
                      'MainField'),
                  ignore_errors=True)
    log_path = util.get_master_modpack_dir() / 'logs' / 'map.log'
    if log_path.exists():
        log_path.unlink()
    print('Loading map mods...')
    map_diffs = get_all_map_diffs()
    if not map_diffs:
        print('No map merge necessary')
        return

    rstb_vals = {}
    rstb_calc = rstb.SizeCalculator()
    print('Merging modded map units...')
    num_threads = min(cpu_count() - 1, len(map_diffs))
    pool = original_pool or Pool(processes=num_threads)
    rstb_results = pool.map(
        partial(merge_map,
                rstb_calc=rstb_calc,
                no_del=no_del,
                link_del=link_del,
                verbose=verbose), list(map_diffs.items()))
    for result in rstb_results:
        rstb_vals[result['aoc'][0]] = result['aoc'][1]
        rstb_vals[result['main'][0]] = result['main'][1]
    if not original_pool:
        pool.close()
        pool.join()

    print('Adjusting RSTB...')
    with log_path.open('w', encoding='utf-8') as l_file:
        for canon, val in rstb_vals.items():
            l_file.write(f'{canon},{val}\n')
    print('Map merge complete')
Ejemplo n.º 5
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / 'logs' / 'savedata.sarc'
     if tmp_sarc.exists():
         return ('GameData/savedataformat.ssarc',
                 util.compress(tmp_sarc.read_bytes()))
     else:
         return
Ejemplo n.º 6
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        return

    new_static = oead.byml.from_binary(
        util.decompress((util.get_aoc_dir() / "Map" / "CDungeon" /
                         "Static.smubin").read_bytes()))

    base_dungeons = [str(dungeon["Map"]) for dungeon in new_static["StartPos"]]
    for dungeon, diff in diffs.items():
        if dungeon not in base_dungeons:
            new_static["StartPos"].append(diff)
        else:
            for key, value in diff.items():
                new_static["StartPos"][base_dungeons.index(
                    dungeon)][key] = value

    output_static = (util.get_master_modpack_dir() / util.get_dlc_path() /
                     ("0010" if util.get_settings("wiiu") else "") / "Map" /
                     "CDungeon" / "Static.smubin")
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(
        util.compress(
            oead.byml.to_binary(new_static,
                                big_endian=util.get_settings("wiiu"))))
Ejemplo n.º 7
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        diffs = {}
        loader = yaml.CSafeLoader
        yaml_util.add_constructors(loader)
        for mod in [mod for mod in util.get_installed_mods() \
                    if (mod.path / 'logs' / 'dstatic.yml').exists()]:
            diffs.update(
                yaml.load((mod.path / 'logs' / 'dstatic.yml').read_bytes(),
                          Loader=loader))

    if not diffs:
        return

    new_static = byml.Byml(
        util.decompress_file(
            str(util.get_game_file(
                'aoc/0010/Map/CDungeon/Static.smubin')))).parse()

    base_dungeons = [dungeon['Map'] for dungeon in new_static['StartPos']]
    for dungeon, diff in diffs.items():
        if dungeon not in base_dungeons:
            new_static['StartPos'].append(diff)
        else:
            for key, value in diff.items():
                new_static['StartPos'][base_dungeons.index(
                    dungeon)][key] = value

    output_static = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \
        'CDungeon' / 'Static.smubin'
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(
        util.compress(byml.Writer(new_static, True).get_bytes()))
Ejemplo n.º 8
0
 def perform_merge(self):
     diffs = self.consolidate_diffs(self.get_all_diffs())
     output: Path
     static_data: Path
     try:
         util.get_aoc_dir()
         output = (util.get_master_modpack_dir() / util.get_dlc_path() /
                   ("0010" if util.get_settings("wiiu") else "") /
                   STATIC_PATH)
         static_data = util.get_game_file("Map/MainField/Static.smubin",
                                          aoc=True).read_bytes()
     except FileNotFoundError:
         output = util.get_master_modpack_dir(
         ) / "logs" / "mainstatic.smubin"
         static_data = util.get_nested_file_bytes(
             (str(util.get_game_file("Pack/Bootup.pack")) +
              "//Map/MainField/Static.smubin"),
             unyaz=False,
         )
     if not diffs:
         try:
             output.unlink()
         except:
             pass
         return
     stock_static = oead.byml.from_binary(util.decompress(static_data))
     merged = Hash()
     for cat in stock_static:
         if cat in diffs:
             items = {get_id(item): item for item in stock_static[cat]}
             util.dict_merge(items, diffs[cat])
             merged[cat] = Array([
                 item for _, item in items.items() if "remove" not in item
             ])
         else:
             merged[cat] = stock_static[cat]
     data = util.compress(
         oead.byml.to_binary(merged, big_endian=util.get_settings("wiiu")))
     output.parent.mkdir(parents=True, exist_ok=True)
     output.write_bytes(data)
     if "mainstatic" in str(output):
         util.inject_file_into_sarc(
             "Map/MainField/Static.smubin",
             data,
             "Pack/Bootup.pack",
             create_sarc=True,
         )
Ejemplo n.º 9
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / "logs" / "mainstatic.smubin"
     if tmp_sarc.exists():
         return (
             "Map/MainField/Static.smubin",
             tmp_sarc,
         )
     return
Ejemplo n.º 10
0
def merge_actorinfo(verbose: bool = False):
    """Merges installed changes to actor info"""
    mods = get_actorinfo_mods()
    actor_path = (util.get_master_modpack_dir() / 'content' / 'Actor' /
                  'ActorInfo.product.sbyml')
    if not mods:
        print('No actor info merging necessary.')
        if actor_path.exists():
            actor_path.unlink()
        return

    print('Loading modded actor info...')
    modded_actors = {}
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    for mod in mods:
        with (mod.path / 'logs' / 'actorinfo.yml').open(
                'r', encoding='utf-8') as a_file:
            entries = yaml.load(a_file, Loader=loader)
            util.dict_merge(modded_actors, entries, overwrite_lists=True)
            if verbose:
                print(f'Loaded {len(entries)} entries from {mod.name}')
            del entries
    print('Loading unmodded actor info...')
    actorinfo = get_stock_actorinfo()

    print('Merging changes...')
    for actor_hash, actor_info in modded_actors.items():
        if actor_hash in actorinfo['Hashes']:
            idx = actorinfo['Hashes'].index(actor_hash)
            util.dict_merge(actorinfo['Actors'][idx],
                            actor_info,
                            overwrite_lists=True)
            if verbose:
                print(f'  Updated entry for {actorinfo["Actors"][idx]}')
        else:
            actorinfo['Hashes'].append(actor_hash)
            actorinfo['Actors'].append(actor_info)
            if verbose:
                print(f'  Added entry for {actor_info["name"]}')

    print('Sorting new actor info...')
    actorinfo['Hashes'].sort()
    actorinfo['Hashes'] = list(
        map(lambda x: byml.Int(x)
            if x < 2147483648 else byml.UInt(x), actorinfo['Hashes']))
    actorinfo['Actors'].sort(
        key=lambda x: zlib.crc32(x['name'].encode('utf-8')))

    print('Saving new actor info...')
    buf = BytesIO()
    byml.Writer(actorinfo, True).write(buf)
    actor_path.parent.mkdir(parents=True, exist_ok=True)
    actor_path.write_bytes(util.compress(buf.getvalue()))
    print('Actor info merged successfully')
Ejemplo n.º 11
0
def merge_savedata(verbose: bool = False, force: bool = False):
    """ Merges install savedata mods and saves the new Bootup.pack, fixing the RSTB if needed"""
    mods = get_savedata_mods()
    slog_path = util.get_master_modpack_dir() / 'logs' / 'savedata.log'
    if not mods:
        print('No gamedata merging necessary.')
        if slog_path.exists():
            slog_path.unlink()
        if (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').exists():
            (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').unlink()
        return
    if slog_path.exists() and not force:
        with slog_path.open('r') as l_file:
            if xxhash.xxh32(str(mods)).hexdigest() == l_file.read():
                print('No savedata merging necessary.')
                return

    new_entries = []
    new_hashes = []
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    print('Loading savedata mods...')
    for mod in mods:
        with open(mod.path / 'logs' / 'savedata.yml') as s_file:
            yml = yaml.load(s_file, Loader=loader)
            for entry in yml:
                if entry['HashValue'] in new_hashes:
                    continue
                else:
                    new_entries.append(entry)
                    new_hashes.append(entry['HashValue'])
                    if verbose:
                        print(f'  Added {entry["DataName"]} from {mod.name}')

    savedata = get_stock_savedata()
    merged_entries = []
    save_files = sorted(savedata.list_files())[0:-2]

    print('Loading stock savedata...')
    for file in save_files:
        merged_entries.extend(byml.Byml(savedata.get_file_data(
            file).tobytes()).parse()['file_list'][1])

    print('Merging changes...')
    merged_entries.extend(new_entries)
    merged_entries.sort(key=lambda x: x['HashValue'])

    special_bgsv = [
        savedata.get_file_data('/saveformat_6.bgsvdata').tobytes(),
        savedata.get_file_data('/saveformat_7.bgsvdata').tobytes(),
    ]

    print('Creating and injecting new savedataformat.sarc...')
    new_savedata = sarc.SARCWriter(True)
    num_files = ceil(len(merged_entries) / 8192)
    for i in range(num_files):
        end_pos = (i+1) * 8192
        if end_pos > len(merged_entries):
            end_pos = len(merged_entries)
        buf = BytesIO()
        byml.Writer({
            'file_list': [
                {
                    'IsCommon': False,
                    'IsCommonAtSameAccount': False,
                    'IsSaveSecureCode': True,
                    'file_name': 'game_data.sav'
                },
                merged_entries[i*8192:end_pos]
            ],
            'save_info': [
                {
                    'directory_num': byml.Int(8),
                    'is_build_machine': True,
                    'revision': byml.Int(18203)
                }
            ]
        }, True).write(buf)
        new_savedata.add_file(f'/saveformat_{i}.bgsvdata', buf.getvalue())
    new_savedata.add_file(f'/saveformat_{num_files}.bgsvdata', special_bgsv[0])
    new_savedata.add_file(
        f'/saveformat_{num_files + 1}.bgsvdata', special_bgsv[1])
    bootup_rstb = inject_savedata_into_bootup(new_savedata)
    (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').open('wb') as s_file:
        new_savedata.write(s_file)

    print('Updating RSTB...')
    rstable.set_size('GameData/savedataformat.sarc', bootup_rstb)

    slog_path.parent.mkdir(parents=True, exist_ok=True)
    with slog_path.open('w', encoding='utf-8') as l_file:
        l_file.write(xxhash.xxh32(str(mods)).hexdigest())
Ejemplo n.º 12
0
def merge_gamedata(verbose: bool = False, force: bool = False):
    """ Merges installed gamedata mods and saves the new Bootup.pack, fixing the RSTB if needed """
    mods = get_gamedata_mods()
    glog_path = util.get_master_modpack_dir() / 'logs' / 'gamedata.log'
    if not mods:
        print('No gamedata merging necessary.')
        if glog_path.exists():
            glog_path.unlink()
        if (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').exists():
            (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').unlink()
        return
    if glog_path.exists() and not force:
        with glog_path.open('r') as l_file:
            if xxhash.xxh32(str(mods)).hexdigest() == l_file.read():
                print('No gamedata merging necessary.')
                return

    modded_entries = {}
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    print('Loading gamedata mods...')
    for mod in mods:
        with (mod.path / 'logs' / 'gamedata.yml').open('r') as g_file:
            yml = yaml.load(g_file, Loader=loader)
            for data_type in yml:
                if data_type not in modded_entries:
                    modded_entries[data_type] = {}
                modded_entries[data_type].update(yml[data_type])
                if verbose:
                    print(f'  Added entries for {data_type} from {mod.name}')

    gamedata = get_stock_gamedata()
    merged_entries = {}

    print('Loading stock gamedata...')
    for yml in gamedata.list_files():
        base_yml = byml.Byml(gamedata.get_file_data(yml).tobytes()).parse()
        for data_type in base_yml:
            if data_type not in merged_entries:
                merged_entries[data_type] = []
            merged_entries[data_type].extend(base_yml[data_type])

    print('Merging changes...')
    for data_type in merged_entries:
        if data_type in modded_entries:
            for entry in [entry for entry in merged_entries[data_type]
                          if entry['DataName'] in modded_entries[data_type]]:
                i = merged_entries[data_type].index(entry)
                if verbose:
                    print(f'  {entry["DataName"]} has been modified')
                merged_entries[data_type][i] = deepcopy(
                    modded_entries[data_type][entry['DataName']])
            print(f'Merged modified {data_type} entries')

    for data_type in modded_entries:
        for entry in [entry for entry in modded_entries[data_type]
                      if entry not in [entry['DataName'] for entry in merged_entries[data_type]]]:
            if verbose:
                print(f'  {entry} has been added')
            merged_entries[data_type].append(modded_entries[data_type][entry])
        print(f'Merged new {data_type} entries')

    print('Creating and injecting new gamedata.sarc...')
    new_gamedata = sarc.SARCWriter(True)
    for data_type in merged_entries:
        num_files = ceil(len(merged_entries[data_type]) / 4096)
        for i in range(num_files):
            end_pos = (i+1) * 4096
            if end_pos > len(merged_entries[data_type]):
                end_pos = len(merged_entries[data_type])
            buf = BytesIO()
            byml.Writer(
                {data_type: merged_entries[data_type][i*4096:end_pos]}, be=True).write(buf)
            new_gamedata.add_file(f'/{data_type}_{i}.bgdata', buf.getvalue())
    bootup_rstb = inject_gamedata_into_bootup(new_gamedata)
    (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').open('wb') as g_file:
        new_gamedata.write(g_file)

    print('Updating RSTB...')
    rstable.set_size('GameData/gamedata.sarc', bootup_rstb)

    glog_path.parent.mkdir(parents=True, exist_ok=True)
    with glog_path.open('w', encoding='utf-8') as l_file:
        l_file.write(xxhash.xxh32(str(mods)).hexdigest())
Ejemplo n.º 13
0
    def perform_merge(self):
        shutil.rmtree(
            str(util.get_master_modpack_dir() / util.get_dlc_path() /
                ("0010" if util.get_settings("wiiu") else "") / "Map" /
                "MainField"),
            ignore_errors=True,
        )
        shutil.rmtree(
            str(util.get_master_modpack_dir() / util.get_content_path() /
                "Map" / "MainField"),
            ignore_errors=True,
        )
        log_path = util.get_master_modpack_dir() / "logs" / "map.log"
        if log_path.exists():
            log_path.unlink()
        print("Loading map mods...")
        map_diffs = self.consolidate_diffs(self.get_all_diffs())
        util.vprint("All map diffs:")
        util.vprint(map_diffs)
        if not map_diffs:
            print("No map merge necessary")
            return
        aoc_pack = (util.get_master_modpack_dir() / util.get_dlc_path() /
                    ("0010" if util.get_settings("wiiu") else "") / "Pack" /
                    "AocMainField.pack")
        if not aoc_pack.exists() or aoc_pack.stat().st_size > 0:
            print("Emptying AocMainField.pack...")
            aoc_pack.parent.mkdir(parents=True, exist_ok=True)
            aoc_pack.write_bytes(b"")

        rstb_vals = {}
        rstb_calc = rstb.SizeCalculator()
        print("Merging modded map units...")

        pool = self._pool or Pool(maxtasksperchild=500)
        rstb_results = pool.map(
            partial(merge_map, rstb_calc=rstb_calc),
            map_diffs.items(),
        )
        for result in rstb_results:
            rstb_vals[result[util.get_dlc_path()][0]] = result[
                util.get_dlc_path()][1]
            rstb_vals[result["main"][0]] = result["main"][1]
        if not self._pool:
            pool.close()
            pool.join()

        stock_static = [m for m in map_diffs if m[1] == "Static"]
        if stock_static:
            title_path = (util.get_master_modpack_dir() /
                          util.get_content_path() / "Pack" / "TitleBG.pack")
            if not title_path.exists():
                title_path.parent.mkdir(parents=True, exist_ok=True)
                shutil.copyfile(util.get_game_file("Pack/TitleBG.pack"),
                                title_path)
            title_bg: oead.SarcWriter = oead.SarcWriter.from_sarc(
                oead.Sarc(title_path.read_bytes()))
            for static in stock_static:
                del title_bg.files[
                    f"Map/MainField/{static[0]}/{static[0]}_Static.smubin"]
            title_path.write_bytes(title_bg.write()[1])
        print("Adjusting RSTB...")
        log_path.parent.mkdir(parents=True, exist_ok=True)
        with log_path.open("w", encoding="utf-8") as l_file:
            for canon, val in rstb_vals.items():
                l_file.write(f"{canon},{val}\n")
        print("Map merge complete")
Ejemplo n.º 14
0
def merge_map(map_pair: tuple,
              rstb_calc: rstb.SizeCalculator) -> Dict[str, Tuple[str, int]]:
    map_unit, changes = map_pair[0], map_pair[1]
    util.vprint(f'Merging {len(changes)} versions of {"_".join(map_unit)}...')
    new_map = get_stock_map(map_unit)
    stock_obj_hashes = [int(obj["HashId"]) for obj in new_map["Objs"]]
    for hash_id, actor in changes["Objs"]["mod"].items():
        try:
            new_map["Objs"][stock_obj_hashes.index(int(hash_id))] = actor
        except ValueError:
            changes["Objs"]["add"].append(actor)
    for map_del in sorted(
            changes["Objs"]["del"],
            key=lambda change: stock_obj_hashes.index(change)
            if change in stock_obj_hashes else -1,
            reverse=True,
    ):
        if int(map_del) in stock_obj_hashes:
            try:
                new_map["Objs"].pop(stock_obj_hashes.index(map_del))
            except IndexError:
                try:
                    obj_to_delete = next(
                        iter([
                            actor for actor in new_map["Objs"]
                            if actor["HashId"] == map_del
                        ]))
                    new_map["Objs"].remove(obj_to_delete)
                except (StopIteration, ValueError):
                    util.vprint(
                        f"Could not delete actor with HashId {map_del}")
    new_map["Objs"].extend([
        change for change in changes["Objs"]["add"]
        if int(change["HashId"]) not in stock_obj_hashes
    ])
    new_map["Objs"] = sorted(new_map["Objs"],
                             key=lambda actor: int(actor["HashId"]))

    if len(new_map["Rails"]):
        stock_rail_hashes = [int(rail["HashId"]) for rail in new_map["Rails"]]
        for hash_id, rail in changes["Rails"]["mod"].items():
            try:
                new_map["Rails"][stock_rail_hashes.index(int(hash_id))] = rail
            except ValueError:
                changes["Rails"]["add"].append(rail)
        for map_del in sorted(
                changes["Rails"]["del"],
                key=lambda change: stock_rail_hashes.index(int(change))
                if int(change) in stock_rail_hashes else -1,
                reverse=True,
        ):
            if int(map_del) in stock_rail_hashes:
                try:
                    new_map["Rails"].pop(stock_rail_hashes.index(int(map_del)))
                except IndexError:
                    try:
                        obj_to_delete = next(
                            iter([
                                rail for rail in new_map["Rails"]
                                if rail["HashId"] == map_del
                            ]))
                        new_map["Rails"].remove(obj_to_delete)
                    except (StopIteration, ValueError):
                        util.vprint(
                            f"Could not delete rail with HashId {map_del}")
        new_map["Rails"].extend([
            change for change in changes["Rails"]["add"]
            if int(change["HashId"]) not in stock_rail_hashes
        ])
        new_map["Rails"] = sorted(new_map["Rails"],
                                  key=lambda rail: int(rail["HashId"]))

    aoc_out: Path = (util.get_master_modpack_dir() / util.get_dlc_path() /
                     ("0010" if util.get_settings("wiiu") else "") / "Map" /
                     "MainField" / map_unit.section /
                     f"{map_unit.section}_{map_unit.type}.smubin")
    aoc_out.parent.mkdir(parents=True, exist_ok=True)
    aoc_bytes = oead.byml.to_binary(new_map,
                                    big_endian=util.get_settings("wiiu"))
    aoc_out.write_bytes(util.compress(aoc_bytes))
    new_map["Objs"] = [
        obj for obj in new_map["Objs"]
        if not str(obj["UnitConfigName"]).startswith("DLC")
    ]
    (util.get_master_modpack_dir() / util.get_content_path() / "Map" /
     "MainField" / map_unit.section).mkdir(parents=True, exist_ok=True)
    base_out = (util.get_master_modpack_dir() / util.get_content_path() /
                "Map" / "MainField" / map_unit.section /
                f"{map_unit.section}_{map_unit.type}.smubin")
    base_out.parent.mkdir(parents=True, exist_ok=True)
    base_bytes = oead.byml.to_binary(new_map,
                                     big_endian=util.get_settings("wiiu"))
    base_out.write_bytes(util.compress(base_bytes))
    return {
        util.get_dlc_path(): (
            f"Aoc/0010/Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin",
            rstb_calc.calculate_file_size_with_ext(bytes(aoc_bytes),
                                                   util.get_settings("wiiu"),
                                                   ".mubin"),
        ),
        "main": (
            f"Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin",
            rstb_calc.calculate_file_size_with_ext(bytes(base_bytes),
                                                   util.get_settings("wiiu"),
                                                   ".mubin"),
        ),
    }
Ejemplo n.º 15
0
def merge_texts(lang: str = 'USen',
                tmp_dir: Path = util.get_work_dir() / 'tmp_text',
                verbose: bool = False,
                original_pool: multiprocessing.Pool = None):
    """
    Merges installed text mods and saves the new Bootup_XXxx.pack, fixing the RSTB if needed

    :param lang: The game language to use, defaults to USen.
    :type lang: str, optional
    :param tmp_dir: The temp directory to extract to, defaults to "tmp_text" in BCML's work dir.
    :type tmp_dir: class:`pathlib.Path`, optional
    :param verbose: Whether to display more detailed output, defaults to False
    :type verbose: bool, optional
    """
    print(f'Loading text mods for language {lang}...')
    text_mods = get_modded_text_entries(lang)
    if not text_mods:
        print('No text merging necessary.')
        old_path = util.get_master_modpack_dir() / 'content' / 'Pack' / \
            f'Bootup_{lang}.pack'
        if old_path.exists():
            old_path.unlink()
        return
    if verbose:
        print(f'  Found {len(text_mods)} text mods to be merged')

    if tmp_dir.exists():
        if verbose:
            print('Cleaning temp directory...')
        shutil.rmtree(tmp_dir, ignore_errors=True)
    print('Extracting clean MSYTs...')
    try:
        extract_ref_msyts(lang, for_merge=True, tmp_dir=tmp_dir)
    except FileNotFoundError:
        return
    merge_dir = tmp_dir / 'merged'
    merge_dir.mkdir(parents=True, exist_ok=True)

    print('Merging modified text files...')
    modded_text_files = list(merge_dir.rglob('**/*.msyt'))
    num_threads = min(multiprocessing.cpu_count(), len(modded_text_files))
    pool = original_pool or multiprocessing.Pool(processes=num_threads)
    thread_merger = partial(threaded_merge_texts,
                            merge_dir=merge_dir,
                            text_mods=text_mods,
                            verbose=verbose)
    results = pool.map(thread_merger, modded_text_files)
    for merge_count, rel_path in results:
        if merge_count > 0:
            print(f'  Merged {merge_count} versions of {rel_path}')
    if not original_pool:
        pool.close()
        pool.join()
    print('Generating merged MSBTs...')
    msyt_to_msbt(tmp_dir)

    added_texts = get_added_text_mods(lang)
    if added_texts:
        print('Adding mod-original MSBTs...')
        for added_text in added_texts:
            for msbt in added_text.list_files():
                Path(merge_dir / msbt).parent.mkdir(parents=True,
                                                    exist_ok=True)
                Path(merge_dir / msbt).write_bytes(
                    added_text.get_file_data(msbt).tobytes())

    print(f'Creating new Bootup_{lang}.pack...')
    tmp_boot_path = bootup_from_msbts(lang)[0]
    merged_boot_path = util.get_modpack_dir() / '9999_BCML' / 'content' / \
        'Pack' / f'Bootup_{lang}.pack'
    if merged_boot_path.exists():
        if verbose:
            print(f'  Removing old Bootup_{lang}.pack...')
        merged_boot_path.unlink()
    merged_boot_path.parent.mkdir(parents=True, exist_ok=True)
    shutil.copy(str(tmp_boot_path), str(merged_boot_path))

    rstb_path = util.get_modpack_dir() / '9999_BCML' / 'content' / 'System' / 'Resource' /\
                                         'ResourceSizeTable.product.srsizetable'
    if rstb_path.exists():
        table: rstb.ResourceSizeTable = rstb.util.read_rstb(
            str(rstb_path), True)
    else:
        table = rstable.get_stock_rstb()
    msg_path = f'Message/Msg_{lang}.product.sarc'
    if table.is_in_table(msg_path):
        print('Correcting RSTB...')
        table.delete_entry(msg_path)
    rstb_path.parent.mkdir(parents=True, exist_ok=True)
    rstb.util.write_rstb(table, str(rstb_path), True)
Ejemplo n.º 16
0
def merge_map(map_pair: tuple,
              rstb_calc: rstb.SizeCalculator,
              no_del: bool = False,
              link_del: bool = False,
              verbose: bool = False) -> {}:
    """
    Merges changes to a mainfield map and returns the RSTB values

    :param map_pair: A tuple containing class:`Map` to merge and the changes as a dict
    :type map_pair: tuple
    :param rstb_calc: An `rstb` library SizeCalculator object
    :type rstb_calc: class:`rstb.SizeCalculator`
    :param verbose: Whether to display verbose output, defaults to False
    :type verbose: bool, optional
    :return: Returns a dict containing the aoc and base RSTB paths for the merged map units along
    with their new size values
    :rtype: dict of str: int
    """
    map_unit, changes = map_pair
    if verbose:
        print(f'Merging {len(changes)} versions of {"_".join(map_unit)}...')
    new_map = get_stock_map(map_unit)
    stock_hashes = [obj['HashId'] for obj in new_map['Objs']]
    for hash_id, actor in changes['mod'].items():
        try:
            new_map['Objs'][stock_hashes.index(hash_id)] = deepcopy(actor)
        except ValueError:
            changes['add'].append(actor)
    if not no_del:
        for map_del in sorted(changes['del'], key=lambda change: stock_hashes.index(change) \
                              if change in stock_hashes else -1, reverse=True):
            if map_del in stock_hashes:
                try:
                    new_map['Objs'].pop(stock_hashes.index(map_del))
                except IndexError:
                    try:
                        obj_to_delete = next(
                            iter([
                                actor for actor in new_map['Objs']
                                if actor['HashId'] == map_del
                            ]))
                        new_map['Objs'].remove(obj_to_delete)
                    except (StopIteration, ValueError):
                        print(f'Could not delete actor with HashId {map_del}')
    new_map['Objs'].extend([
        change for change in changes['add']
        if change['HashId'] not in stock_hashes
    ])
    new_map['Objs'].sort(key=lambda actor: actor['HashId'])

    aoc_out: Path = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \
        'MainField' / map_unit.section / \
        f'{map_unit.section}_{map_unit.type}.smubin'
    aoc_out.parent.mkdir(parents=True, exist_ok=True)
    aoc_bytes = byml.Writer(new_map, be=True).get_bytes()
    aoc_out.write_bytes(util.compress(aoc_bytes))
    new_map['Objs'] = [
        obj for obj in new_map['Objs']
        if not obj['UnitConfigName'].startswith('DLC')
    ]
    (util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField' /
     map_unit.section).mkdir(parents=True, exist_ok=True)
    base_out = util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField' / \
        map_unit.section / f'{map_unit.section}_{map_unit.type}.smubin'
    base_out.parent.mkdir(parents=True, exist_ok=True)
    base_bytes = byml.Writer(new_map, be=True).get_bytes()
    base_out.write_bytes(util.compress(base_bytes))
    return {
        'aoc':
        (f'Aoc/0010/Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin',
         rstb_calc.calculate_file_size_with_ext(aoc_bytes, True, '.mubin')),
        'main':
        (f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin',
         rstb_calc.calculate_file_size_with_ext(base_bytes, True, '.mubin'))
    }