Example #1
0
def inject_savedata_into_bootup(bgsvdata: sarc.SARCWriter, bootup_path: Path = None) -> int:
    """
    Packs a savedata SARC into Bootup.pack and returns the RSTB size of the new savedataformat.sarc

    :param bgsvdata: A SARCWriter for the new savedata
    :type bgsvdata: class:`sarc.SARCWriter`
    :param bootup_path: Path to the Bootup.pack to update, defaults to a master BCML copy
    :type bootup_path: class:`pathlib.Path`, optional
    :returns: Returns the RSTB size of the new savedataformat.sarc
    :rtype: int
    """
    if not bootup_path:
        master_boot = util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack'
        bootup_path = master_boot if master_boot.exists() \
            else util.get_game_file('Pack/Bootup.pack')
    with bootup_path.open('rb') as b_file:
        bootup_pack = sarc.read_file_and_make_sarc(b_file)
    new_pack = sarc.make_writer_from_sarc(bootup_pack)
    new_pack.delete_file('GameData/savedataformat.ssarc')
    savedata_bytes = bgsvdata.get_bytes()
    new_pack.add_file('GameData/savedataformat.ssarc',
                      util.compress(savedata_bytes))
    (util.get_master_modpack_dir() / 'content' / 'Pack').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'content' / 'Pack' / 'Bootup.pack').open('wb') as b_file:
        new_pack.write(b_file)
    return rstb.SizeCalculator().calculate_file_size_with_ext(savedata_bytes, True, '.sarc')
Example #2
0
def bootup_from_msbts(
    lang: str = 'USen',
    msbt_dir: Path = util.get_work_dir() / 'tmp_text' / 'merged'
) -> (Path, int):
    """
    Generates a new Bootup_XXxx.pack from a directory of MSBT files

    :param lang: The game language to use, defaults to USen.
    :type lang: str, optional
    :param msbt_dir: The directory to pull MSBTs from, defaults to "tmp_text/merged" in BCML's
    working directory.
    :type msbt_dir: class:`pathlib.Path`, optional
    :returns: A tuple with the path to the new Bootup_XXxx.pack and the RSTB size of the new
    Msg_XXxx.product.sarc
    :rtype: (class:`pathlib.Path`, int)
    """
    new_boot_path = msbt_dir.parent / f'Bootup_{lang}.pack'
    with new_boot_path.open('wb') as new_boot:
        s_msg = sarc.SARCWriter(True)
        for new_msbt in msbt_dir.rglob('**/*.msbt'):
            with new_msbt.open('rb') as f_new:
                s_msg.add_file(
                    str(new_msbt.relative_to(msbt_dir)).replace('\\', '/'),
                    f_new.read())
        new_msg_stream = io.BytesIO()
        s_msg.write(new_msg_stream)
        unyaz_bytes = new_msg_stream.getvalue()
        rsize = rstb.SizeCalculator().calculate_file_size_with_ext(
            unyaz_bytes, True, '.sarc')
        new_msg_bytes = util.compress(unyaz_bytes)
        s_boot = sarc.SARCWriter(True)
        s_boot.add_file(f'Message/Msg_{lang}.product.ssarc', new_msg_bytes)
        s_boot.write(new_boot)
    return new_boot_path, rsize
Example #3
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        return

    new_static = oead.byml.from_binary(
        util.decompress((util.get_aoc_dir() / "Map" / "CDungeon" /
                         "Static.smubin").read_bytes()))

    base_dungeons = [str(dungeon["Map"]) for dungeon in new_static["StartPos"]]
    for dungeon, diff in diffs.items():
        if dungeon not in base_dungeons:
            new_static["StartPos"].append(diff)
        else:
            for key, value in diff.items():
                new_static["StartPos"][base_dungeons.index(
                    dungeon)][key] = value

    output_static = (util.get_master_modpack_dir() / util.get_dlc_path() /
                     ("0010" if util.get_settings("wiiu") else "") / "Map" /
                     "CDungeon" / "Static.smubin")
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(
        util.compress(
            oead.byml.to_binary(new_static,
                                big_endian=util.get_settings("wiiu"))))
Example #4
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / 'logs' / 'savedata.sarc'
     if tmp_sarc.exists():
         return ('GameData/savedataformat.ssarc',
                 util.compress(tmp_sarc.read_bytes()))
     else:
         return
Example #5
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        diffs = {}
        loader = yaml.CSafeLoader
        yaml_util.add_constructors(loader)
        for mod in [mod for mod in util.get_installed_mods() \
                    if (mod.path / 'logs' / 'dstatic.yml').exists()]:
            diffs.update(
                yaml.load((mod.path / 'logs' / 'dstatic.yml').read_bytes(),
                          Loader=loader))

    if not diffs:
        return

    new_static = byml.Byml(
        util.decompress_file(
            str(util.get_game_file(
                'aoc/0010/Map/CDungeon/Static.smubin')))).parse()

    base_dungeons = [dungeon['Map'] for dungeon in new_static['StartPos']]
    for dungeon, diff in diffs.items():
        if dungeon not in base_dungeons:
            new_static['StartPos'].append(diff)
        else:
            for key, value in diff.items():
                new_static['StartPos'][base_dungeons.index(
                    dungeon)][key] = value

    output_static = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \
        'CDungeon' / 'Static.smubin'
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(
        util.compress(byml.Writer(new_static, True).get_bytes()))
Example #6
0
def merge_actorinfo(verbose: bool = False):
    """Merges installed changes to actor info"""
    mods = get_actorinfo_mods()
    actor_path = (util.get_master_modpack_dir() / 'content' / 'Actor' /
                  'ActorInfo.product.sbyml')
    if not mods:
        print('No actor info merging necessary.')
        if actor_path.exists():
            actor_path.unlink()
        return

    print('Loading modded actor info...')
    modded_actors = {}
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    for mod in mods:
        with (mod.path / 'logs' / 'actorinfo.yml').open(
                'r', encoding='utf-8') as a_file:
            entries = yaml.load(a_file, Loader=loader)
            util.dict_merge(modded_actors, entries, overwrite_lists=True)
            if verbose:
                print(f'Loaded {len(entries)} entries from {mod.name}')
            del entries
    print('Loading unmodded actor info...')
    actorinfo = get_stock_actorinfo()

    print('Merging changes...')
    for actor_hash, actor_info in modded_actors.items():
        if actor_hash in actorinfo['Hashes']:
            idx = actorinfo['Hashes'].index(actor_hash)
            util.dict_merge(actorinfo['Actors'][idx],
                            actor_info,
                            overwrite_lists=True)
            if verbose:
                print(f'  Updated entry for {actorinfo["Actors"][idx]}')
        else:
            actorinfo['Hashes'].append(actor_hash)
            actorinfo['Actors'].append(actor_info)
            if verbose:
                print(f'  Added entry for {actor_info["name"]}')

    print('Sorting new actor info...')
    actorinfo['Hashes'].sort()
    actorinfo['Hashes'] = list(
        map(lambda x: byml.Int(x)
            if x < 2147483648 else byml.UInt(x), actorinfo['Hashes']))
    actorinfo['Actors'].sort(
        key=lambda x: zlib.crc32(x['name'].encode('utf-8')))

    print('Saving new actor info...')
    buf = BytesIO()
    byml.Writer(actorinfo, True).write(buf)
    actor_path.parent.mkdir(parents=True, exist_ok=True)
    actor_path.write_bytes(util.compress(buf.getvalue()))
    print('Actor info merged successfully')
Example #7
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        try:
            (util.get_master_modpack_dir() / "logs" /
             "dungeonstatic.smubin").unlink()
        except:
            pass
        return

    try:
        new_static = oead.byml.from_binary(
            util.decompress((util.get_aoc_dir() / "Map" / "CDungeon" /
                             "Static.smubin").read_bytes()))
    except FileNotFoundError:
        new_static = oead.byml.from_binary(
            util.get_nested_file_bytes(
                f"{util.get_game_file('Pack/Bootup.pack')}//Map/CDungeon/Static.smubin"
            ))

    base_names = [
        f"{str(spawn_point['Map'])}___{str(spawn_point['PosName'])}"
        for spawn_point in new_static["StartPos"]
    ]
    for spawn_name, diff in diffs.items():
        if "___" not in spawn_name:
            spawn_name = f"{spawn_name}___Entrance_1"
        if spawn_name not in base_names:
            new_static["StartPos"].append(diff)
        else:
            for key, value in diff.items():
                new_static["StartPos"][base_names.index(
                    spawn_name)][key] = value

    data = util.compress(
        oead.byml.to_binary(new_static, big_endian=util.get_settings("wiiu")))
    try:
        util.get_aoc_dir()
        output_static = (util.get_master_modpack_dir() / util.get_dlc_path() /
                         ("0010" if util.get_settings("wiiu") else "") /
                         "Map" / "CDungeon" / "Static.smubin")
    except FileNotFoundError:
        output_static = util.get_master_modpack_dir(
        ) / "logs" / "dungeonstatic.smubin"
        util.inject_file_into_sarc(
            "Map/CDungeon/Static.smubin",
            data,
            "Pack/Bootup.pack",
            create_sarc=True,
        )
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(data)
Example #8
0
 def perform_merge(self):
     diffs = self.consolidate_diffs(self.get_all_diffs())
     output: Path
     static_data: Path
     try:
         util.get_aoc_dir()
         output = (util.get_master_modpack_dir() / util.get_dlc_path() /
                   ("0010" if util.get_settings("wiiu") else "") /
                   STATIC_PATH)
         static_data = util.get_game_file("Map/MainField/Static.smubin",
                                          aoc=True).read_bytes()
     except FileNotFoundError:
         output = util.get_master_modpack_dir(
         ) / "logs" / "mainstatic.smubin"
         static_data = util.get_nested_file_bytes(
             (str(util.get_game_file("Pack/Bootup.pack")) +
              "//Map/MainField/Static.smubin"),
             unyaz=False,
         )
     if not diffs:
         try:
             output.unlink()
         except:
             pass
         return
     stock_static = oead.byml.from_binary(util.decompress(static_data))
     merged = Hash()
     for cat in stock_static:
         if cat in diffs:
             items = {get_id(item): item for item in stock_static[cat]}
             util.dict_merge(items, diffs[cat])
             merged[cat] = Array([
                 item for _, item in items.items() if "remove" not in item
             ])
         else:
             merged[cat] = stock_static[cat]
     data = util.compress(
         oead.byml.to_binary(merged, big_endian=util.get_settings("wiiu")))
     output.parent.mkdir(parents=True, exist_ok=True)
     output.write_bytes(data)
     if "mainstatic" in str(output):
         util.inject_file_into_sarc(
             "Map/MainField/Static.smubin",
             data,
             "Pack/Bootup.pack",
             create_sarc=True,
         )
Example #9
0
def merge_map(map_pair: tuple,
              rstb_calc: rstb.SizeCalculator) -> Dict[str, Tuple[str, int]]:
    map_unit, changes = map_pair[0], map_pair[1]
    util.vprint(f'Merging {len(changes)} versions of {"_".join(map_unit)}...')
    new_map = get_stock_map(map_unit)
    stock_obj_hashes = [int(obj["HashId"]) for obj in new_map["Objs"]]
    for hash_id, actor in changes["Objs"]["mod"].items():
        try:
            new_map["Objs"][stock_obj_hashes.index(int(hash_id))] = actor
        except ValueError:
            changes["Objs"]["add"].append(actor)
    for map_del in sorted(
            changes["Objs"]["del"],
            key=lambda change: stock_obj_hashes.index(change)
            if change in stock_obj_hashes else -1,
            reverse=True,
    ):
        if int(map_del) in stock_obj_hashes:
            try:
                new_map["Objs"].pop(stock_obj_hashes.index(map_del))
            except IndexError:
                try:
                    obj_to_delete = next(
                        iter([
                            actor for actor in new_map["Objs"]
                            if actor["HashId"] == map_del
                        ]))
                    new_map["Objs"].remove(obj_to_delete)
                except (StopIteration, ValueError):
                    util.vprint(
                        f"Could not delete actor with HashId {map_del}")
    new_map["Objs"].extend([
        change for change in changes["Objs"]["add"]
        if int(change["HashId"]) not in stock_obj_hashes
    ])
    new_map["Objs"] = sorted(new_map["Objs"],
                             key=lambda actor: int(actor["HashId"]))

    if len(new_map["Rails"]):
        stock_rail_hashes = [int(rail["HashId"]) for rail in new_map["Rails"]]
        for hash_id, rail in changes["Rails"]["mod"].items():
            try:
                new_map["Rails"][stock_rail_hashes.index(int(hash_id))] = rail
            except ValueError:
                changes["Rails"]["add"].append(rail)
        for map_del in sorted(
                changes["Rails"]["del"],
                key=lambda change: stock_rail_hashes.index(int(change))
                if int(change) in stock_rail_hashes else -1,
                reverse=True,
        ):
            if int(map_del) in stock_rail_hashes:
                try:
                    new_map["Rails"].pop(stock_rail_hashes.index(int(map_del)))
                except IndexError:
                    try:
                        obj_to_delete = next(
                            iter([
                                rail for rail in new_map["Rails"]
                                if rail["HashId"] == map_del
                            ]))
                        new_map["Rails"].remove(obj_to_delete)
                    except (StopIteration, ValueError):
                        util.vprint(
                            f"Could not delete rail with HashId {map_del}")
        new_map["Rails"].extend([
            change for change in changes["Rails"]["add"]
            if int(change["HashId"]) not in stock_rail_hashes
        ])
        new_map["Rails"] = sorted(new_map["Rails"],
                                  key=lambda rail: int(rail["HashId"]))

    aoc_out: Path = (util.get_master_modpack_dir() / util.get_dlc_path() /
                     ("0010" if util.get_settings("wiiu") else "") / "Map" /
                     "MainField" / map_unit.section /
                     f"{map_unit.section}_{map_unit.type}.smubin")
    aoc_out.parent.mkdir(parents=True, exist_ok=True)
    aoc_bytes = oead.byml.to_binary(new_map,
                                    big_endian=util.get_settings("wiiu"))
    aoc_out.write_bytes(util.compress(aoc_bytes))
    new_map["Objs"] = [
        obj for obj in new_map["Objs"]
        if not str(obj["UnitConfigName"]).startswith("DLC")
    ]
    (util.get_master_modpack_dir() / util.get_content_path() / "Map" /
     "MainField" / map_unit.section).mkdir(parents=True, exist_ok=True)
    base_out = (util.get_master_modpack_dir() / util.get_content_path() /
                "Map" / "MainField" / map_unit.section /
                f"{map_unit.section}_{map_unit.type}.smubin")
    base_out.parent.mkdir(parents=True, exist_ok=True)
    base_bytes = oead.byml.to_binary(new_map,
                                     big_endian=util.get_settings("wiiu"))
    base_out.write_bytes(util.compress(base_bytes))
    return {
        util.get_dlc_path(): (
            f"Aoc/0010/Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin",
            rstb_calc.calculate_file_size_with_ext(bytes(aoc_bytes),
                                                   util.get_settings("wiiu"),
                                                   ".mubin"),
        ),
        "main": (
            f"Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin",
            rstb_calc.calculate_file_size_with_ext(bytes(base_bytes),
                                                   util.get_settings("wiiu"),
                                                   ".mubin"),
        ),
    }
Example #10
0
def merge_map(map_pair: tuple,
              rstb_calc: rstb.SizeCalculator,
              no_del: bool = False,
              link_del: bool = False,
              verbose: bool = False) -> {}:
    """
    Merges changes to a mainfield map and returns the RSTB values

    :param map_pair: A tuple containing class:`Map` to merge and the changes as a dict
    :type map_pair: tuple
    :param rstb_calc: An `rstb` library SizeCalculator object
    :type rstb_calc: class:`rstb.SizeCalculator`
    :param verbose: Whether to display verbose output, defaults to False
    :type verbose: bool, optional
    :return: Returns a dict containing the aoc and base RSTB paths for the merged map units along
    with their new size values
    :rtype: dict of str: int
    """
    map_unit, changes = map_pair
    if verbose:
        print(f'Merging {len(changes)} versions of {"_".join(map_unit)}...')
    new_map = get_stock_map(map_unit)
    stock_hashes = [obj['HashId'] for obj in new_map['Objs']]
    for hash_id, actor in changes['mod'].items():
        try:
            new_map['Objs'][stock_hashes.index(hash_id)] = deepcopy(actor)
        except ValueError:
            changes['add'].append(actor)
    if not no_del:
        for map_del in sorted(changes['del'], key=lambda change: stock_hashes.index(change) \
                              if change in stock_hashes else -1, reverse=True):
            if map_del in stock_hashes:
                try:
                    new_map['Objs'].pop(stock_hashes.index(map_del))
                except IndexError:
                    try:
                        obj_to_delete = next(
                            iter([
                                actor for actor in new_map['Objs']
                                if actor['HashId'] == map_del
                            ]))
                        new_map['Objs'].remove(obj_to_delete)
                    except (StopIteration, ValueError):
                        print(f'Could not delete actor with HashId {map_del}')
    new_map['Objs'].extend([
        change for change in changes['add']
        if change['HashId'] not in stock_hashes
    ])
    new_map['Objs'].sort(key=lambda actor: actor['HashId'])

    aoc_out: Path = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \
        'MainField' / map_unit.section / \
        f'{map_unit.section}_{map_unit.type}.smubin'
    aoc_out.parent.mkdir(parents=True, exist_ok=True)
    aoc_bytes = byml.Writer(new_map, be=True).get_bytes()
    aoc_out.write_bytes(util.compress(aoc_bytes))
    new_map['Objs'] = [
        obj for obj in new_map['Objs']
        if not obj['UnitConfigName'].startswith('DLC')
    ]
    (util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField' /
     map_unit.section).mkdir(parents=True, exist_ok=True)
    base_out = util.get_master_modpack_dir() / 'content' / 'Map' / 'MainField' / \
        map_unit.section / f'{map_unit.section}_{map_unit.type}.smubin'
    base_out.parent.mkdir(parents=True, exist_ok=True)
    base_bytes = byml.Writer(new_map, be=True).get_bytes()
    base_out.write_bytes(util.compress(base_bytes))
    return {
        'aoc':
        (f'Aoc/0010/Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin',
         rstb_calc.calculate_file_size_with_ext(aoc_bytes, True, '.mubin')),
        'main':
        (f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.mubin',
         rstb_calc.calculate_file_size_with_ext(base_bytes, True, '.mubin'))
    }