Esempio n. 1
0
def _clean_sarc(old_sarc: oead.Sarc,
                base_sarc: oead.Sarc) -> Optional[oead.SarcWriter]:
    old_files = {f.name for f in old_sarc.get_files()}
    new_sarc = oead.SarcWriter(endian=oead.Endianness.Big if util.get_settings(
        "wiiu") else oead.Endianness.Little)
    can_delete = True
    for nest_file, file_data in [(f.name, f.data)
                                 for f in base_sarc.get_files()]:
        ext = Path(nest_file).suffix
        if ext in {".yml", ".bak"}:
            continue
        if nest_file in old_files:
            old_data = util.unyaz_if_needed(old_sarc.get_file(nest_file).data)
        file_data = util.unyaz_if_needed(file_data)
        if nest_file not in old_files or (file_data != old_data
                                          and ext not in util.AAMP_EXTS):
            if (ext in util.SARC_EXTS and nest_file in old_files
                    and nest_file not in SPECIAL):
                nest_old_sarc = oead.Sarc(old_data)
                nest_base_sarc = oead.Sarc(file_data)
                nest_new_sarc = _clean_sarc(nest_old_sarc, nest_base_sarc)
                if nest_new_sarc:
                    new_bytes = nest_new_sarc.write()[1]
                    if ext.startswith(".s") and ext != ".sarc":
                        new_bytes = util.compress(new_bytes)
                    new_sarc.files[nest_file] = oead.Bytes(new_bytes)
                    can_delete = False
                else:
                    continue
            else:
                if ext.startswith(".s") and ext != ".sarc":
                    file_data = util.compress(file_data)
                new_sarc.files[nest_file] = oead.Bytes(file_data)
                can_delete = False
    return None if can_delete else new_sarc
Esempio n. 2
0
def threaded_merge(item, verbose: bool) -> (str, dict):
    """Deep merges an individual file, suitable for multiprocessing"""
    file, stuff = item
    failures = {}

    base_file = util.get_game_file(file, file.startswith('aoc'))
    if (util.get_master_modpack_dir() / file).exists():
        base_file = util.get_master_modpack_dir() / file
    file_ext = os.path.splitext(file)[1]
    if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() /
                                       file).exists():
        base_file = (util.get_master_modpack_dir() / file)
    file_bytes = base_file.read_bytes()
    yazd = file_bytes[0:4] == b'Yaz0'
    file_bytes = file_bytes if not yazd else util.decompress(file_bytes)
    magic = file_bytes[0:4]

    if magic == b'SARC':
        new_sarc, sub_failures = nested_patch(sarc.SARC(file_bytes), stuff)
        del file_bytes
        new_bytes = new_sarc.get_bytes()
        for failure, contents in sub_failures.items():
            print(f'Some patches to {failure} failed to apply.')
            failures[failure] = contents
    else:
        try:
            if magic == b'AAMP':
                aamp_contents = aamp.Reader(file_bytes).parse()
                for change in stuff:
                    aamp_contents = _aamp_merge(aamp_contents, change)
                aamp_bytes = aamp.Writer(aamp_contents).get_bytes()
                del aamp_contents
                new_bytes = aamp_bytes if not yazd else util.compress(
                    aamp_bytes)
            else:
                raise ValueError(f'{file} is not a SARC or AAMP file.')
        except ValueError:
            new_bytes = file_bytes
            del file_bytes
            print(f'Deep merging file {file} failed. No changes were made.')

    new_bytes = new_bytes if not yazd else util.compress(new_bytes)
    output_file = (util.get_master_modpack_dir() / file)
    if base_file == output_file:
        output_file.unlink()
    output_file.parent.mkdir(parents=True, exist_ok=True)
    output_file.write_bytes(new_bytes)
    del new_bytes
    if magic == b'SARC' and verbose:
        print(f'Finished patching files inside {file}')
    elif verbose:
        print(f'Finished patching {file}')
    return util.get_canon_name(file), failures
Esempio n. 3
0
def nested_patch(pack: sarc.SARC, nest: dict) -> (sarc.SARCWriter, dict):
    """
    Recursively patches deep merge files in a SARC

    :param pack: The SARC in which to recursively patch.
    :type pack: class:`sarc.SARC`
    :param nest: A dict of nested patches to apply.
    :type nest: dict
    :return: Returns a new SARC with patches applied and a dict of any failed patches.
    :rtype: (class:`sarc.SARCWriter`, dict, dict)
    """
    new_sarc = sarc.make_writer_from_sarc(pack)
    failures = {}

    for file, stuff in nest.items():
        file_bytes = pack.get_file_data(file).tobytes()
        yazd = file_bytes[0:4] == b'Yaz0'
        file_bytes = util.decompress(file_bytes) if yazd else file_bytes

        if isinstance(stuff, dict):
            sub_sarc = sarc.SARC(file_bytes)
            new_sarc.delete_file(file)
            new_sub_sarc, sub_failures = nested_patch(sub_sarc, stuff)
            for failure in sub_failures:
                failure[file + '//' + failure] = sub_failures[failure]
            del sub_sarc
            new_bytes = new_sub_sarc.get_bytes()
            new_sarc.add_file(
                file, new_bytes if not yazd else util.compress(new_bytes))

        elif isinstance(stuff, list):
            try:
                if file_bytes[0:4] == b'AAMP':
                    aamp_contents = aamp.Reader(file_bytes).parse()
                    for change in stuff:
                        aamp_contents = _aamp_merge(aamp_contents, change)
                    aamp_bytes = aamp.Writer(aamp_contents).get_bytes()
                    del aamp_contents
                    new_bytes = aamp_bytes if not yazd else util.compress(
                        aamp_bytes)
                    cache_merged_aamp(file, new_bytes)
                else:
                    raise ValueError(
                        'Wait, what the heck, this isn\'t an AAMP file?!')
            except ValueError:
                new_bytes = pack.get_file_data(file).tobytes()
                print(f'Deep merging {file} failed. No changed were made.')

            new_sarc.delete_file(file)
            new_sarc.add_file(file, new_bytes)
    return new_sarc, failures
Esempio n. 4
0
def nested_patch(pack: oead.Sarc, nest: dict) -> Tuple[oead.SarcWriter, dict]:
    new_sarc: oead.SarcWriter = oead.SarcWriter.from_sarc(pack)
    failures: dict = {}

    for file, stuff in nest.items():
        file_bytes = pack.get_file(file).data
        yazd = file_bytes[0:4] == b"Yaz0"
        file_bytes = util.decompress(file_bytes) if yazd else file_bytes

        if isinstance(stuff, dict):
            sub_sarc = oead.Sarc(file_bytes)
            new_sub_sarc, sub_failures = nested_patch(sub_sarc, stuff)
            for failure in sub_failures:
                failure[file + "//" + failure] = sub_failures[failure]
            del sub_sarc
            new_bytes = bytes(new_sub_sarc.write()[1])
            new_sarc.files[file] = new_bytes if not yazd else util.compress(
                new_bytes)

        elif isinstance(stuff, ParameterList):
            try:
                if file_bytes[0:4] == b"AAMP":
                    aamp_contents = ParameterIO.from_binary(file_bytes)
                    try:
                        file_ext = os.path.splitext(file)[1]
                        aamp_contents = shop_merge(
                            aamp_contents,
                            file_ext.replace(".", ""),
                            stuff.lists["Additions"],
                            stuff.lists["Removals"],
                        )
                        aamp_bytes = ParameterIO.to_binary(aamp_contents)
                    except:  # pylint: disable=bare-except
                        raise RuntimeError(
                            f"AAMP file {file} could be merged.")
                    del aamp_contents
                    new_bytes = aamp_bytes if not yazd else util.compress(
                        aamp_bytes)
                    cache_merged_shop(file, new_bytes)
                else:
                    raise ValueError(
                        "Wait, what the heck, this isn't an AAMP file?!")
            except ValueError:
                new_bytes = pack.get_file(file).data
                print(f"Deep merging {file} failed. No changes were made.")

            new_sarc.files[file] = oead.Bytes(new_bytes)
    return new_sarc, failures
Esempio n. 5
0
def merge_sarcs(file_name: str, sarcs: List[Union[Path, bytes]]) -> (str, bytes):
    opened_sarcs: List[sarc.SARC] = []
    if isinstance(sarcs[0], Path):
        for i, sarc_path in enumerate(sarcs):
            sarcs[i] = sarc_path.read_bytes()
    for sarc_bytes in sarcs:
        sarc_bytes = util.unyaz_if_needed(sarc_bytes)
        try:
            opened_sarcs.append(sarc.SARC(sarc_bytes))
        except ValueError:
            continue

    all_files = {key for open_sarc in opened_sarcs for key in open_sarc.list_files()}
    nested_sarcs = {}
    new_sarc = sarc.SARCWriter(be=True)
    files_added = []

    # for file in all_files:
    #     dm_cache = util.get_master_modpack_dir() / 'logs' / 'dm' / file
    #     if dm_cache.exists():
    #         file_data = dm_cache.read_bytes()
    #         new_sarc.add_file(file, file_data)
    #         files_added.append(file)

    for opened_sarc in reversed(opened_sarcs):
        for file in [file for file in opened_sarc.list_files() if file not in files_added]:
            data = opened_sarc.get_file_data(file).tobytes()
            if util.is_file_modded(file.replace('.s', '.'), data, count_new=True):
                if not Path(file).suffix in util.SARC_EXTS:
                    new_sarc.add_file(file, data)
                    files_added.append(file)
                else:
                    if file not in nested_sarcs:
                        nested_sarcs[file] = []
                    nested_sarcs[file].append(util.unyaz_if_needed(data))
    for file, sarcs in nested_sarcs.items():
        merged_bytes = merge_sarcs(file, sarcs)[1]
        if Path(file).suffix.startswith('.s') and not file.endswith('.sarc'):
            merged_bytes = util.compress(merged_bytes)
        new_sarc.add_file(file, merged_bytes)
        files_added.append(file)
    for file in [file for file in all_files if file not in files_added]:
        for opened_sarc in [open_sarc for open_sarc in opened_sarcs \
                            if file in open_sarc.list_files()]:
            new_sarc.add_file(file, opened_sarc.get_file_data(file).tobytes())
            break

    if 'Bootup.pack' in file_name:
        for merger in [merger() for merger in mergers.get_mergers() if merger.is_bootup_injector()]:
            inject = merger.get_bootup_injection()
            if not inject:
                continue
            file, data = inject
            try:
                new_sarc.delete_file(file)
            except KeyError:
                pass
            new_sarc.add_file(file, data)

    return (file_name, new_sarc.get_bytes())
Esempio n. 6
0
def _clean_sarc_file(file: Path, hashes: dict, tmp_dir: Path):
    canon = util.get_canon_name(file.relative_to(tmp_dir))
    try:
        stock_file = util.get_game_file(file.relative_to(tmp_dir))
    except FileNotFoundError:
        return
    try:
        old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes()))
    except (RuntimeError, ValueError, oead.InvalidDataError):
        return
    if canon not in hashes:
        return
    try:
        base_sarc = oead.Sarc(util.unyaz_if_needed(file.read_bytes()))
    except (RuntimeError, ValueError, oead.InvalidDataError):
        return
    new_sarc = _clean_sarc(old_sarc, base_sarc)
    if not new_sarc:
        file.unlink()
    else:
        write_bytes = new_sarc.write()[1]
        file.write_bytes(
            write_bytes
            if not (file.suffix.startswith(".s") and file.suffix != ".ssarc")
            else util.compress(write_bytes)
        )
Esempio n. 7
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / "logs" / "gamedata.sarc"
     if tmp_sarc.exists():
         return ("GameData/gamedata.ssarc",
                 util.compress(tmp_sarc.read_bytes()))
     else:
         return
Esempio n. 8
0
    def perform_merge(self):
        merged_events = util.get_master_modpack_dir() / "logs" / "eventinfo.byml"
        event_merge_log = util.get_master_modpack_dir() / "logs" / "eventinfo.log"

        print("Loading event info mods...")
        modded_events = self.consolidate_diffs(self.get_all_diffs())
        event_mod_hash = hash(str(modded_events))
        if not modded_events:
            print("No event info merging necessary")
            if merged_events.exists():
                merged_events.unlink()
                event_merge_log.unlink()
                try:
                    stock_eventinfo = util.get_nested_file_bytes(
                        (
                            str(util.get_game_file("Pack/Bootup.pack"))
                            + "//Event/EventInfo.product.sbyml"
                        ),
                        unyaz=False,
                    )
                    util.inject_file_into_sarc(
                        "Event/EventInfo.product.sbyml",
                        stock_eventinfo,
                        "Pack/Bootup.pack",
                    )
                except FileNotFoundError:
                    pass
            return
        if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash:
            print("No event info merging necessary")
            return

        new_events = get_stock_eventinfo()
        for event, data in modded_events.items():
            new_events[event] = data
        del modded_events

        print("Writing new event info...")
        event_bytes = oead.byml.to_binary(
            new_events, big_endian=util.get_settings("wiiu")
        )
        del new_events
        util.inject_file_into_sarc(
            "Event/EventInfo.product.sbyml",
            util.compress(event_bytes),
            "Pack/Bootup.pack",
            create_sarc=True,
        )
        print("Saving event info merge log...")
        event_merge_log.parent.mkdir(parents=True, exist_ok=True)
        event_merge_log.write_text(str(event_mod_hash))
        merged_events.write_bytes(event_bytes)

        print("Updating RSTB...")
        rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(
            bytes(event_bytes), True, ".byml"
        )
        del event_bytes
        rstable.set_size("Event/EventInfo.product.byml", rstb_size)
Esempio n. 9
0
    def perform_merge(self):
        merged_quests = util.get_master_modpack_dir() / "logs" / "quests.byml"
        print("Loading quest mods...")
        diffs = self.consolidate_diffs(self.get_all_diffs())
        if not diffs:
            print("No quest merging necessary")
            if merged_quests.exists():
                merged_quests.unlink()
                try:
                    util.inject_file_into_sarc(
                        "Quest/QuestProduct.sbquestpack",
                        util.get_nested_file_bytes(
                            (f'{str(util.get_game_file("Pack/TitleBG.pack"))}'
                             "//Quest/QuestProduct.sbquestpack"),
                            unyaz=False,
                        ),
                        "Pack/TitleBG.pack",
                    )
                except FileNotFoundError:
                    pass
            return
        print("Loading stock quests...")
        quests = get_stock_quests()
        stock_names = [q["Name"] for q in quests]

        print("Merging quest mods...")
        for name, mod in diffs["mod"].items():
            try:
                quests[stock_names.index(name)] = mod
            except (IndexError, ValueError):
                diffs["add"].append(mod)
        for delete in reversed(diffs["del"]):
            try:
                quests.remove(quests[stock_names.index(delete)])
            except ValueError:
                pass
            except IndexError:
                raise RuntimeError(
                    f"An error occurred when attempting to remove a quest from your "
                    "game. Most of the time this means the mod was accidentally made "
                    "using the base game 1.0 TitleBG.pack instead of the latest updated "
                    "version. Please contact the mod author for assistance.")
        added_names = set()
        for add in diffs["add"]:
            if add["Name"] not in added_names:
                quests.append(add)
                added_names.add(add["Name"])

        print("Writing new quest pack...")
        data = oead.byml.to_binary(quests,
                                   big_endian=util.get_settings("wiiu"))
        merged_quests.parent.mkdir(parents=True, exist_ok=True)
        merged_quests.write_bytes(data)
        util.inject_file_into_sarc(
            "Quest/QuestProduct.sbquestpack",
            util.compress(data),
            "Pack/TitleBG.pack",
            create_sarc=True,
        )
Esempio n. 10
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / "logs" / "savedata.sarc"
     if tmp_sarc.exists():
         return (
             "GameData/savedataformat.ssarc",
             util.compress(tmp_sarc.read_bytes()),
         )
     return None
Esempio n. 11
0
def _yml_to_byml(file: Path):
    data = oead.byml.to_binary(
        oead.byml.from_text(file.read_text("utf-8")),
        big_endian=util.get_settings("wiiu"),
    )
    out = file.with_suffix("")
    out.write_bytes(data if not out.suffix.startswith(".s") else util.compress(data))
    file.unlink()
Esempio n. 12
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / "logs" / "effects.byml"
     if tmp_sarc.exists():
         return (
             "Ecosystem/StatusEffectList.sbyml",
             util.compress(tmp_sarc.read_bytes()),
         )
     return
Esempio n. 13
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / 'logs' / 'eventinfo.byml'
     if tmp_sarc.exists():
         return (
             'Event/EventInfo.product.sbyml',
             util.compress(tmp_sarc.read_bytes())
         )
     else:
         return
Esempio n. 14
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / "logs" / "areadata.byml"
     if tmp_sarc.exists():
         return (
             "Ecosystem/AreaData.sbyml",
             util.compress(tmp_sarc.read_bytes()),
         )
     else:
         return
Esempio n. 15
0
 def get_bootup_injection(self):
     tmp_sarc = util.get_master_modpack_dir() / "logs" / "eventinfo.byml"
     if tmp_sarc.exists():
         return (
             "Event/EventInfo.product.sbyml",
             util.compress(tmp_sarc.read_bytes()),
         )
     else:
         return
Esempio n. 16
0
def _convert_sarc_file(pack: Path, to_wiiu: bool) -> list:
    data = pack.read_bytes()
    if not data:
        return []
    sarc = oead.Sarc(util.unyaz_if_needed(data))
    new_bytes, error = _convert_sarc(sarc, to_wiiu)
    pack.write_bytes(
        util.compress(new_bytes) if pack.suffix.startswith(".s")
        and pack.suffix != ".sarc" else new_bytes)
    return error
Esempio n. 17
0
    def perform_merge(self):
        actor_path = (util.get_master_modpack_dir() / util.get_content_path() /
                      "Actor" / "ActorInfo.product.sbyml")
        print("Loading modded actor info...")
        modded_actors = self.consolidate_diffs(self.get_all_diffs())
        if not modded_actors:
            print("No actor info merging necessary.")
            if actor_path.exists():
                actor_path.unlink()
            return

        print("Loading unmodded actor info...")
        actorinfo = get_stock_actorinfo()
        stock_actors = {
            crc32(actor["name"].encode("utf8")): actor
            for actor in actorinfo["Actors"]
        }

        print("Merging changes...")
        new_hashes = set()
        for actor_hash, actor_info in modded_actors.items():
            if isinstance(actor_hash, str):
                actor_hash = int(actor_hash)
            if actor_hash in stock_actors:
                util.dict_merge(stock_actors[actor_hash],
                                actor_info,
                                overwrite_lists=True)
            else:
                actorinfo["Actors"].append(actor_info)
                new_hashes.add(actor_hash)

        print("Sorting new actor info...")
        actorinfo["Hashes"] = oead.byml.Array([
            oead.S32(x) if x < 2147483648 else oead.U32(x)
            for x in sorted(new_hashes | set(stock_actors.keys()))
        ])
        try:
            actorinfo["Actors"] = sorted(
                actorinfo["Actors"],
                key=lambda x: crc32(x["name"].encode("utf-8")))
        except KeyError as err:
            if str(err) == "":
                raise RuntimeError(
                    "Your actor info mods could not be merged. "
                    "This usually indicates a corrupt game dump.") from err
            else:
                raise

        print("Saving new actor info...")
        actor_path.parent.mkdir(parents=True, exist_ok=True)
        actor_path.write_bytes(
            util.compress(
                oead.byml.to_binary(actorinfo,
                                    big_endian=util.get_settings("wiiu"))))
        print("Actor info merged successfully")
Esempio n. 18
0
def _convert_actorpack(actor_pack: Path, to_wiiu: bool) -> Union[None, str]:
    error = None
    sarc = oead.Sarc(util.unyaz_if_needed(actor_pack.read_bytes()))
    new_sarc = oead.SarcWriter.from_sarc(sarc)
    new_sarc.set_endianness(oead.Endianness.Big if to_wiiu else oead.Endianness.Little)
    for file in sarc.get_files():
        if "Physics/" in file.name and "Actor/" not in file.name:
            ext = file.name[file.name.rindex(".") :]
            if ext in NO_CONVERT_EXTS:
                if not util.is_file_modded(
                    util.get_canon_name(file.name, allow_no_source=True),
                    file.data,
                    count_new=True,
                ):
                    actor_name = file.name[
                        file.name.rindex("/") : file.name.rindex(".")
                    ]
                    try:
                        pack_path = util.get_game_file(
                            f"Actor/Pack/{actor_name}.sbactorpack"
                        )
                        stock_data = util.get_nested_file_bytes(
                            f"{str(pack_path)}//{file.name}"
                        )
                        if stock_data:
                            new_sarc.files[file.name] = stock_data
                        else:
                            raise FileNotFoundError(file.name)
                    except (FileNotFoundError, AttributeError):
                        error = (
                            "This mod contains a Havok file not supported by the "
                            f"converter: {file.name}"
                        )
                else:
                    error = (
                        "This mod contains a Havok file not supported by the"
                        f" converter: {file.name}"
                    )
            else:
                if file.data[0:4] == b"AAMP":
                    continue
                try:
                    hk = Havok.from_bytes(bytes(file.data))
                except:  # pylint: disable=bare-except
                    return f"Could not parse Havok file {file.name}"
                if to_wiiu:
                    hk.to_wiiu()
                else:
                    hk.to_switch()
                hk.serialize()
                new_sarc.files[file.name] = hk.to_bytes()
    actor_pack.write_bytes(util.compress(new_sarc.write()[1]))
    return error
Esempio n. 19
0
    def perform_merge(self):
        merged_effects = util.get_master_modpack_dir() / "logs" / "effects.byml"
        print("Loading status effect mods...")
        diffs = self.consolidate_diffs(self.get_all_diffs())
        if not diffs:
            print("No status effect merging necessary...")
            if merged_effects.exists():
                merged_effects.unlink()
                try:
                    stock_effects = util.get_nested_file_bytes(
                        (
                            str(util.get_game_file("Pack/Bootup.pack"))
                            + "//Ecosystem/StatusEffectList.sbyml"
                        ),
                        unyaz=False,
                    )
                    util.inject_file_into_sarc(
                        "Ecosystem/StatusEffectList.sbyml",
                        stock_effects,
                        "Pack/Bootup.pack",
                    )
                    del stock_effects
                except FileNotFoundError:
                    pass
            return
        util.vprint("All status effect diffs:")
        util.vprint(diffs)

        effects = get_stock_effects()
        util.dict_merge(effects, diffs, overwrite_lists=True)
        del diffs

        print("Writing new effects list...")
        effect_bytes = oead.byml.to_binary(
            oead.byml.Array([effects]), big_endian=util.get_settings("wiiu")
        )
        del effects
        util.inject_file_into_sarc(
            "Ecosystem/StatusEffectList.sbyml",
            util.compress(effect_bytes),
            "Pack/Bootup.pack",
            create_sarc=True,
        )
        print("Saving status effect merge log...")
        merged_effects.parent.mkdir(parents=True, exist_ok=True)
        merged_effects.write_bytes(effect_bytes)

        print("Updating RSTB...")
        rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(
            effect_bytes, True, ".byml"
        )
        del effect_bytes
        rstable.set_size("Ecosystem/StatusEffectList.byml", rstb_size)
Esempio n. 20
0
    def perform_merge(self):
        merged_quests = util.get_master_modpack_dir() / "logs" / "quests.byml"
        print("Loading quest mods...")
        diffs = self.consolidate_diffs(self.get_all_diffs())
        if not diffs:
            print("No quest merging necessary")
            if merged_quests.exists():
                merged_quests.unlink()
                try:
                    util.inject_file_into_sarc(
                        "Quest/QuestProduct.sbquestpack",
                        util.get_nested_file_bytes(
                            (f'{str(util.get_game_file("Pack/TitleBG.pack"))}'
                             "//Quest/QuestProduct.sbquestpack"),
                            unyaz=False,
                        ),
                        "Pack/TitleBG.pack",
                    )
                except FileNotFoundError:
                    pass
            return
        print("Loading stock quests...")
        quests = get_stock_quests()
        stock_names = [q["Name"] for q in quests]

        print("Merging quest mods...")
        for name, mod in diffs["mod"].items():
            try:
                quests[stock_names.index(name)] = mod
            except (IndexError, ValueError):
                diffs["add"].append(mod)
        for delete in reversed(diffs["del"]):
            try:
                quests.remove(quests[stock_names.index(delete)])
            except ValueError:
                pass
        added_names = set()
        for add in diffs["add"]:
            if add["Name"] not in added_names:
                quests.append(add)
                added_names.add(add["Name"])

        print("Writing new quest pack...")
        data = oead.byml.to_binary(quests,
                                   big_endian=util.get_settings("wiiu"))
        merged_quests.parent.mkdir(parents=True, exist_ok=True)
        merged_quests.write_bytes(data)
        util.inject_file_into_sarc(
            "Quest/QuestProduct.sbquestpack",
            util.compress(data),
            "Pack/TitleBG.pack",
            create_sarc=True,
        )
Esempio n. 21
0
def set_size(entry: str, size: int):
    rstb_path = (util.get_master_modpack_dir() / util.get_content_path() /
                 "System" / "Resource" /
                 "ResourceSizeTable.product.srsizetable")
    if rstb_path.exists():
        table = read_rstb(rstb_path, be=util.get_settings("wiiu"))
    else:
        table = get_stock_rstb()
        rstb_path.parent.mkdir(parents=True, exist_ok=True)
    table.set_size(entry, size)
    buf = io.BytesIO()
    table.write(buf, be=util.get_settings("wiiu"))
    rstb_path.write_bytes(util.compress(buf.getvalue()))
Esempio n. 22
0
def merge_events():
    """ Merges all installed event info mods """
    event_mods = [mod for mod in util.get_installed_mods() \
                  if (mod.path / 'logs' / 'eventinfo.yml').exists()]
    merged_events = util.get_master_modpack_dir() / 'logs' / 'eventinfo.byml'
    event_merge_log = util.get_master_modpack_dir() / 'logs' / 'eventinfo.log'
    event_mod_hash = str(hash(tuple(event_mods)))

    if not event_mods:
        print('No event info merging necessary')
        if merged_events.exists():
            merged_events.unlink()
            event_merge_log.unlink()
            try:
                stock_eventinfo = util.get_nested_file_bytes(
                    str(util.get_game_file('Pack/Bootup.pack')) + '//Event/EventInfo.product.sbyml',
                    unyaz=False
                )
                util.inject_file_into_bootup(
                    'Event/EventInfo.product.sbyml',
                    stock_eventinfo
                )
            except FileNotFoundError:
                pass
        return
    if event_merge_log.exists() and event_merge_log.read_text() == event_mod_hash:
        print('No event info merging necessary')
        return

    print('Loading event info mods...')
    modded_events = {}
    for mod in event_mods:
        modded_events.update(get_events_for_mod(mod))
    new_events = get_stock_eventinfo()
    for event, data in modded_events.items():
        new_events[event] = data

    print('Writing new event info...')
    event_bytes = byml.Writer(new_events, be=True).get_bytes()
    util.inject_file_into_bootup(
        'Event/EventInfo.product.sbyml',
        util.compress(event_bytes),
        create_bootup=True
    )
    print('Saving event info merge log...')
    event_merge_log.write_text(event_mod_hash)
    merged_events.write_bytes(event_bytes)

    print('Updating RSTB...')
    rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(event_bytes, True, '.byml')
    rstable.set_size('Event/EventInfo.product.byml', rstb_size)
Esempio n. 23
0
def merge_aamp_files(file: str, tree: dict):
    try:
        base_file = util.get_game_file(file)
    except FileNotFoundError:
        util.vprint(f"Skipping {file}, not found in dump")
        return
    if (util.get_master_modpack_dir() / file).exists():
        base_file = util.get_master_modpack_dir() / file
    sarc = Sarc(util.unyaz_if_needed(base_file.read_bytes()))
    new_data = _merge_in_sarc(sarc, tree)
    if base_file.suffix.startswith(".s") and base_file.suffix != ".ssarc":
        new_data = util.compress(new_data)
    (util.get_master_modpack_dir() / file).parent.mkdir(parents=True,
                                                        exist_ok=True)
    (util.get_master_modpack_dir() / file).write_bytes(new_data)
Esempio n. 24
0
 def perform_merge(self):
     print("Loading modded SARC list...")
     sarcs = {
         s: ss
         for s, ss in self.consolidate_diffs(self.get_all_diffs()).items()
         if ss
     }
     if "only_these" in self._options:
         for sarc_file in self._options["only_these"]:
             master_path = util.get_master_modpack_dir() / sarc_file
             if master_path.exists():
                 master_path.unlink()
         for sarc_file in [
                 file for file in sarcs
                 if file not in self._options["only_these"]
         ]:
             del sarcs[sarc_file]
     else:
         for file in [
                 file
                 for file in util.get_master_modpack_dir().rglob("**/*")
                 if file.suffix in util.SARC_EXTS
         ]:
             file.unlink()
     for sarc_file in sarcs:
         try:
             sarcs[sarc_file].insert(0, util.get_game_file(sarc_file))
         except FileNotFoundError:
             continue
     if not sarcs:
         print("No SARC merging necessary")
         return
     print(f"Merging {len(sarcs)} SARC files...")
     pool = self._pool or Pool(maxtasksperchild=500)
     results = pool.starmap(merge_sarcs, sarcs.items())
     for result in results:
         file, file_data = result
         output_path = util.get_master_modpack_dir() / file
         output_path.parent.mkdir(parents=True, exist_ok=True)
         if output_path.suffix.startswith(".s"):
             file_data = util.compress(file_data)
         output_path.write_bytes(file_data)
     if not self._pool:
         pool.close()
         pool.join()
     print("Finished merging SARCs")
Esempio n. 25
0
def _pack_sarc(folder: Path, tmp_dir: Path, hashes: dict):
    packed = oead.SarcWriter(
        endian=oead.Endianness.Big
        if util.get_settings("wiiu")
        else oead.Endianness.Little
    )
    try:
        canon = util.get_canon_name(
            folder.relative_to(tmp_dir).as_posix(), allow_no_source=True
        )
        if canon not in hashes:
            raise FileNotFoundError("File not in game dump")
        stock_file = util.get_game_file(folder.relative_to(tmp_dir))
        try:
            old_sarc = oead.Sarc(util.unyaz_if_needed(stock_file.read_bytes()))
        except (RuntimeError, ValueError, oead.InvalidDataError):
            raise ValueError("Cannot open file from game dump")
        old_files = {f.name for f in old_sarc.get_files()}
    except (FileNotFoundError, ValueError):
        for file in {f for f in folder.rglob("**/*") if f.is_file()}:
            packed.files[file.relative_to(folder).as_posix()] = file.read_bytes()
    else:
        for file in {
            f
            for f in folder.rglob("**/*")
            if f.is_file() and not f.suffix in EXCLUDE_EXTS
        }:
            file_data = file.read_bytes()
            xhash = xxhash.xxh64_intdigest(util.unyaz_if_needed(file_data))
            file_name = file.relative_to(folder).as_posix()
            if file_name in old_files:
                old_hash = xxhash.xxh64_intdigest(
                    util.unyaz_if_needed(old_sarc.get_file(file_name).data)
                )
            if file_name not in old_files or (xhash != old_hash):
                packed.files[file_name] = file_data
    finally:
        shutil.rmtree(folder)
        if not packed.files:
            return  # pylint: disable=lost-exception
        sarc_bytes = packed.write()[1]
        folder.write_bytes(
            util.compress(sarc_bytes)
            if (folder.suffix.startswith(".s") and not folder.suffix == ".sarc")
            else sarc_bytes
        )
Esempio n. 26
0
def inject_files_into_bootup(files: list, datas: list):
    bootup_path: Path = root_dir() / "content" / "Pack" / "Bootup.pack"
    sarc_data = bootup_path.read_bytes()
    yaz = sarc_data[0:4] == b"Yaz0"
    if yaz:
        sarc_data = bcmlutil.decompress(sarc_data)
    old_sarc = oead.Sarc(sarc_data)
    del sarc_data
    new_sarc = oead.SarcWriter.from_sarc(old_sarc)
    del old_sarc
    for idx in range(len(files)):
        new_sarc.files[files[idx]] = (datas[idx] if isinstance(
            datas[idx], bytes) else bytes(datas[idx]))
    new_bytes = new_sarc.write()[1]
    del new_sarc
    bootup_path.write_bytes(
        new_bytes if not yaz else bcmlutil.compress(new_bytes))
    del new_bytes
Esempio n. 27
0
def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path):
    canon = util.get_canon_name(file.relative_to(tmp_dir))
    try:
        stock_file = util.get_game_file(file.relative_to(tmp_dir))
    except FileNotFoundError:
        return
    with stock_file.open('rb') as old_file:
        old_sarc = sarc.read_file_and_make_sarc(old_file)
        if not old_sarc:
            return
        old_files = set(old_sarc.list_files())
    if canon not in hashes:
        return
    with file.open('rb') as s_file:
        base_sarc = sarc.read_file_and_make_sarc(s_file)
    if not base_sarc:
        return
    new_sarc = sarc.SARCWriter(True)
    can_delete = True
    for nest_file in base_sarc.list_files():
        canon = nest_file.replace('.s', '.')
        ext = Path(canon).suffix
        if ext in {'.yml', '.bak'}:
            continue
        file_data = base_sarc.get_file_data(nest_file).tobytes()
        xhash = xxhash.xxh32(util.unyaz_if_needed(file_data)).hexdigest()
        if nest_file in old_files:
            old_hash = xxhash.xxh32(
                util.unyaz_if_needed(
                    old_sarc.get_file_data(nest_file).tobytes())).hexdigest()
        if nest_file not in old_files or (xhash != old_hash
                                          and ext not in util.AAMP_EXTS):
            can_delete = False
            new_sarc.add_file(nest_file, file_data)
    del old_sarc
    if can_delete:
        del new_sarc
        file.unlink()
    else:
        with file.open('wb') as s_file:
            if file.suffix.startswith('.s') and file.suffix != '.ssarc':
                s_file.write(util.compress(new_sarc.get_bytes()))
            else:
                new_sarc.write(s_file)
Esempio n. 28
0
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString:
    new_sarc = SarcWriter.from_sarc(sarc)
    for file, stuff in edits.items():
        if isinstance(stuff, dict):
            try:
                ofile = sarc.get_file(file)
                if ofile == None:
                    raise FileNotFoundError(f"Could not find nested file {file} in SARC")
                sub_sarc = Sarc(util.unyaz_if_needed(ofile.data))
            except (
                InvalidDataError,
                ValueError,
                AttributeError,
                RuntimeError,
                FileNotFoundError,
            ):
                util.vprint(f"Couldn't merge into nested SARC {file}")
                continue
            nsub_bytes = _merge_in_sarc(sub_sarc, stuff)
            new_sarc.files[file] = (
                util.compress(nsub_bytes)
                if file[file.rindex(".") :].startswith(".s")
                else nsub_bytes
            )
        elif isinstance(stuff, ParameterList):
            try:
                ofile = sarc.get_file(file)
                if ofile == None:
                    raise FileNotFoundError(f"Could not find nested file {file} in SARC")
                pio = ParameterIO.from_binary(ofile.data)
            except (
                AttributeError,
                ValueError,
                InvalidDataError,
                FileNotFoundError,
            ) as err:
                util.vprint(f"Couldn't open {file}: {err}")
                continue
            new_pio = merge_shopdata(pio, stuff)
            new_sarc.files[file] = new_pio.to_binary()
    return new_sarc.write()[1]
Esempio n. 29
0
 def perform_merge(self):
     print('Loading modded SARC list...')
     sarcs = self.consolidate_diffs(self.get_all_diffs())
     if 'only_these' in self._options:
         for sarc_file in self._options['only_these']:
             master_path = (util.get_master_modpack_dir() / sarc_file)
             if master_path.exists():
                 master_path.unlink()
         for sarc_file in [
                 file for file in sarcs
                 if file not in self._options['only_these']
         ]:
             del sarcs[sarc_file]
     else:
         for file in [file for file in util.get_master_modpack_dir().rglob('**/*') \
                     if file.suffix in util.SARC_EXTS]:
             file.unlink()
     for sarc_file in sarcs:
         try:
             sarcs[sarc_file].insert(0, util.get_game_file(sarc_file))
         except FileNotFoundError:
             continue
     if not sarcs:
         print('No SARC merging necessary')
         return
     num_threads = min(cpu_count(), len(sarcs))
     pool = self._pool or Pool(processes=num_threads)
     print(f'Merging {len(sarcs)} SARC files...')
     results = pool.starmap(merge_sarcs, sarcs.items())
     for result in results:
         file, data = result
         output_path = util.get_master_modpack_dir() / file
         output_path.parent.mkdir(parents=True, exist_ok=True)
         if output_path.suffix.startswith('.s'):
             data = util.compress(data)
         output_path.write_bytes(data)
     if not self._pool:
         pool.close()
         pool.join()
     print('Finished merging SARCs')
Esempio n. 30
0
def set_size(entry: str, size: int):
    """
    Sets the size of a resource in the current master RSTB

    :param entry: The resource to set
    :type entry: str
    :param size: The resource size
    :type size: int
    """
    rstb_path = util.get_master_modpack_dir() / 'content' / 'System' / 'Resource' /\
                'ResourceSizeTable.product.srsizetable'
    if rstb_path.exists():
        table = read_rstb(rstb_path, be=True)
    else:
        table = get_stock_rstb()
        rstb_path.parent.mkdir(parents=True, exist_ok=True)
    table.set_size(entry, size)
    buf = io.BytesIO()
    table.write(buf, be=True)
    rstb_path.write_bytes(
        util.compress(buf.getvalue())
    )