Exemple #1
0
def msbt_to_msyt(folder: Path, pool: multiprocessing.Pool = None):
    """ Converts MSBTs in given temp dir to MSYTs """
    if system() == "Windows":
        subprocess.run(
            [MSYT_PATH, "export", "-d", str(folder)],
            creationflags=util.CREATE_NO_WINDOW,
            check=False,
        )
    else:
        subprocess.run([MSYT_PATH, "export", "-d", str(folder)], check=False)
    fix_msbts = [
        msbt for msbt in folder.rglob("**/*.msbt")
        if not msbt.with_suffix(".msyt").exists()
    ]
    if fix_msbts:
        print("Some MSBTs failed to convert. Trying again individually...")
        this_pool = pool or multiprocessing.Pool(maxtasksperchild=500)
        this_pool.map(partial(_msyt_file), fix_msbts)
        fix_msbts = [
            msbt for msbt in folder.rglob("**/*.msbt")
            if not msbt.with_suffix(".msyt").exists()
        ]
        if not pool:
            this_pool.close()
            this_pool.join()
    if fix_msbts:
        print(
            f"{len(fix_msbts)} MSBT files failed to convert. They will not be merged."
        )
        util.vprint(fix_msbts)
    for msbt_file in folder.rglob("**/*.msbt"):
        Path(msbt_file).unlink()
    return fix_msbts
Exemple #2
0
def _convert_text_logs(logs_path: Path):
    diffs = {}
    with Pool(maxtasksperchild=500) as pool:
        for diff in pool.imap_unordered(_convert_text_log,
                                        logs_path.glob("texts_*.yml")):
            diffs.update(diff)
    fails = set()
    for text_pack in logs_path.glob("newtexts_*.sarc"):
        lang = text_pack.stem[9:]
        sarc = oead.Sarc(text_pack.read_bytes())
        for file in sarc.get_files():
            if lang not in diffs:
                diffs[lang] = {}
            try:
                diffs[lang].update(
                    {file.name: read_msbt(bytes(file.data))["entries"]})
            except RuntimeError:
                print(
                    f"Warning: {file.name} could not be processed and will not be used"
                )
                fails.add(file.name)
                continue
        util.vprint(f"{len(fails)} text files failed to process:\n{fails}")
        text_pack.unlink()
    (logs_path / "texts.json").write_text(json.dumps(diffs,
                                                     ensure_ascii=False,
                                                     indent=2),
                                          encoding="utf-8")
Exemple #3
0
def log_drop_file(file: str, mod_dir: Path):
    if "Bootup.pack" in file:
        return {}
    drop = ParameterIO.from_binary(
        util.get_nested_file_bytes(str(mod_dir) + "/" + file))
    drop_table = _drop_to_dict(drop)
    del drop
    try:
        base_file = file[:file.index("//")]
        sub_file = file[file.index("//"):]
        ref_drop = ParameterIO.from_binary(
            util.get_nested_file_bytes(
                str(util.get_game_file(base_file)) + sub_file))
        ref_table = _drop_to_dict(ref_drop)
        del ref_drop
        for table, contents in drop_table.items():
            if table not in ref_table:
                continue
            for item, prob in {(i, p)
                               for i, p in contents["items"].items()
                               if i in ref_table[table]["items"]}:
                if prob == ref_table[table]["items"][item]:
                    drop_table[table]["items"][item] = util.UNDERRIDE
        del ref_table
    except (
            FileNotFoundError,
            oead.InvalidDataError,
            AttributeError,
            RuntimeError,
            ValueError,
    ):
        util.vprint(f"Could not load stock {file}")
    return {file: drop_table}
Exemple #4
0
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: dict,
                         path: str) -> dict:
    diffs = {}
    for file, edits in edits.items():
        if edits:
            try:
                rsub_sarc = Sarc(
                    util.unyaz_if_needed(ref_sarc.get_file(file).data))
            except (AttributeError, InvalidDataError, ValueError,
                    RuntimeError) as err:
                util.vprint(f'Skipping "{path}//{file}", {err}')
                continue
            sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data))
            diffs.update(
                _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits,
                                     path + "//" + file))
            del sub_sarc
            del rsub_sarc
        else:
            full_path = f"{path}//{file}"
            try:
                ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data)
            except AttributeError:
                continue
            try:
                pio = ParameterIO.from_binary(sarc.get_file(file).data)
            except AttributeError as err:
                raise ValueError(
                    f"Failed to read nested file:\n{path}//{file}") from err
            except (ValueError, RuntimeError, InvalidDataError) as err:
                raise ValueError(f"Failed to parse AAMP file:\n{path}//{file}")
            diffs.update({full_path: get_aamp_diff(pio, ref_pio)})
    return diffs
Exemple #5
0
def _get_diffs_from_sarc(sarc: Sarc, ref_sarc: Sarc, edits: Union[dict, list],
                         path: str) -> dict:
    diffs = {}
    if isinstance(edits, dict):
        for file, edits in edits.items():
            try:
                rsub_sarc = Sarc(
                    util.unyaz_if_needed(ref_sarc.get_file(file).data))
            except (AttributeError, InvalidDataError, ValueError,
                    RuntimeError) as e:
                util.vprint(f'Skipping "{path}//{file}", {e}')
                continue
            sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data))
            diffs.update(
                _get_diffs_from_sarc(sub_sarc, rsub_sarc, edits,
                                     path + "//" + file))
            del sub_sarc
            del rsub_sarc
    else:
        for file in edits:
            full_path = f"{path}//{file}"
            try:
                ref_pio = ParameterIO.from_binary(ref_sarc.get_file(file).data)
            except AttributeError:
                continue
            pio = ParameterIO.from_binary(sarc.get_file(file).data)
            diffs.update({full_path: get_aamp_diff(pio, ref_pio)})
    return diffs
Exemple #6
0
 def consolidate_diffs(self, diffs: list):
     all_diffs = {}
     for diff in diffs:
         util.dict_merge(all_diffs, diff, overwrite_lists=True)
     util.vprint("All actor info diffs:")
     util.vprint(oead.byml.to_text(all_diffs))
     return oead.byml.Hash(all_diffs)
Exemple #7
0
def link_master_mod(output: Path = None):
    util.create_bcml_graphicpack_if_needed()
    if not output:
        if not util.get_settings("export_dir"):
            return
        output = Path(util.get_settings("export_dir"))
    if output.exists():
        shutil.rmtree(output, ignore_errors=True)
    try:
        output.mkdir(parents=True, exist_ok=True)
        if not util.get_settings("no_cemu"):
            output.mkdir(parents=True, exist_ok=True)
            shutil.copy(util.get_master_modpack_dir() / "rules.txt",
                        output / "rules.txt")
    except (OSError, PermissionError, FileExistsError,
            FileNotFoundError) as err:
        raise RuntimeError(
            "There was a problem creating the master BCML graphic pack. "
            "It may be a one time fluke, so try remerging and/or restarting BCML. "
            "This can also happen if BCML and/or Cemu are installed into Program "
            "Files or any folder which requires administrator (or root) permissions. "
            "You can try running BCML as administrator or root, but bear in mind this "
            "is not officially supported. If the problem persists, good luck, because "
            "it's something wonky about your PC, I guess.") from err

    mod_folders: List[Path] = sorted(
        [
            item for item in util.get_modpack_dir().glob("*")
            if item.is_dir() and not (item / ".disabled").exists()
        ],
        reverse=True,
    )
    util.vprint(mod_folders)
    link_or_copy: Any = os.link if not util.get_settings(
        "no_hardlinks") else copyfile
    for mod_folder in mod_folders:
        for item in mod_folder.rglob("**/*"):
            rel_path = item.relative_to(mod_folder)
            exists = (output / rel_path).exists()
            is_log = str(rel_path).startswith("logs")
            is_opt = str(rel_path).startswith("options")
            is_meta = str(rel_path).startswith("meta")
            is_extra = (len(rel_path.parts) == 1 and rel_path.suffix != ".txt"
                        and not item.is_dir())
            if exists or is_log or is_extra or is_meta or is_opt:
                continue
            if item.is_dir():
                (output / rel_path).mkdir(parents=True, exist_ok=True)
            elif item.is_file():
                try:
                    link_or_copy(str(item), str(output / rel_path))
                except OSError:
                    if link_or_copy is os.link:
                        link_or_copy = copyfile
                        link_or_copy(str(item), str(output / rel_path))
                    else:
                        raise
Exemple #8
0
 def get_mods(self, params):
     if not params:
         params = {"disabled": False}
     mods = [
         mod.to_json()
         for mod in util.get_installed_mods(params["disabled"])
     ]
     util.vprint(mods)
     return mods
Exemple #9
0
def link_master_mod(output: Path = None):
    util.create_bcml_graphicpack_if_needed()
    if not output:
        if util.get_settings("no_cemu"):
            return
        output = util.get_cemu_dir() / "graphicPacks" / "BreathOfTheWild_BCML"
    if output.exists():
        shutil.rmtree(output, ignore_errors=True)
    try:
        output.mkdir(parents=True, exist_ok=True)
        if not util.get_settings("no_cemu"):
            shutil.copy(util.get_master_modpack_dir() / "rules.txt",
                        output / "rules.txt")
    except (OSError, PermissionError, FileExistsError,
            FileNotFoundError) as err:
        raise RuntimeError(
            "There was a problem creating the master BCML graphic pack. "
            "It may be a one time fluke, so try remerging and/or restarting BCML. "
            "If the problem persists, good luck, because it's something wonky about your "
            "PC, I guess.") from err

    mod_folders: List[Path] = sorted(
        [
            item for item in util.get_modpack_dir().glob("*")
            if item.is_dir() and not (item / ".disabled").exists()
        ],
        reverse=True,
    )
    util.vprint(mod_folders)
    link_or_copy = os.link if not util.get_settings(
        "no_hardlinks") else copyfile
    for mod_folder in mod_folders:
        for item in mod_folder.rglob("**/*"):
            rel_path = item.relative_to(mod_folder)
            exists = (output / rel_path).exists()
            is_log = str(rel_path).startswith("logs")
            is_opt = str(rel_path).startswith("options")
            is_meta = str(rel_path).startswith("meta")
            is_extra = (len(rel_path.parts) == 1 and rel_path.suffix != ".txt"
                        and not item.is_dir())
            if exists or is_log or is_extra or is_meta or is_opt:
                continue
            if item.is_dir():
                (output / rel_path).mkdir(parents=True, exist_ok=True)
            elif item.is_file():
                try:
                    link_or_copy(str(item), str(output / rel_path))
                except OSError:
                    if link_or_copy is os.link:
                        link_or_copy = copyfile
                        link_or_copy(str(item), str(output / rel_path))
                    else:
                        raise
Exemple #10
0
    def perform_merge(self):
        merged_effects = util.get_master_modpack_dir() / "logs" / "effects.byml"
        print("Loading status effect mods...")
        diffs = self.consolidate_diffs(self.get_all_diffs())
        if not diffs:
            print("No status effect merging necessary...")
            if merged_effects.exists():
                merged_effects.unlink()
                try:
                    stock_effects = util.get_nested_file_bytes(
                        (
                            str(util.get_game_file("Pack/Bootup.pack"))
                            + "//Ecosystem/StatusEffectList.sbyml"
                        ),
                        unyaz=False,
                    )
                    util.inject_file_into_sarc(
                        "Ecosystem/StatusEffectList.sbyml",
                        stock_effects,
                        "Pack/Bootup.pack",
                    )
                    del stock_effects
                except FileNotFoundError:
                    pass
            return
        util.vprint("All status effect diffs:")
        util.vprint(diffs)

        effects = get_stock_effects()
        util.dict_merge(effects, diffs, overwrite_lists=True)
        del diffs

        print("Writing new effects list...")
        effect_bytes = oead.byml.to_binary(
            oead.byml.Array([effects]), big_endian=util.get_settings("wiiu")
        )
        del effects
        util.inject_file_into_sarc(
            "Ecosystem/StatusEffectList.sbyml",
            util.compress(effect_bytes),
            "Pack/Bootup.pack",
            create_sarc=True,
        )
        print("Saving status effect merge log...")
        merged_effects.parent.mkdir(parents=True, exist_ok=True)
        merged_effects.write_bytes(effect_bytes)

        print("Updating RSTB...")
        rstb_size = rstb.SizeCalculator().calculate_file_size_with_ext(
            effect_bytes, True, ".byml"
        )
        del effect_bytes
        rstable.set_size("Ecosystem/StatusEffectList.byml", rstb_size)
Exemple #11
0
 def consolidate_diffs(self, diffs):
     all_sarcs = set()
     all_diffs = {}
     for mod in sorted(diffs.keys(), key=lambda mod: mod.priority):
         all_sarcs |= set(diffs[mod])
     for modded_sarc in all_sarcs:
         for mod, diff in diffs.items():
             if modded_sarc in diff:
                 if not modded_sarc in all_diffs:
                     all_diffs[modded_sarc] = []
                 if (mod.path / modded_sarc).exists():
                     all_diffs[modded_sarc].append(mod.path / modded_sarc)
     util.vprint("All SARC diffs:")
     util.vprint(all_diffs)
     return all_diffs
Exemple #12
0
def merge_aamp_files(file: str, tree: dict):
    try:
        base_file = util.get_game_file(file)
    except FileNotFoundError:
        util.vprint(f"Skipping {file}, not found in dump")
        return
    if (util.get_master_modpack_dir() / file).exists():
        base_file = util.get_master_modpack_dir() / file
    sarc = Sarc(util.unyaz_if_needed(base_file.read_bytes()))
    new_data = _merge_in_sarc(sarc, tree)
    if base_file.suffix.startswith(".s") and base_file.suffix != ".ssarc":
        new_data = util.compress(new_data)
    (util.get_master_modpack_dir() / file).parent.mkdir(parents=True,
                                                        exist_ok=True)
    (util.get_master_modpack_dir() / file).write_bytes(new_data)
Exemple #13
0
def get_shop_diffs(file: str, tree: dict, tmp_dir: Path) -> Optional[dict]:
    try:
        ref_sarc = Sarc(util.unyaz_if_needed(util.get_game_file(file).read_bytes()))
    except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError) as err:
        util.vprint(f"{file} ignored on stock side, cuz {err}")
        return None
    try:
        sarc = Sarc(util.unyaz_if_needed((tmp_dir / file).read_bytes()))
    except (FileNotFoundError, InvalidDataError, ValueError, RuntimeError):
        util.vprint(f"{file} corrupt, ignored")
        return None
    diffs = _get_diffs_from_sarc(sarc, ref_sarc, tree, file)
    del sarc
    del ref_sarc
    return diffs
Exemple #14
0
def _make_bnp_logs(tmp_dir: Path, options: dict):
    util.vprint(install.generate_logs(tmp_dir, options=options))

    print("Removing unnecessary files...")

    if (tmp_dir / "logs" / "map.yml").exists():
        print("Removing map units...")
        for file in [
            file
            for file in tmp_dir.rglob("**/*.smubin")
            if fnmatch(file.name, "[A-Z]-[0-9]_*.smubin") and "MainField" in file.parts
        ]:
            file.unlink()

    if set((tmp_dir / "logs").glob("*texts*")):
        print("Removing language bootup packs...")
        for bootup_lang in (tmp_dir / util.get_content_path() / "Pack").glob(
            "Bootup_*.pack"
        ):
            bootup_lang.unlink()

    if (tmp_dir / "logs" / "actorinfo.yml").exists() and (
        tmp_dir / util.get_content_path() / "Actor" / "ActorInfo.product.sbyml"
    ).exists():
        print("Removing ActorInfo.product.sbyml...")
        (
            tmp_dir / util.get_content_path() / "Actor" / "ActorInfo.product.sbyml"
        ).unlink()

    if (tmp_dir / "logs" / "gamedata.yml").exists() or (
        tmp_dir / "logs" / "savedata.yml"
    ).exists():
        print("Removing gamedata sarcs...")
        bsarc = oead.Sarc(
            (tmp_dir / util.get_content_path() / "Pack" / "Bootup.pack").read_bytes()
        )
        csarc = oead.SarcWriter.from_sarc(bsarc)
        bsarc_files = {f.name for f in bsarc.get_files()}
        if "GameData/gamedata.ssarc" in bsarc_files:
            del csarc.files["GameData/gamedata.ssarc"]
        if "GameData/savedataformat.ssarc" in bsarc_files:
            del csarc.files["GameData/savedataformat.ssarc"]
        (tmp_dir / util.get_content_path() / "Pack" / "Bootup.pack").write_bytes(
            csarc.write()[1]
        )
Exemple #15
0
 def get_mod_info(self, params):
     mod = BcmlMod.from_json(params["mod"])
     util.vprint(mod)
     img = ""
     try:
         img = base64.b64encode(mod.get_preview().read_bytes()).decode("utf8")
     except:  # pylint: disable=bare-except
         pass
     return {
         "changes": [
             m.NAME.upper() for m in mergers.get_mergers() if m().is_mod_logged(mod)
         ],
         "desc": mod.description,
         "date": mod.date,
         "processed": (mod.path / ".processed").exists(),
         "image": img,
         "url": mod.url,
     }
Exemple #16
0
 def consolidate_diffs(self, diffs: ParameterIO):
     if not diffs:
         return None
     consolidated: Dict[str, Any] = {}
     for _, file in diffs.objects["Filenames"].params.items():
         try:
             util.dict_merge(
                 consolidated,
                 reduce(
                     lambda res, cur: {cur: res},
                     reversed(file.v.split("//")),
                     diffs.lists[file.v],
                 ),
             )
         except KeyError:
             util.vprint(diffs)
             raise Exception(f"{_}: {file} in diff lists: {file.v in diffs.lists}")
     return consolidated
Exemple #17
0
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString:
    new_sarc = SarcWriter.from_sarc(sarc)
    for file, stuff in edits.items():
        if isinstance(stuff, dict):
            try:
                ofile = sarc.get_file(file)
                if ofile == None:
                    raise FileNotFoundError(f"Could not find nested file {file} in SARC")
                sub_sarc = Sarc(util.unyaz_if_needed(ofile.data))
            except (
                InvalidDataError,
                ValueError,
                AttributeError,
                RuntimeError,
                FileNotFoundError,
            ):
                util.vprint(f"Couldn't merge into nested SARC {file}")
                continue
            nsub_bytes = _merge_in_sarc(sub_sarc, stuff)
            new_sarc.files[file] = (
                util.compress(nsub_bytes)
                if file[file.rindex(".") :].startswith(".s")
                else nsub_bytes
            )
        elif isinstance(stuff, ParameterList):
            try:
                ofile = sarc.get_file(file)
                if ofile == None:
                    raise FileNotFoundError(f"Could not find nested file {file} in SARC")
                pio = ParameterIO.from_binary(ofile.data)
            except (
                AttributeError,
                ValueError,
                InvalidDataError,
                FileNotFoundError,
            ) as err:
                util.vprint(f"Couldn't open {file}: {err}")
                continue
            new_pio = merge_shopdata(pio, stuff)
            new_sarc.files[file] = new_pio.to_binary()
    return new_sarc.write()[1]
Exemple #18
0
 def install_mod(self, params: dict):
     util.vprint(params)
     with Pool(maxtasksperchild=1000) as pool:
         selects = (params["selects"]
                    if "selects" in params and params["selects"] else {})
         mods = [
             install.install_mod(
                 Path(m),
                 options=params["options"],
                 selects=selects.get(m, None),
                 pool=pool,
             ) for m in params["mods"]
         ]
         util.vprint(f"Installed {len(mods)} mods")
         print(f"Installed {len(mods)} mods")
         try:
             install.refresh_merges()
             print("Install complete")
         except Exception:  # pylint: disable=broad-except
             pool.terminate()
             raise
Exemple #19
0
def find_modded_sarc_files(mod_sarc: Union[Path, oead.Sarc],
                           tmp_dir: Path,
                           name: str = "",
                           aoc: bool = False) -> List[str]:
    if isinstance(mod_sarc, Path):
        if any(mod_sarc.name.startswith(exclude) for exclude in ["Bootup_"]):
            return []
        name = str(mod_sarc.relative_to(tmp_dir))
        aoc = util.get_dlc_path() in mod_sarc.parts or "Aoc" in mod_sarc.parts
        try:
            mod_sarc = oead.Sarc(util.unyaz_if_needed(mod_sarc.read_bytes()))
        except (RuntimeError, ValueError, oead.InvalidDataError):
            return []
    modded_files = []
    for file, contents in [(f.name, bytes(f.data))
                           for f in mod_sarc.get_files()]:
        canon = file.replace(".s", ".")
        if aoc:
            canon = "Aoc/0010/" + canon
        contents = util.unyaz_if_needed(contents)
        nest_path = str(name).replace("\\", "/") + "//" + file
        if util.is_file_modded(canon, contents, True):
            modded_files.append(nest_path)
            util.vprint(
                f'Found modded file {canon} in {str(name).replace("//", "/")}')
            if util.is_file_sarc(canon) and ".ssarc" not in file:
                try:
                    nest_sarc = oead.Sarc(contents)
                except ValueError:
                    continue
                sub_mod_files = find_modded_sarc_files(nest_sarc,
                                                       name=nest_path,
                                                       tmp_dir=tmp_dir,
                                                       aoc=aoc)
                modded_files.extend(sub_mod_files)
        else:
            util.vprint(
                f'Ignored unmodded file {canon} in {str(name).replace("//", "/")}'
            )
    return modded_files
Exemple #20
0
def _merge_in_sarc(sarc: Sarc, edits: dict) -> ByteString:
    new_sarc = SarcWriter.from_sarc(sarc)
    for file, stuff in edits.items():
        if isinstance(stuff, dict):
            try:
                sub_sarc = Sarc(util.unyaz_if_needed(sarc.get_file(file).data))
            except (InvalidDataError, ValueError, AttributeError,
                    RuntimeError):
                util.vprint(f"Couldn't merge into nested SARC {file}")
                continue
            nsub_bytes = _merge_in_sarc(sub_sarc, stuff)
            new_sarc.files[file] = (util.compress(nsub_bytes)
                                    if file[file.rindex("."):].startswith(".s")
                                    else nsub_bytes)
        elif isinstance(stuff, ParameterList):
            try:
                pio = ParameterIO.from_binary(sarc.get_file(file).data)
            except (AttributeError, ValueError, InvalidDataError) as e:
                util.vprint(f"Couldn't open {file}: {e}")
                continue
            merge_plists(pio, stuff)
            new_sarc.files[file] = pio.to_binary()
    return new_sarc.write()[1]
Exemple #21
0
def generate_logs(
    tmp_dir: Path,
    options: dict = None,
    pool: Optional[multiprocessing.pool.Pool] = None,
) -> List[Union[Path, str]]:
    if isinstance(tmp_dir, str):
        tmp_dir = Path(tmp_dir)
    if not options:
        options = {"disable": [], "options": {}}
    if "disable" not in options:
        options["disable"] = []
    util.vprint(options)

    this_pool = pool or Pool(maxtasksperchild=500)
    print("Scanning for modified files...")
    modded_files = find_modded_files(tmp_dir, pool=pool)
    if not (modded_files or (tmp_dir / "patches").exists() or
            (tmp_dir / "logs").exists()):
        if "options" in tmp_dir.parts:
            message = (
                f"No modified files were found in {str(tmp_dir)}. "
                f"This may mean that this option's files are identical to the "
                f"base mod's, or that the folder has an improper structure.")
        else:
            message = (
                f"No modified files were found in {str(tmp_dir)}. "
                f"This probably means this mod is not in a supported format.")
        raise RuntimeError(message)

    (tmp_dir / "logs").mkdir(parents=True, exist_ok=True)
    try:
        for i, merger_class in enumerate([
                merger_class for merger_class in mergers.get_mergers()
                if merger_class.NAME not in options["disable"]
        ]):
            merger = merger_class()  # type: ignore
            util.vprint(
                f"Merger {merger.NAME}, #{i+1} of {len(mergers.get_mergers())}"
            )
            if options is not None and merger.NAME in options["options"]:
                merger.set_options(options["options"][merger.NAME])
            merger.set_pool(this_pool)
            merger.log_diff(tmp_dir, modded_files)
        if util.get_settings("strip_gfx"):
            dev._clean_sarcs(tmp_dir,
                             util.get_hash_table(util.get_settings("wiiu")),
                             this_pool)
    except:  # pylint: disable=bare-except
        this_pool.close()
        this_pool.join()
        this_pool.terminate()
        raise
    if not pool:
        this_pool.close()
        this_pool.join()
    util.vprint(modded_files)
    return modded_files
Exemple #22
0
def _check_modded(file: Path, tmp_dir: Path):
    try:
        canon = util.get_canon_name(file.relative_to(tmp_dir).as_posix())
    except ValueError:
        util.vprint(
            f"Ignored unknown file {file.relative_to(tmp_dir).as_posix()}")
        return None
    if util.is_file_modded(canon, file, True):
        util.vprint(f"Found modded file {canon}")
        return file
    else:
        if "Aoc/0010/Map/MainField" in canon:
            file.unlink()
        util.vprint(f"Ignored unmodded file {canon}")
        return None
Exemple #23
0
def generate_logs(
    tmp_dir: Path,
    options: dict = None,
    pool: Optional[multiprocessing.pool.Pool] = None,
) -> List[Union[Path, str]]:
    if isinstance(tmp_dir, str):
        tmp_dir = Path(tmp_dir)
    if not options:
        options = {"disable": [], "options": {}}
    if "disable" not in options:
        options["disable"] = []
    util.vprint(options)

    this_pool = pool or Pool(maxtasksperchild=500)
    print("Scanning for modified files...")
    modded_files = find_modded_files(tmp_dir, pool=pool)
    if not modded_files:
        raise RuntimeError(
            f"No modified files were found in {str(tmp_dir)}."
            "This probably means this mod is not in a supported format.")

    (tmp_dir / "logs").mkdir(parents=True, exist_ok=True)
    try:
        for i, merger_class in enumerate([
                merger_class for merger_class in mergers.get_mergers()
                if merger_class.NAME not in options["disable"]
        ]):
            merger = merger_class()  # type: ignore
            util.vprint(
                f"Merger {merger.NAME}, #{i+1} of {len(mergers.get_mergers())}"
            )
            if options is not None and merger.NAME in options["options"]:
                merger.set_options(options["options"][merger.NAME])
            merger.set_pool(this_pool)
            merger.log_diff(tmp_dir, modded_files)
    except:  # pylint: disable=bare-except
        this_pool.close()
        this_pool.join()
        this_pool.terminate()
        raise
    if not pool:
        this_pool.close()
        this_pool.join()
    util.vprint(modded_files)
    return modded_files
Exemple #24
0
    def generate_diff(self, mod_dir: Path, modded_files: List[Union[str,
                                                                    Path]]):
        print("Checking for modified languages...")
        languages = {
            util.get_file_language(file)
            for file in modded_files
            if (isinstance(file, Path) and "Bootup_" in file.name
                and "Graphic" not in file.name)
        }
        if not languages:
            return None
        util.vprint(f'Languages: {",".join(languages)}')

        language_map = {}
        save_langs = (LANGUAGES if self._options.get("all_langs", False) else
                      [util.get_settings("lang")])
        for lang in save_langs:
            if lang in languages:
                language_map[lang] = lang
            elif lang[2:4] in [l[2:4] for l in languages]:
                language_map[lang] = [
                    l for l in languages if l[2:4] == lang[2:4]
                ][0]
            else:
                language_map[lang] = [l for l in LANGUAGES
                                      if l in languages][0]
        util.vprint(f"Language map:")
        util.vprint(language_map)

        language_diffs = {}
        for language in set(language_map.values()):
            print(f"Logging text changes for {language}...")
            language_diffs[language] = diff_language(
                mod_dir / util.get_content_path() / "Pack" /
                f"Bootup_{language}.pack",
                pool=self._pool,
            )

        return {
            save_lang: language_diffs[map_lang]
            for save_lang, map_lang in language_map.items()
        }
Exemple #25
0
    def generate_diff(self, mod_dir: Path, modded_files: List[Union[str, Path]]):
        print("Checking for modified languages...")
        mod_langs = {
            util.get_file_language(file)
            for file in modded_files
            if (
                isinstance(file, Path)
                and "Bootup_" in file.name
                and "Graphic" not in file.name
            )
        }
        if not mod_langs:
            return None
        util.vprint(f'Languages: {",".join(mod_langs)}')

        # find a user lang for each mod lang
        language_map = map_languages(mod_langs, util.get_user_languages())
        util.vprint("Language map:")
        util.vprint(language_map)

        language_diffs = {}
        for mod_lang, user_lang in language_map.items():
            print(f"Logging text changes for {user_lang}...")
            mod_pack = (
                mod_dir / util.get_content_path() / "Pack" / f"Bootup_{mod_lang}.pack"
            )
            if not user_lang == mod_lang:
                mod_pack = swap_region(mod_pack, user_lang)
            ref_pack = util.get_game_file(f"Pack/Bootup_{user_lang}.pack")
            language_diffs[user_lang] = rsext.mergers.texts.diff_language(
                str(mod_pack), str(ref_pack), user_lang[2:4] != mod_lang[2:4]
            )
            if not user_lang == mod_lang:
                mod_pack.unlink()

        return language_diffs
Exemple #26
0
def merge_sarcs(file_name: str, sarcs: List[Union[Path,
                                                  bytes]]) -> (str, bytes):
    opened_sarcs: List[oead.Sarc] = []
    if "ThunderRodLv2" in file_name:
        print()
    if isinstance(sarcs[0], Path):
        for i, sarc_path in enumerate(sarcs):
            sarcs[i] = sarc_path.read_bytes()
    for sarc_bytes in sarcs:
        sarc_bytes = util.unyaz_if_needed(sarc_bytes)
        try:
            opened_sarcs.append(oead.Sarc(sarc_bytes))
        except (ValueError, RuntimeError, oead.InvalidDataError):
            continue

    all_files = {
        file.name
        for open_sarc in opened_sarcs for file in open_sarc.get_files()
    }
    nested_sarcs = {}
    new_sarc = oead.SarcWriter(endian=oead.Endianness.Big if util.get_settings(
        "wiiu") else oead.Endianness.Little)
    files_added = set()

    for opened_sarc in reversed(opened_sarcs):
        for file in [
                f for f in opened_sarc.get_files() if f.name not in files_added
        ]:
            file_data = oead.Bytes(file.data)
            if (file.name[file.name.rindex("."):] in util.SARC_EXTS -
                    EXCLUDE_EXTS) and file.name not in SPECIAL:
                if file.name not in nested_sarcs:
                    nested_sarcs[file.name] = []
                nested_sarcs[file.name].append(util.unyaz_if_needed(file_data))
            elif util.is_file_modded(file.name.replace(".s", "."),
                                     file_data,
                                     count_new=True):
                new_sarc.files[file.name] = file_data
                files_added.add(file.name)
    util.vprint(set(nested_sarcs.keys()))
    for file, sarcs in nested_sarcs.items():
        if not sarcs:
            continue
        merged_bytes = merge_sarcs(file, sarcs[::-1])[1]
        if Path(file).suffix.startswith(".s") and not file.endswith(".sarc"):
            merged_bytes = util.compress(merged_bytes)
        new_sarc.files[file] = merged_bytes
        files_added.add(file)
    for file in [file for file in all_files if file not in files_added]:
        for opened_sarc in [
                open_sarc for open_sarc in opened_sarcs
                if (file in [f.name for f in open_sarc.get_files()])
        ]:
            new_sarc.files[file] = oead.Bytes(opened_sarc.get_file(file).data)
            break

    if "Bootup.pack" in file_name:
        for merger in [
                merger() for merger in mergers.get_mergers()
                if merger.is_bootup_injector()
        ]:
            inject = merger.get_bootup_injection()
            if not inject:
                continue
            file, data = inject
            new_sarc.files[file] = data

    return (file_name, bytes(new_sarc.write()[1]))
Exemple #27
0
def threaded_merge(item) -> Tuple[str, dict]:
    """Deep merges an individual file, suitable for multiprocessing"""
    file, stuff = item
    failures = {}

    try:
        base_file = util.get_game_file(file,
                                       file.startswith(util.get_dlc_path()))
    except FileNotFoundError:
        return "", {}
    if (util.get_master_modpack_dir() / file).exists():
        base_file = util.get_master_modpack_dir() / file
    file_ext = os.path.splitext(file)[1]
    if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() /
                                       file).exists():
        base_file = util.get_master_modpack_dir() / file
    file_bytes = base_file.read_bytes()
    yazd = file_bytes[0:4] == b"Yaz0"
    file_bytes = file_bytes if not yazd else util.decompress(file_bytes)
    magic = file_bytes[0:4]

    if magic == b"SARC":
        new_sarc, sub_failures = nested_patch(oead.Sarc(file_bytes), stuff)
        del file_bytes
        new_bytes = bytes(new_sarc.write()[1])
        for failure, contents in sub_failures.items():
            print(f"Some patches to {failure} failed to apply.")
            failures[failure] = contents
    elif magic == b"AAMP":
        try:
            aamp_contents = ParameterIO.from_binary(file_bytes)
            try:
                aamp_contents = shop_merge(
                    aamp_contents,
                    file_ext.replace(".", ""),
                    stuff.lists["Additions"],
                    stuff.lists["Removals"],
                )
                aamp_bytes = ParameterIO.to_binary(aamp_contents)
            except:  # pylint: disable=bare-except
                raise RuntimeError(f"AAMP file {file} could be merged.")
            del aamp_contents
            new_bytes = aamp_bytes if not yazd else util.compress(aamp_bytes)
        except ValueError:
            new_bytes = file_bytes
            del file_bytes
            print(f"Deep merging file {file} failed. No changes were made.")
    else:
        raise ValueError(f"{file} is not a SARC or AAMP file.")

    new_bytes = new_bytes if not yazd else util.compress(new_bytes)
    output_file = util.get_master_modpack_dir() / file
    if base_file == output_file:
        output_file.unlink()
    output_file.parent.mkdir(parents=True, exist_ok=True)
    output_file.write_bytes(new_bytes)
    del new_bytes
    if magic == b"SARC":
        util.vprint(f"Finished patching files inside {file}")
    else:
        util.vprint(f"Finished patching {file}")
    return util.get_canon_name(file), failures
Exemple #28
0
    def perform_merge(self):
        # pylint: disable=unsupported-assignment-operation
        langs = ({util.get_settings("lang")}
                 if not self._options["all_langs"] else get_user_languages())
        for lang in langs:
            print("Loading text mods...")
            diffs = self.consolidate_diffs(self.get_all_diffs())
            if not diffs or lang not in diffs:
                print("No text merge necessary")
                for bootup in util.get_master_modpack_dir().rglob(
                        "**/Bootup_????.pack"):
                    bootup.unlink()
                return
            util.vprint({
                lang: {
                    file: list(entries.keys())
                    for file, entries in diffs[lang].items()
                }
            })

            print(f"Merging modded texts for {lang}...")
            saved_files = set()
            with TemporaryDirectory() as tmp:
                tmp_dir = Path(tmp)
                ref_lang = "XXen" if lang.endswith("en") else lang
                extract_refs(ref_lang, tmp_dir)
                tmp_dir = tmp_dir / "refs" / ref_lang
                this_pool = self._pool or multiprocessing.Pool(
                    maxtasksperchild=500)
                this_pool.map(partial(merge_msyt, tmp_dir=tmp_dir),
                              diffs[lang].items())
                if not self._pool:
                    this_pool.close()
                    this_pool.join()

                m_args = [
                    MSYT_PATH,
                    "create",
                    "-d",
                    str(tmp_dir),
                    "-p",
                    "wiiu" if util.get_settings("wiiu") else "switch",
                    "-o",
                    str(tmp_dir),
                ]
                result: subprocess.CompletedProcess
                if system() == "Windows":
                    result = subprocess.run(
                        m_args,
                        capture_output=True,
                        creationflags=util.CREATE_NO_WINDOW,
                        check=False,
                        text=True,
                    )

                else:
                    result = subprocess.run(
                        m_args,
                        capture_output=True,
                        check=False,
                        text=True,
                    )
                if result.stderr:
                    raise RuntimeError(
                        f"There was an error merging game texts. {result.stderr}"
                    )

                msg_sarc = oead.SarcWriter(
                    endian=oead.Endianness.Big if util.
                    get_settings("wiiu") else oead.Endianness.Little)
                for file in tmp_dir.rglob("**/*.msbt"):
                    msg_sarc.files[file.relative_to(
                        tmp_dir).as_posix()] = file.read_bytes()
                    saved_files.add(file.relative_to(tmp_dir).as_posix())
            bootup_sarc = oead.SarcWriter(
                endian=oead.Endianness.Big if util.
                get_settings("wiiu") else oead.Endianness.Little)
            bootup_sarc.files[
                f"Message/Msg_{lang}.product.ssarc"] = util.compress(
                    msg_sarc.write()[1])

            bootup_path = (util.get_master_modpack_dir() /
                           util.get_content_path() / "Pack" /
                           f"Bootup_{lang}.pack")
            bootup_path.parent.mkdir(parents=True, exist_ok=True)
            bootup_path.write_bytes(bootup_sarc.write()[1])
            del bootup_sarc
            del msg_sarc
            print(f"{lang} texts merged successfully")
Exemple #29
0
    def perform_merge(self):
        force = self._options.get("force", False)
        glog_path = util.get_master_modpack_dir() / "logs" / "gamedata.log"

        modded_entries = self.consolidate_diffs(self.get_all_diffs())
        util.vprint("All gamedata diffs:")
        util.vprint(modded_entries)
        if not modded_entries:
            print("No gamedata merging necessary.")
            if glog_path.exists():
                glog_path.unlink()
            if (util.get_master_modpack_dir() / "logs" /
                    "gamedata.sarc").exists():
                (util.get_master_modpack_dir() / "logs" /
                 "gamedata.sarc").unlink()
            return
        if glog_path.exists() and not force:
            with glog_path.open("r") as l_file:
                if xxhash.xxh64_hexdigest(
                        str(modded_entries)) == l_file.read():
                    print("No gamedata merging necessary.")
                    return

        print("Loading stock gamedata...")
        gamedata = consolidate_gamedata(get_stock_gamedata())
        merged_entries = {
            data_type:
            oead.byml.Hash({entry["DataName"]: entry
                            for entry in entries})
            for data_type, entries in gamedata.items()
        }
        del gamedata

        print("Merging changes...")
        for data_type in {d for d in merged_entries if d in modded_entries}:
            util.dict_merge(
                merged_entries[data_type],
                modded_entries[data_type]["add"],
                shallow=True,
            )
            for entry in modded_entries[data_type]["del"]:
                try:
                    del merged_entries[data_type][entry]
                except KeyError:
                    continue

        merged_entries = oead.byml.Hash({
            data_type: oead.byml.Array({value
                                        for _, value in entries.items()})
            for data_type, entries in merged_entries.items()
        })
        print("Creating and injecting new gamedata.sarc...")
        new_gamedata = oead.SarcWriter(
            endian=oead.Endianness.Big if util.get_settings("wiiu") else oead.
            Endianness.Little)
        for data_type in merged_entries:
            num_files = ceil(len(merged_entries[data_type]) / 4096)
            for i in range(num_files):
                end_pos = (i + 1) * 4096
                if end_pos > len(merged_entries[data_type]):
                    end_pos = len(merged_entries[data_type])
                new_gamedata.files[
                    f"/{data_type}_{i}.bgdata"] = oead.byml.to_binary(
                        oead.byml.Hash({
                            data_type:
                            merged_entries[data_type][i * 4096:end_pos]
                        }),
                        big_endian=util.get_settings("wiiu"),
                    )
        new_gamedata_bytes = new_gamedata.write()[1]
        del new_gamedata
        util.inject_file_into_sarc(
            "GameData/gamedata.ssarc",
            util.compress(new_gamedata_bytes),
            "Pack/Bootup.pack",
            create_sarc=True,
        )
        (util.get_master_modpack_dir() / "logs").mkdir(parents=True,
                                                       exist_ok=True)
        (util.get_master_modpack_dir() / "logs" /
         "gamedata.sarc").write_bytes(new_gamedata_bytes)

        print("Updating RSTB...")
        rstable.set_size(
            "GameData/gamedata.sarc",
            rstable.calculate_size("GameData/gamedata.sarc",
                                   new_gamedata_bytes),
        )
        del new_gamedata_bytes

        glog_path.parent.mkdir(parents=True, exist_ok=True)
        with glog_path.open("w", encoding="utf-8") as l_file:
            l_file.write(xxhash.xxh64_hexdigest(str(modded_entries)))
Exemple #30
0
def create_bnp_mod(mod: Path,
                   output: Path,
                   meta: dict,
                   options: Optional[dict] = None):
    if isinstance(mod, str):
        mod = Path(mod)
    if not options:
        options = {"options": {}, "disable": []}

    if mod.is_file():
        print("Extracting mod...")
        tmp_dir: Path = install.open_mod(mod)
    elif mod.is_dir():
        print(f"Loading mod from {str(mod)}...")
        tmp_dir = Path(TemporaryDirectory().name)
        shutil.copytree(mod, tmp_dir)
    else:
        print(f"Error: {str(mod)} is neither a valid file nor a directory")
        return

    if not ((tmp_dir / util.get_content_path()).exists() or
            (tmp_dir / util.get_dlc_path()).exists()):
        if (tmp_dir.parent / util.get_content_path()).exists():
            tmp_dir = tmp_dir.parent
        elif util.get_settings("wiiu") and (tmp_dir / "Content").exists():
            (tmp_dir / "Content").rename(tmp_dir / "content")
        else:
            raise FileNotFoundError(
                "This mod does not appear to have a valid folder structure")

    if (tmp_dir / "rules.txt").exists():
        (tmp_dir / "rules.txt").unlink()

    if "showDepends" in meta:
        del meta["showDepends"]
    depend_string = f"{meta['name']}=={meta['version']}"
    meta["id"] = urlsafe_b64encode(depend_string.encode("utf8")).decode("utf8")
    any_platform = (options.get("options",
                                dict()).get("general",
                                            dict()).get("agnostic", False))
    meta["platform"] = ("any" if any_platform else
                        "wiiu" if util.get_settings("wiiu") else "switch")
    (tmp_dir / "info.json").write_text(dumps(meta,
                                             ensure_ascii=False,
                                             indent=2),
                                       encoding="utf-8")

    with Pool(maxtasksperchild=500) as pool:
        yml_files = set(tmp_dir.glob("**/*.yml"))
        if yml_files:
            print("Compiling YAML documents...")
            pool.map(_do_yml, yml_files)

        hashes = util.get_hash_table(util.get_settings("wiiu"))
        print("Packing SARCs...")
        _pack_sarcs(tmp_dir, hashes, pool)
        for folder in {d for d in tmp_dir.glob("options/*") if d.is_dir()}:
            _pack_sarcs(folder, hashes, pool)

        for option_dir in tmp_dir.glob("options/*"):
            for file in {
                    f
                    for f in option_dir.rglob("**/*")
                    if (f.is_file() and (tmp_dir /
                                         f.relative_to(option_dir)).exists())
            }:
                data1 = (tmp_dir / file.relative_to(option_dir)).read_bytes()
                data2 = file.read_bytes()
                if data1 == data2:
                    util.vprint(
                        f"Removing {file} from option {option_dir.name}, "
                        "identical to base mod")
                    file.unlink()
                del data1
                del data2

        if not options:
            options = {"disable": [], "options": {}}
        options["options"]["texts"] = {"all_langs": True}

        try:
            _make_bnp_logs(tmp_dir, options)
            for option_dir in {
                    d
                    for d in tmp_dir.glob("options/*") if d.is_dir()
            }:
                _make_bnp_logs(option_dir, options)
        except Exception as err:  # pylint: disable=broad-except
            pool.terminate()
            raise Exception(
                f"There was an error generating change logs for your mod. {str(err)}"
            )

        _clean_sarcs(tmp_dir, hashes, pool)
        for folder in {d for d in tmp_dir.glob("options/*") if d.is_dir()}:
            _clean_sarcs(folder, hashes, pool)

    print("Cleaning any junk files...")
    for file in {f for f in tmp_dir.rglob("**/*") if f.is_file()}:
        if "logs" in file.parts:
            continue
        if (file.suffix in {".yml", ".json", ".bak", ".tmp", ".old"}
                and file.stem != "info"):
            file.unlink()

    print("Removing blank folders...")
    for folder in reversed(list(tmp_dir.rglob("**/*"))):
        if folder.is_dir() and not list(folder.glob("*")):
            shutil.rmtree(folder)

    print(f"Saving output file to {str(output)}...")
    x_args = [util.get_7z_path(), "a", str(output), f'{str(tmp_dir / "*")}']
    if system() == "Windows":
        subprocess.run(
            x_args,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            creationflags=util.CREATE_NO_WINDOW,
            check=True,
        )
    else:
        subprocess.run(x_args,
                       stdout=subprocess.PIPE,
                       stderr=subprocess.PIPE,
                       check=True)
    shutil.rmtree(tmp_dir, ignore_errors=True)
    print("Conversion complete.")