Exemplo n.º 1
0
def bootup_from_msbts(
    lang: str = 'USen',
    msbt_dir: Path = util.get_work_dir() / 'tmp_text' / 'merged'
) -> (Path, int):
    """
    Generates a new Bootup_XXxx.pack from a directory of MSBT files

    :param lang: The game language to use, defaults to USen.
    :type lang: str, optional
    :param msbt_dir: The directory to pull MSBTs from, defaults to "tmp_text/merged" in BCML's
    working directory.
    :type msbt_dir: class:`pathlib.Path`, optional
    :returns: A tuple with the path to the new Bootup_XXxx.pack and the RSTB size of the new
    Msg_XXxx.product.sarc
    :rtype: (class:`pathlib.Path`, int)
    """
    new_boot_path = msbt_dir.parent / f'Bootup_{lang}.pack'
    with new_boot_path.open('wb') as new_boot:
        s_msg = sarc.SARCWriter(True)
        for new_msbt in msbt_dir.rglob('**/*.msbt'):
            with new_msbt.open('rb') as f_new:
                s_msg.add_file(
                    str(new_msbt.relative_to(msbt_dir)).replace('\\', '/'),
                    f_new.read())
        new_msg_stream = io.BytesIO()
        s_msg.write(new_msg_stream)
        unyaz_bytes = new_msg_stream.getvalue()
        rsize = rstb.SizeCalculator().calculate_file_size_with_ext(
            unyaz_bytes, True, '.sarc')
        new_msg_bytes = util.compress(unyaz_bytes)
        s_boot = sarc.SARCWriter(True)
        s_boot.add_file(f'Message/Msg_{lang}.product.ssarc', new_msg_bytes)
        s_boot.write(new_boot)
    return new_boot_path, rsize
Exemplo n.º 2
0
def repack_archive(content_dir: Path, archive_path: Path,
                   rel_archive_dir: Path, wiiu: bool) -> bool:
    temp_archive_dir = archive_path.with_name(archive_path.name +
                                              '.PATCHER_TEMP')
    os.rename(archive_path, temp_archive_dir)

    archive = _find_sarc(content_dir / rel_archive_dir)
    if archive:
        writer = sarc.make_writer_from_sarc(archive, lambda x: True)
    else:
        writer = sarc.SARCWriter(wiiu)
    if not writer:
        return False

    for root, dirs, files in os.walk(temp_archive_dir, topdown=False):
        for file_name in files:
            host_file_path = Path(os.path.join(root, file_name))
            path_in_archive = host_file_path.relative_to(
                temp_archive_dir).as_posix()
            # For some reason, Nintendo uses paths with leading slashes in these archives. Annoying.
            if file_name == 'gamedata.ssarc' or file_name == 'savedataformat.ssarc':
                path_in_archive = '/' + path_in_archive
            with open(host_file_path, 'rb') as f:
                writer.add_file(path_in_archive, f.read())

    with open(archive_path, 'wb') as archive_file:
        writer.write(archive_file)

    if archive_path.suffix.startswith('.s'):
        sys.stderr.write('compressing...\n')
        _compress_file(archive_path)
    shutil.rmtree(temp_archive_dir)
    return True
Exemplo n.º 3
0
def merge_sarcs(file_name: str, sarcs: List[Union[Path, bytes]]) -> (str, bytes):
    opened_sarcs: List[sarc.SARC] = []
    if isinstance(sarcs[0], Path):
        for i, sarc_path in enumerate(sarcs):
            sarcs[i] = sarc_path.read_bytes()
    for sarc_bytes in sarcs:
        sarc_bytes = util.unyaz_if_needed(sarc_bytes)
        try:
            opened_sarcs.append(sarc.SARC(sarc_bytes))
        except ValueError:
            continue

    all_files = {key for open_sarc in opened_sarcs for key in open_sarc.list_files()}
    nested_sarcs = {}
    new_sarc = sarc.SARCWriter(be=True)
    files_added = []

    # for file in all_files:
    #     dm_cache = util.get_master_modpack_dir() / 'logs' / 'dm' / file
    #     if dm_cache.exists():
    #         file_data = dm_cache.read_bytes()
    #         new_sarc.add_file(file, file_data)
    #         files_added.append(file)

    for opened_sarc in reversed(opened_sarcs):
        for file in [file for file in opened_sarc.list_files() if file not in files_added]:
            data = opened_sarc.get_file_data(file).tobytes()
            if util.is_file_modded(file.replace('.s', '.'), data, count_new=True):
                if not Path(file).suffix in util.SARC_EXTS:
                    new_sarc.add_file(file, data)
                    files_added.append(file)
                else:
                    if file not in nested_sarcs:
                        nested_sarcs[file] = []
                    nested_sarcs[file].append(util.unyaz_if_needed(data))
    for file, sarcs in nested_sarcs.items():
        merged_bytes = merge_sarcs(file, sarcs)[1]
        if Path(file).suffix.startswith('.s') and not file.endswith('.sarc'):
            merged_bytes = util.compress(merged_bytes)
        new_sarc.add_file(file, merged_bytes)
        files_added.append(file)
    for file in [file for file in all_files if file not in files_added]:
        for opened_sarc in [open_sarc for open_sarc in opened_sarcs \
                            if file in open_sarc.list_files()]:
            new_sarc.add_file(file, opened_sarc.get_file_data(file).tobytes())
            break

    if 'Bootup.pack' in file_name:
        for merger in [merger() for merger in mergers.get_mergers() if merger.is_bootup_injector()]:
            inject = merger.get_bootup_injection()
            if not inject:
                continue
            file, data = inject
            try:
                new_sarc.delete_file(file)
            except KeyError:
                pass
            new_sarc.add_file(file, data)

    return (file_name, new_sarc.get_bytes())
Exemplo n.º 4
0
def _clean_sarc(file: Path, hashes: dict, tmp_dir: Path):
    canon = util.get_canon_name(file.relative_to(tmp_dir))
    try:
        stock_file = util.get_game_file(file.relative_to(tmp_dir))
    except FileNotFoundError:
        return
    with stock_file.open('rb') as old_file:
        old_sarc = sarc.read_file_and_make_sarc(old_file)
        if not old_sarc:
            return
        old_files = set(old_sarc.list_files())
    if canon not in hashes:
        return
    with file.open('rb') as s_file:
        base_sarc = sarc.read_file_and_make_sarc(s_file)
    if not base_sarc:
        return
    new_sarc = sarc.SARCWriter(True)
    can_delete = True
    for nest_file in base_sarc.list_files():
        canon = nest_file.replace('.s', '.')
        ext = Path(canon).suffix
        if ext in {'.yml', '.bak'}:
            continue
        file_data = base_sarc.get_file_data(nest_file).tobytes()
        xhash = xxhash.xxh32(util.unyaz_if_needed(file_data)).hexdigest()
        if nest_file in old_files:
            old_hash = xxhash.xxh32(
                util.unyaz_if_needed(
                    old_sarc.get_file_data(nest_file).tobytes())).hexdigest()
        if nest_file not in old_files or (xhash != old_hash
                                          and ext not in util.AAMP_EXTS):
            can_delete = False
            new_sarc.add_file(nest_file, file_data)
    del old_sarc
    if can_delete:
        del new_sarc
        file.unlink()
    else:
        with file.open('wb') as s_file:
            if file.suffix.startswith('.s') and file.suffix != '.ssarc':
                s_file.write(util.compress(new_sarc.get_bytes()))
            else:
                new_sarc.write(s_file)
Exemplo n.º 5
0
def merge_savedata(verbose: bool = False, force: bool = False):
    """ Merges install savedata mods and saves the new Bootup.pack, fixing the RSTB if needed"""
    mods = get_savedata_mods()
    slog_path = util.get_master_modpack_dir() / 'logs' / 'savedata.log'
    if not mods:
        print('No gamedata merging necessary.')
        if slog_path.exists():
            slog_path.unlink()
        if (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').exists():
            (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').unlink()
        return
    if slog_path.exists() and not force:
        with slog_path.open('r') as l_file:
            if xxhash.xxh32(str(mods)).hexdigest() == l_file.read():
                print('No savedata merging necessary.')
                return

    new_entries = []
    new_hashes = []
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    print('Loading savedata mods...')
    for mod in mods:
        with open(mod.path / 'logs' / 'savedata.yml') as s_file:
            yml = yaml.load(s_file, Loader=loader)
            for entry in yml:
                if entry['HashValue'] in new_hashes:
                    continue
                else:
                    new_entries.append(entry)
                    new_hashes.append(entry['HashValue'])
                    if verbose:
                        print(f'  Added {entry["DataName"]} from {mod.name}')

    savedata = get_stock_savedata()
    merged_entries = []
    save_files = sorted(savedata.list_files())[0:-2]

    print('Loading stock savedata...')
    for file in save_files:
        merged_entries.extend(byml.Byml(savedata.get_file_data(
            file).tobytes()).parse()['file_list'][1])

    print('Merging changes...')
    merged_entries.extend(new_entries)
    merged_entries.sort(key=lambda x: x['HashValue'])

    special_bgsv = [
        savedata.get_file_data('/saveformat_6.bgsvdata').tobytes(),
        savedata.get_file_data('/saveformat_7.bgsvdata').tobytes(),
    ]

    print('Creating and injecting new savedataformat.sarc...')
    new_savedata = sarc.SARCWriter(True)
    num_files = ceil(len(merged_entries) / 8192)
    for i in range(num_files):
        end_pos = (i+1) * 8192
        if end_pos > len(merged_entries):
            end_pos = len(merged_entries)
        buf = BytesIO()
        byml.Writer({
            'file_list': [
                {
                    'IsCommon': False,
                    'IsCommonAtSameAccount': False,
                    'IsSaveSecureCode': True,
                    'file_name': 'game_data.sav'
                },
                merged_entries[i*8192:end_pos]
            ],
            'save_info': [
                {
                    'directory_num': byml.Int(8),
                    'is_build_machine': True,
                    'revision': byml.Int(18203)
                }
            ]
        }, True).write(buf)
        new_savedata.add_file(f'/saveformat_{i}.bgsvdata', buf.getvalue())
    new_savedata.add_file(f'/saveformat_{num_files}.bgsvdata', special_bgsv[0])
    new_savedata.add_file(
        f'/saveformat_{num_files + 1}.bgsvdata', special_bgsv[1])
    bootup_rstb = inject_savedata_into_bootup(new_savedata)
    (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').open('wb') as s_file:
        new_savedata.write(s_file)

    print('Updating RSTB...')
    rstable.set_size('GameData/savedataformat.sarc', bootup_rstb)

    slog_path.parent.mkdir(parents=True, exist_ok=True)
    with slog_path.open('w', encoding='utf-8') as l_file:
        l_file.write(xxhash.xxh32(str(mods)).hexdigest())
Exemplo n.º 6
0
def merge_gamedata(verbose: bool = False, force: bool = False):
    """ Merges installed gamedata mods and saves the new Bootup.pack, fixing the RSTB if needed """
    mods = get_gamedata_mods()
    glog_path = util.get_master_modpack_dir() / 'logs' / 'gamedata.log'
    if not mods:
        print('No gamedata merging necessary.')
        if glog_path.exists():
            glog_path.unlink()
        if (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').exists():
            (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').unlink()
        return
    if glog_path.exists() and not force:
        with glog_path.open('r') as l_file:
            if xxhash.xxh32(str(mods)).hexdigest() == l_file.read():
                print('No gamedata merging necessary.')
                return

    modded_entries = {}
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    print('Loading gamedata mods...')
    for mod in mods:
        with (mod.path / 'logs' / 'gamedata.yml').open('r') as g_file:
            yml = yaml.load(g_file, Loader=loader)
            for data_type in yml:
                if data_type not in modded_entries:
                    modded_entries[data_type] = {}
                modded_entries[data_type].update(yml[data_type])
                if verbose:
                    print(f'  Added entries for {data_type} from {mod.name}')

    gamedata = get_stock_gamedata()
    merged_entries = {}

    print('Loading stock gamedata...')
    for yml in gamedata.list_files():
        base_yml = byml.Byml(gamedata.get_file_data(yml).tobytes()).parse()
        for data_type in base_yml:
            if data_type not in merged_entries:
                merged_entries[data_type] = []
            merged_entries[data_type].extend(base_yml[data_type])

    print('Merging changes...')
    for data_type in merged_entries:
        if data_type in modded_entries:
            for entry in [entry for entry in merged_entries[data_type]
                          if entry['DataName'] in modded_entries[data_type]]:
                i = merged_entries[data_type].index(entry)
                if verbose:
                    print(f'  {entry["DataName"]} has been modified')
                merged_entries[data_type][i] = deepcopy(
                    modded_entries[data_type][entry['DataName']])
            print(f'Merged modified {data_type} entries')

    for data_type in modded_entries:
        for entry in [entry for entry in modded_entries[data_type]
                      if entry not in [entry['DataName'] for entry in merged_entries[data_type]]]:
            if verbose:
                print(f'  {entry} has been added')
            merged_entries[data_type].append(modded_entries[data_type][entry])
        print(f'Merged new {data_type} entries')

    print('Creating and injecting new gamedata.sarc...')
    new_gamedata = sarc.SARCWriter(True)
    for data_type in merged_entries:
        num_files = ceil(len(merged_entries[data_type]) / 4096)
        for i in range(num_files):
            end_pos = (i+1) * 4096
            if end_pos > len(merged_entries[data_type]):
                end_pos = len(merged_entries[data_type])
            buf = BytesIO()
            byml.Writer(
                {data_type: merged_entries[data_type][i*4096:end_pos]}, be=True).write(buf)
            new_gamedata.add_file(f'/{data_type}_{i}.bgdata', buf.getvalue())
    bootup_rstb = inject_gamedata_into_bootup(new_gamedata)
    (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').open('wb') as g_file:
        new_gamedata.write(g_file)

    print('Updating RSTB...')
    rstable.set_size('GameData/gamedata.sarc', bootup_rstb)

    glog_path.parent.mkdir(parents=True, exist_ok=True)
    with glog_path.open('w', encoding='utf-8') as l_file:
        l_file.write(xxhash.xxh32(str(mods)).hexdigest())
Exemplo n.º 7
0
def store_added_texts(new_texts: dict) -> sarc.SARCWriter:
    """ Creates a SARC to store mod-original MSBTs """
    text_sarc = sarc.SARCWriter(True)
    for msbt in new_texts:
        text_sarc.add_file(msbt, new_texts[msbt])
    return text_sarc
Exemplo n.º 8
0
def create_bnp_mod(mod: Path, output: Path, options: dict = None):
    """[summary]
    
    :param mod: [description]
    :type mod: Path
    :param output: [description]
    :type output: Path
    :param options: [description], defaults to {}
    :type options: dict, optional
    """
    if isinstance(mod, str):
        mod = Path(mod)
    if mod.is_file():
        print('Extracting mod...')
        tmp_dir: Path = open_mod(mod)
    elif mod.is_dir():
        print(f'Loading mod from {str(mod)}...')
        tmp_dir: Path = util.get_work_dir() / \
            f'tmp_{xxhash.xxh32(str(mod)).hexdigest()}'
        shutil.copytree(str(mod), str(tmp_dir))
    else:
        print(f'Error: {str(mod)} is neither a valid file nor a directory')
        return

    print('Packing loose files...')
    pack_folders = sorted(
        {
            d
            for d in tmp_dir.rglob('**/*')
            if d.is_dir() and d.suffix in util.SARC_EXTS
        },
        key=lambda d: len(d.parts),
        reverse=True)
    for folder in pack_folders:
        new_tmp: Path = folder.with_suffix(folder.suffix + '.tmp')
        shutil.move(folder, new_tmp)
        new_sarc = sarc.SARCWriter(be=True)
        for file in {f for f in new_tmp.rglob('**/*') if f.is_file()}:
            new_sarc.add_file(
                file.relative_to(new_tmp).as_posix(), file.read_bytes())
        sarc_bytes = new_sarc.get_bytes()
        if str(folder.suffix).startswith('.s') and folder.suffix != '.sarc':
            sarc_bytes = util.compress(sarc_bytes)
        folder.write_bytes(sarc_bytes)
        shutil.rmtree(new_tmp)

    if not options:
        options = {}
    options['texts'] = {'user_only': False}
    pool = Pool(cpu_count())
    logged_files = generate_logs(tmp_dir, options=options, original_pool=pool)

    print('Removing unnecessary files...')
    if (tmp_dir / 'logs' / 'map.yml').exists():
        print('Removing map units...')
        for file in [file for file in logged_files if isinstance(file, Path) and \
                           fnmatch(file.name, '[A-Z]-[0-9]_*.smubin')]:
            file.unlink()
    if [file for file in (tmp_dir / 'logs').glob('*texts*')]:
        print('Removing language bootup packs...')
        for bootup_lang in (tmp_dir / 'content' /
                            'Pack').glob('Bootup_*.pack'):
            bootup_lang.unlink()
    if (tmp_dir / 'logs' / 'actorinfo.yml').exists() and \
       (tmp_dir / 'content' / 'Actor' / 'ActorInfo.product.sbyml').exists():
        print('Removing ActorInfo.product.sbyml...')
        (tmp_dir / 'content' / 'Actor' / 'ActorInfo.product.sbyml').unlink()
    if (tmp_dir / 'logs' / 'gamedata.yml').exists() or (
            tmp_dir / 'logs' / 'savedata.yml').exists():
        print('Removing gamedata sarcs...')
        with (tmp_dir / 'content' / 'Pack' /
              'Bootup.pack').open('rb') as b_file:
            bsarc = sarc.read_file_and_make_sarc(b_file)
        csarc = sarc.make_writer_from_sarc(bsarc)
        bsarc_files = list(bsarc.list_files())
        if 'GameData/gamedata.ssarc' in bsarc_files:
            csarc.delete_file('GameData/gamedata.ssarc')
        if 'GameData/savedataformat.ssarc' in bsarc_files:
            csarc.delete_file('GameData/savedataformat.ssarc')
        with (tmp_dir / 'content' / 'Pack' /
              'Bootup.pack').open('wb') as b_file:
            csarc.write(b_file)

    hashes = util.get_hash_table()
    print('Creating partial packs...')
    sarc_files = {
        file
        for file in tmp_dir.rglob('**/*') if file.suffix in util.SARC_EXTS
    }
    if sarc_files:
        pool.map(partial(_clean_sarc, hashes=hashes, tmp_dir=tmp_dir),
                 sarc_files)
        pool.close()
        pool.join()

        sarc_files = {
            file
            for file in tmp_dir.rglob('**/*') if file.suffix in util.SARC_EXTS
        }
        if sarc_files:
            with (tmp_dir / 'logs' / 'packs.log').open(
                    'w', encoding='utf-8') as p_file:
                final_packs = [
                    file for file in list(tmp_dir.rglob('**/*'))
                    if file.suffix in util.SARC_EXTS
                ]
                if final_packs:
                    p_file.write('name,path\n')
                    for file in final_packs:
                        p_file.write(
                            f'{util.get_canon_name(file.relative_to(tmp_dir))},'
                            f'{file.relative_to(tmp_dir)}\n')
    else:
        if (tmp_dir / 'logs' / 'packs.log').exists():
            (tmp_dir / 'logs' / 'packs.log').unlink()

    print('Cleaning any junk files...')
    for file in tmp_dir.rglob('**/*'):
        if file.parent.stem == 'logs':
            continue
        if file.suffix in ['.yml', '.bak', '.tmp', '.old']:
            file.unlink()

    print('Removing blank folders...')
    for folder in reversed(list(tmp_dir.rglob('**/*'))):
        if folder.is_dir() and not list(folder.glob('*')):
            shutil.rmtree(folder)

    print(f'Saving output file to {str(output)}...')
    x_args = [
        str(util.get_exec_dir() / 'helpers' / '7z.exe'), 'a',
        str(output), f'{str(tmp_dir / "*")}'
    ]
    subprocess.run(x_args,
                   stdout=subprocess.PIPE,
                   stderr=subprocess.PIPE,
                   creationflags=util.CREATE_NO_WINDOW)
    print('Conversion complete.')