Ejemplo n.º 1
0
def get_dungeonstatic_diff(file: Path) -> dict:
    """Returns the changes made to the Static.smubin containing shrine entrance coordinates

    :param file: The Static.mubin file to diff
    :type file: class:`pathlib.Path`
    :return: Returns a dict of shrines and their updated entrance coordinates
    :rtype: dict of str: dict
    """
    base_file = util.get_game_file('aoc/0010/Map/CDungeon/Static.smubin',
                                   aoc=True)
    base_pos = byml.Byml(util.decompress_file(
        str(base_file))).parse()['StartPos']

    mod_pos = byml.Byml(util.decompress_file(str(file))).parse()['StartPos']

    base_dungeons = [dungeon['Map'] for dungeon in base_pos]
    diffs = {}
    for dungeon in mod_pos:
        if dungeon['Map'] not in base_dungeons:
            diffs[dungeon['Map']] = dungeon
        else:
            base_dungeon = base_pos[base_dungeons.index(dungeon['Map'])]
            if dungeon['Rotate'] != base_dungeon['Rotate']:
                diffs[dungeon['Map']] = {'Rotate': dungeon['Rotate']}
            if dungeon['Translate'] != base_dungeon['Translate']:
                if dungeon['Map'] not in diffs:
                    diffs[dungeon['Map']] = {}
                diffs[dungeon['Map']]['Translate'] = dungeon['Translate']

    return diffs
Ejemplo n.º 2
0
    def process_map_unit_unit(unit_dir: Path):
        unit_name = unit_dir.stem
        static_p = unit_dir / f"{unit_name}_Static.mubin"
        dynamic_p = unit_dir / f"{unit_name}_Dynamic.mubin"
        if not static_p.is_file() or not dynamic_p.is_file():
            return

        with static_p.open("rb") as static_f, dynamic_p.open(
                "rb") as dynamic_f:
            static_d = byml.Byml(static_f.read()).parse()
            dynamic_d = byml.Byml(dynamic_f.read()).parse()
            assert isinstance(static_d, dict) and isinstance(dynamic_d, dict)

        add_is_static_to_entries(static_d, is_static=True)
        add_is_static_to_entries(dynamic_d, is_static=False)

        objs = sorted(static_d["Objs"] + dynamic_d["Objs"],
                      key=lambda obj: obj["HashId"])
        rails = sorted(static_d["Rails"] + dynamic_d["Rails"],
                       key=lambda rail: rail["HashId"])
        merged_map_unit: dict = dict()
        for prop in ["LocationPosX", "LocationPosZ", "LocationSize"]:
            if prop in static_d:
                merged_map_unit[prop] = static_d[prop]
        merged_map_unit["Objs"] = objs
        merged_map_unit["Rails"] = rails

        merged_p = unit_dir / f"{unit_name}.muunt.yml"
        with merged_p.open("w") as f:
            dump_byml_data(merged_map_unit, f)

        static_p.unlink()
        dynamic_p.unlink()
Ejemplo n.º 3
0
def process_actorinfo(dest_dir: Path, platform: str,
                      other_platform_actorinfo_path: typing.Optional[Path]):
    # Create the new ActorMeta / DevActorMeta directories (which are extensions).
    dest_devactormeta_dir = dest_dir / "Actor" / "DevActorMeta"
    dest_devactormeta_dir.mkdir(exist_ok=True)
    dest_actormeta_dir = dest_dir / "Actor" / "ActorMeta"
    dest_actormeta_dir.mkdir(exist_ok=True)

    actorinfo_byml = dest_dir / "Actor" / "ActorInfo.product.byml"
    with actorinfo_byml.open("rb") as f:
        actorinfo = byml.Byml(f.read()).parse()
        assert isinstance(actorinfo, dict)

    other_platform = "cafe" if platform == "nx" else "nx"
    other_actorinfo: typing.Optional[dict] = None
    if other_platform_actorinfo_path:
        # A temporary variable is needed to avoid a mypy error...
        tmp = byml.Byml(
            wszst_yaz0.decompress_file(
                str(other_platform_actorinfo_path))).parse()
        assert isinstance(tmp, dict)
        other_actorinfo = tmp

    def fill_in_inst_size(i: int, entry: dict) -> None:
        entry["instSizeCafe"] = -1
        entry["instSizeNx"] = -1
        entry[f"instSize{platform.capitalize()}"] = actor["instSize"]
        if other_actorinfo:
            other_actor = other_actorinfo["Actors"][i]
            assert other_actor["name"] == actor["name"]
            entry[f"instSize{other_platform.capitalize()}"] = other_actor[
                "instSize"]
        entry.pop("instSize", None)

    for i, actor in enumerate(actorinfo["Actors"]):
        is_dev_actor = not (dest_dir / "Actor" / "ActorLink" /
                            f"{actor['name']}.yml").is_file()
        actor_meta: typing.Dict[str, typing.Any] = dict()
        if is_dev_actor:
            actor_meta_path = dest_devactormeta_dir / f"{actor['name']}.yml"
            actor_meta = actor.copy()
            fill_in_inst_size(i, actor_meta)
        else:
            actor_meta_path = dest_actormeta_dir / f"{actor['name']}.yml"
            fill_in_inst_size(i, actor_meta)
            for key in _ACTOR_META_KEYS:
                if key in actor:
                    actor_meta[key] = actor[key]

        with actor_meta_path.open("w") as f:
            dump_byml_data(actor_meta, f, default_flow_style=False)

    actorinfo_byml.unlink()
Ejemplo n.º 4
0
def merge_dungeonstatic(diffs: dict = None):
    """Merges all changes to the CDungeon Static.smubin"""
    if not diffs:
        diffs = {}
        loader = yaml.CSafeLoader
        yaml_util.add_constructors(loader)
        for mod in [mod for mod in util.get_installed_mods() \
                    if (mod.path / 'logs' / 'dstatic.yml').exists()]:
            diffs.update(
                yaml.load((mod.path / 'logs' / 'dstatic.yml').read_bytes(),
                          Loader=loader))

    if not diffs:
        return

    new_static = byml.Byml(
        util.decompress_file(
            str(util.get_game_file(
                'aoc/0010/Map/CDungeon/Static.smubin')))).parse()

    base_dungeons = [dungeon['Map'] for dungeon in new_static['StartPos']]
    for dungeon, diff in diffs.items():
        if dungeon not in base_dungeons:
            new_static['StartPos'].append(diff)
        else:
            for key, value in diff.items():
                new_static['StartPos'][base_dungeons.index(
                    dungeon)][key] = value

    output_static = util.get_master_modpack_dir() / 'aoc' / '0010' / 'Map' / \
        'CDungeon' / 'Static.smubin'
    output_static.parent.mkdir(parents=True, exist_ok=True)
    output_static.write_bytes(
        util.compress(byml.Writer(new_static, True).get_bytes()))
Ejemplo n.º 5
0
def process_gamedata(dest_dir: Path):
    gamedata_dir = dest_dir / "GameData"
    (gamedata_dir / "ShopGameDataInfo.yml").unlink()
    shutil.rmtree(gamedata_dir / "savedataformat.sarc")

    gamedata_arc_dir = gamedata_dir / "gamedata.sarc"

    flags: typing.DefaultDict[str, typing.List[typing.Any]] = defaultdict(list)
    flag_types: typing.Dict[str, str] = dict()
    for bgdata_path in sorted(gamedata_arc_dir.glob("*.bgdata")):
        series = bgdata_path.stem[:-2]
        with bgdata_path.open("rb") as f:
            gdata = byml.Byml(f.read()).parse()
        assert isinstance(gdata, dict)
        assert len(gdata) == 1
        flag_type = list(gdata.keys())[0]
        flags[series] += gdata[flag_type]
        if series in flag_types:
            assert flag_types[series] == flag_type
        flag_types[series] = flag_type

    for series, merged_flags in flags.items():
        merged_gdata: dict = dict()
        merged_gdata[flag_types[series]] = merged_flags
        dest_path = dest_dir / "GameData" / "Flag" / f"{series}.yml"
        dest_path.parent.mkdir(exist_ok=True, parents=True)
        with dest_path.open("w") as f:
            dump_byml_data(merged_gdata, f)

    shutil.rmtree(gamedata_arc_dir)
Ejemplo n.º 6
0
 def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]):
     try:
         actor_file = next(iter([file for file in modded_files \
                            if Path(file).name == 'ActorInfo.product.sbyml']))
     except StopIteration:
         return {}
     actor_info = byml.Byml(util.decompress_file(str(actor_file))).parse()
     print('Detecting modified actor info entries...')
     return get_modded_actors(actor_info)
def main() -> None:
    parser = argparse.ArgumentParser()
    parser.add_argument("gamedata_dir",
                        help="Path to the gamedata.sarc/ directory")
    parser.add_argument("--by-data-type",
                        help="Group flags by data type",
                        action="store_true")
    args = parser.parse_args()
    DIR = Path(args.gamedata_dir)

    byml.yaml_util.add_representers(yaml.CSafeDumper)
    yaml.add_representer(dict, represent_dict, Dumper=yaml.CSafeDumper)
    yaml.add_representer(defaultdict,
                         represent_dict_sort,
                         Dumper=yaml.CSafeDumper)

    all_flags: typing.Dict[typing.Union[int, str], list] = defaultdict(list)
    DATATYPES = ("bool", "s32", "f32", "string", "string64", "string256",
                 "vector2f", "vector3f", "vector4", "bool_array", "s32_array",
                 "f32_array", "string32_array", "string64_array",
                 "string256_array", "vector2f_array", "vector3f_array",
                 "vector4_array")

    for bgdata_path in DIR.glob("*.bgdata"):
        bgdata = byml.Byml(bgdata_path.open("rb").read()).parse()

        for datatype in DATATYPES:
            key = datatype + "_data"
            if key not in bgdata:
                continue
            flags = bgdata[key]
            for flag in flags:
                assert flag["DeleteRev"] == -1
                reset_type = flag["ResetType"]
                perms = ["-", "-"]
                if flag["IsProgramReadable"]:
                    perms[0] = "r"
                if flag["IsProgramWritable"]:
                    perms[1] = "w"
                k = datatype if args.by_data_type else reset_type
                all_flags[k].append({
                    "name": flag["DataName"],
                    "t": datatype,
                    "init": flag["InitValue"],
                    "min": flag["MinValue"],
                    "max": flag["MaxValue"],
                    "perms": "".join(perms),
                    "event": flag["IsEventAssociated"],
                    "save": flag["IsSave"],
                    "hash": flag["HashValue"] & 0xffffffff,
                    "reset_type": reset_type,
                })

    for x in all_flags.values():
        x.sort(key=itemgetter("name"))
    yaml.dump(all_flags, sys.stdout, Dumper=yaml.CSafeDumper)
Ejemplo n.º 8
0
def get_stock_eventinfo() -> {}:
    """ Gets the contents of the stock `EventInfo.product.sbyml` """
    if not hasattr(get_stock_eventinfo, 'event_info'):
        get_stock_eventinfo.event_info = byml.Byml(
            util.get_nested_file_bytes(
                str(util.get_game_file('Pack/Bootup.pack')) + '//Event/EventInfo.product.sbyml',
                unyaz=True
            )
        ).parse()
    return deepcopy(get_stock_eventinfo.event_info)
def main() -> None:
    area_data = byml.Byml(wszst_yaz0.decompress_file(sys.argv[1])).parse()
    assert isinstance(area_data, list)

    t = Texttable(max_width=130)
    t.set_deco(Texttable.BORDER | Texttable.HEADER | Texttable.VLINES)
    t.header(['Idx', 'Area', 'Climate', 'Climate idx'])
    for area in area_data:
        t.add_row([area['AreaNumber'], area['Area'], area['Climate'], climate_ids.index(area['Climate'])])
    print(t.draw())
Ejemplo n.º 10
0
    def _init_ai_defs(self) -> None:
        if self._ai_defs or not _rom_path:
            return

        raw_data = oead.yaz0.decompress(
            (_rom_path /
             'Pack/Bootup.pack/Actor/AIDef/AIDef_Game.product.sbyml'
             ).read_bytes())
        defs = byml.Byml(raw_data).parse()
        if isinstance(defs, dict):
            self._ai_defs = defs
Ejemplo n.º 11
0
def get_modded_savedata_entries(savedata: sarc.SARC) -> []:
    """
    Gets all of the modified savedata entries in a dict of modded savedata contents.

    :param savedata: The saveformatdata.sarc to search for modded entries.
    :type savedata: class:`sarc.SARC`
    :return: Returns a list of modified savedata entries.
    :rtype: list
    """
    ref_savedata = get_stock_savedata()
    ref_hashes = []
    new_entries = []
    for file in sorted(ref_savedata.list_files())[0:-2]:
        for item in byml.Byml(ref_savedata.get_file_data(file).tobytes()).parse()['file_list'][1]:
            ref_hashes.append(item['HashValue'])
    for file in sorted(savedata.list_files())[0:-2]:
        for item in byml.Byml(savedata.get_file_data(file).tobytes()).parse()['file_list'][1]:
            if item['HashValue'] not in ref_hashes:
                new_entries.append(item)
    return new_entries
Ejemplo n.º 12
0
 def generate_diff(self, mod_dir: Path, modded_files: List[Union[Path, str]]):
     if 'content/Pack/Bootup.pack//Event/EventInfo.product.sbyml' in modded_files:
         with (mod_dir / 'content' / 'Pack' / 'Bootup.pack').open('rb') as bootup_file:
             bootup_sarc = sarc.read_file_and_make_sarc(bootup_file)
         event_info = byml.Byml(
             util.decompress(
                 bootup_sarc.get_file_data('Event/EventInfo.product.sbyml').tobytes()
             )
         ).parse()
         return get_modded_events(event_info)
     else:
         return {}
Ejemplo n.º 13
0
def process_questproduct(dest_dir: Path):
    byml_path = dest_dir / "Quest" / "QuestProduct.bquestpack"
    with byml_path.open("rb") as f:
        questinfo = byml.Byml(f.read()).parse()
        assert isinstance(questinfo, list)

    for quest in questinfo:
        dest_path = dest_dir / "Quest" / f"{quest['Name']}.quest.yml"
        dest_path.parent.mkdir(exist_ok=True, parents=True)
        with dest_path.open("w") as f:
            dump_byml_data(quest, f, default_flow_style=False)

    byml_path.unlink()
Ejemplo n.º 14
0
def load_stage():
    tmp_objs = []
    stage_file_szs = askopenfilename(filetypes=(("SZS File", "*.szs"),
                                                ("All Files", "*.*")),
                                     title="Select a SMO stage SZS")
    done = SARCExtract.extract_szs(stage_file_szs)
    stage_file = os.path.join(stage_file_szs[:-4],
                              ntpath.basename(stage_file_szs)[:-3] + "byml")
    scenario = 0
    root = byml.Byml(open(stage_file, "rb").read()).parse()
    a = root[scenario]
    for b in a['ObjectList']:
        obj_name = ''
        stage_name = ''
        res_path = ''
        unit_cfg_name = ''
        obj_data = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]
        for c in b:
            if c == 'ModelName':
                obj_name = str(b['ModelName'])
            elif c == 'PlacementFilename':
                stage_name = str(b['PlacementFilename'])
            elif c == 'ResourceCategory':
                res_path = str(b['ResourceCategory'])
            elif c == 'UnitConfigName':
                unit_cfg_name = str(b['UnitConfigName'])
            elif c == 'Translate':
                obj_data[0][0] = float(
                    str(b['Translate']['X']).replace(',', '.'))
                obj_data[0][1] = float(
                    str(b['Translate']['Y']).replace(',', '.'))
                obj_data[0][2] = float(
                    str(b['Translate']['Z']).replace(',', '.'))
            elif c == 'Rotate':
                obj_data[1][0] = float(str(b['Rotate']['X']).replace(',', '.'))
                obj_data[1][1] = float(str(b['Rotate']['Y']).replace(',', '.'))
                obj_data[1][2] = float(str(b['Rotate']['Z']).replace(',', '.'))
            elif c == 'Scale':
                obj_data[2][0] = float(str(b['Scale']['X']).replace(',', '.'))
                obj_data[2][1] = float(str(b['Scale']['Y']).replace(',', '.'))
                obj_data[2][2] = float(str(b['Scale']['Z']).replace(',', '.'))
        if not obj_name == "":
            obj_path = get_model_obj(obj_name)
        if obj_path == "" or not os.path.isfile(obj_path):
            obj_path = get_model_obj(unit_cfg_name)
        print(obj_name)
        print(unit_cfg_name)
        if os.path.isfile(obj_path):
            tmp_objs.append(OBJ_test(obj_path, obj_data))
    print('Done!')
    return tmp_objs
Ejemplo n.º 15
0
def byml_to_yml_dir(tmp_dir: Path, ext: str = '.byml'):
    """ Converts BYML files in given temp dir to YAML """
    dumper = yaml.CDumper
    yaml_util.add_representers(dumper)
    for data in tmp_dir.rglob(f'**/*{ext}'):
        yml_data = byml.Byml(data.read_bytes())
        with (data.with_name(data.stem + '.yml')).open(
                'w', encoding='utf-8') as y_file:
            yaml.dump(yml_data.parse(),
                      y_file,
                      Dumper=dumper,
                      allow_unicode=True,
                      encoding='utf-8')
        data.unlink()
Ejemplo n.º 16
0
 def _load_gamedata_flags(self) -> None:
     print('loading GameData flags')
     self.gamedata_bgdata: typing.Dict[str, dict] = dict()
     self.gamedata_flags: typing.DefaultDict[str, list] = defaultdict(list)
     for bgdata_path in self.gamedata_dir.glob('*.bgdata'):
         with bgdata_path.open('rb') as f:
             bgdata = byml.Byml(f.read()).parse()
             assert isinstance(bgdata, dict)
             self.gamedata_bgdata[bgdata_path.name] = bgdata
         for data_type_key, flags in bgdata.items():
             data_type = data_type_key[:-5]
             self.gamedata_flags[data_type] += flags
     if not self.gamedata_flags:
         raise Exception(f'No bgdata was found in {self.gamedata_dir}')
Ejemplo n.º 17
0
def process_eventinfo(dest_dir: Path):
    byml_path = dest_dir / "Event" / "EventInfo.product.byml"
    with byml_path.open("rb") as f:
        eventinfo = byml.Byml(f.read()).parse()
        assert isinstance(eventinfo, dict)

    for merged_event_name, event in eventinfo.items():
        event_name, entry_name = merged_event_name.split("<")
        entry_name = entry_name[:-1]

        dest_path = dest_dir / "Event" / "EventMeta" / event_name / f"{entry_name}.yml"
        dest_path.parent.mkdir(exist_ok=True, parents=True)
        with dest_path.open("w") as f:
            dump_byml_data(event, f, default_flow_style=False)

    byml_path.unlink()
def main() -> None:
    parser = argparse.ArgumentParser(
        'Shows actors that are not spawned when in final boss mode.')
    parser.add_argument(
        'map_path',
        help='Path to a map unit (BYAML or compressed BYAML or YAML)')
    args = parser.parse_args()
    MAP_PATH: str = args.map_path

    byml.yaml_util.add_constructors(yaml.CSafeLoader)
    with open(MAP_PATH, 'rb') as f:
        if MAP_PATH.endswith('mubin'):
            map_data = byml.Byml(wszst_yaz0.decompress(f.read())).parse()
        else:
            map_data = yaml.load(f, Loader=yaml.CSafeLoader)

    pmap = Map(map_data)
    pmap.parse_obj_links()

    skip_reasons: typing.Dict[int, str] = dict()
    skipped_objs: typing.Set[int] = set()
    for obj in pmap.objs.values():
        objid = obj['HashId']
        if objid in skipped_objs or should_spawn_obj(obj):
            continue
        skipped_objs.add(objid)
        gen_group = pmap.build_gen_group(obj)
        for linked_obj in gen_group:
            skipped_objs.add(linked_obj['HashId'])
            skip_reasons[linked_obj[
                'HashId']] = f'linked to skipped object: 0x{obj["HashId"]:08x} {obj["UnitConfigName"]}'
        skip_reasons[objid] = 'skipped'

    for objid in skipped_objs:
        obj = pmap.get_obj(objid)
        print(
            f"[0x{obj['HashId']:08x}] {obj['UnitConfigName']} {tuple(obj['Translate'])}"
        )
        if '!Parameters' in obj:
            pprint.pprint(obj['!Parameters'], indent=2)
        print(f"| SKIP REASON: {skip_reasons[objid]}")
        for link in obj['__links']:
            print(f"| LINKS TO: {link.description()}")
        for link in obj['__links_to_self']:
            print(f"| LINKED BY: {link.description()}")
        print('-' * 70)
Ejemplo n.º 19
0
def get_modded_map(map_unit: Union[Map, tuple], tmp_dir: Path) -> dict:
    """
    Finds the most significant available map_unit unit in a mod for a given section and type
    and returns its contents as a dict. Checks `AocMainField.pack` first, then the unpacked
    aoc map_unit files, and then the base game map_unit files.

    :param map_unit: The map_unit section and type.
    :type map_unit: class:`bcml.mubin.Map`
    :param tmp_dir: The path to the base directory of the mod.
    :type tmp_dir: class:`pathlib.Path`
    :return: Returns a dict representation of the requested map_unit unit.
    :rtype: dict
    """
    if isinstance(map_unit, tuple):
        map_unit = Map(*map_unit)
    map_bytes = None
    aoc_dir = tmp_dir / 'aoc' / '0010' / 'content'
    if not aoc_dir.exists():
        aoc_dir = tmp_dir / 'aoc' / 'content' / '0010'
        if not aoc_dir.exists():
            aoc_dir = tmp_dir / 'aoc' / '0010'
    if (aoc_dir / 'Pack' / 'AocMainField.pack').exists():
        with (aoc_dir / 'Pack' / 'AocMainField.pack').open('rb') as s_file:
            map_pack = sarc.read_file_and_make_sarc(s_file)
        if map_pack:
            try:
                map_bytes = map_pack.get_file_data(
                    f'Map/MainField/{map_unit.section}/{map_unit.section}_{map_unit.type}.smubin'
                ).tobytes()
            except KeyError:
                pass
    if not map_bytes:
        if (aoc_dir / 'Map' / 'MainField' / map_unit.section /\
            f'{map_unit.section}_{map_unit.type}.smubin').exists():
            map_bytes = (tmp_dir / 'aoc' / '0010' / 'Map' / 'MainField' / map_unit.section /\
                         f'{map_unit.section}_{map_unit.type}.smubin').read_bytes()
        elif (tmp_dir / 'content' / 'Map' / 'MainField' / map_unit.section /\
                f'{map_unit.section}_{map_unit.type}.smubin').exists():
            map_bytes = (tmp_dir / 'content' / 'Map' / 'MainField' / map_unit.section /\
                         f'{map_unit.section}_{map_unit.type}.smubin').read_bytes()
    if not map_bytes:
        raise FileNotFoundError(
            f'Oddly, the modded map {map_unit.section}_{map_unit.type}.smubin '
            'could not be found.')
    map_bytes = util.decompress(map_bytes)
    return byml.Byml(map_bytes).parse()
def main() -> None:
    parser = argparse.ArgumentParser()
    parser.add_argument("gamedata_dir", help="Path to the gamedata.sarc/ directory")
    args = parser.parse_args()
    DIR = Path(args.gamedata_dir)

    byml.yaml_util.add_representers(yaml.CSafeDumper)
    yaml.add_representer(dict, represent_dict, Dumper=yaml.CSafeDumper)
    yaml.add_representer(defaultdict, represent_dict_sort, Dumper=yaml.CSafeDumper)

    result: typing.List[dict] = []

    for bgdata_path in DIR.glob("*.bgdata"):
        bgdata = byml.Byml(bgdata_path.open("rb").read()).parse()

        flags = bgdata.get("bool_data", None)
        if not flags:
            continue
        for flag in flags:
            reset_data = flag["InitValue"] >> 1
            if reset_data == 0:
                continue
            assert flag["ResetType"] == 0
            assert flag["DeleteRev"] == -1
            assert flag["IsProgramReadable"] and flag["IsProgramWritable"]
            assert flag["IsSave"]
            assert flag["MinValue"] is False and flag["MaxValue"] is True
            assert flag["InitValue"] & 1 == 0

            row = (reset_data - 1) & 0b111
            col = (reset_data - 1) >> 3

            result.append({
                "name": flag["DataName"],
                "hash": flag["HashValue"] & 0xffffffff,
                "col": col,
                "row": row,
            })

    result.sort(key=itemgetter("name"))
    yaml.dump(result, sys.stdout, Dumper=yaml.CSafeDumper)
def main() -> None:
    parser = argparse.ArgumentParser(
        'Shows the placement generation group for a map object.')
    parser.add_argument(
        'map_path',
        help='Path to a map unit (BYAML or compressed BYAML or YAML)')
    parser.add_argument('object_id',
                        type=lambda x: int(x, 0),
                        help='Map object ID (HashId)')
    args = parser.parse_args()
    MAP_PATH: str = args.map_path
    MAP_OBJID: int = args.object_id

    byml.yaml_util.add_constructors(yaml.CSafeLoader)
    with open(MAP_PATH, 'rb') as f:
        if MAP_PATH.endswith('mubin'):
            map_data = byml.Byml(wszst_yaz0.decompress(f.read())).parse()
        else:
            map_data = yaml.load(f, Loader=yaml.CSafeLoader)

    pmap = Map(map_data)
    pmap.parse_obj_links()

    gen_group = pmap.build_gen_group(pmap.get_obj(MAP_OBJID))

    for obj in gen_group:
        print(
            f"[0x{obj['HashId']:08x} ({obj['HashId']})] {obj['UnitConfigName']} {tuple(obj['Translate'])}"
        )
        if '!Parameters' in obj:
            pprint.pprint(obj['!Parameters'], indent=2)
        for link in obj['__links']:
            print(f"| LINKS TO: {link.description()}")
        for link in obj['__links_to_self']:
            print(f"| LINKED BY: {link.description()}")
        print('-' * 70)
Ejemplo n.º 22
0
def merge_gamedata(verbose: bool = False, force: bool = False):
    """ Merges installed gamedata mods and saves the new Bootup.pack, fixing the RSTB if needed """
    mods = get_gamedata_mods()
    glog_path = util.get_master_modpack_dir() / 'logs' / 'gamedata.log'
    if not mods:
        print('No gamedata merging necessary.')
        if glog_path.exists():
            glog_path.unlink()
        if (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').exists():
            (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').unlink()
        return
    if glog_path.exists() and not force:
        with glog_path.open('r') as l_file:
            if xxhash.xxh32(str(mods)).hexdigest() == l_file.read():
                print('No gamedata merging necessary.')
                return

    modded_entries = {}
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    print('Loading gamedata mods...')
    for mod in mods:
        with (mod.path / 'logs' / 'gamedata.yml').open('r') as g_file:
            yml = yaml.load(g_file, Loader=loader)
            for data_type in yml:
                if data_type not in modded_entries:
                    modded_entries[data_type] = {}
                modded_entries[data_type].update(yml[data_type])
                if verbose:
                    print(f'  Added entries for {data_type} from {mod.name}')

    gamedata = get_stock_gamedata()
    merged_entries = {}

    print('Loading stock gamedata...')
    for yml in gamedata.list_files():
        base_yml = byml.Byml(gamedata.get_file_data(yml).tobytes()).parse()
        for data_type in base_yml:
            if data_type not in merged_entries:
                merged_entries[data_type] = []
            merged_entries[data_type].extend(base_yml[data_type])

    print('Merging changes...')
    for data_type in merged_entries:
        if data_type in modded_entries:
            for entry in [entry for entry in merged_entries[data_type]
                          if entry['DataName'] in modded_entries[data_type]]:
                i = merged_entries[data_type].index(entry)
                if verbose:
                    print(f'  {entry["DataName"]} has been modified')
                merged_entries[data_type][i] = deepcopy(
                    modded_entries[data_type][entry['DataName']])
            print(f'Merged modified {data_type} entries')

    for data_type in modded_entries:
        for entry in [entry for entry in modded_entries[data_type]
                      if entry not in [entry['DataName'] for entry in merged_entries[data_type]]]:
            if verbose:
                print(f'  {entry} has been added')
            merged_entries[data_type].append(modded_entries[data_type][entry])
        print(f'Merged new {data_type} entries')

    print('Creating and injecting new gamedata.sarc...')
    new_gamedata = sarc.SARCWriter(True)
    for data_type in merged_entries:
        num_files = ceil(len(merged_entries[data_type]) / 4096)
        for i in range(num_files):
            end_pos = (i+1) * 4096
            if end_pos > len(merged_entries[data_type]):
                end_pos = len(merged_entries[data_type])
            buf = BytesIO()
            byml.Writer(
                {data_type: merged_entries[data_type][i*4096:end_pos]}, be=True).write(buf)
            new_gamedata.add_file(f'/{data_type}_{i}.bgdata', buf.getvalue())
    bootup_rstb = inject_gamedata_into_bootup(new_gamedata)
    (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'logs' / 'gamedata.sarc').open('wb') as g_file:
        new_gamedata.write(g_file)

    print('Updating RSTB...')
    rstable.set_size('GameData/gamedata.sarc', bootup_rstb)

    glog_path.parent.mkdir(parents=True, exist_ok=True)
    with glog_path.open('w', encoding='utf-8') as l_file:
        l_file.write(xxhash.xxh32(str(mods)).hexdigest())
Ejemplo n.º 23
0
import byml
import wszst_yaz0
from collections import defaultdict
import typing
import yaml
import sys

data = byml.Byml(wszst_yaz0.decompress_file(sys.argv[1])).parse()

keys: typing.List[str] = list()
keys_per_mode: typing.DefaultDict[str, typing.List[str]] = defaultdict(list)

assert isinstance(data, dict)
for event in data.values():
    for key in event.keys():
        keys.append(key)
        keys_per_mode[event["mode"]].append(key)

print(
    yaml.dump(
        {
            "keys": sorted(set(keys)),
            "keys_per_mode":
            {k: sorted(set(v))
             for k, v in keys_per_mode.items()},
        },
        default_flow_style=False))
#!/usr/bin/env python3
import pprint
import typing
import yaml

import argparse
import byml
import byml.yaml_util
from pathlib import Path
import wszst_yaz0
import zlib
from _map_utils import Map

actorinfodata = byml.Byml(
    (Path(__file__).parent.parent / 'game_files' /
     'ActorInfo.product.byml').open('rb').read()).parse()


def get_actor_data(name):
    h = zlib.crc32(name.encode())
    hashes = actorinfodata['Hashes']
    a = 0
    b = len(hashes) - 1
    while a <= b:
        m = (a + b) // 2
        if hashes[m] < h:
            a = m + 1
        elif hashes[m] > h:
            b = m - 1
        else:
            return actorinfodata['Actors'][m]
Ejemplo n.º 25
0
                        done = subprocess.Popen(command,
                                                stdout=subprocess.PIPE,
                                                shell=True).wait()
                        os.remove(inf)
            print("Done!")


init_thing()
stage_file_szs = askopenfilename(filetypes=(("SZS File", "*.szs"),
                                            ("All Files", "*.*")),
                                 title="Select a SMO stage SZS")
done = SARCExtract.extract_szs(stage_file_szs)
stage_file = os.path.join(stage_file_szs[:-4],
                          ntpath.basename(stage_file_szs)[:-3] + "byml")
scenario = 0
root = byml.Byml(open(stage_file, "rb").read()).parse()
a = root[scenario]
intybinty = 0
obj_group = OBJGroup()
for b in a['ObjectList']:
    obj_name = ''
    stage_name = ''
    res_path = ''
    unit_cfg_name = ''
    obj_data = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]
    for c in b:
        if c == 'ModelName':
            obj_name = str(b['ModelName'])
        elif c == 'PlacementFilename':
            stage_name = str(b['PlacementFilename'])
        elif c == 'ResourceCategory':
Ejemplo n.º 26
0
def dump_byml(data: bytes, stream=None):
    return dump_byml_data(byml.Byml(data).parse(), stream)
Ejemplo n.º 27
0
def get_stock_actorinfo() -> dict:
    """ Gets the unmodded contents of ActorInfo.product.sbyml """
    actorinfo = util.get_game_file('Actor/ActorInfo.product.sbyml')
    with actorinfo.open('rb') as a_file:
        return byml.Byml(util.decompress(a_file.read())).parse()
Ejemplo n.º 28
0
def merge_savedata(verbose: bool = False, force: bool = False):
    """ Merges install savedata mods and saves the new Bootup.pack, fixing the RSTB if needed"""
    mods = get_savedata_mods()
    slog_path = util.get_master_modpack_dir() / 'logs' / 'savedata.log'
    if not mods:
        print('No gamedata merging necessary.')
        if slog_path.exists():
            slog_path.unlink()
        if (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').exists():
            (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').unlink()
        return
    if slog_path.exists() and not force:
        with slog_path.open('r') as l_file:
            if xxhash.xxh32(str(mods)).hexdigest() == l_file.read():
                print('No savedata merging necessary.')
                return

    new_entries = []
    new_hashes = []
    loader = yaml.CSafeLoader
    yaml_util.add_constructors(loader)
    print('Loading savedata mods...')
    for mod in mods:
        with open(mod.path / 'logs' / 'savedata.yml') as s_file:
            yml = yaml.load(s_file, Loader=loader)
            for entry in yml:
                if entry['HashValue'] in new_hashes:
                    continue
                else:
                    new_entries.append(entry)
                    new_hashes.append(entry['HashValue'])
                    if verbose:
                        print(f'  Added {entry["DataName"]} from {mod.name}')

    savedata = get_stock_savedata()
    merged_entries = []
    save_files = sorted(savedata.list_files())[0:-2]

    print('Loading stock savedata...')
    for file in save_files:
        merged_entries.extend(byml.Byml(savedata.get_file_data(
            file).tobytes()).parse()['file_list'][1])

    print('Merging changes...')
    merged_entries.extend(new_entries)
    merged_entries.sort(key=lambda x: x['HashValue'])

    special_bgsv = [
        savedata.get_file_data('/saveformat_6.bgsvdata').tobytes(),
        savedata.get_file_data('/saveformat_7.bgsvdata').tobytes(),
    ]

    print('Creating and injecting new savedataformat.sarc...')
    new_savedata = sarc.SARCWriter(True)
    num_files = ceil(len(merged_entries) / 8192)
    for i in range(num_files):
        end_pos = (i+1) * 8192
        if end_pos > len(merged_entries):
            end_pos = len(merged_entries)
        buf = BytesIO()
        byml.Writer({
            'file_list': [
                {
                    'IsCommon': False,
                    'IsCommonAtSameAccount': False,
                    'IsSaveSecureCode': True,
                    'file_name': 'game_data.sav'
                },
                merged_entries[i*8192:end_pos]
            ],
            'save_info': [
                {
                    'directory_num': byml.Int(8),
                    'is_build_machine': True,
                    'revision': byml.Int(18203)
                }
            ]
        }, True).write(buf)
        new_savedata.add_file(f'/saveformat_{i}.bgsvdata', buf.getvalue())
    new_savedata.add_file(f'/saveformat_{num_files}.bgsvdata', special_bgsv[0])
    new_savedata.add_file(
        f'/saveformat_{num_files + 1}.bgsvdata', special_bgsv[1])
    bootup_rstb = inject_savedata_into_bootup(new_savedata)
    (util.get_master_modpack_dir() / 'logs').mkdir(parents=True, exist_ok=True)
    with (util.get_master_modpack_dir() / 'logs' / 'savedata.sarc').open('wb') as s_file:
        new_savedata.write(s_file)

    print('Updating RSTB...')
    rstable.set_size('GameData/savedataformat.sarc', bootup_rstb)

    slog_path.parent.mkdir(parents=True, exist_ok=True)
    with slog_path.open('w', encoding='utf-8') as l_file:
        l_file.write(xxhash.xxh32(str(mods)).hexdigest())
Ejemplo n.º 29
0
def _bgdata_from_bytes(file: str, game_dict: dict) -> {}:
    return byml.Byml(game_dict[file]).parse()
def test_to_bin_byml(benchmark, file):
    benchmark.group = "to bin: " + file
    x = byml.Byml(data[file]).parse()
    benchmark(byml_to_bin, x)