def get_existing_project_game(project_path: str): project_path = Path(project_path) if project_path.is_dir() and (project_path / "project_config.json").is_file(): project_config = read_json(project_path / "project_config.json") if game_name := project_config.get("GameName", ""): return get_game(game_name)
def load_json_dir(self, directory: tp.Union[Path, str], clear_old_data=True): """Load individual Param JSON files from an unpacked Binder folder (produced by `write_json_dir()`). The names of the Param JSON files to be loaded from the folder are recorded in the "entries" key of the `GameParamBND_manifest.json` file. Functionally very similar to `load_dict()`, but avoids the need for one gigantic JSON file for all Params. """ directory = Path(directory) manifest_path = directory / "DrawParamBND_manifest.json" if not manifest_path.is_file(): raise FileNotFoundError( f"Could not find `DrawParamBND` manifest file '{manifest_path}'." ) manifest = read_json(manifest_path) for field, value in self.get_manifest_header(manifest).items(): if not clear_old_data: if (old_value := getattr(self, field)) != value: raise ValueError( f"New `{field}` value {repr(value)} does not match old value {repr(old_value)}." ) else: setattr(self, field, value)
def load_unpacked_dir(self, directory): """Load binder from a Soulstruct-unpacked directory containing a `binder_manifest.json` file.""" directory = Path(directory) if not directory.is_dir(): raise ValueError( f"Could not find unpacked binder directory {repr(directory)}.") manifest = read_json(directory / "binder_manifest.json", encoding="shift-jis") for field, value in self.get_manifest_header(manifest).items(): setattr(self, field, value) self.add_entries_from_manifest(manifest["entries"], directory, manifest["use_id_prefix"])
def detect(cls, binder_source: tp.Union[GameFile.Typing, dict]) -> bool: """Returns True if `binder_source` appears to be this subclass of `BaseBinder`. Does not support DCX sources.""" if isinstance(binder_source, dict): # Manifest dictionary. Simply check version. return binder_source.get( "version") == cls.__name__ # "BND3", "BND4", etc. if isinstance(binder_source, (str, Path)): binder_path = Path(binder_source) if binder_path.is_file( ) and binder_path.name == "binder_manifest.json": binder_path = binder_path.parent if binder_path.is_dir(): try: manifest = read_json(binder_path / "binder_manifest.json", encoding="shift_jis") return manifest.get( "version") == cls.__name__ # "BND3", "BND4", etc. except FileNotFoundError: return False elif binder_path.is_file(): reader = BinaryReader(binder_path) try: version = reader.unpack_string(length=4, encoding="ascii") except ValueError: return False if version[:3] in {"BHF", "BDF"}: version = f"BXF{version[3]}" # BXF header or data file return version == cls.__name__ return False elif isinstance(binder_source, (bytes, io.BufferedIOBase)): binder_source = BinaryReader(binder_source) if isinstance(binder_source, BinaryReader): with binder_source.temp_offset(0): try: version = binder_source.unpack_string(length=4, encoding="ascii") except ValueError: return False if version[:3] in {"BHF", "BDF"}: version = f"BXF{version[3]}" # BXF header or data file return version == cls.__name__ raise TypeError( f"Cannot detect `Binder` class from source type: {binder_source}")
def add_common_emedf_info(emedf: dict, common_emedf_path: Path | str): """Insert information from EMEDF JSON into dictionary. Currently, just adds internal argument types. """ common_emedf_path = Path(common_emedf_path) common_emedf_raw = read_json(common_emedf_path) common_emedf = {} for cat_dict in common_emedf_raw["main_classes"]: category = cat_dict["index"] for instr_dict in cat_dict["instrs"]: index = instr_dict["index"] common_emedf[category, index] = instr_dict for (category, index), info in emedf.items(): try: instr = common_emedf[category, index] except KeyError: if all("internal_type" in arg_dict for arg_dict in info["args"].values()): continue # permitted to be missing from real EMEDF raise KeyError( f"Invalid instruction ID for common EMEDF '{common_emedf_path.name}': ({category}, {index})" ) if len(info["args"]) != len(instr["args"]): print(category, index, info["alias"], instr["name"]) raise ValueError( f"Instruction ID ({category}, {index}) has {len(instr['args'])} args in common EMEDF but has " f"{len(info['args'])} args in Soulstruct.") for i, arg_name in enumerate(info["args"]): if "internal_type" in instr["args"][i]: continue # already specified manually instr_type = int(instr["args"][i]["type"]) try: # Original dictionary is copied, as many arguments across instructions refer to common dicts. info["args"][arg_name] = info["args"][arg_name].copy() | { "internal_type": ArgType(instr_type) } except ValueError: raise ValueError( f"Instruction ({category}, {index}) argument '{arg_name}' has unrecognized type in common EMEDF: " f"{instr_type}")
def load_json_dir(self, directory: tp.Union[Path, str], clear_old_data=True): """Load individual text (FMG) JSON files from an unpacked Binder folder (produced by `write_json_dir()`). The names of the JSON files to be loaded from the folder are recorded in the "categories" key of the `item_manifest.json` and `menu_manifest.json` files, along with header information for each BND. Note that this will immediately modify the underlying MSGBNDs held in this `MSGDirectory` instance. """ directory = Path(directory) item_manifest_path = directory / "item_manifest.json" if not item_manifest_path.is_file(): raise FileNotFoundError( f"Could not find `MSGDirectory` manifest file '{item_manifest_path}'." ) item_manifest = read_json(item_manifest_path) menu_manifest_path = directory / "menu_manifest.json" if not menu_manifest_path.is_file(): raise FileNotFoundError( f"Could not find `MSGDirectory` manifest file '{menu_manifest_path}'." ) menu_manifest = read_json(menu_manifest_path) if clear_old_data: self._original_names = {} self._is_menu = {} # Update MSGBND binder information. for bnd_name, bnd, manifest in zip( ("item", "menu"), (self.item_msgbnd, self.menu_msgbnd), (item_manifest, menu_manifest), ): entry_ids = set() if bnd is None: bnd = self.MSGBND_CLASS(manifest) bnd.path = Path(f"{bnd_name}.msgbnd.dcx" if self. IS_DCX else f"{bnd_name}.msgbnd") setattr(self, f"{bnd_name}_msgbnd", bnd) else: for field, value in bnd.get_manifest_header(manifest).items(): if not clear_old_data: if (old_value := getattr(bnd, field)) != value: raise ValueError( f"New `{field}` value {repr(value)} does not match old value {repr(old_value)}." ) else: setattr(bnd, field, value) if clear_old_data: bnd.clear_entries() else: entry_ids = set(bnd.entries_by_id) for json_name in manifest["entries"]: try: fmg_dict = read_json(directory / json_name) except FileNotFoundError: raise FileNotFoundError( f"Could not find text (FMG) JSON file '{directory / json_name}'." ) for field in ("entry_id", "path", "flags", "data"): if field not in fmg_dict: raise KeyError( f"Required field `{field}` not specified in '{json_name}'." ) fmg = self.FMG_CLASS({ "entries": {int(k): v for k, v in fmg_dict["data"].items()} }) if json_name.endswith("Patch.json"): # "Patch" JSONs are also written into non-Patch Binder entries. non_patch_entry_id = self._MSGBND_INDEX_NAMES[ json_name.removesuffix("Patch.json")] non_patch_entry = bnd.BinderEntry( fmg.pack(), non_patch_entry_id, fmg_dict[ "path"], # TODO: Patch and non-Patch paths are identical in DSR, but not others. fmg_dict["flags"], ) if non_patch_entry_id in entry_ids: _LOGGER.warning( f"Binder entry ID {non_patch_entry_id} appears more than once in `MSGDirectory` " f"'{bnd_name}' MSGBND. Fix this ASAP.") bnd.add_entry(non_patch_entry) entry = bnd.BinderEntry(fmg.pack(), fmg_dict["entry_id"], fmg_dict["path"], fmg_dict["flags"]) if entry.id in entry_ids: _LOGGER.warning( f"Binder entry ID {entry.id} appears more than once in `MSGDirectory` '{bnd_name}' MSGBND. " f"Fix this ASAP.") bnd.add_entry(entry)
def __init__( self, file_source: Typing = None, dcx_type: DCXType | None = DCXType.Null, **kwargs, ): """Base class for a game file, with key methods and automatic DCX detection. Args: file_source (None, str, Path, bytes, BufferedIOBase): a file path, `bytes` object, or binary stream to load the file from. It will be checked for DCX first. Set to None (default) to create a default instance. dcx_type (DCXType): optional DCX compression type enum to manually specify the DCX. Only permitted for `file_source` values that are not already DCX-compressed. (If you want to change the DCX for some reason, set `.dcx_type` directly after the instance is created.) kwargs: keyword arguments to pass on to `unpack` for buffered sources. """ self._dcx_type = DCXType.Null self.dcx_type = dcx_type # run through setter self.path = None # type: tp.Optional[Path] if file_source is None: return try: reader = self._handle_other_source_types(file_source, **kwargs) if reader is None: return except InvalidGameFileTypeError: if isinstance(file_source, dict): self.load_dict(file_source.copy()) return if isinstance(file_source, (str, Path)): self.path = Path(file_source) if self.path.suffix == ".json": json_dict = read_json(self.path, encoding="utf-8") try: self.load_dict(json_dict) except Exception as ex: _LOGGER.error( f"Error while loading as JSON dict: {self.path}.\n {ex}" ) raise return if isinstance(file_source, (str, Path, bytes, io.BufferedIOBase, BinderEntry)): reader = BinaryReader(file_source) elif isinstance(file_source, BinaryReader): reader = file_source else: raise InvalidGameFileTypeError( f"Invalid `GameFile` source type: {type(file_source)}") if self._is_dcx(reader): if self.dcx_type: reader.close() raise ValueError( "Cannot manually set `dcx_type` before reading a DCX file source." ) try: data, self.dcx_type = decompress(reader) finally: reader.close() reader = BinaryReader(data) try: self.unpack(reader, **kwargs) except Exception: _LOGGER.error( f"Error occurred while parsing game file: {self.path}. See traceback." ) raise finally: reader.close()
def __init__(self, game_path=None, vanilla_backup=None): super().__init__(toplevel=True, window_title="Dark Souls Mod Manager") if not game_path: game_path = Path(DSR_PATH) game_path = Path(game_path) if game_path.is_file() and game_path.name == "DarkSoulsRemastered.exe": self._game_path = game_path.parent elif game_path.is_dir() and (game_path / "DarkSoulsRemastered.exe").is_file(): self._game_path = game_path else: raise ValueError( f"`game_path` should point to DARK SOULS REMASTERED folder or the executable within." ) self.backup_path = self._game_path / self.DEFAULT_VANILLA_BACKUP if not vanilla_backup else Path( vanilla_backup) if self._manager_json_path.is_file(): self.mods = read_json(self._manager_json_path) else: self.mods = [] with self.set_master(padx=20, pady=20, auto_rows=0): self.Label(text="Mod Manager", font_size=25, pady=(5, 10)) with self.set_master(auto_columns=0, grid_defaults={"padx": 10}): with self.set_master(width=20, auto_rows=0): self.Label(text="Mod List:") self.mod_list = self.Listbox(font=16, height=10, width=20, sticky="ew") with self.set_master(auto_columns=0, pady=5, grid_defaults={"padx": 5}): self.Button(text="Forget Mod", width=20, command=self._delete_mod) self.Button(text="Show Mod Path", width=20, command=self._show_selected_paths) with self.set_master(auto_rows=0, grid_defaults={"pady": 5}): self.mod_nickname = self.Entry(width=40, label="Mod Nickname:", sticky="e") self.mod_path = self.Entry(width=40, label="Mod Root Directory:", sticky="e") with self.set_master(auto_columns=0, pady=10, grid_defaults={"padx": 5}): self.Button(text="Browse to Mod Root", width=20, command=self._browse_to_mod) self.Button(text="Add to Mod List", width=20, command=self._add_mod) self.Button(text="Add Root to Selected Mod", width=30, command=self._add_root_to_selected) self.Button(text="Install Mod", width=40, command=self._install_mod, pady=20, bg="#622") self.Button(text="Create Vanilla Backup", width=40, command=self._create_vanilla_backup) self.Button(text="Restore Vanilla Backup", width=40, command=self._restore_vanilla_backup, bg="#226") self.Button(text="Delete Bak Files", width=40, command=self._delete_bak_files, bg="#226") for mod_info in self.mods: self.mod_list.insert("end", mod_info["nickname"]) self.set_geometry()
def _generate(old_instr_module, emedf_json_path): def_re = re.compile(r"^def (.*)\((.*)\):", re.DOTALL) arg_re = re.compile(r"(\w[\w\d_]*)(: \w[\w\d_]*)?( *= *.*)?") doc_re = re.compile(r"^[ \"\n]*(.*?)[ \"\n]*$", re.DOTALL) # strips all quotes and spaces # base_module = PACKAGE_PATH("base/events/emevd/instructions.py").read_text().split("\n") # game_module = Path("instructions.py").read_text().split("\n") instr_module = Path(old_instr_module).read_text().split("\n") emedf = read_json(emedf_json_path) new_module = "EMEDF = {\n" for category_class in emedf["main_classes"]: category = category_class["index"] for instruction_class in category_class["instrs"]: index = instruction_class["index"] category_index = f"[{category}, {index}]" # Search module for (category, index) i_re = re.compile( rf"^ *instruction_info = \({category}, {index}(, .*)?\)") for i, line in enumerate(instr_module): if i_match := i_re.match(line): if i_match.group(1): # Internal defaults given. internal_defaults = literal_eval( i_match.group(1)[2:]) # cut ", " at start else: internal_defaults = None # Go backwards to find function def start line while not instr_module[i].startswith("def "): i -= 1 def_string = instr_module[i] # Append extra lines until a line ending with "):" is found while not instr_module[i].endswith("):"): i += 1 def_string += instr_module[ i] # will omit newline characters but that's fine (good, even) if not (match := def_re.match(def_string)): print( f" {category_index} -- COULD NOT PARSE PYTHON DEF: {def_string}" ) new_module += f" # TODO: {category_index} (could not parse Python def)\n" break instr_name = match.group(1) args = [ s.strip() for s in match.group(2).split(",") if s.strip() ] # includes types and defaults print( f"{category_index} | {instruction_class['name']} | {instr_name}({', '.join(args)})" ) if len(args) != len(instruction_class["args"]): print( f" {category_index} -- ARG COUNT DOES NOT MATCH: {len(args)} vs. {len(instruction_class['args'])}" ) new_module += f" # TODO: {category_index} (arg count does not match)\n" break # Read docstring i += 1 # after def indent = " " * 12 if instr_module[i].lstrip().startswith("\"\"\""): doc_string_lines = [] line_tex = doc_re.match(instr_module[i]).group(1) if line_tex: doc_string_lines.append(line_tex) # Append extra lines until closing quotes are found while not instr_module[i].rstrip().endswith("\"\"\""): i += 1 line_text = doc_re.match(instr_module[i]).group(1) doc_string_lines.append(line_text) if not doc_string_lines[-1]: doc_string_lines = doc_string_lines[:-1] doc_string = f"\"\"\"\n{indent}" + f"\n{indent}".join( doc_string_lines) + f"\n \"\"\"" else: doc_string = "\"TODO\"" arg_dicts = [] for arg_i, arg in enumerate(args): if not (arg_match := arg_re.match(arg)): raise ValueError( f" {category_index} -- COULD NOT PARSE ARG: {arg}" ) arg_dict = { "name": arg_match.group(1) } # type: dict[str, tp.Any] if internal_defaults and len( internal_defaults) > arg_i: arg_dict["internal_default"] = internal_defaults[ arg_i] if arg_match.group(2): arg_dict["type"] = arg_match.group(2)[ 2:] # skip ": " else: arg_dict["type"] = None if arg_match.group(3): arg_default = re.match(r"[ =]*(.*)", arg_match.group(3)).group( 1) # MUST match if arg_default == "None": arg_dict["default"] = "TODO" else: try: arg_dict["default"] = literal_eval( arg_default) except ValueError: print(arg_default) raise if arg_dict["type"] is None: arg_dict["type"] = type( arg_dict["default"]).__name__ else: arg_dict[ "default"] = None # indicates absence of default, NOT `None` as default arg_dicts.append(arg_dict) new_module += f" ({category}, {index}): {{\n" new_module += f" \"alias\": \"{instr_name}\",\n" new_module += f" \"docstring\": {doc_string},\n" # triple-quoted if not arg_dicts: new_module += f" \"args\": {{}},\n" else: new_module += f" \"args\": {{\n" for arg_dict in arg_dicts: new_module += f" \"{arg_dict['name']}\": {{\n" if arg_dict['type'] is None: new_module += f" \"type\": {arg_dict['type']}, # TODO\n" else: new_module += f" \"type\": {arg_dict['type']},\n" new_module += f" \"default\": {repr(arg_dict['default'])},\n" if "internal_default" in arg_dict and arg_dict[ "internal_default"] != 0: new_module += f"{' ' * 16}\"internal_default\": {repr(arg_dict['internal_default'])},\n" new_module += f" }},\n" new_module += f" }},\n" new_module += " },\n" break
f"New `{field}` value {repr(value)} does not match old value {repr(old_value)}." ) else: setattr(self, field, value) if clear_old_data: self._entries.clear() self.params = { } # type: dict[str, list[tp.Optional[Param], tp.Optional[Param]]] entry_ids = set() else: entry_ids = set(self.entries_by_id.keys()) for json_name in manifest["entries"]: # The rest of the JSON file name doesn't actually matter; we use the BND "path" within to identify it. try: param_dict = read_json(directory / json_name, encoding="utf-8") except FileNotFoundError: raise FileNotFoundError( f"Could not find DrawParam JSON file '{directory / json_name}'." ) for field in ("entry_id", "path", "flags", "data"): if field not in param_dict: raise KeyError( f"Field `{field}` not specified in '{json_name}' in `DrawParamBND` folder." ) path_name = Path(param_dict["path"]).name if not (match := _DRAW_PARAM_FILE_NAME_RE.match(path_name)): raise ValueError( f"Invalid `DrawParam` path name in '{json_name}': '{path_name}'" ) slot = int(match.group(2)[1]) if match.group(2) else 0