def _(data: list, patches: list[Callable], previous_path: str = None) -> SJSON: patched = copy.deepcopy(data) current_path = '[]' if previous_path is None else f'{previous_path}.[]' LOGGER.debug(f"Patching '{current_path}'") for patch in patches: patched = [patch(data=item) for item in patched] return patched
def uninstall() -> None: mod_dir = config.content_dir.joinpath(MOD_TARGET_DIR) if mod_dir.exists(): dir_util.remove_tree(str(mod_dir)) LOGGER.info(f"Uninstalled Lua mod from '{mod_dir}'") else: LOGGER.info(f"No Lua mod to uninstall from '{mod_dir}'")
def status() -> None: if config.HASH_DIR.exists() and any(config.HASH_DIR.iterdir()): LOGGER.info(f"Found hashes at '{config.HASH_DIR}'") return True else: LOGGER.info(f"No hashes found at '{config.HASH_DIR}'") return False
def status() -> None: if config.SJSON_DATA_DIR.exists() and any(config.SJSON_DATA_DIR.iterdir()): LOGGER.info(f"Found SJSON data at '{config.SJSON_DATA_DIR}'") return True else: LOGGER.info(f"No SJSON data found at '{config.SJSON_DATA_DIR}'") return False
def try_detect_hades_dirs() -> list[Path]: """Try to detect Hades directory from Steam and Epic Games files.""" potential_hades_dirs: list[Path] = [] for steam_library_file in [ Path(item).joinpath('libraryfolders.vdf') for item in TRY_STEAM[config.platform] ]: if steam_library_file.exists(): LOGGER.debug(f"Found Steam library file at '{steam_library_file}'") for steam_library in LIBRARY_REGEX.finditer( steam_library_file.read_text()): potential_hades_dirs.append( Path(steam_library.group(1)).joinpath( 'steamapps/common/Hades')) for epic_metadata_dir in [ Path(item) for item in TRY_EPIC[config.platform] ]: for epic_metadata_item in epic_metadata_dir.glob('*.item'): item = epic_metadata_item.read_text() search_name = DISPLAY_NAME_REGEX.search(item) if search_name and 'Hades' in search_name.group(1): LOGGER.debug( f"Found potential Epic Games' Hades installation from '{epic_metadata_item}'" ) potential_hades_dirs.append( Path(INSTALL_LOCATION_REGEX.search(item).group(1))) return [ hades_dir for hades_dir in potential_hades_dirs if hades_dir.exists() and is_valid_hades_dir(hades_dir, False) ]
def try_get_modimporter() -> Path: """Check if modimporter is available in the Content directory.""" for mod_importer in MOD_IMPORTERS: modimporter = config.content_dir.joinpath(mod_importer) if modimporter.exists(): LOGGER.info(f"'modimporter' detected at '{modimporter}'") return modimporter return None
def store(file: Path) -> dict: sjson_data_file = __get_file(file) if sjson_data_file.exists() and not config.force: raise FileExistsError(f"SJSON data file '{sjson_data_file}' already exists") data = sjson.loads(file.read_text()) sjson_data_file.parent.mkdir(parents=True, exist_ok=True) sjson_data_file.write_text(json.dumps(data)) LOGGER.debug(f"Saved SJSON data from '{file}' to '{sjson_data_file}'") return data
def __patch_hook_file(original_file: Path, file: Path, import_statement: str) -> None: source_text = original_file.read_text() source_text += f""" -- Hephaistos hook {import_statement} """ file.write_text(source_text) LOGGER.info(f"Patched '{file}' with hook '{import_statement}'")
def status() -> None: mod_dir = config.content_dir.joinpath(MOD_TARGET_DIR) if mod_dir.exists() and any(mod_dir.iterdir()): LOGGER.info(f"Found Lua mod at '{mod_dir}'") (mod_dir, lua_scripts_dir, relative_path_to_mod, _) = __prepare_variables() return patchers.patch_lua_status( lua_scripts_dir, relative_path_to_mod + MOD_ENTRY_POINT) else: LOGGER.info(f"No Lua mod found at '{mod_dir}'") return False
def __handle_global_args(self, args: list[str]) -> None: # logging verbosity level level = ParserBase.VERBOSE_TO_LOG_LEVEL[min(args.verbose, 2)] LOGGER.setLevel(level) # hades_dir self.__configure_hades_dir(args.hades_dir) # modimporter if args.modimporter: config.modimporter = helpers.try_get_modimporter() else: LOGGER.info("Using '--no-modimporter': will not run 'modimporter', even if available")
def patch_sjsons() -> None: LOGGER.info( "Reading SJSON data (this operation can take time, please be patient)") sjson_dir = config.content_dir.joinpath(SJSON_DIR) for dirname, files in SJON_PATCHES.items(): sub_dir = sjson_dir.joinpath(dirname) for filename, patches in files.items(): file = sub_dir.joinpath(filename) LOGGER.debug(f"Patching SJSON file at '{file}'") with safe_patch_file(file) as (source_sjson, file): __patch_sjson_file(source_sjson, file, patches)
def __find_sjsons(save_dir: Path) -> list[Path]: """Find actual SJSON files in directory.""" LOGGER.debug(f"Detecting SJSON files from '{save_dir}'") sjsons: list[Path] = [] for file in save_dir.rglob('*'): try: sjson.loads(Path(file).read_text()) LOGGER.debug(f"Found valid SJSON in '{file}'") sjsons.append(file) except: pass return sjsons
def __update_children(children_dict: dict, data: dict) -> dict: patched = copy.deepcopy(data) for child_key, callback in children_dict.items(): try: child_value = copy.deepcopy(patched[child_key]) patched[child_key] = callback(patched[child_key]) LOGGER.debug( f"Updated child '{child_key}' from '{child_value}' to '{patched[child_key]}'" ) except KeyError: raise KeyError(f"Did not find '{child_key}'.") return patched
def __patch_sjson_data(data: dict, patch: Union[dict[str, SJSONPatch], Callable], previous_path: str = None) -> SJSON: patched = copy.deepcopy(data) if isinstance(patch, dict): for key, patches in patch.items(): current_path = key if previous_path is None else f'{previous_path}.{key}' patched[key] = __patch_sjson_data(patched[key], patches, current_path) else: LOGGER.debug(f"Patching '{previous_path}'") patched = patch(data=patched) return patched
def patch_engines() -> None: HEX_PATCHES['viewport']['replacement_args'] = (__int_to_bytes( config.new_screen.width), __int_to_bytes(config.new_screen.height)) HEX_PATCHES['fullscreen_vector']['replacement_args'] = (__float_to_bytes( config.new_screen.width), __float_to_bytes(config.new_screen.height)) HEX_PATCHES['width_height_floats']['replacement_args'] = (__float_to_bytes( config.new_screen.height), __float_to_bytes(config.new_screen.width)) HEX_PATCHES['screencenter_vector']['replacement_args'] = ( __float_to_bytes(config.new_screen.center_x), __float_to_bytes(config.new_screen.center_y)) for engine, filepath in ENGINES[config.platform].items(): hex_patches = __get_engine_specific_hex_patches(engine) file = config.hades_dir.joinpath(filepath) LOGGER.debug(f"Patching '{engine}' backend at '{file}'") got_any_warnings = False with safe_patch_file(file) as (original_file, file): if not __patch_engine(original_file, file, engine, hex_patches): got_any_warnings = True if got_any_warnings: LOGGER.warning( "Hephaistos managed to apply all hex patches but did not patch everything exactly as expected." ) LOGGER.warning("This is most probably due to a game update.") LOGGER.warning( "In most cases this is inconsequential and Hephaistos will work anyway, but Hephaistos might need further changes to work properly with the new version of the game." )
def __configure_hades_dir(self, hades_dir_arg: str) -> None: # if we are on macOS and running PyInstaller executable and defaulting # to current directory, force working directory to be the one containing # the executable # this is a kludge around macOS calling executables from the user home # rather than the current directory when double-clicked on from Finder if config.platform == Platform.MACOS and getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS') and hades_dir_arg == '.': hades_dir_arg = Path(sys.argv[0]).parent LOGGER.debug(f"Running macOS executable from Finder: forced working directory to {hades_dir_arg}") config.hades_dir = Path(hades_dir_arg) try: helpers.is_valid_hades_dir(config.hades_dir) config.content_dir = config.hades_dir.joinpath(CONTENT_DIR_PATH[config.platform]) LOGGER.debug(f"Detected platform: {config.platform}") except HadesNotFound as e: LOGGER.error(e) hades_dirs = helpers.try_detect_hades_dirs() if len(hades_dirs) > 0: advice = '\n'.join(f" - {hades_dir}" for hades_dir in hades_dirs) else: advice = " - Could not auto-detect any Hades directory." msg = f"""Hephaistos does not seem to be located in the Hades directory: {advice} Please move Hephaistos directly to the Hades directory. If you know what you're doing, you can also re-run with '--hades-dir' to manually specify Hades directory while storing Hephaistos elsewhere.""" LOGGER.error(msg) self.__end(1, prompt_user=config.interactive_mode)
def __prepare_variables() -> tuple[Path, Path, str]: # copy mod files mod_dir = config.content_dir.joinpath(MOD_TARGET_DIR) mod_dir.mkdir(parents=True, exist_ok=True) # compute relative path from Hades scripts dir to mod lua_scripts_dir = config.content_dir.joinpath(LUA_SCRIPTS_DIR) relative_path_to_mod = os.path.relpath(mod_dir, lua_scripts_dir) # replace backward slashes with forward slashes on Windows and add trailing slash relative_path_to_mod = relative_path_to_mod.replace('\\', '/') + '/' LOGGER.debug( f"Computed relative path '{relative_path_to_mod}' from '{lua_scripts_dir}' to '{mod_dir}'" ) import_statement = f'Import "{relative_path_to_mod + MOD_ENTRY_POINT}"' return (mod_dir, lua_scripts_dir, relative_path_to_mod, import_statement)
def check_version() -> str: """Compare current version with latest GitHub release.""" try: LOGGER.debug(f"Checking latest version at {config.LATEST_RELEASE_URL}") request = urllib.request.Request(config.LATEST_RELEASE_API_URL) response = urllib.request.urlopen(request).read() data = json.loads(response.decode('utf-8')) latest_version = data['name'] except urllib.error.URLError as e: LOGGER.debug(e, exc_info=True) latest_version = VERSION_CHECK_ERROR msg = f"""Current version: {config.VERSION} Latest version: {latest_version}""" if latest_version != config.VERSION and latest_version != VERSION_CHECK_ERROR: msg += f"\nA new version of Hephaistos is available at: {config.LATEST_RELEASE_URL}" return msg
def __start(self) -> None: raw_args = sys.argv[1:] args = self.parse_args(raw_args) # handle global args early self.__handle_global_args(args) # if no subcommand is provided, enter interactive mode if not args.subcommand: # if verbosity not set by user, default to INFO logs in interactive if not args.verbose: LOGGER.setLevel(logging.INFO) args = self.__interactive(raw_args) # handle subcommand args via SubcommandBase.dispatch handler try: args.dispatch(**vars(args)) except Exception as e: LOGGER.exception(e) # log any unhandled exception # if in interactive mode, loop until user manually closes self.__restart() if config.interactive_mode else self.__end()
def try_get_profile_sjson_files() -> list[Path]: """Try to detect save directory and list all Profile*.sjson files.""" save_dirs = TRY_SAVE_DIR[config.platform]() for save_dir in save_dirs: if save_dir.exists(): LOGGER.debug(f"Found save directory at '{save_dir}'") if config.platform == Platform.MS_STORE: # Microsoft Store save files are not actually named # `Profile*.sjson` and instead use random hexadecimal names with # no file extensions, so we need to list them by trying to parse # them as SJSON profiles = __find_sjsons(save_dir) else: profiles = [item for item in save_dir.glob('Profile*.sjson')] if profiles: return profiles save_dirs_list = '\n'.join(f" - {save_dir}" for save_dir in save_dirs) msg = f"""Did not find any 'ProfileX.sjson' in save directories: {save_dirs_list}""" LOGGER.warning(msg) return []
def __try_windows_save_dirs() -> list[Path]: # Windows (Steam / Epic Games) might store saves: # - Directly inside the Documents directory # - Nested inside OneDrive inside the Documents directory save_dirs = [ r'Saved Games\Hades', r'OneDrive\Saved Games\Hades', ] # Try to detect actual path to Documents folder from registry, in case user # has moved its Documents folder somewhere else than `%USERDIR%\Documents` try: import winreg sub_key = r'SOFTWARE\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders' with winreg.OpenKey(winreg.HKEY_CURRENT_USER, sub_key) as key: my_documents_path = winreg.QueryValueEx(key, r'Personal')[0] LOGGER.debug( f"Detected 'Documents' path from registry: {my_documents_path}") save_dirs = [ Path(my_documents_path).joinpath(item) for item in save_dirs ] # Fall back to default `%USERDIR%\Documents` value if no registry entry # found or anything goes wrong except Exception as e: LOGGER.debug(f"Could not detect 'Documents' path from registry.") LOGGER.debug(e, exc_info=True) save_dirs = [ Path(os.path.expanduser(r'~\Documents')).joinpath(item) for item in save_dirs ] return save_dirs
def __patch_engine(original_file: Path, file: Path, engine: str, hex_patches: dict[str, HexPatch]) -> bool: """Return True if patch went as expected, False if any warnings happened.""" data = original_file.read_bytes() all_as_expected = True for hex_patch_name, hex_patch in hex_patches.items(): replacement = hex_patch['replacement'] % hex_patch['replacement_args'] pattern = hex_patch['pattern'] (data, sub_count) = pattern.subn(replacement, data) LOGGER.debug( f"Replaced {sub_count} occurrences of '{hex_patch_name}' pattern {pattern.pattern} with {replacement} in '{file}'" ) expected = hex_patch['expected_subs'] if sub_count == 0 and expected != 0: raise LookupError( f"Failed to apply '{hex_patch_name}' patch in '{file}' (no occurrences found)" ) elif sub_count != expected: LOGGER.warning( f"Expected {expected} matches for '{hex_patch_name}' patch in '{file}', found {sub_count}" ) all_as_expected = False file.write_bytes(data) LOGGER.info(f"Patched '{file}'") return all_as_expected
def __upsert_siblings(lookup_key: str, lookup_value: str, sibling_dict: dict, data: dict) -> dict: try: if data[lookup_key] == lookup_value: patched = copy.deepcopy(data) for sibling_key, (callback, default) in sibling_dict.items(): try: sibling_value = copy.deepcopy(patched[sibling_key]) patched[sibling_key] = callback(patched[sibling_key]) LOGGER.debug( f"Found '{lookup_key} = {lookup_value}', updated sibling '{sibling_key}' from '{sibling_value}' to '{patched[sibling_key]}'" ) except KeyError: if default: patched[sibling_key] = callback(default) LOGGER.debug( f"Found '{lookup_key} = {lookup_value}', inserted sibling '{sibling_key} = {patched[sibling_key]}'" ) return patched return data except KeyError: return data
def patch_profile_sjsons() -> None: if config.custom_resolution: profile_sjsons = helpers.try_get_profile_sjson_files() if not profile_sjsons: msg = """Cannot patch custom resolution to 'ProfileX.sjson'. This is a non-blocking issue but might prevent you from running Hades at the resolution of your choice.""" LOGGER.warning(msg) return edited_list = [] for file in profile_sjsons: LOGGER.debug(f"Analyzing '{file}'") data = sjson.loads(file.read_text()) for key in ['X', 'WindowWidth']: data[key] = config.resolution.width for key in ['Y', 'WindowHeight']: data[key] = config.resolution.height # we manually set WindowX/Y in ProfileX.sjson configuration files as a # safeguard against WindowX/Y values overflowing when switching to # windowed mode while using a custom resolution larger than officially # supported by the main monitor, ensuring Hades will not be drawn # offscreen and can then be repositioned by the user for key in ['WindowX', 'WindowY']: if not key in data: data[key] = WINDOW_XY_DEFAULT_OFFSET LOGGER.debug( f"'{key}' not found in '{file.name}', inserted '{key} = {WINDOW_XY_DEFAULT_OFFSET}'" ) elif data[key] >= WINDOW_XY_OVERFLOW_THRESHOLD: data[key] = WINDOW_XY_DEFAULT_OFFSET LOGGER.debug( f"'{key}' found in '{file.name}' but with overflowed value, reset to '{key} = {WINDOW_XY_DEFAULT_OFFSET}'" ) file.write_text(sjson.dumps(data)) edited_list.append(file) if edited_list: edited_list = '\n'.join(f" - {file}" for file in edited_list) msg = f"""Applied custom resolution to: {edited_list}""" LOGGER.info(msg)
def install() -> None: LOGGER.debug(f"Installing Lua mod from '{config.MOD_SOURCE_DIR}'") (mod_dir, lua_scripts_dir, relative_path_to_mod, import_statement) = __prepare_variables() dir_util.copy_tree(str(config.MOD_SOURCE_DIR), str(mod_dir)) LOGGER.debug(f"Copied '{config.MOD_SOURCE_DIR}' to '{mod_dir}'") __configure(mod_dir, relative_path_to_mod) LOGGER.info(f"Installed Lua mod to '{mod_dir}'") # run modimporter (if available) to register Hephaistos if config.modimporter: LOGGER.info(f"Running 'modimporter' to register Hephaistos") helpers.run_modimporter(config.modimporter) # otherwise register manually else: patchers.patch_lua(lua_scripts_dir, import_statement)
def __configure(mod_dir: Path, relative_path_to_mod: str) -> None: # configure viewport mod_config_file = mod_dir.joinpath(MOD_CONFIG_FILE) source_text = mod_config_file.read_text() patched_text = WIDTH_REGEX.sub('\g<1>' + str(config.new_screen.width), source_text) patched_text = HEIGHT_REGEX.sub('\g<1>' + str(config.new_screen.height), patched_text) patched_text = CENTER_HUD_REGEX.sub( '\g<1>' + str(config.center_hud).lower(), patched_text) mod_config_file.write_text(patched_text) LOGGER.debug(f"Configured '{mod_config_file}'") # configure internal mod imports for file in mod_dir.glob('**/*.lua'): source_text = file.read_text() (patched_text, count) = IMPORT_REGEX.subn(f'Import "{relative_path_to_mod}\g<1>"', source_text) if count: file.write_text(patched_text) LOGGER.debug( f"Configured '{file}' internal mod imports ({count} occurrences)" )
def patch_lua_status(lua_scripts_dir: Path, import_statement: str) -> None: hook_file = lua_scripts_dir.joinpath(HOOK_FILE) LOGGER.debug(f"Checking patch status of Lua hook file at '{hook_file}'") text = hook_file.read_text() if import_statement in text: LOGGER.info(f"Found hook '{import_statement}' in '{hook_file}'") return True else: LOGGER.info(f"No hook '{import_statement}' found in '{hook_file}'") return False
def patch_engines_status() -> None: status = True for engine, filepath in ENGINES[config.platform].items(): file = config.hades_dir.joinpath(filepath) LOGGER.debug( f"Checking patch status of '{engine}' backend at '{file}'") try: with safe_patch_file(file, store_backup=False) as (original_file, file): if original_file is not None: LOGGER.info(f"'{file}' looks patched") else: status = False LOGGER.info(f"'{file}' is not patched") except hashes.HashMismatch: status = False LOGGER.info( f"'{file}' has been modified since last backup: probably not patched" ) return status
def safe_patch_file( file: Path, store_backup: bool = True ) -> Generator[Tuple[Union[SJSON, Path], Path], None, None]: """Context manager for patching files in a safe manner, wrapped by backup and hash handling. On first run: - Store a backup copy of the original file for restoration with the `restore` subcommand. - (If patching SJSON) Store a copy of the parsed SJSON data for speeding up subsequent patches. - Store patched hash in a text file. On subsequent runs, check current hash against previously stored hash: - If matching, repatch from the backup copy or stored SJSON data (if patching SJSON). - It not matching, the file has changed since the last patch. """ try: if hashes.check(file): LOGGER.debug( f"Hash match for '{file}': repatching based on backup file") original_file, source_sjson = backups.get(file) elif store_backup: LOGGER.debug(f"No hash stored for '{file}': storing backup file") original_file, source_sjson = backups.store(file) else: LOGGER.debug(f"No hash stored for '{file}'") original_file, source_sjson = None, None except hashes.HashMismatch as e: if config.force: # if using '--force', discard existing backup / hash and use new file as basis LOGGER.debug( f"Hash mismatch for '{file}' but running with '--force': patching based on new file" ) original_file, source_sjson = backups.store(file) else: # otherwise let caller decide what to do raise e if file.suffix == config.SJSON_SUFFIX: yield (source_sjson, file) else: yield (original_file, file) if store_backup: hashes.store(file)
def handler(self, **kwargs) -> None: """Restore backups, discard hashes and SJSON data, uninstall Lua mod.""" # run 'modimporter --clean' (if available) to unregister Hephaistos if config.modimporter: LOGGER.info(f"Running 'modimporter --clean' to unregister Hephaistos") helpers.run_modimporter(config.modimporter, clean_only=True) backups.restore() hashes.discard() sjson_data.discard() lua_mod.uninstall() # clean up Hephaistos data dir if empty (using standalone executable) if not any(config.HEPHAISTOS_DATA_DIR.iterdir()): dir_util.remove_tree(str(config.HEPHAISTOS_DATA_DIR)) LOGGER.info(f"Cleaned up empty directory '{config.HEPHAISTOS_DATA_DIR}'") # re-run modimporter (if available) to re-register other mods if config.modimporter: LOGGER.info(f"Running 'modimporter' to re-register other mods") helpers.run_modimporter(config.modimporter)