def test_properties(loader: AssetLoader): loader.wipe_cache() with ue_parsing_context(properties=False): asset = loader[TEST_PGD_PKG] assert not asset.has_properties # Check asset is re-parsed when more data is requested with ue_parsing_context(properties=True): asset = loader[TEST_PGD_PKG] assert asset.has_properties loader.wipe_cache() with ue_parsing_context(properties=True): asset = loader[TEST_PGD_PKG] assert asset.has_properties
def test_bulk_data(loader: AssetLoader): loader.wipe_cache() with ue_parsing_context(bulk_data=False): asset = loader[TEST_PGD_PKG] assert not asset.has_bulk_data # Check asset is re-parsed when more data is requested with ue_parsing_context(bulk_data=True): asset = loader[TEST_PGD_PKG] assert asset.has_bulk_data loader.wipe_cache() with ue_parsing_context(bulk_data=True): asset = loader[TEST_PGD_PKG] assert asset.has_bulk_data
def test_linking(loader: AssetLoader): loader.wipe_cache() with ue_parsing_context(link=False): asset = loader[TEST_PGD_PKG] assert not asset.is_linked # Check asset is re-parsed when more data is requested with ue_parsing_context(link=True): asset = loader[TEST_PGD_PKG] assert asset.is_linked loader.wipe_cache() with ue_parsing_context(link=True): asset = loader[TEST_PGD_PKG] assert asset.is_linked
def explore_path(path: str, loader: AssetLoader, excludes: Iterable[str], verbose=False, disable_debug=False): '''Run hierarchy discovery over every matching asset within the given path.''' excludes = set(excludes) logger.info('Discovering hierarchy in path: %s', path) n = 0 with ue_parsing_context(properties=False): asset_iterator = loader.find_assetnames('.*', path, exclude=excludes, extension=asset_extensions, return_extension=True) for (assetname, ext) in asset_iterator: n += 1 if verbose and n % 200 == 0: logger.info(assetname) try: asset = loader.load_asset(assetname, quiet=disable_debug) except AssetLoadException: logger.warning("Failed to load asset: %s", assetname) continue try: _ingest_asset(asset, loader, ext) except AssetLoadException: logger.warning("Failed to check parentage of %s", assetname) except MissingParent as ex: logger.exception("Missing parent for %s", assetname) raise MissingParent from ex # Remove maps from the cache immediately as they are large and cannot be inherited from if ext == '.umap': loader.cache.remove(assetname)
def is_species(cls_name: str, loader: AssetLoader, *, skip_character_check=False) -> bool: ''' Verify a class is a valid character class, complete with DCSC. `cls_name` should be the fullname of the class to test `loader` needs to be an asset loader `skip_character_check` to avoid checking ''' if not skip_character_check and not ue.hierarchy.inherits_from( cls_name, CHR_CLS): return False with ue_parsing_context(properties=False): for parent_cls_name in ue.hierarchy.find_parent_classes( cls_name, include_self=True): if not parent_cls_name.startswith('/Game'): return False parent_asset_name = parent_cls_name[:parent_cls_name.rfind('.')] try: parent_asset = loader[parent_asset_name] except AssetLoadException: logger.exception( f'Unexpected loading error while checking for DCSCs of {cls_name}' ) return False # no way to continue - abort for cmp_export in findSubComponentExports(parent_asset): if ue.hierarchy.inherits_from(cmp_export, DCSC_CLS): return True return False
def _explore_path( path: str, is_mod: bool, arkman: ArkSteamManager, verbose: bool = False) -> Generator[Tuple[str, str], None, None]: n = 0 includes = set(arkman.config.optimisation.SearchInclude) mod_excludes = set(arkman.config.optimisation.SearchIgnore) core_excludes = set( ['/Game/Mods/.*', *arkman.config.optimisation.SearchIgnore]) excludes = mod_excludes if is_mod else core_excludes loader = arkman.getLoader() with ue_parsing_context(properties=False, bulk_data=False): asset_iterator = loader.find_assetnames( path, include=includes, exclude=excludes, extension=ue.hierarchy.asset_extensions, return_extension=True) for (assetname, ext) in asset_iterator: n += 1 if verbose and n % 200 == 0: logger.info(assetname) try: asset = loader.load_asset(assetname, quiet=not verbose) except AssetLoadException: logger.warning("Failed to load asset: %s", assetname) continue try: export: ExportTableItem for export in ue.hierarchy._find_exports_to_store(asset, ext): parent = ue.hierarchy._get_parent_cls(export) fullname = export.fullname if not parent: raise ValueError( f"Unexpected missing parent for export: {fullname}" ) if not fullname: raise ValueError( f"Unexpected empty export name: {export.asset.assetname}.{export.name}" ) yield (fullname, parent) except AssetLoadException: logger.warning("Failed to check parentage of %s", assetname) # Remove maps from the cache immediately as they are large and cannot be inherited from if ext == '.umap': loader.cache.remove(assetname)
def gather_dcsc_properties(species_cls: ExportTableItem, *, alt=False, report=False) -> DinoCharacterStatusComponent: ''' Gather combined DCSC properties from a species, respecting CharacterStatusComponentPriority. ''' assert species_cls.asset and species_cls.asset.loader if not inherits_from(species_cls, PDC_CLS): raise ValueError("Supplied export should be a species character class") loader: AssetLoader = species_cls.asset.loader dcscs: List[Tuple[float, ExportTableItem]] = list() proxy: DinoCharacterStatusComponent = get_proxy_for_type(DCSC_CLS, loader) with ue_parsing_context(properties=True): # Gather DCSCs as we traverse from UObject back towards this species class for cls_name in find_parent_classes(species_cls, include_self=True): if not cls_name.startswith('/Game'): continue asset: UAsset = loader[cls_name] for dcsc_export in _get_dcscs_for_species(asset): # Calculate the priority of this DCSC pri_prop = get_property(dcsc_export, "CharacterStatusComponentPriority") if pri_prop is None: dcsc_cls = loader.load_related( dcsc_export.klass.value).default_export pri_prop = get_property( dcsc_cls, "CharacterStatusComponentPriority") pri = 0 if pri_prop is None else float(pri_prop) if report: print( f'DCSC from {asset.assetname} = {dcsc_export.fullname} (pri {pri_prop} = {pri})' ) dcscs.append((pri, dcsc_export)) # Order the DCSCs by CharacterStatusComponentPriority value, descending # Python's sort is stable, so it will maintain the gathered order of exports with identical priorities (e.g. Deinonychus) dcscs.sort(key=lambda p: p[0]) # Collect properties from each DCSC in order props: Dict[str, Dict[int, UEBase]] = defaultdict( lambda: defaultdict(lambda: None)) # type: ignore if dcscs: extract_properties_from_export(dcscs[-1][1], props, skip_top=alt, recurse=True, report=False) proxy.update(props) return proxy
def test_defaults(loader: AssetLoader): loader.wipe_cache() asset = loader[TEST_PGD_PKG] assert asset.is_linked assert asset.has_properties assert not asset.has_bulk_data loader.wipe_cache() with ue_parsing_context(): asset = loader[TEST_PGD_PKG] assert asset.is_linked assert asset.has_properties assert not asset.has_bulk_data
def test_no_properties_without_link(loader: AssetLoader): loader.wipe_cache() with ue_parsing_context(link=False, properties=True): asset = loader[TEST_PGD_PKG] assert not asset.has_properties
def do_extract(root: str, excludes: Set[str]): base_dir = output_dir / format with ue_parsing_context(properties=True, bulk_data=False): asset_iterator = loader.find_assetnames( root, exclude=excludes, extension=ue.hierarchy.asset_extensions, return_extension=True) total_files = 0 total_dirs = 0 total_bytes = 0 prev_path = '' for (assetname, _) in asset_iterator: # print(assetname) show_stats = False total_files += 1 output_filename = create_filename(assetname, ext) output_path = base_dir / output_filename output_path.parent.mkdir(exist_ok=True, parents=True) print(f'► {output_path}') if str(output_path.parent) != prev_path: total_dirs += 1 prev_path = str(output_path.parent) print(output_path.parent.relative_to(base_dir)) if (total_dirs % 100) == 0: show_stats = True try: # Skip existing files for if output_path.is_file(): continue try: asset = loader[assetname] except Exception: print(f'ERROR: Unable to parse: {assetname}') continue loader.wipe_cache() # No caching! data = sanitise_output(asset.exports) binary = fmt(data, asset.names.values) output_path.write_bytes(binary) finally: if output_path.is_file(): total_bytes += output_path.stat().st_size if show_stats: print() print(f'Total dirs: {total_dirs:,}') print(f'Total files: {total_files:,}') print(f'Total bytes: {total_bytes/1024/1024:,.1f}Mo') print() print() print('Grand total:') print() print(f'Total dirs: {total_dirs:,}') print(f'Total files: {total_files:,}') print(f'Total bytes: {total_bytes/1024/1024:,.1f}Mo')