def test_gather_ab_dodo(scan_and_load): dodo_ab = scan_and_load(DODO_AB_CHR) assert inherits_from(dodo_ab, DODO_CHR) dodo_ab_chr: PrimalDinoCharacter = gather_properties(dodo_ab) assert isinstance(dodo_ab_chr, UEProxyStructure) assert isinstance(dodo_ab_chr, PrimalDinoCharacter) assert str(dodo_ab_chr.DescriptiveName[0]) == 'Aberrant Dodo'
def _iterate_exports(self, classes: Set[str], sort: bool) -> Iterator[UEProxyStructure]: # Exclude classes that have a Default__ counterpart to_remove = [] for cls_name in classes: if '.Default__' in cls_name: to_remove.append(cls_name.replace('Default__', '')) classes -= set(to_remove) # Sort them to help with consistent outputs, if requested output_order = sorted(classes) if sort else classes # Load and output each one for cls_name in output_order: try: export = self.loader.load_class(cls_name) except AssetLoadException: logger.warning('Failed to load asset during export: %s', cls_name) continue try: proxy: UEProxyStructure = gather_properties(export) except Exception: # pylint: disable=broad-except logger.warning('Failed to gather properties from asset: %s', cls_name) continue yield proxy
def gather_cloning_data(species: PrimalDinoCharacter) -> Optional[CloningData]: if not can_be_cloned(species): return None loader = species.get_source().asset.loader chamber_a = loader[CLONING_CHAMBER_C] assert chamber_a.default_export chamber = cast(TekCloningChamber, gather_properties(chamber_a.default_export)) cost_base = species.CloneBaseElementCost[ 0] * chamber.CloneBaseElementCostGlobalMultiplier[0] cost_level = species.CloneElementCostPerLevel[ 0] * chamber.CloneElementCostPerLevelGlobalMultiplier[ 0] # skipped: CharacterLevel time_base = chamber.CloningTimePerElementShard[ 0] * cost_base # skipped: BabyMatureSpeedMultiplier time_level = chamber.CloningTimePerElementShard[ 0] * cost_level # skipped: BabyMatureSpeedMultiplier, CharacterLevel if cost_base == 0: # Free cloning, skip for sanity, it probably can't be obtained naturally. return None return CloningData( costBase=round(cost_base, 1), costLevel=round(cost_level, 2), timeBase=round(time_base, 1), timeLevel=round(time_level, 2), )
def test_gather_purloviatest_pgd(scan_and_load): export = scan_and_load(TEST_PGD_CLS) pgd: PrimalGameData = gather_properties(export) assert isinstance(pgd, UEProxyStructure) assert isinstance(pgd, PrimalGameData) assert str(pgd.ModName[0]) == 'PurloviaTEST' assert str(pgd.ModDescription[0]) == 'Test mod used for Purlovia'
def _gather_data_from_level(self, level: UAsset, world_data: WorldData): for export in level.exports: if not is_export_extractable(export): continue proxy = gather_properties(export) # type:ignore export_function = PROXY_TYPE_MAP.get(proxy.get_ue_type(), None) if export_function: export_function(world_data, proxy) # type:ignore else: logger.error( f'Unsupported type: no export mapping exists for "{proxy.get_ue_type()}".' ) del proxy
def _extract_single(cls, option: ObjectProperty) -> Optional[models.Trade]: export = option.asset.loader.load_class(option.value.value.fullname) trade: HexagonTradableOption = gather_properties(export) item = trade.get('ItemClass', fallback=None) if not bool(item): return None return models.Trade( bp=trade.get_source().fullname, item=str(trade.ItemClass[0].value.value.name), qty=trade.Quantity[0], cost=trade.ItemCost[0], )
def _gather_data_from_levels(self, levels: List[str], known_persistent: Optional[str] = None) -> MapInfo: ''' Goes through each sublevel, gathering data and looking for the persistent level. ''' map_info = MapInfo(data=dict()) for assetname in levels: asset = self.manager.loader[assetname] # Check if asset is a persistent level and collect data from it. if not getattr(asset, 'tile_info', None) and (not known_persistent or known_persistent == assetname): if getattr(map_info, 'persistent_level', None): logger.warning( f'Found another persistent level ({assetname}), but {map_info.persistent_level} was located earlier: skipping.' ) continue map_info.persistent_level = assetname # Go through each export and, if valuable, gather data from it. for export in asset.exports: helper = find_gatherer_for_export(export) if helper: # Make sure the data list is initialized. category_name = helper.get_category_name() if category_name not in map_info.data: map_info.data[category_name] = list() # Extract data using helper class. try: for data_fragment in helper.extract(proxy=gather_properties(export)): if not data_fragment: continue # Sanitise the data fragment to remove references to the UE tree. data_fragment = sanitise_output(data_fragment) # Add to the list. map_info.data[category_name].append(data_fragment) except: # pylint: disable=bare-except logger.warning(f'Gathering properties failed for export "{export.name}" in {assetname}', exc_info=True) continue # Preemptively remove the level from linker cache. self.manager.loader.cache.remove(assetname) return map_info
def extract_single(cls, export: ExportTableItem) -> models.Glitch: poi: PointOfInterestBP = gather_properties(export) location = get_actor_location_vector(poi) result = models.Glitch( hexagons=poi.number_of_hexagons_to_reward_upon_fixing[0], x=location.x, y=location.y, z=location.z) noteid = poi.get('Specific_Unlocked_Explorer_Note_Index', fallback=-1) if noteid != -1: result.noteId = noteid # TODO: remove? # poi_info = poi.get('MyPointOfInterestData', fallback=None) # if poi_info: # tag = poi_info.as_dict().get('PointTag', None) # d['poiTag'] = tag return result
def ingest_level(self, level: UAsset): assert level.assetname assert level.loader assetname = level.assetname loader = cast(AssetLoader, level.loader) # Check if asset is a persistent level and mark it as such in map info object if not getattr(level, 'tile_info', None) and self.persistent_level != assetname: if self.persistent_level: logger.warning(f'Found a persistent level ({assetname}), but {self.persistent_level} was located earlier') else: self.persistent_level = assetname # Go through each export and, if valuable, gather data from it. for export in level.exports: gatherer = find_gatherer_for_export(export) if not gatherer: continue # Extract data using gatherer class. try: data = gatherer.extract(proxy=gather_properties(export)) except Exception: logger.warning(f'Gathering properties failed for export "{export.name}" in {assetname}', exc_info=True) continue # Add fragment to data lists if data: if isinstance(data, GeneratorType): data_fragments: list = sanitise_output(list(data)) for fragment in data_fragments: if fragment: self.data[gatherer].append(fragment) else: fragment = sanitise_output(data) self.data[gatherer].append(fragment) # Preemptively remove the level from linker cache. loader.cache.remove(assetname)
def __post_init__(self, _level): # Some maps have misnamed PrimalWorldSettings export # and that prevents usage of AssetLoader.load_class. for export in _level.exports: if str(export.klass.value.name) == 'PrimalWorldSettings': self.world_settings = gather_properties(export) break if not self.world_settings: raise RuntimeError(f'PrimalWorldSettings could not have been found in "{_level.assetname}".') if str(self.world_settings.Title[0]): self.name = str(self.world_settings.Title[0]) else: self.name = str(_level.default_class.name) self.name = self.name.rstrip('_C').rstrip('_P') self.latitude, self.longitude = gather_geo_data(self.world_settings) if getattr(self.world_settings, 'NPCRandomSpawnClassWeights', None): self.npcRandomSpawnClassWeights = [{ 'from': data.get_property('FromClass'), 'to': data.get_property('ToClasses'), 'chances': data.get_property('Weights') } for data in self.world_settings.NPCRandomSpawnClassWeights[0].values]
def test_gather_dodo(scan_and_load): dodo = scan_and_load(DODO_CHR) dodo_chr: PrimalDinoCharacter = gather_properties(dodo) assert isinstance(dodo_chr, UEProxyStructure) assert isinstance(dodo_chr, PrimalDinoCharacter) assert str(dodo_chr.DescriptiveName[0]) == 'Dodo'
def collect_data(asset: UAsset) -> Tuple[str, Any]: if args.default and args.export is not None: print("Cannot specify an export with --default", file=sys.stderr) sys.exit(1) if args.ovi: if not args.export and not args.default: # Grab the default export since we need a starting point for the proxy. args.default = True export = find_export(asset) assert export assert export.fullname if not inherits_from(export, PrimalItem.get_ue_type()): print(f"Export {export.name} is not a subclass of PrimalItem.", file=sys.stderr) sys.exit(1) proxy: UEProxyStructure = gather_properties(export) item = cast(PrimalItem, proxy) if 'ItemIconMaterialParent' not in item: print("Item does not use an icon shader", file=sys.stderr) sys.exit(1) name = get_item_name(item) or export.name data: Dict[str, Any] = dict( Format='2.0', Name=name, BlueprintPath=export.fullname, ) assert asset.loader mat_instance = asset.loader.load_related(item.ItemIconMaterialParent[0]).default_export assert mat_instance mat_properties = mat_instance.properties.as_dict() # Convert all parameters from the material instance. parameters = dict() if 'ScalarParameterValues' in mat_properties: for param in mat_properties['ScalarParameterValues'][0].values: param_info = param.as_dict() param_name = sanitise_output(param_info['ParameterName']) parameters[param_name] = param_info['ParameterValue'] if 'VectorParameterValues' in mat_properties: for param in mat_properties['VectorParameterValues'][0].values: param_info = param.as_dict() param_name = sanitise_output(param_info['ParameterName']) parameters[param_name] = param_info['ParameterValue'].values[0] if 'TextureParameterValues' in mat_properties: for param in mat_properties['TextureParameterValues'][0].values: param_info = param.as_dict() param_name = sanitise_output(param_info['ParameterName']) parameters[param_name] = param_info['ParameterValue'] if parameters: data['2DMaterial'] = dict(Parent=mat_properties['Parent'][0], **parameters) else: # Export only the parent material as the instance has no parameters. data['2DMaterial'] = mat_properties['Parent'][0] data = sanitise_output(data) filename = create_filename(export.fullname) elif args.default or args.export: export = find_export(asset) assert export assert export.fullname data = sanitise_output(export.properties) filename = create_filename(export.fullname) else: # Full asset extraction data = sanitise_output(asset) assert asset.assetname filename = create_filename(asset.assetname) return (filename, data)
for assetname in assetnames: try: asset: UAsset = loader[assetname] except Exception: print(f"Couldn't load: {assetname}") continue if not asset.default_export or not asset.default_class: print(f"No default exports: {assetname}") continue if not inherits_from(asset.default_export, PRIMALITEM_CLS): print(f"Not an item: {assetname}") continue item: PrimalItem = gather_properties(asset.default_export) v: Dict[str, Any] = dict() v['name'] = str(item.DescriptiveNameBase[0]) v['description'] = str(item.ItemDescription[0]) v['blueprintPath'] = asset.default_class.fullname if hasattr(item, 'BaseCraftingResourceRequirements'): recipe = item.BaseCraftingResourceRequirements[0] if recipe.values: v['recipe'] = [] for entry in recipe.values: recipe_line = entry.as_dict() v['recipe'].append( dict(
max=int(d['MaxNumItems']), numItemsPower=int(d['NumItemsPower']), setWeight=int(d['SetWeight']), entries=[ decode_item_entry(entry) for entry in d['ItemEntries'].values ]) species = [] for assetname in assetnames: asset: UAsset = loader[assetname] assert asset.default_export assert asset.default_class item: DinoDrop = gather_properties(asset.default_export) item_sets: List[Any] = [] if item.has_override('ItemSets', 0): item_sets.extend(item.ItemSets[0].values) if item.has_override('AdditionalItemSets', 0): item_sets.extend(item.AdditionalItemSets[0].values) if not item_sets: continue v: Dict[str, Any] = dict() v['class'] = str(asset.default_class.name) v['sets'] = [ d for d in (decode_item_set(item_set) for item_set in item_sets) if d['entries']