Пример #1
0
def collect_data(asset: UAsset) -> Tuple[str, Any]:
    if args.default and args.export is not None:
        print("Cannot specify an export with --default", file=sys.stderr)
        sys.exit(1)

    if args.default:
        # Produce default export only
        if not hasattr(asset, 'default_export'):
            print("Asset does not have a default export", file=sys.stderr)
            sys.exit(1)

        assert asset.default_export and asset.default_export.fullname
        data = sanitise_output(asset.default_export.properties)
        filename = create_filename(asset.default_export.fullname)

    elif args.export:
        # Single export
        as_int: Optional[int] = None
        try:
            as_int = int(args.export)
        except ValueError:
            pass

        if as_int is not None:
            # Integer-based export lookup
            if as_int < 0 or as_int >= len(asset.exports.values):
                print(f"Maximum export index for this asset is {len(asset.exports.values)-1}", file=sys.stderr)
                sys.exit(1)

            export = asset.exports[as_int]
        else:
            # Name-based export lookup
            found_indexes = []
            search_name = args.export.lower()

            for i in range(len(asset.exports.values)):
                export = asset.exports.values[i]

                if str(export.name).lower() == search_name:
                    found_indexes.append(i)

            if found_indexes:
                print("Export with this name not found", file=sys.stderr)
                sys.exit(1)
            elif len(found_indexes) > 1:
                print("This name was found at multiple indexes:", file=sys.stderr)
                pprint(found_indexes, stream=sys.stderr)
                sys.exit(1)

            export = asset.exports.values[found_indexes[0]]

        data = sanitise_output(export.properties)
        filename = create_filename(export.fullname)
    else:
        # Full asset extraction
        data = sanitise_output(asset)
        assert asset.assetname
        filename = create_filename(asset.assetname)

    return (filename, data)
Пример #2
0
def convert_npc_remaps(pgd: UAsset) -> List[ClassRemap]:
    assert pgd.default_export

    export_data = pgd.default_export.properties
    npcs = export_data.get_property('Remap_NPC', fallback=None)
    containers = export_data.get_property('Remap_NPCSpawnEntries',
                                          fallback=None)
    remaps = []

    if npcs:
        remaps += npcs.values
    if containers:
        remaps += containers.values

    out = []
    for entry in remaps:
        d = entry.as_dict()

        # Get the class-to-remap and ensure it is a valid reference.
        # Skip otherwise, as nulls cannot be spawned.
        from_class = d.get('FromClass', None)
        if not from_class:
            continue

        # Push the remap to the output list.
        v = ClassRemap(
            from_bp=sanitise_output(from_class),
            to=sanitise_output(d.get('ToClass', None)),
        )
        out.append(v)

    return out
Пример #3
0
def convert_single_class_swap(d):
    result = WeighedClassSwap(from_class=sanitise_output(d['FromClass']),
                              exact=bool(d.get('bExactMatch', True)),
                              to=sanitise_output(d['ToClasses']),
                              weights=d['Weights'].values)

    if d['ActiveEvent'] and d['ActiveEvent'].value and d[
            'ActiveEvent'].value.value:
        # Assigning "None" here is safe as it is the field default and therefore omitted
        result.during = str(d['ActiveEvent'])

    return result
Пример #4
0
def segregate_container_additions(
        pgd: UAsset) -> Optional[List[RuntimeGroupAddition]]:
    if not pgd.default_export:
        return None

    export_data = pgd.default_export.properties
    d = export_data.get_property('TheNPCSpawnEntriesContainerAdditions',
                                 fallback=None)
    if not d:
        return None

    # Extract the addition entries
    changes: Dict[str, List[RuntimeGroupAddition]] = defaultdict(list)
    for add in d.values:
        add = add.as_dict()
        klass = add['SpawnEntriesContainerClass']
        entries = add['AdditionalNPCSpawnEntries'].values
        limits = add['AdditionalNPCSpawnLimits'].values
        if not klass.value.value or (not entries and not limits):
            continue

        out = RuntimeGroupAddition(
            bp=sanitise_output(klass),
            entries=[convert_group_entry(entry) for entry in entries],
            limits=convert_limit_entries(limits),
        )

        # Skip if no data
        if not out.limits and not out.entries:
            continue

        # Append to the fragment list
        changes[out.bp].append(out)

    return _merge_changes(changes)
Пример #5
0
def main():
    arkman = ArkSteamManager()
    loader = arkman.getLoader()
    config = get_global_config()

    assetname = sys.argv[1] if len(sys.argv) > 1 else None
    if not assetname:
        print('Usage: python ueexport.py <assetname>')
        sys.exit(1)

    assetname = find_asset(assetname, loader)
    if not assetname:
        print("Not found")
        sys.exit(1)

    asset = loader[assetname]
    assert asset.default_export
    if not asset.default_export:
        print("Asset has no default export")
        sys.exit(2)

    export = asset.default_export
    data = sanitise_output(export.properties)

    pprint(data)

    save_as_json(data, f'output/{asset.name}.{export.name}.json', pretty=True)
Пример #6
0
    def _extract_and_save(self, version: str, modid: Optional[str], base_path: Path, relative_path: PurePosixPath,
                          proxy_iter: Iterator[UEProxyStructure]):
        # Work out the output path (cleaned)
        clean_relative_path = PurePosixPath('/'.join(get_valid_filename(p) for p in relative_path.parts))
        output_path = Path(base_path / clean_relative_path)

        # Setup the output structure
        results: List[Any] = []
        format_version = self.get_format_version()
        output: Dict[str, Any] = dict(version=version, format=format_version)

        # Pre-data comes before the main items
        pre_data = self.get_pre_data(modid) or dict()
        pre_data = sanitise_output(pre_data)
        output.update(pre_data)

        # Main items array
        output[self.get_field()] = results

        # Do the actual export into the existing `results` list
        for proxy in proxy_iter:
            item_output = self.extract(proxy)
            if item_output:
                item_output = sanitise_output(item_output)
                results.append(item_output)

        # Make the results available to get_post_data
        self.gathered_results = results

        # Post-data comes after the main items
        post_data = self.get_post_data(modid) or {}
        post_data = sanitise_output(post_data)
        output.update(post_data)
        post_data_has_content = post_data and any(post_data.values())

        # Clear gathered data reference
        del self.gathered_results

        # Save if the data changed
        if results or post_data_has_content:
            save_json_if_changed(output, output_path, self.get_use_pretty())
        else:
            # ...but remove an existing one if the output was empty
            if output_path.is_file():
                output_path.unlink()
Пример #7
0
 def extract(cls, proxy: UEProxyStructure) -> GatheringResult:
     volume: TogglePainVolume = cast(TogglePainVolume, proxy)
     box = get_volume_bounds(volume)
     return models.PainVolume(
         start=box.start,
         center=box.center,
         end=box.end,
         immune=sanitise_output(volume.get('ActorClassesToExclude', fallback=[])),
     )
Пример #8
0
    def extract(cls, proxy: UEProxyStructure) -> GatheringResult:
        dispatcher: MissionDispatcher_MultiUsePylon = cast(MissionDispatcher_MultiUsePylon, proxy)
        type_id = dispatcher.MissionTypeIndex[0].value
        location = get_actor_location_vector(dispatcher)

        return models.MissionDispatcher(type_=cls.MISSION_TYPE_MAP.get(type_id, type_id),
                                        missions=sanitise_output(dispatcher.MissionTypes[0].values),
                                        x=location.x,
                                        y=location.y,
                                        z=location.z)
Пример #9
0
def _zip_swap_outputs(
        d: Dict[str, Any]) -> Iterable[Tuple[float, Optional[str]]]:
    npcs: ArrayProperty = d['ToClasses']
    weights = d['Weights'].values
    num_weights = len(weights)

    for index, kls in enumerate(npcs.values):
        # Get weight of this class. Defaults to 1 if array is too short.
        weight = float(weights[index]) if index < num_weights else 1.0
        yield (weight, sanitise_output(kls))
Пример #10
0
    def ingest_level(self, level: UAsset):
        assert level.assetname
        assert level.loader
        assetname = level.assetname
        loader = cast(AssetLoader, level.loader)

        # Check if asset is a persistent level and mark it as such in map info object
        if not getattr(level, 'tile_info', None) and self.persistent_level != assetname:
            if self.persistent_level:
                logger.warning(f'Found a persistent level ({assetname}), but {self.persistent_level} was located earlier')
            else:
                self.persistent_level = assetname

        # Go through each export and, if valuable, gather data from it.
        for export in level.exports:
            gatherer = find_gatherer_for_export(export)
            if not gatherer:
                continue

            # Extract data using gatherer class.
            try:
                data = gatherer.extract(proxy=gather_properties(export))
            except Exception:
                logger.warning(f'Gathering properties failed for export "{export.name}" in {assetname}', exc_info=True)
                continue

            # Add fragment to data lists
            if data:
                if isinstance(data, GeneratorType):
                    data_fragments: list = sanitise_output(list(data))
                    for fragment in data_fragments:
                        if fragment:
                            self.data[gatherer].append(fragment)
                else:
                    fragment = sanitise_output(data)
                    self.data[gatherer].append(fragment)

        # Preemptively remove the level from linker cache.
        loader.cache.remove(assetname)
Пример #11
0
def convert_single_class_swap(d: Dict[str, Any]) -> Optional[WeighedClassSwap]:
    result = WeighedClassSwap(from_class=sanitise_output(d['FromClass']),
                              exact=bool(d.get('bExactMatch', True)),
                              to=list(_zip_swap_outputs(d)))

    if not result.from_class:
        return None

    if d['ActiveEvent'] and d['ActiveEvent'].value and d[
            'ActiveEvent'].value.value:
        # Assigning "None" here is safe as it is the field default and therefore omitted
        result.during = str(d['ActiveEvent'])

    return result
Пример #12
0
def convert_group_entry(struct) -> NpcGroup:
    d = struct.as_dict()
    out = NpcGroup(
        name=str(d['AnEntryName']),
        weight=d['EntryWeight'],
        species=list(),
        randomSwaps=[],
    )

    # Export zipped NPC entries
    chances = d['NPCsToSpawnPercentageChance'].values
    offsets = d['NPCsSpawnOffsets'].values
    num_chances = len(chances)
    num_offsets = len(offsets)

    for index, kls in enumerate(d['NPCsToSpawn'].values):
        # Ensure the NPC class is not null.
        # The bp field was marked as optional before, but it left dead entries with only a chance.
        # Since we know more about NPC spawning now, and AE 893735676 has shipped a very dirty update
        # around Sep 10th, 2021, skip those entries entirely.
        if kls and kls.value and kls.value.value:
            npc = NpcEntry(
                chance=chances[index] if index < num_chances else 1,
                bp=sanitise_output(kls),
                offset=sanitise_output(offsets[index] if index < num_offsets
                                       else Vector(x=0, y=0, z=0)),
            )
            out.species.append(npc)

    # Export local random class swaps if any exist
    swaps = d['NPCRandomSpawnClassWeights'].values
    for entry in swaps:
        rule = convert_single_class_swap(entry.as_dict())
        if rule:
            out.randomSwaps.append(rule)

    return out
Пример #13
0
def convert_limit_entries(array) -> Iterable[NpcLimit]:
    already_found = set()

    for entry in array:
        d = entry.as_dict()
        npc_class = sanitise_output(d['NPCClass'])
        mult = d['MaxPercentageOfDesiredNumToAllow']

        # We've already seen this class so this rule does not matter in context of this container, skip it.
        if npc_class in already_found:
            continue

        # Only yield if the NPC class isn't a null and the max multiplier isn't 1.0.
        if npc_class and mult != 1.0:
            already_found.add(npc_class)
            yield NpcLimit(bp=npc_class, mult=mult)
Пример #14
0
    def _gather_data_from_levels(self, levels: List[str], known_persistent: Optional[str] = None) -> MapInfo:
        '''
        Goes through each sublevel, gathering data and looking for the persistent level.
        '''
        map_info = MapInfo(data=dict())
        for assetname in levels:
            asset = self.manager.loader[assetname]

            # Check if asset is a persistent level and collect data from it.
            if not getattr(asset, 'tile_info', None) and (not known_persistent or known_persistent == assetname):
                if getattr(map_info, 'persistent_level', None):
                    logger.warning(
                        f'Found another persistent level ({assetname}), but {map_info.persistent_level} was located earlier: skipping.'
                    )
                    continue

                map_info.persistent_level = assetname

            # Go through each export and, if valuable, gather data from it.
            for export in asset.exports:
                helper = find_gatherer_for_export(export)
                if helper:
                    # Make sure the data list is initialized.
                    category_name = helper.get_category_name()
                    if category_name not in map_info.data:
                        map_info.data[category_name] = list()

                    # Extract data using helper class.
                    try:
                        for data_fragment in helper.extract(proxy=gather_properties(export)):
                            if not data_fragment:
                                continue

                            # Sanitise the data fragment to remove references to the UE tree.
                            data_fragment = sanitise_output(data_fragment)

                            # Add to the list.
                            map_info.data[category_name].append(data_fragment)
                    except:  # pylint: disable=bare-except
                        logger.warning(f'Gathering properties failed for export "{export.name}" in {assetname}', exc_info=True)
                        continue

            # Preemptively remove the level from linker cache.
            self.manager.loader.cache.remove(assetname)

        return map_info
Пример #15
0
    def extract(cls, proxy: UEProxyStructure) -> models.WorldSettings:
        settings: PrimalWorldSettings = cast(PrimalWorldSettings, proxy)
        source: ExportTableItem = cast(ExportTableItem, proxy.get_source())

        display_name: Union[StringProperty, str]
        if settings.has_override('Title'):
            display_name = settings.Title[0]
        else:
            display_name = get_leaf_from_assetname(source.asset.assetname)
            display_name = display_name.rstrip('_P')
            display_name = uelike_prettify(display_name)

        result = models.WorldSettings(
            source=source.asset.assetname,
            name=display_name,

            # Geo
            latOrigin=settings.LatitudeOrigin[0],
            longOrigin=settings.LongitudeOrigin[0],
            latScale=settings.LatitudeScale[0],
            longScale=settings.LongitudeScale[0],

            # Gameplay Settings
            maxDifficulty=settings.OverrideDifficultyMax[0],
            mapTextures=models.InGameMapTextureSet(
                held=sanitise_output(
                    settings.get('OverrideWeaponMapTextureFilled', 0, None)),
                emptyHeld=sanitise_output(
                    settings.get('OverrideWeaponMapTextureEmpty', 0, None)),
                empty=sanitise_output(
                    settings.get('OverrideUIMapTextureEmpty', 0, None)),
                big=sanitise_output(
                    settings.get('OverrideUIMapTextureFilled', 0, None)),
                small=sanitise_output(
                    settings.get('OverrideUIMapTextureSmall', 0, None)),
            ),
            # Spawns
            onlyEventGlobalSwaps=bool(
                settings.bPreventGlobalNonEventSpawnOverrides[0]),
            randomNPCClassWeights=list(cls._convert_class_swaps(settings)),
            # Uploads
            allowedDinoDownloads=sanitise_output(
                settings.get('AllowDownloadDinoClasses', 0, ())),
        )

        # Calculate remaining geo fields
        result.latMulti = result.latScale * 10
        result.latShift = -result.latOrigin / result.latMulti
        result.longMulti = result.longScale * 10
        result.longShift = -result.longOrigin / result.longMulti

        return result
Пример #16
0
def test_field_prop_in_constructor(field_type, field_name, value, target):
    '''
    Ensure UE values in models are accepted and converted
    correctly, when set in the constructor.
    '''
    # Setup
    create_fn = getattr(field_type, 'create')
    field_value = create_fn(value)
    kwargs = {field_name: field_value}

    # Create model with field in constructor
    model = UETypedModel(**kwargs)

    # Convert model to dict, verify field is untouched
    output = model.dict()
    assert output[field_name] is field_value

    # Sanitise model, verify field is converted as expected
    result = sanitise_output(output)
    field_result = result[field_name]
    assert field_result == target
Пример #17
0
    def extract(cls, proxy: UEProxyStructure) -> GatheringResult:
        dispatcher: MissionDispatcher = cast(MissionDispatcher, proxy)
        location = get_actor_location_vector(dispatcher)
        out = models.MissionDispatcher(x=location.x,
                                       y=location.y,
                                       z=location.z)

        # Genesis 1 has dispatchers that only allow specific missions.
        if isinstance(dispatcher, MissionDispatcher_MultiUsePylon):
            dispatcher_gen1 = cast(MissionDispatcher_MultiUsePylon, dispatcher)
            type_id = dispatcher_gen1.MissionTypeIndex[0].value
            out.type_ = cls.MISSION_TYPE_MAP.get(type_id, type_id)
            out.missions = sanitise_output(
                dispatcher_gen1.MissionTypes[0].values)

        # Export a flag in case this dispatcher is used to start the Rockwell Prime fight.
        # Allows for easier identification.
        if isinstance(dispatcher, MissionDispatcher_FinalBattle):
            out.isRockwellBattle = True

        return out
Пример #18
0
def test_field_prop_as_attr(field_type, field_name, value, target):
    '''
    Ensure UE values in models are accepted and converted
    correctly, when set after creation.'''
    # Setup
    model = UETypedModel()
    create_fn = getattr(field_type, 'create')
    field_value = create_fn(value)

    # Set the model's field
    setattr(model, field_name, field_value)
    assert getattr(model, field_name) is field_value

    # Convert model to dict, verify field is untouched
    output = model.dict()
    assert output[field_name] is field_value

    # Sanitise model, verify field is converted as expected
    result = sanitise_output(output)
    field_result = result[field_name]
    assert field_result == target
Пример #19
0
    def extract(self, proxy: UEProxyStructure) -> Engram:
        engram: PrimalEngramEntry = cast(PrimalEngramEntry, proxy)

        out = Engram(
            description=engram.ExtraEngramDescription[0]
            if engram.has_override('ExtraEngramDescription') else None,
            blueprintPath=engram.get_source().fullname,
            itemBlueprintPath=sanitise_output(
                engram.get('BluePrintEntry', 0, None)),
            group=convert_engram_group(engram),
            requirements=EngramRequirements(
                characterLevel=engram.RequiredCharacterLevel[0],
                engramPoints=engram.RequiredEngramPoints[0],
            ),
            manualUnlock=bool(engram.bCanBeManuallyUnlocked[0]),
            givesBP=bool(engram.bGiveBlueprintToPlayerInventory[0]),
        )

        if 'EngramRequirementSets' in engram:
            out.requirements.otherEngrams = list(
                convert_requirement_sets(engram))

        return out
Пример #20
0
    def extract(cls, proxy: UEProxyStructure) -> GatheringResult:
        manager: NPCZoneManager = cast(NPCZoneManager, proxy)

        # Sanity checks
        spawn_group = manager.get('NPCSpawnEntriesContainerObject', 0, None)
        count_volumes = manager.get('LinkedZoneVolumes', 0, None)
        if not spawn_group or not spawn_group.value.value or not count_volumes:
            return None

        # Export properties
        result = models.NPCManager(
            disabled=not manager.bEnabled[0],
            spawnGroup=sanitise_output(spawn_group),
            minDesiredNumberOfNPC=manager.MinDesiredNumberOfNPC[0],
            neverSpawnInWater=manager.bNeverSpawnInWater[0],
            forceUntameable=manager.bForceUntameable[0],
        )

        # Export dino counting regions
        result.locations = list(cls._extract_counting_volumes(count_volumes))
        # Export spawn points if present
        spawn_points = manager.get('SpawnPointOverrides', 0, None)
        spawn_volumes = manager.get('LinkedZoneSpawnVolumeEntries', 0, None)
        if spawn_points:
            result.spawnPoints = list(cls._extract_spawn_points(spawn_points))
        # Export spawn regions if present
        # Behaviour verified in DevKit. Dinos don't spawn in spawning volumes if
        # points were manually specified.
        elif spawn_volumes:
            result.spawnLocations = list(
                cls._extract_spawn_volumes(spawn_volumes))

        # Check if we extracted any spawn data at all, otherwise we can skip it.
        if not result.spawnPoints and not result.spawnLocations:
            return None

        return result
Пример #21
0
 def _convert_crate_classes(cls, entries) -> Iterable[models.ObjectPath]:
     for entry in entries.values:
         klass = entry.as_dict()['CrateTemplate']
         if klass:
             yield sanitise_output(klass)
Пример #22
0
    def _extract_and_save(self,
                          version: str,
                          modid: Optional[str],
                          base_path: Path,
                          relative_path: PurePosixPath,
                          proxy_iter: Iterator[UEProxyStructure],
                          *,
                          schema_file: Optional[PurePosixPath] = None):
        # Work out the output path (cleaned)
        clean_relative_path = PurePosixPath(*(get_valid_filename(p) for p in relative_path.parts))
        output_path = Path(base_path / clean_relative_path)

        # Setup the output structure
        results: List[Any] = []
        format_version = self.get_format_version()
        output: Dict[str, Any] = dict()
        if schema_file:
            model = self.get_schema_model()  # pylint: disable=assignment-from-none # stupid pylint
            assert model
            expected_subtype = _get_model_list_field_type(model, self.get_field())
            output['$schema'] = str(_calculate_relative_path(clean_relative_path, schema_file))
        output['version'] = version
        output['format'] = format_version

        # Pre-data comes before the main items
        pre_data = self.get_pre_data(modid) or dict()
        pre_data = sanitise_output(pre_data)
        output.update(pre_data)

        # Main items array
        output[self.get_field()] = results

        # Do the actual export into the existing `results` list
        for proxy in proxy_iter:
            item_output = self.extract(proxy)
            if item_output:
                if schema_file and expected_subtype and not isinstance(item_output, expected_subtype):
                    raise TypeError(f"Expected {expected_subtype} from schema-enabled exported item but got {type(item_output)}")

                item_output = sanitise_output(item_output)
                results.append(item_output)

        # Make the results available to get_post_data
        self.gathered_results = results

        # Post-data comes after the main items
        post_data = self.get_post_data(modid) or {}
        post_data = sanitise_output(post_data)
        output.update(post_data)
        post_data_has_content = post_data and any(post_data.values())

        # Clear gathered data reference
        del self.gathered_results

        # Save if the data changed
        if results or post_data_has_content:
            save_json_if_changed(output, output_path, self.get_use_pretty())
        else:
            # ...but remove an existing one if the output was empty
            if output_path.is_file():
                output_path.unlink()
Пример #23
0
def collect_data(asset: UAsset) -> Tuple[str, Any]:
    if args.default and args.export is not None:
        print("Cannot specify an export with --default", file=sys.stderr)
        sys.exit(1)

    if args.ovi:
        if not args.export and not args.default:
            # Grab the default export since we need a starting point for the proxy.
            args.default = True
        export = find_export(asset)
        assert export
        assert export.fullname

        if not inherits_from(export, PrimalItem.get_ue_type()):
            print(f"Export {export.name} is not a subclass of PrimalItem.", file=sys.stderr)
            sys.exit(1)

        proxy: UEProxyStructure = gather_properties(export)
        item = cast(PrimalItem, proxy)

        if 'ItemIconMaterialParent' not in item:
            print("Item does not use an icon shader", file=sys.stderr)
            sys.exit(1)

        name = get_item_name(item) or export.name
        data: Dict[str, Any] = dict(
            Format='2.0',
            Name=name,
            BlueprintPath=export.fullname,
        )

        assert asset.loader
        mat_instance = asset.loader.load_related(item.ItemIconMaterialParent[0]).default_export
        assert mat_instance
        mat_properties = mat_instance.properties.as_dict()

        # Convert all parameters from the material instance.
        parameters = dict()
        if 'ScalarParameterValues' in mat_properties:
            for param in mat_properties['ScalarParameterValues'][0].values:
                param_info = param.as_dict()
                param_name = sanitise_output(param_info['ParameterName'])
                parameters[param_name] = param_info['ParameterValue']
        if 'VectorParameterValues' in mat_properties:
            for param in mat_properties['VectorParameterValues'][0].values:
                param_info = param.as_dict()
                param_name = sanitise_output(param_info['ParameterName'])
                parameters[param_name] = param_info['ParameterValue'].values[0]
        if 'TextureParameterValues' in mat_properties:
            for param in mat_properties['TextureParameterValues'][0].values:
                param_info = param.as_dict()
                param_name = sanitise_output(param_info['ParameterName'])
                parameters[param_name] = param_info['ParameterValue']

        if parameters:
            data['2DMaterial'] = dict(Parent=mat_properties['Parent'][0], **parameters)
        else:
            # Export only the parent material as the instance has no parameters.
            data['2DMaterial'] = mat_properties['Parent'][0]

        data = sanitise_output(data)
        filename = create_filename(export.fullname)

    elif args.default or args.export:
        export = find_export(asset)
        assert export
        assert export.fullname
        data = sanitise_output(export.properties)
        filename = create_filename(export.fullname)

    else:
        # Full asset extraction
        data = sanitise_output(asset)
        assert asset.assetname
        filename = create_filename(asset.assetname)

    return (filename, data)
Пример #24
0
def do_extract(root: str, excludes: Set[str]):
    base_dir = output_dir / format

    with ue_parsing_context(properties=True, bulk_data=False):
        asset_iterator = loader.find_assetnames(
            root,
            exclude=excludes,
            extension=ue.hierarchy.asset_extensions,
            return_extension=True)

        total_files = 0
        total_dirs = 0
        total_bytes = 0

        prev_path = ''
        for (assetname, _) in asset_iterator:
            # print(assetname)
            show_stats = False

            total_files += 1
            output_filename = create_filename(assetname, ext)
            output_path = base_dir / output_filename
            output_path.parent.mkdir(exist_ok=True, parents=True)
            print(f'► {output_path}')

            if str(output_path.parent) != prev_path:
                total_dirs += 1
                prev_path = str(output_path.parent)
                print(output_path.parent.relative_to(base_dir))

                if (total_dirs % 100) == 0:
                    show_stats = True

            try:
                # Skip existing files for
                if output_path.is_file():
                    continue

                try:
                    asset = loader[assetname]
                except Exception:
                    print(f'ERROR: Unable to parse: {assetname}')
                    continue
                loader.wipe_cache()  # No caching!

                data = sanitise_output(asset.exports)
                binary = fmt(data, asset.names.values)
                output_path.write_bytes(binary)

            finally:
                if output_path.is_file():
                    total_bytes += output_path.stat().st_size

                if show_stats:
                    print()
                    print(f'Total dirs:  {total_dirs:,}')
                    print(f'Total files: {total_files:,}')
                    print(f'Total bytes: {total_bytes/1024/1024:,.1f}Mo')
                    print()

        print()
        print('Grand total:')
        print()
        print(f'Total dirs:  {total_dirs:,}')
        print(f'Total files: {total_files:,}')
        print(f'Total bytes: {total_bytes/1024/1024:,.1f}Mo')
Пример #25
0
def convert_requirement_sets(
        engram: PrimalEngramEntry) -> Iterable[Optional[str]]:
    for struct in engram.EngramRequirementSets[0].values:
        for entry in struct.get_property('EngramEntries').values:
            yield sanitise_output(entry)