Example #1
0
    def _export_modded_spawn_groups(self, modid: str, version: str,
                                    moddata: dict):
        mod_pgd = moddata.get('package', None)
        if not mod_pgd:
            logger.warning(
                f'PrimalGameData information missing for mod {modid}')
            return
        pgd = self.loader[mod_pgd]
        groups = gather_spawn_groups_from_pgd(self.loader, pgd)
        if not groups:
            return

        values: Dict[str, Any] = dict()
        dirname = f"{moddata['id']}-{moddata['name']}"
        dirname = get_valid_filename(dirname)
        title = moddata['title'] or moddata['name']
        values['mod'] = dict(id=moddata['id'],
                             tag=moddata['name'],
                             title=title)
        values['version'] = version
        values['spawnGroups'] = groups

        fullpath = (self.config.settings.OutputPath /
                    self.config.export_wiki.PublishSubDir / dirname)
        fullpath.mkdir(parents=True, exist_ok=True)
        fullpath = (fullpath / 'spawningGroups').with_suffix('.json')
        self._save_json_if_changed(values, fullpath)
Example #2
0
    def _export_values(self,
                       species_values: List,
                       version: str,
                       other: Dict = None,
                       moddata: Optional[Dict] = None):
        values: Dict[str, Any] = dict()
        values['format'] = "1.12"

        if moddata:
            filename = f"{moddata['id']}-{moddata['name']}"
            filename = get_valid_filename(filename)
            title = moddata['title'] or moddata['name']
            values['mod'] = dict(id=moddata['id'],
                                 tag=moddata['name'],
                                 title=title)
        else:
            filename = 'values'

        values['version'] = version
        values['species'] = species_values

        if other:
            values.update(other)

        fullpath = (self.config.settings.OutputPath /
                    self.config.export_asb.PublishSubDir /
                    filename).with_suffix('.json')
        self._save_json_if_changed(values, fullpath)
Example #3
0
    def _extract_and_save(self,
                          version: str,
                          base_path: Path,
                          relative_path: Path,
                          levels: List[str],
                          modid: Optional[str] = None,
                          known_persistent: Optional[str] = None):
        # Do the actual extraction
        world = World(known_persistent)
        for assetname in levels:
            asset = self.manager.loader[assetname]
            world.ingest_level(asset)

        if not world.bind_settings():
            logger.error(
                f'No world settings could have been found for {relative_path} - data will not be emitted.'
            )
            return None

        world.convert_for_export()

        # Save
        pretty_json = self.manager.config.export_wiki.PrettyJson
        if pretty_json is None:
            pretty_json = True

        for file_name, data in world.construct_export_files():
            # Work out the clean output path
            output_path = (relative_path / file_name).with_suffix('.json')
            clean_relative_path = PurePosixPath(*(get_valid_filename(p)
                                                  for p in output_path.parts))

            # Remove existing file if exists and no data was found.
            if not data:
                if output_path.is_file():
                    output_path.unlink()
                continue

            # Work out schema path
            schema_path = _calculate_relative_path(
                clean_relative_path, self._get_schema_file_path(file_name))

            # Setup the output structure
            output: Dict[str, Any] = dict()
            output['$schema'] = str(schema_path)
            output['version'] = version
            if modid:
                mod_data = self.manager.arkman.getModData(modid)
                assert mod_data
                title = mod_data['title'] or mod_data['name']
                output['mod'] = dict(id=modid,
                                     tag=mod_data['name'],
                                     title=title)
            output.update(data)

            # Save if the data changed
            save_json_if_changed(output, (base_path / output_path),
                                 pretty_json)
Example #4
0
    def _export_world_data(self,
                           world_data: WorldData,
                           version: str,
                           moddata: Optional[Dict] = None):
        values: Dict[str, Any] = dict()

        if moddata:
            dirname = f"{moddata['id']}-{moddata['name']}-{world_data.name}"
            dirname = get_valid_filename(dirname)
            title = moddata['title'] or moddata['name']
            values['mod'] = dict(id=moddata['id'],
                                 tag=moddata['name'],
                                 title=title)
        else:
            dirname = get_valid_filename(world_data.name)

        values['version'] = version
        values.update(world_data.format_for_json())

        fullpath = (self.config.settings.OutputPath /
                    self.config.export_wiki.PublishSubDir / dirname)
        fullpath.mkdir(parents=True, exist_ok=True)
        fullpath = (fullpath / 'map').with_suffix('.json')
        self._save_json_if_changed(values, fullpath)
Example #5
0
    def _extract_and_save(self, version: str, modid: Optional[str], base_path: Path, relative_path: PurePosixPath,
                          proxy_iter: Iterator[UEProxyStructure]):
        # Work out the output path (cleaned)
        clean_relative_path = PurePosixPath('/'.join(get_valid_filename(p) for p in relative_path.parts))
        output_path = Path(base_path / clean_relative_path)

        # Setup the output structure
        results: List[Any] = []
        format_version = self.get_format_version()
        output: Dict[str, Any] = dict(version=version, format=format_version)

        # Pre-data comes before the main items
        pre_data = self.get_pre_data(modid) or dict()
        pre_data = sanitise_output(pre_data)
        output.update(pre_data)

        # Main items array
        output[self.get_field()] = results

        # Do the actual export into the existing `results` list
        for proxy in proxy_iter:
            item_output = self.extract(proxy)
            if item_output:
                item_output = sanitise_output(item_output)
                results.append(item_output)

        # Make the results available to get_post_data
        self.gathered_results = results

        # Post-data comes after the main items
        post_data = self.get_post_data(modid) or {}
        post_data = sanitise_output(post_data)
        output.update(post_data)
        post_data_has_content = post_data and any(post_data.values())

        # Clear gathered data reference
        del self.gathered_results

        # Save if the data changed
        if results or post_data_has_content:
            save_json_if_changed(output, output_path, self.get_use_pretty())
        else:
            # ...but remove an existing one if the output was empty
            if output_path.is_file():
                output_path.unlink()
Example #6
0
    def _extract_and_save(self,
                          version: str,
                          modid: Optional[str],
                          base_path: Path,
                          relative_path: PurePosixPath,
                          proxy_iter: Iterator[UEProxyStructure],
                          *,
                          schema_file: Optional[PurePosixPath] = None):
        # Work out the output path (cleaned)
        clean_relative_path = PurePosixPath(*(get_valid_filename(p) for p in relative_path.parts))
        output_path = Path(base_path / clean_relative_path)

        # Setup the output structure
        results: List[Any] = []
        format_version = self.get_format_version()
        output: Dict[str, Any] = dict()
        if schema_file:
            model = self.get_schema_model()  # pylint: disable=assignment-from-none # stupid pylint
            assert model
            expected_subtype = _get_model_list_field_type(model, self.get_field())
            output['$schema'] = str(_calculate_relative_path(clean_relative_path, schema_file))
        output['version'] = version
        output['format'] = format_version

        # Pre-data comes before the main items
        pre_data = self.get_pre_data(modid) or dict()
        pre_data = sanitise_output(pre_data)
        output.update(pre_data)

        # Main items array
        output[self.get_field()] = results

        # Do the actual export into the existing `results` list
        for proxy in proxy_iter:
            item_output = self.extract(proxy)
            if item_output:
                if schema_file and expected_subtype and not isinstance(item_output, expected_subtype):
                    raise TypeError(f"Expected {expected_subtype} from schema-enabled exported item but got {type(item_output)}")

                item_output = sanitise_output(item_output)
                results.append(item_output)

        # Make the results available to get_post_data
        self.gathered_results = results

        # Post-data comes after the main items
        post_data = self.get_post_data(modid) or {}
        post_data = sanitise_output(post_data)
        output.update(post_data)
        post_data_has_content = post_data and any(post_data.values())

        # Clear gathered data reference
        del self.gathered_results

        # Save if the data changed
        if results or post_data_has_content:
            save_json_if_changed(output, output_path, self.get_use_pretty())
        else:
            # ...but remove an existing one if the output was empty
            if output_path.is_file():
                output_path.unlink()