class Function(TextFileBase[MutableSequence[str]], NamespaceFile): """Class representing a function.""" content: TextFileContent[MutableSequence[str]] = None tags: Optional[MutableSequence[str]] = extra_field(default=None) scope = ("functions", ) extension = ".mcfunction" lines = FileDeserialize() # type: FileDeserialize[MutableSequence[str]] @classmethod def to_str(cls, content: MutableSequence[str]) -> str: return "\n".join(content) + "\n" @classmethod def from_str(cls, content: str) -> MutableSequence[str]: return content.splitlines() def bind(self, pack: "DataPack", namespace: str, path: str): super().bind(pack, namespace, path) for tag_name in self.tags or (): pack.function_tags.merge( {tag_name: FunctionTag({"values": [f"{namespace}:{path}"]})})
class Function(TextFileBase[List[str]]): """Class representing a function.""" content: TextFileContent[List[str]] = None tags: Optional[List[str]] = extra_field(default=None) prepend_tags: Optional[List[str]] = extra_field(default=None) scope = ("functions", ) extension = ".mcfunction" lines = FileDeserialize[List[str]]() def append(self, other: Union["Function", Iterable[str], str]): """Append lines from another function.""" self.lines.extend(other.lines if isinstance(other, Function) else [other] if isinstance(other, str) else other) def prepend(self, other: Union["Function", Iterable[str], str]): """Prepend lines from another function.""" self.lines[0:0] = (other.lines if isinstance(other, Function) else [other] if isinstance(other, str) else other) @classmethod def default(cls) -> List[str]: return [] @classmethod def to_str(cls, content: List[str]) -> str: return "\n".join(content) + "\n" @classmethod def from_str(cls, content: str) -> List[str]: return content.splitlines() def bind(self, pack: "DataPack", path: str): super().bind(pack, path) for tag_name in self.tags or (): pack.function_tags.merge( {tag_name: FunctionTag({"values": [path]})}) for tag_name in self.prepend_tags or (): function_tag = pack.function_tags.setdefault( tag_name, FunctionTag()) function_tag.prepend(FunctionTag({"values": [path]}))
class Sound(BinaryFile): """Class representing a sound file.""" event: Optional[str] = extra_field(default=None) subtitle: Optional[str] = extra_field(default=None) replace: Optional[bool] = extra_field(default=None) volume: Optional[float] = extra_field(default=None) pitch: Optional[float] = extra_field(default=None) weight: Optional[int] = extra_field(default=None) stream: Optional[bool] = extra_field(default=None) attenuation_distance: Optional[int] = extra_field(default=None) preload: Optional[bool] = extra_field(default=None) scope = ("sounds", ) extension = ".ogg" def bind(self, pack: "ResourcePack", path: str): super().bind(pack, path) namespace, _, path = path.partition(":") if self.event is not None: attributes = { "volume": self.volume, "pitch": self.pitch, "weight": self.weight, "stream": self.stream, "attenuation_distance": self.attenuation_distance, "preload": self.preload, } attributes = {k: v for k, v in attributes.items() if v is not None} event: JsonDict = { "sounds": [{ "name": path, **attributes } if attributes else path] } if self.replace is not None: event["replace"] = self.replace if self.subtitle is not None: event["subtitle"] = self.subtitle pack[namespace].extra.merge( {"sounds.json": SoundConfig({self.event: event})})
class Texture(PngFile): """Class representing a texture.""" content: BinaryFileContent[Image] = None mcmeta: Optional[JsonDict] = extra_field(default=None) scope = ("textures", ) extension = ".png" def bind(self, pack: "ResourcePack", path: str): super().bind(pack, path) if self.mcmeta is not None: pack.textures_mcmeta[path] = TextureMcmeta(self.mcmeta)
class Document: """Class representing a lectern document.""" ctx: InitVar[Optional[Context]] = None path: InitVar[Optional[FileSystemPath]] = None text: InitVar[Optional[str]] = None markdown: InitVar[Optional[str]] = None cache: InitVar[Optional[Cache]] = None external_files: InitVar[Optional[FileSystemPath]] = None assets: ResourcePack = field(default_factory=ResourcePack) data: DataPack = field(default_factory=DataPack) loaders: List[FragmentLoader] = extra_field(default_factory=list) directives: DirectiveRegistry = extra_field( default_factory=DirectiveRegistry) text_extractor: TextExtractor = extra_field(default_factory=TextExtractor) markdown_extractor: MarkdownExtractor = extra_field( default_factory=MarkdownExtractor) text_serializer: TextSerializer = extra_field( default_factory=TextSerializer) markdown_serializer: MarkdownSerializer = extra_field( default_factory=MarkdownSerializer) def __post_init__( self, ctx: Optional[Context], path: Optional[FileSystemPath] = None, text: Optional[str] = None, markdown: Optional[str] = None, cache: Optional[Cache] = None, external_files: Optional[FileSystemPath] = None, ): if ctx: self.assets = ctx.assets self.data = ctx.data if cache is None: cache = ctx.cache["lectern"] if cache: self.text_extractor.cache = cache self.markdown_extractor.cache = cache self.directives.assets = self.assets self.directives.data = self.data if path: self.load(path) if text: self.add_text(text) if markdown: self.add_markdown(markdown, external_files) def load(self, path: FileSystemPath): """Load and extract fragments from the file at the specified location.""" path = Path(path).resolve() if path.suffix == ".md": self.add_markdown(path.read_text(), external_files=path.parent) else: self.add_text(path.read_text()) def add_text(self, source: str): """Extract pack fragments from plain text.""" assets, data = self.text_extractor.extract( source=source, directives=self.directives.resolve(), loaders=self.loaders, ) self.assets.merge(assets) self.data.merge(data) def add_markdown( self, source: str, external_files: Optional[FileSystemPath] = None, ): """Extract pack fragments from markdown.""" assets, data = self.markdown_extractor.extract( source=source, directives=self.directives.resolve(), loaders=self.loaders, external_files=external_files, ) self.assets.merge(assets) self.data.merge(data) def get_text(self) -> str: """Turn the data pack and the resource pack into text.""" return self.text_serializer.serialize( assets=self.assets, data=self.data, mapping=self.directives.resolve().get_serialization_mapping(), ) @overload def get_markdown( self, emit_external_files: Literal[True], prefix: str = "", ) -> Tuple[str, Dict[str, File[Any, Any]]]: ... @overload def get_markdown(self, emit_external_files: Literal[False] = False) -> str: ... def get_markdown( self, emit_external_files: bool = False, prefix: str = "", ) -> Union[str, Tuple[str, Dict[str, File[Any, Any]]]]: """Turn the data pack and the resource pack into markdown.""" external_files: Optional[Dict[str, File[Any, Any]]] = ({} if emit_external_files else None) content = self.markdown_serializer.serialize( assets=self.assets, data=self.data, mapping=self.directives.resolve().get_serialization_mapping(), external_files=external_files, external_prefix=prefix, ) if external_files is None: return content else: return content, external_files def save( self, path: FileSystemPath, external_files: Optional[FileSystemPath] = None, ): """Save the serialized document at the specified location.""" path = Path(path).resolve() if path.suffix == ".md": if external_files: with ExternalFilesManager( Path(external_files).resolve(), path) as manager: content, files = self.get_markdown( emit_external_files=True, prefix=manager.external_prefix, ) manager.external_files.update(files) else: content = self.get_markdown() else: content = self.get_text() path.parent.mkdir(parents=True, exist_ok=True) path.write_text(content)
class Context: """The build context.""" project_id: str project_name: str project_description: TextComponent project_author: str project_version: str directory: Path output_directory: Optional[Path] meta: JsonDict cache: ProjectCache worker: WorkerPoolHandle template: TemplateManager generate: Generator = field(init=False) assets: ResourcePack = field(default_factory=ResourcePack) data: DataPack = field(default_factory=DataPack) whitelist: InitVar[Optional[List[str]]] = None _container: ContextContainer = extra_field(init=False) _path_entry: str = extra_field(init=False) def __post_init__(self, whitelist: Optional[List[str]]): self._container = ContextContainer(self) self._path_entry = str(self.directory.resolve()) self.generate = self.inject(Generator) self.generate.assets = self.assets self.generate.data = self.data self.inject(Pipeline).whitelist = whitelist self.template.bind(self) self.template.expose("generate_path", self.generate.path) self.template.expose("generate_id", self.generate.id) self.template.expose("generate_hash", self.generate.hash) self.template.expose("generate_objective", self.generate.objective) self.template.expose( "generate_tree", lambda *args, **kwargs: generate_tree( kwargs.pop("root") if "root" in kwargs else self.meta["render_path"], *args, name=( kwargs.pop("name") if "name" in kwargs else self.generate["tree"][self.meta["render_path"]].format( "tree_{incr}" ) ), **kwargs, ), ) self.template.expose("parse_json", lambda string: json.loads(string)) @overload def inject(self, cls: ServiceFactory[T]) -> T: ... @overload def inject(self, cls: str) -> Any: ... def inject(self, cls: Any) -> Any: """Retrieve the instance provided by the specified service factory.""" if not callable(cls): cls = import_from_string(cls, whitelist=self.inject(Pipeline).whitelist) return self._container[cls] @contextmanager def activate(self): """Push the context directory to sys.path and handle cleanup to allow module reloading.""" with local_import_path(self._path_entry), self.cache: yield self.inject(Pipeline) @contextmanager def override(self, **meta: Any): """Temporarily update the context meta.""" to_restore: JsonDict = {} to_remove: Set[str] = set() for key, value in meta.items(): if key in self.meta: to_restore[key] = self.meta[key] else: to_remove.add(key) self.meta[key] = value try: yield self finally: for key in to_remove: del self.meta[key] self.meta.update(to_restore) def validate( self, key: str, validator: Validator[T], options: Optional[JsonDict] = None, ) -> T: """Validate options.""" if options is None: options = self.meta.get(key) try: return validator(**(options or {})) except BubbleException: raise except ValidationError as exc: explanation = format_validation_error(key, exc) raise InvalidOptions(key, explanation) from None except Exception as exc: raise InvalidOptions(key) from exc @property def packs(self) -> Tuple[ResourcePack, DataPack]: return self.assets, self.data def require(self, *args: PluginSpec): """Execute the specified plugin.""" self.inject(Pipeline).require(*args)
class Context: """The build context.""" project_name: str project_description: TextComponent project_author: str project_version: str directory: Path output_directory: Optional[Path] meta: JsonDict cache: MultiCache worker: WorkerPoolHandle template: TemplateManager assets: ResourcePack = field(default_factory=ResourcePack) data: DataPack = field(default_factory=DataPack) whitelist: InitVar[Optional[List[str]]] = None _container: ContextContainer = extra_field(init=False) _path_entry: str = extra_field(init=False) def __post_init__(self, whitelist: Optional[List[str]]): self._container = ContextContainer(self) self._path_entry = str(self.directory.resolve()) self.template.env.globals["ctx"] = self self.inject(Pipeline).whitelist = whitelist def inject(self, cls: Callable[["Context"], InjectedType]) -> InjectedType: """Retrieve the instance provided by the specified service factory.""" return self._container[cls] @contextmanager def activate(self): """Push the context directory to sys.path and handle cleanup to allow module reloading.""" not_in_path = self._path_entry not in sys.path if not_in_path: sys.path.append(self._path_entry) try: with self.cache: yield self.inject(Pipeline) finally: if not_in_path: sys.path.remove(self._path_entry) imported_modules = [ name for name, module in sys.modules.items() if (filename := getattr(module, "__file__", None)) and filename.startswith(self._path_entry) ] for name in imported_modules: del sys.modules[name] @contextmanager def override(self, **meta: Any): """Temporarily update the context meta.""" to_restore: JsonDict = {} to_remove = set() for key, value in meta.items(): if key in self.meta: to_restore[key] = self.meta[key] else: to_remove.add(key) self.meta[key] = value try: yield self finally: for key in to_remove: del self.meta[key] self.meta.update(to_restore) @property def packs(self) -> Tuple[ResourcePack, DataPack]: return self.assets, self.data def require(self, spec: PluginSpec): """Execute the specified plugin.""" self.inject(Pipeline).require(spec)