def _import_tokens(self, interpreter: Interpreter, filename: str) -> None: if self.TOKEN_COMPILER is not None: try: tokens = self.TOKEN_COMPILER.read_compiled_file(filename) except self.TOKEN_COMPILER.exception: tokens = self._construct_tokens(filename) else: tokens = self._construct_tokens(filename) interpreter.add_stack() for token in tokens: interpreter.init(token) with path.ChangeDir(os.path.dirname(filename)): for token in tokens: interpreter.run(token)
def init(self, interpreter: Interpreter): """Initialises all subtokens, then itself""" for token in self.tokens: if token.functional: interpreter.init(token) # type: ignore self._init(interpreter)