def parse(source): lexer = LuaLexer(InputStream(source)) stream = CommonTokenStream(lexer) parser = LuaParser(stream) parser.chunk() tokens = stream.tokens dlltokens = llist.dllist() for t in tokens: dlltokens.append(t) return ProgramEditor(None, dlltokens)
def __init__(self, source): self._stream = CommonTokenStream(LuaLexer(InputStream(source))) # contains a list of CommonTokens self._line_count: int = 0 self._right_index: int = 0 self._last_expr_type: Optional[int] = None # following stack are used to backup values self._index_stack: List[int] = [] self._right_index_stack: List[int] = [] self.text: str = "" # last token text self.type: int = -1 # last token type # contains expected token in case of invalid input code self._expected = [] # comments waiting to be inserted into ast nodes self._comments_index_stack: List[int] = [] self.comments: List[Comment] = [] self._hidden_handled: bool = False self._hidden_handled_stack: List[bool] = []
def get_token_stream(source: str) -> CommonTokenStream: lexer = LuaLexer(InputStream(source)) stream = CommonTokenStream(lexer) return stream
def parse(source): lexer = LuaLexer(InputStream(source)) parser = LuaParser(CommonTokenStream(lexer)) parser.addErrorListener(ParserErrorListener()) astVisitor = ParseTreeVisitor() return astVisitor.visit(parser.chunk())
def get_token_stream(source: str) -> CommonTokenStream: """ Get the antlr token stream. """ lexer = LuaLexer(InputStream(source)) stream = CommonTokenStream(lexer) return stream