Esempio n. 1
0
def parse(source):
    lexer = LuaLexer(InputStream(source))
    stream = CommonTokenStream(lexer)
    parser = LuaParser(stream)
    parser.chunk()
    tokens = stream.tokens

    dlltokens = llist.dllist()
    for t in tokens:
        dlltokens.append(t)
    return ProgramEditor(None, dlltokens)
Esempio n. 2
0
    def __init__(self, source):
        self._stream = CommonTokenStream(LuaLexer(InputStream(source)))
        # contains a list of CommonTokens
        self._line_count: int = 0
        self._right_index: int = 0
        self._last_expr_type: Optional[int] = None

        # following stack are used to backup values
        self._index_stack: List[int] = []
        self._right_index_stack: List[int] = []
        self.text: str = ""  # last token text
        self.type: int = -1  # last token type

        # contains expected token in case of invalid input code
        self._expected = []

        # comments waiting to be inserted into ast nodes
        self._comments_index_stack: List[int] = []
        self.comments: List[Comment] = []
        self._hidden_handled: bool = False
        self._hidden_handled_stack: List[bool] = []
Esempio n. 3
0
def get_token_stream(source: str) -> CommonTokenStream:
    lexer = LuaLexer(InputStream(source))
    stream = CommonTokenStream(lexer)
    return stream
Esempio n. 4
0
def parse(source):
    lexer = LuaLexer(InputStream(source))
    parser = LuaParser(CommonTokenStream(lexer))
    parser.addErrorListener(ParserErrorListener())
    astVisitor = ParseTreeVisitor()
    return astVisitor.visit(parser.chunk())
Esempio n. 5
0
def get_token_stream(source: str) -> CommonTokenStream:
    """ Get the antlr token stream.
    """
    lexer = LuaLexer(InputStream(source))
    stream = CommonTokenStream(lexer)
    return stream