def test_parse_success(self):
     """Token literals should work."""
     s = [Token('token1'), Token('token2')]
     grammar = TokenGrammar("""
         foo = token1 "token2"
         token1 = "token1"
         """)
     eq_(grammar.parse(s),
         Node('foo', s, 0, 2, children=[
             Node('token1', s, 0, 1),
             Node('', s, 1, 2)]))
Example #2
0
 def test_parse_success(self):
     """Token literals should work."""
     s = [Token('token1'), Token('token2')]
     grammar = TokenGrammar("""
         foo = token1 "token2"
         token1 = "token1"
         """)
     self.assertEqual(grammar.parse(s),
         Node(grammar['foo'], s, 0, 2, children=[
             Node(grammar['token1'], s, 0, 1),
             Node(TokenMatcher('token2'), s, 1, 2)]))
Example #3
0
 def test_parse_success(self):
     """Token literals should work."""
     s = [Token('token1'), Token('token2')]
     grammar = TokenGrammar("""
         foo = token1 "token2"
         token1 = "token1"
         """)
     eq_(grammar.parse(s),
         Node('foo', s, 0, 2, children=[
             Node('token1', s, 0, 1),
             Node('', s, 1, 2)]))
Example #4
0
 def test_parse_failure(self):
     """Parse failures should work normally with token literals."""
     grammar = TokenGrammar("""
         foo = "token1" "token2"
         """)
     assert_raises(ParseError, grammar.parse,
                   [Token('tokenBOO'), Token('token2')])
Example #5
0
expr = arith_expr
arith_expr = term (("PLUS" / "MINUS") term)*
term = factor (("MULT" / "DIV" / "MOD") factor)*
factor = (("PLUS" / "MINUS") factor) / atom_expr
atom_expr = atom trailer*
atom = ("LPAREN" test "RPAREN") / "ID" / literal
trailer = ("LPAREN" arglist? "RPAREN") / ("LBRACKET" test "RBRACKET") / ("DOT" "ID")
arglist = test ("COMMA" test)*

literal = int_literal / string_literal / boolean_literal / none_literal
int_literal = "INT"
string_literal = "STRING"
boolean_literal = "TRUE" / "FALSE"
none_literal = "NONE"
"""
GRAMMAR = TokenGrammar(_GRAMMAR)


class Token(parsimonious.utils.Token):
    def __init__(self, lexeme: Lexeme):
        super().__init__(lexeme.type.name)
        self.lexeme = lexeme


# class Analyzer(NodeVisitor):
#     def visit_literal(self, node, _):
#         pass
#
#     def visit_int_literal(self, node, _):
#         print(int(node.text[0].lexeme.matched_string))