Example #1
0
    def __init__(self, module, builder, printf, scanf, definations = {}):
        self.pg = ParserGenerator(
            # Tokens that can be accepted by our parser
            ['NUMBER', 'WRITE', 'WRITELN', 'OPEN_PAREN', 'CLOSE_PAREN',
             'SEMI_COLON', 'SUM', 'SUB','MUL','DIV','MOD', 'VAR', 'ASSIGN',
             'AND', 'OR', 'NOT', 'TRUE', 'FALSE',
             'EQUALS', 'LESS', 'GREATER', 'LESS_EQ', 'GREAT_EQ', 'NOT_EQUALS',
             'COMMA', 'STRING', 'IF', 'ELSE', 'OPEN_CURLY', 'CLOSE_CURLY',
             'NOPS','FUNCTION', 'RETURN', 'FOR', 'INPUT', 'WHILE'
             ],
            
             
             ## Defining the precedence of operators in language
             precedence = [
                ('left', ['SUM', 'SUB']),
                ('left', ['MUL', 'DIV']),
                ('left', ['MOD'])
            ]
        )

        ## Setting the module, builder and printf system call reference
        self.module = module
        self.builder = builder
        self.printf = printf
        self.scanf = scanf

        ## Initializing the defaults constructs for our language
        ## Like a global string called True, False etc.
        initialize(builder, module, definations)

        self.constants = {}
        self.constants['false'] = self.builder.bitcast(globalFalse, globalVoidPtr)
        self.constants['true'] = self.builder.bitcast(globalTrue, globalVoidPtr)
        self.constants['int'] = self.builder.bitcast(globalInt, globalVoidPtr)
Example #2
0
 def __init__(self, syntax=False):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'STRING', 'INTEGER', 'FLOAT', 'BOOLEAN', 'PI', 'E', 'PRINT',
             'ABSOLUTE', 'SIN', 'COS', 'TAN', 'POWER', 'CONSOLE_INPUT', '(',
             ')', ';', ',', '{', '}', 'LET', 'AND', 'OR', 'NOT', 'IF',
             'ELSE', '=', '==', '!=', '>=', '>', '<', '<=', 'SUM', 'SUB',
             'MUL', 'DIV', 'IDENTIFIER', 'FUNCTION'
         ],
         # A list of precedence rules with ascending precedence, to
         # disambiguate ambiguous production rules.
         precedence=(('left', ['FUNCTION']), ('left', ['LET']),
                     ('left', ['=']), ('left', ['IF', 'ELSE', ';']),
                     ('left', ['AND', 'OR']), ('left', ['NOT']),
                     ('left', ['==', '!=', '>=', '>', '<',
                               '<=']), ('left', ['SUM',
                                                 'SUB']), ('left',
                                                           ['MUL', 'DIV']),
                     ('left',
                      ['STRING', 'INTEGER', 'FLOAT', 'BOOLEAN', 'PI',
                       'E'])))
     self.syntax = syntax
     self.parse()
     pass  # End Parser's constructor !
Example #3
0
 def __init__(self):
     # The list of tokens from the lexer file
     self.pg = ParserGenerator([token for token in operators.keys()],
                               precedence=[
                                   ("left", ["ADD", "SUBTRACT"]),
                                   ("left", ["MULTIPLY", "DIVIDE"]),
                               ])
Example #4
0
    def test_empty_production(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : values")
        def main(p):
            return p[0]

        @pg.production("values : VALUE values")
        def values_value(p):
            return [p[0]] + p[1]

        @pg.production("values :")
        def values_empty(p):
            return []

        parser = pg.build()
        assert parser.lr_table.lr_action == [
            {
                "$end": -3,
                "VALUE": 3
            },
            {
                "$end": 0
            },
            {
                "$end": -1
            },
            {
                "$end": -3,
                "VALUE": 3
            },
            {
                "$end": -2
            },
        ]
Example #5
0
    def test_basic_parser(self):
        pg = ParserGenerator(["NUMBER", "PLUS"])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        def expr_op(p):
            return BoxInt(p[0].getint() + p[2].getint())

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        with self.assert_warns(
            ParserGeneratorWarning, "1 shift/reduce conflict"
        ):
            parser = pg.build()

        def f(n):
            return parser.parse(iter([
                Token("NUMBER", str(n)),
                Token("PLUS", "+"),
                Token("NUMBER", str(n))
            ])).getint()

        assert self.run(f, [12]) == 24
Example #6
0
    def test_precedence(self):
        pg = ParserGenerator(["NUMBER", "PLUS", "TIMES"], precedence=[
            ("left", ["PLUS"]),
            ("left", ["TIMES"]),
        ])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        @pg.production("expr : expr TIMES expr")
        def expr_binop(p):
            return BoxInt({
                "+": operator.add,
                "*": operator.mul
            }[p[1].getstr()](p[0].getint(), p[2].getint()))

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        parser = pg.build()

        assert parser.parse(iter([
            Token("NUMBER", "3"),
            Token("TIMES", "*"),
            Token("NUMBER", "4"),
            Token("PLUS", "+"),
            Token("NUMBER", "5")
        ])) == BoxInt(17)
Example #7
0
    def test_arithmetic(self):
        lg = LexerGenerator()
        lg.add("NUMBER", r"\d+")
        lg.add("PLUS", r"\+")
        lg.add("TIMES", r"\*")

        pg = ParserGenerator(["NUMBER", "PLUS", "TIMES"], precedence=[
            ("left", ["PLUS"]),
            ("left", ["TIMES"]),
        ])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        @pg.production("expr : expr TIMES expr")
        def expr_binop(p):
            return BoxInt({
                "+": operator.add,
                "*": operator.mul
            }[p[1].getstr()](p[0].getint(), p[2].getint()))

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        lexer = lg.build()
        parser = pg.build()

        assert parser.parse(lexer.lex("3*4+5"))
Example #8
0
    def test_null_production(self):
        pg = ParserGenerator(["VALUE", "SPACE"])

        @pg.production("main : values")
        def main(p):
            return p[0]

        @pg.production("values : none")
        def values_empty(p):
            return []

        @pg.production("values : VALUE")
        def values_value(p):
            return [p[0].getstr()]

        @pg.production("values : values SPACE VALUE")
        def values_values(p):
            return p[0] + [p[2].getstr()]

        @pg.production("none :")
        def none(p):
            return None

        parser = pg.build()
        assert parser.parse(FakeLexer([
            Token("VALUE", "abc"),
            Token("SPACE", " "),
            Token("VALUE", "def"),
            Token("SPACE", " "),
            Token("VALUE", "ghi"),
        ])) == ["abc", "def", "ghi"]

        assert parser.parse(FakeLexer([])) == []
Example #9
0
    def test_arithmetic(self):
        lg = LexerGenerator()
        lg.add("NUMBER", r"\d+")
        lg.add("PLUS", r"\+")
        lg.add("TIMES", r"\*")

        pg = ParserGenerator(["NUMBER", "PLUS", "TIMES"],
                             precedence=[
                                 ("left", ["PLUS"]),
                                 ("left", ["TIMES"]),
                             ])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        @pg.production("expr : expr TIMES expr")
        def expr_binop(p):
            return BoxInt({
                "+": operator.add,
                "*": operator.mul
            }[p[1].getstr()](p[0].getint(), p[2].getint()))

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        lexer = lg.build()
        parser = pg.build()

        assert parser.parse(lexer.lex("3*4+5"))
Example #10
0
 def __init__(self):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'SCANF', 'OPEN_PAREN', 'CLOSE_PAREN', 'POINT', 'PLUS', 'COMMA',
             'SEMI_COLON', 'CONNECTION_CLASS', 'EXEC_FUNC', 'VAR', 'STRING'
         ])
Example #11
0
    def test_precedence(self):
        pg = ParserGenerator(["NUMBER", "PLUS", "TIMES"], precedence=[
            ("left", ["PLUS"]),
            ("left", ["TIMES"]),
        ])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        @pg.production("expr : expr TIMES expr")
        def expr_binop(p):
            return BoxInt({
                "+": operator.add,
                "*": operator.mul
            }[p[1].getstr()](p[0].getint(), p[2].getint()))

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        parser = pg.build()

        assert parser.parse(FakeLexer([
            Token("NUMBER", "3"),
            Token("TIMES", "*"),
            Token("NUMBER", "4"),
            Token("PLUS",  "+"),
            Token("NUMBER", "5")
        ])) == BoxInt(17)
Example #12
0
    def test_state(self):
        pg = ParserGenerator(["NUMBER", "PLUS"], precedence=[
            ("left", ["PLUS"]),
        ])

        @pg.production("main : expression")
        def main(state, p):
            state.count += 1
            return p[0]

        @pg.production("expression : expression PLUS expression")
        def expression_plus(state, p):
            state.count += 1
            return BoxInt(p[0].getint() + p[2].getint())

        @pg.production("expression : NUMBER")
        def expression_number(state, p):
            state.count += 1
            return BoxInt(int(p[0].getstr()))

        parser = pg.build()

        state = ParserState()
        assert parser.parse(iter([
            Token("NUMBER", "10"),
            Token("PLUS", "+"),
            Token("NUMBER", "12"),
            Token("PLUS", "+"),
            Token("NUMBER", "-2"),
        ]), state=state) == BoxInt(20)
        assert state.count == 6
Example #13
0
    def test_arithmetic(self):
        pg = ParserGenerator(["NUMBER", "PLUS"])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        def expr_op(p):
            return BoxInt(p[0].getint() + p[2].getint())

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        with self.assert_warns(
            ParserGeneratorWarning, "1 shift/reduce conflict"
        ):
            parser = pg.build()

        assert parser.parse(iter([
            Token("NUMBER", "1"),
            Token("PLUS", "+"),
            Token("NUMBER", "4")
        ])) == BoxInt(5)
Example #14
0
    def test_per_rule_precedence(self):
        pg = ParserGenerator(["NUMBER", "MINUS"], precedence=[
            ("right", ["UMINUS"]),
        ])

        @pg.production("main : expr")
        def main_expr(p):
            return p[0]

        @pg.production("expr : expr MINUS expr")
        def expr_minus(p):
            return BoxInt(p[0].getint() - p[2].getint())

        @pg.production("expr : MINUS expr", precedence="UMINUS")
        def expr_uminus(p):
            return BoxInt(-p[1].getint())

        @pg.production("expr : NUMBER")
        def expr_number(p):
            return BoxInt(int(p[0].getstr()))

        with self.assert_warns(
            ParserGeneratorWarning, "1 shift/reduce conflict"
        ):
            parser = pg.build()

        assert parser.parse(iter([
            Token("MINUS", "-"),
            Token("NUMBER", "4"),
            Token("MINUS", "-"),
            Token("NUMBER", "5"),
        ])) == BoxInt(-9)
Example #15
0
    def test_null_production(self):
        pg = ParserGenerator(["VALUE", "SPACE"])

        @pg.production("main : values")
        def main(p):
            return p[0]

        @pg.production("values : none")
        def values_empty(p):
            return []

        @pg.production("values : VALUE")
        def values_value(p):
            return [p[0].getstr()]

        @pg.production("values : values SPACE VALUE")
        def values_values(p):
            return p[0] + [p[2].getstr()]

        @pg.production("none :")
        def none(p):
            return None

        parser = pg.build()
        assert parser.parse(iter([
            Token("VALUE", "abc"),
            Token("SPACE", " "),
            Token("VALUE", "def"),
            Token("SPACE", " "),
            Token("VALUE", "ghi"),
        ])) == ["abc", "def", "ghi"]

        assert parser.parse(iter([])) == []
Example #16
0
    def test_duplicate_precedence(self):
        pg = ParserGenerator([], precedence=[
            ("left", ["term", "term"])
        ])

        with py.test.raises(ParserGeneratorError):
            pg.build()
Example #17
0
    def test_invalid_associativity(self):
        pg = ParserGenerator([], precedence=[
            ("to-the-left", ["term"]),
        ])

        with py.test.raises(ParserGeneratorError):
            pg.build()
Example #18
0
 def __init__(self, dic_variables, dic_etiquetas):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         ['LODD','STOD','ADDD','SUBD','JPOS','JZER','JUMP','LOCO','LODL','STOL','ADDL','SUBL','JNEG','JNZE','CALL','PUSHI','POPI','PUSH','POP','RETN','SWAP','INSP','DESP','INPAC', 'OUTAC','HALT','DIRECCION', 'NUMERO', 'VARIABLE', 'ETIQUETA']
     )
     self.dic_variables=dic_variables
     self.dic_etiquetas=dic_etiquetas
Example #19
0
 def __init__(self):
     self.pg = ParserGenerator([
         'NUMBER', 'OPEN_PARENS', 'CLOSE_PARENS', 'SEMI_COLON', 'SUM', 'SUB'
     ],
                               precedence=[
                                   ('left', ['SUM', 'SUB']),
                               ])
Example #20
0
 def __init__(self, syntax=False):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         get_all_tokens_name(),
         # A list of precedence rules with ascending precedence, to
         # disambiguate ambiguous production rules.
         precedence=((AppConstant.LEFT, [TokenEnum.FUNCTION.name]),
                     (AppConstant.LEFT, [TokenEnum.LET.name]),
                     (AppConstant.LEFT,
                      [TokenEnum.ASSIGN.name]), (AppConstant.LEFT, [
                          TokenEnum.IF.name, TokenEnum.ELSE.name,
                          TokenEnum.SEMI_COLON.name
                      ]), (AppConstant.LEFT,
                           [TokenEnum.AND.name, TokenEnum.OR.name
                            ]), (AppConstant.LEFT, [TokenEnum.NOT.name]),
                     (AppConstant.LEFT, [
                         TokenEnum.EQ.name, TokenEnum.NEQ.name,
                         TokenEnum.GTEQ.name, TokenEnum.GT.name,
                         TokenEnum.LT.name, TokenEnum.LTEQ.name
                     ]), (AppConstant.LEFT,
                          [TokenEnum.SUM.name, TokenEnum.SUB.name]),
                     (AppConstant.LEFT,
                      [TokenEnum.MUL.name,
                       TokenEnum.DIV.name]), (AppConstant.LEFT, [
                           TokenEnum.STRING.name, TokenEnum.INTEGER.name,
                           TokenEnum.FLOAT.name, TokenEnum.BOOLEAN.name,
                           TokenEnum.PI.name, TokenEnum.E.name
                       ])))
     self.syntax = syntax
     self.parse()
     pass  # End Parser's constructor !
Example #21
0
 def __init__(self):
     self.pg = ParserGenerator([
         'NUMBER',
         'STRING',
         'IDENT',
         'PRINT',
         'READ',
         'INT',
         'IF',
         'THEN',
         'ELSE',
         'END_IF',
         'FOR',
         'UNTIL',
         'STEP',
         'END_FOR',
         'OPEN_PAREN',
         'CLOSE_PAREN',
         'END_STATEMENT',
         'DECLARE_TYPE',
         'ADD',
         'SUB',
         'MULT',
         'DIV',
         'LESS',
         'MORE',
         'LESS_EQ',
         'MORE_EQ',
     ],
                               precedence=[('left', ['ADD', 'SUB']),
                                           ('left', ['MUL', 'DIV'])])
     self.vars = {}
 def __init__(self, module, builder, printf):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         ['RESERVED', 'END', 'DIGIT', 'LETTER', 'SPECIAL'])
     self.module = module
     self.builder = builder
     self.printf = printf
Example #23
0
    def test_duplicate_precedence(self):
        pg = ParserGenerator([], precedence=[
            ("left", ["term", "term"])
        ])

        with py.test.raises(ParserGeneratorError):
            pg.build()
Example #24
0
    def __init__(self):
        self.indent_count = 0
        self.definition_list = []

        self.pg = ParserGenerator(
            # A list of all token names, accepted by the parser.
            [
                'INTEGER', 'IDENTIFIER', 'IF', 'ELSE', 'COLON', 'NOT', 'WHILE',
                'END', 'FUNCTION', 'OPENPAREN', 'CLOSEPAREN', 'NOT', 'IMPORT',
                'BEGIN', 'MOVE', 'LEFTTURN', 'PUTBEEPER', 'PICKBEEPER',
                'FACENORTH', 'FACESOUTH', 'FACEWEST', 'LEFTCLEAR',
                'RIGHTCLEAR', 'FRONTCLEAR', 'PRESENT', 'INBAG', 'FOR',
                'FACEEAST', 'INDENT', 'DEDENT'
            ],
            # A list of precedence rules with ascending precedence, to
            # disambiguate ambiguous production rules.
            precedence=[('left', [
                'FUNCTION',
            ]), ('left', ['[', ']', ',']),
                        ('left', [
                            'IF',
                            'COLON',
                            'ELSE',
                            ' ',
                            'WHILE',
                        ])])
    def test_basic_parser(self):
        pg = ParserGenerator(["NUMBER", "PLUS"])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        def expr_op(p):
            return BoxInt(p[0].getint() + p[2].getint())

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        with self.assert_warns(ParserGeneratorWarning,
                               "1 shift/reduce conflict"):
            parser = pg.build()

        def f(n):
            return parser.parse(
                FakeLexer([
                    Token("NUMBER", str(n)),
                    Token("PLUS", "+"),
                    Token("NUMBER", str(n))
                ])).getint()

        assert self.run(f, [12]) == 24
Example #26
0
 def __init__(self,
              lexer_or_tokens: Union[Lexer, Iterable[str]],
              precedence: Optional[ParserPrecedence] = None):
     self._pg = ParserGenerator(
         lexer_or_tokens.possible_tokens if isinstance(
             lexer_or_tokens, Lexer) else lexer_or_tokens, precedence or [])
     self._pg.error(self._handle_error)
Example #27
0
 def __init__(self, module, builder, printf):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'NUMERO',
             'ESCREVA',
             'APAR',
             'FPAR',
             'PONTO_VIRGULA',
             'SOMA',
             'SUB',
             'MUL',
             'DIV',
         ],
         precedence=[
             ('left', [
                 'SOMA',
                 'SUB',
             ]),
             ('left', [
                 'MUL',
                 'DIV',
             ]),
         ],
     )
     self.module = module
     self.builder = builder
     self.printf = printf
Example #28
0
 def __init__(self):
     self.pg = ParserGenerator([
         'ADD', 'COLON', 'PREDICTION', 'CLUSTERING', 'FLOAT', 'STATS',
         'PLOT', 'C_PLOT', 'SENTENCE', 'PRINT', 'FORECAST', 'VAR', 'EQUAL',
         'INT', 'OPEN_BRA', 'CLOSE_BRA', 'OPEN_PAREN', 'CLOSE_PAREN',
         'SEMI_COLON', 'COMA', 'METHOD', 'BOOLEAN', 'TYPE_A'
     ])
Example #29
0
    def test_invalid_associativity(self):
        pg = ParserGenerator([], precedence=[
            ("to-the-left", ["term"]),
        ])

        with py.test.raises(ParserGeneratorError):
            pg.build()
Example #30
0
 def __init__(self):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'NUMBER', 'PRINT', 'OPEN_PAREN', 'CLOSE_PAREN', 'SEMI_COLON',
             'SUM', 'SUB', 'OPEN_QUOTE', 'CLOSE_QUOTE'
         ])
Example #31
0
    def __init__(self):
        self.token_list = ["NUMBER", "PLUS", "EXIT"]
        pg = ParserGenerator(self.token_list, cache_id='wcc')

        @pg.production("main : statement")
        def main(p):
            return p[0]

        @pg.production("statement : addition")
        @pg.production("statement : exit")
        def main(p):
            return p[0]

        @pg.production("exit : EXIT")
        def exit_prod(p):
            return ExitBox()

        @pg.production("number : NUMBER")
        def number(p):
            return IntBox(p[0])

        @pg.production("addition : PLUS number number")
        def addition0(p):
            return BinaryAddBox(p[1], p[2])

        @pg.production("addition : number PLUS number")
        def addition1(p):
            return BinaryAddBox(p[0], p[2])

        @pg.production("addition : number number PLUS")
        def addition2(p):
            return BinaryAddBox(p[0], p[1])

        self.parser = pg.build()
    def test_parser_state(self):
        pg = ParserGenerator(["NUMBER", "PLUS"],
                             precedence=[
                                 ("left", ["PLUS"]),
                             ])

        @pg.production("main : expression")
        def main(state, p):
            state.count += 1
            return p[0]

        @pg.production("expression : expression PLUS expression")
        def expression_plus(state, p):
            state.count += 1
            return BoxInt(p[0].getint() + p[2].getint())

        @pg.production("expression : NUMBER")
        def expression_number(state, p):
            state.count += 1
            return BoxInt(int(p[0].getstr()))

        parser = pg.build()

        def f():
            state = ParserState()
            return parser.parse(FakeLexer([
                Token("NUMBER", "10"),
                Token("PLUS", "+"),
                Token("NUMBER", "12"),
                Token("PLUS", "+"),
                Token("NUMBER", "-2"),
            ]),
                                state=state).getint() + state.count

        assert self.run(f, []) == 26
Example #33
0
    def test_arithmetic(self):
        pg = ParserGenerator(["NUMBER", "PLUS"])

        @pg.production("main : expr")
        def main(p):
            return p[0]

        @pg.production("expr : expr PLUS expr")
        def expr_op(p):
            return BoxInt(p[0].getint() + p[2].getint())

        @pg.production("expr : NUMBER")
        def expr_num(p):
            return BoxInt(int(p[0].getstr()))

        with self.assert_warns(
            ParserGeneratorWarning, "1 shift/reduce conflict"
        ):
            parser = pg.build()

        assert parser.parse(iter([
            Token("NUMBER", "1"),
            Token("PLUS", "+"),
            Token("NUMBER", "4")
        ])) == BoxInt(5)
Example #34
0
    def test_per_rule_precedence(self):
        pg = ParserGenerator(["NUMBER", "MINUS"], precedence=[
            ("right", ["UMINUS"]),
        ])

        @pg.production("main : expr")
        def main_expr(p):
            return p[0]

        @pg.production("expr : expr MINUS expr")
        def expr_minus(p):
            return BoxInt(p[0].getint() - p[2].getint())

        @pg.production("expr : MINUS expr", precedence="UMINUS")
        def expr_uminus(p):
            return BoxInt(-p[1].getint())

        @pg.production("expr : NUMBER")
        def expr_number(p):
            return BoxInt(int(p[0].getstr()))

        with self.assert_warns(ParserGeneratorWarning, "1 shift/reduce conflict"):
            parser = pg.build()

        assert parser.parse(FakeLexer([
            Token("MINUS", "-"),
            Token("NUMBER", "4"),
            Token("MINUS", "-"),
            Token("NUMBER", "5"),
        ])) == BoxInt(-9)
Example #35
0
 def __init__(self, module, builder, printf):
     self.pg = ParserGenerator([
         'NUMBER', 'PRINT', 'OPEN_PAREN', 'CLOSE_PAREN', 'SEMI_COLON',
         'ADD', 'SUB'
     ])
     self.module = module
     self.builder = builder
     self.printf = printf
Example #36
0
 def __init__(self):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'NOMBRE', 'MON', 'PARENTESE1', 'PARENTESE2', 'POINT_VERG',
             'PLUS', 'MOINS', 'FOIS', 'DIVI', 'TERM', 'EGAL', 'QUOTE',
             'VERGULE', 'DOLLAR'
         ])
Example #37
0
 def __init__(self):
     self.pg = ParserGenerator(
         [
             'SELECT', 'FROM', 'WHERE', 'SEP', 'BOXPLOT', 'STR', 'GRT',
             'LSS', 'EQ'
         ]
         #Adicionar o NUM no final, agora é tudo string
     )
Example #38
0
 def __init__(self):
     self.pg = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'NUMBER', 'PRINTTOLCD', 'OPEN_PAREN', 'CLOSE_PAREN',
             'SEMI_COLON', 'SUM', 'SUB', 'STRING', 'SLEEP', 'OUTPUT',
             'PINON', 'PINOFF', 'EQUAL', 'VAR', 'VARNAME', 'GETVAR'
         ])
Example #39
0
 def __init__(self):
     self.parseGen = ParserGenerator(
         # A list of all token names accepted by the parser.
         [
             'NUMBER', 'PRINT', 'LEFT_PAREN', 'RIGHT_PAREN', 'EOL', 'SUM',
             'SUB', 'MUL', 'DIV'
         ],
         precedence=[('left', ['SUM', 'SUB']), ('left', ['MUL', 'DIV'])])
Example #40
0
    def test_nonexistant_precedence(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : VALUE", precedence="abc")
        def main(p):
            pass

        with py.test.raises(ParserGeneratorError):
            pg.build()
Example #41
0
    def test_production_terminal_overlap(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("VALUE :")
        def x(p):
            pass

        with py.test.raises(ParserGeneratorError):
            pg.build()
Example #42
0
    def test_unused_tokens(self):
        pg = ParserGenerator(["VALUE", "OTHER"])

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        with self.assert_warns(ParserGeneratorWarning, "Token 'OTHER' is unused"):
            pg.build()
Example #43
0
    def test_simple(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        parser = pg.build()

        assert parser.parse(iter([Token("VALUE", "abc")])) == Token("VALUE", "abc")
Example #44
0
def build():
    pg = ParserGenerator(TOKENS, cache_id="sql_parser")
    for prod, cls in gen_productions(select_bnf, node_classes):
        pg.production(prod)(cls.production)
        # print prod

    @pg.error
    def error_handler(token):
        raise ValueError("Ran into a %s(%s) where it wasn't expected"
                         % (token.gettokentype(), token.getstr()))
    return pg.build()
Example #45
0
    def test_error_symbol(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : VALUE")
        def main(p):
            pass

        @pg.production("main : error")
        def main_error(p):
            pass

        pg.build()
Example #46
0
    def test_unused_production(self):
        pg = ParserGenerator(["VALUE", "OTHER"])

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        @pg.production("unused : OTHER")
        def unused(p):
            pass

        with self.assert_warns(ParserGeneratorWarning, "Production 'unused' is not reachable"):
            pg.build()
Example #47
0
    def test_parse_error(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        parser = pg.build()

        with py.test.raises(ParsingError) as exc_info:
            parser.parse(FakeLexer([Token("VALUE", "hello"), Token("VALUE", "world", SourcePosition(5, 10, 2))]))

        assert exc_info.value.getsourcepos().lineno == 10
Example #48
0
    def test_simple_caching(self):
        pg = ParserGenerator(["VALUE"], cache_id="simple")

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        pg.build()
        parser = pg.build()

        assert parser.parse(iter([
            Token("VALUE", "3")
        ])) == Token("VALUE", "3")
Example #49
0
    def test_simple(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        parser = pg.build()

        assert parser.lr_table.lr_action == {
            0: {"VALUE": 2},
            1: {"$end": 0},
            2: {"$end": -1},
        }
Example #50
0
    def test_simple_caching(self):
        # Generate a random cache_id so that every test run does both the cache
        # write and read paths.
        pg = ParserGenerator(["VALUE"], cache_id=str(uuid.uuid4()))

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        pg.build()
        parser = pg.build()

        assert parser.parse(iter([
            Token("VALUE", "3")
        ])) == Token("VALUE", "3")
Example #51
0
def parser_from_lexer(lexer, mapping):
    pg = ParserGenerator(
        [rule.name for rule in lexer.rules],
        cache_id="cache",

        # NOTE: This is pretty arbitrary at the moment
        precedence=[
            ('right', ['NOT']),
            ('left', ['PARENT', 'CHILD', 'OR']),
            ('left', ['AND', 'TYPE']),
            ('left', ['L_PAREN', 'R_PAREN', 'EQUAL']),
        ])

    @pg.production("expr : L_PAREN expr R_PAREN")
    def expr(p):
        return p[1]

    @pg.production("expr : expr AND expr")
    @pg.production("expr : expr OR expr")
    @pg.production("expr : expr CHILD expr")
    @pg.production("expr : expr PARENT expr")
    def binary_operation(p):
        return [p[1], p[0], p[2]]

    @pg.production("expr : expr EQUAL DATA")
    @pg.production("expr : expr TYPE DATA")
    def equal(p):
        p[2].value = p[2].value.strip("/")
        return p

    @pg.production("expr : expr NOT CHILD expr")
    @pg.production("expr : expr NOT PARENT expr")
    def not_expr(p):
        # This is a hack
        op = p[2]
        op.name = "NOT_" + op.name
        op.value = "!" + op.value
        return [op, p[0], p[3]]

    for kind in mapping.keys():
        @pg.production("expr : " + kind)
        def kind(p):
            return mapping[p[0].name]

    parser = pg.build()
    return parser
Example #52
0
    def test_default_reductions(self):
        pg = ParserGenerator(
            ["INTEGER_START", "INTEGER_VALUE", "COMPARE"],
            precedence=[
                ("nonassoc", ["COMPARE"])
            ]
        )
        record = []

        @pg.production("main : expr")
        def main(p):
            record.append("main")
            return p[0]

        @pg.production("expr : expr COMPARE expr")
        def expr_compare(p):
            record.append("expr:compare")
            return BoxInt(p[0].getint() - p[2].getint())

        @pg.production("expr : INTEGER_START INTEGER_VALUE")
        def expr_int(p):
            record.append("expr:int")
            return BoxInt(int(p[1].getstr()))

        parser = pg.build()

        assert parser.parse(RecordingLexer(record, [
            Token("INTEGER_START", ""),
            Token("INTEGER_VALUE", "10"),
            Token("COMPARE", "-"),
            Token("INTEGER_START", ""),
            Token("INTEGER_VALUE", "5")
        ])) == BoxInt(5)

        assert record == [
            "token:INTEGER_START",
            "token:INTEGER_VALUE",
            "expr:int",
            "token:COMPARE",
            "token:INTEGER_START",
            "token:INTEGER_VALUE",
            "expr:int",
            "expr:compare",
            "token:None",
            "main",
        ]
Example #53
0
    def test_reduce_reduce(self):
        pg = ParserGenerator(["NAME", "EQUALS", "NUMBER"])

        @pg.production("main : assign")
        def main(p):
            pass

        @pg.production("assign : NAME EQUALS expression")
        @pg.production("assign : NAME EQUALS NUMBER")
        def assign(p):
            pass

        @pg.production("expression : NUMBER")
        def expression(p):
            pass

        with self.assert_warns(ParserGeneratorWarning, "1 reduce/reduce conflict"):
            pg.build()
Example #54
0
    def test_parse_error_handler(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : VALUE")
        def main(p):
            return p[0]

        @pg.error
        def error_handler(token):
            raise ValueError(token)

        parser = pg.build()

        token = Token("VALUE", "world")

        with py.test.raises(ValueError) as exc_info:
            parser.parse(FakeLexer([Token("VALUE", "hello"), token]))

        assert exc_info.value.args[0] is token
Example #55
0
    def test_error_handler_state(self):
        pg = ParserGenerator([])

        @pg.production("main :")
        def main(state, p):
            pass

        @pg.error
        def error(state, token):
            raise ValueError(state, token)

        parser = pg.build()

        state = ParserState()
        token = Token("VALUE", "")
        with py.test.raises(ValueError) as exc_info:
            parser.parse(iter([token]), state=state)

        assert exc_info.value.args[0] is state
        assert exc_info.value.args[1] is token
Example #56
0
    def test_shift_reduce(self):
        pg = ParserGenerator([
            "NAME", "NUMBER", "EQUALS", "PLUS", "MINUS", "TIMES", "DIVIDE",
            "LPAREN", "RPAREN"
        ])

        @pg.production("statement : NAME EQUALS expression")
        def statement_assign(p):
            pass

        @pg.production("statement : expression")
        def statement_expression(p):
            pass

        @pg.production("expression : expression PLUS expression")
        @pg.production("expression : expression MINUS expression")
        @pg.production("expression : expression TIMES expression")
        @pg.production("expression : expression DIVIDE expression")
        def expression_binop(p):
            pass

        @pg.production("expression : MINUS expression")
        def expression_uminus(p):
            pass

        @pg.production("expression : LPAREN expression RPAREN")
        def expression_group(p):
            pass

        @pg.production("expression : NUMBER")
        def expression_number(p):
            pass

        @pg.production("expression : NAME")
        def expression_name(p):
            pass

        with self.assert_warns(
            ParserGeneratorWarning, "20 shift/reduce conflicts"
        ):
            pg.build()
Example #57
0
    def test_empty_production(self):
        pg = ParserGenerator(["VALUE"])

        @pg.production("main : values")
        def main(p):
            return p[0]

        @pg.production("values : VALUE values")
        def values_value(p):
            return [p[0]] + p[1]

        @pg.production("values :")
        def values_empty(p):
            return []

        parser = pg.build()
        assert parser.lr_table.lr_action == {
            0: {"$end": -3, "VALUE": 3},
            1: {"$end": 0},
            2: {"$end": -1},
            3: {"$end": -3, "VALUE": 3},
            4: {"$end": -2},
        }
Example #58
0
 def test_production_syntax_error(self):
     pg = ParserGenerator([])
     with py.test.raises(ParserGeneratorError):
         pg.production("main VALUE")
Example #59
0
lg.add("char_return", r"\\return")
lg.add("char_space", r"\\space")
lg.add("char", r"\\.")
lg.add("ns_symbol", NS_SYMBOL)
lg.add("symbol", SYMBOL_RE)
lg.add("string", r'"(\\\^.|\\.|[^\"])*"')
lg.add("ns_tag", "#" + NS_SYMBOL)
lg.add("tag", "#" + SYMBOL_RE)

lg.ignore(r"[\s,\n]+")
lg.ignore(r";.*\n")

lexer = lg.build()

pg = ParserGenerator(["boolean", "nil", "float", "number", "olist", "clist",
"omap", "cmap", "ovec", "cvec", "oset", "colon", "char_nl", "char_tab",
"char_return", "char_space", "char", "symbol", "ns_symbol", "string",
"tag", "ns_tag"])

class Char(TaggedValue):
    def __init__(self, rep):
        TaggedValue.__init__(self, 'char', rep)

NL = Char('\n')
TAB = Char('\t')
RETURN = Char('\r')
SPACE = Char(' ')

class State(object):
    def __init__(self, tagged, accept_unknown_tags):
        self.tagged = tagged if tagged is not None else {}
        self.accept_unknown_tags = accept_unknown_tags
Example #60
0
lex = LexerGenerator()
lex.ignore(ur'(?:[,;\s]+|\band\b|\bor\b)+')
lex.add(u'URL', UrlPattern)
lex.add(u'BTHASH', ur'#betterthan')
lex.add(u'IBTHASH', ur'#isbetterthan')
lex.add(u'HASHTAG', ur'#[a-zA-Z0-9_]+')
lex.add(u'MENTION', ur'@[a-zA-Z0-9_]+')
lex.add(u'FOR', ur'(for|FOR|For)')
lex.add(u'WORD', ur'[\w]+')

pg = ParserGenerator([u'URL',
                      u'BTHASH',
                      u'IBTHASH',
                      u'HASHTAG',
                      u'MENTION',
                      u'FOR',
                      u'WORD'
                     ], 
                     cache_id=u'graphextractor.tweetparser')

@pg.production("betterthan : words URL bthash URL topics words")
def betterthan(p):
    ast = dict()
    ast.update(p[4])
    ast.update(better_url=p[1].value)
    ast.update(worse_url=p[3].value)
    return ast

@pg.production("bthash : BTHASH")
@pg.production("bthash : IBTHASH")