Exemplo n.º 1
0
def test_accepted_init_lexer(source_code, file_name):
    l = None
    if source_code == "" and file_name == "":
        l = lexer.Lexer()
        file_name = "CLI"
    elif source_code == "":
        l = lexer.Lexer(filename=file_name)
    elif file_name == "":
        l = lexer.Lexer(source_code=source_code)
        file_name = "CLI"
    else:
        l = lexer.Lexer(source_code, file_name)

    assert isinstance(l, (lexer.Lexer)) and (l.error is None) and \
        (l.filename == file_name) and (l.source_code == source_code)
Exemplo n.º 2
0
def main(filename):
    try:
        # open the file for lexical analysis
        file_stream = open(filename, 'r')
        the_lexer = lexer.Lexer(file_stream)
        t = the_lexer.next_token()
        # while not at end of file,
        while t != None and t.tokentype != token.EOS:
            # print the next token
            print t
            t = the_lexer.next_token()
    # in case the user chooses a bad file, let them know why it's not working
    except IOError as e:
        print "error: unable to open file '" + filename + "'"
        sys.exit(1)
    except error.Error as e:
        print e
        sys.exit(1)
        
    if __name__ == '__main__':
        if len(sys.argv) != 2:
            print 'usage:', sys.argv[0], 'source-code-file'
            sys.exit(1)
        else:
            main(sys.argv[1])
Exemplo n.º 3
0
 def __init__(self):
     lex_rules = (
         ('if', 'IF'),
         ('then', 'THEN'),
         ('else', 'ELSE'),
         ('true', 'TRUE'),
         ('false', 'FALSE'),
         ('lambda', 'LAMBDA'),
         ('\d+', 'INT'),
         ('->', 'ARROW'),
         ('!=', '!='),
         ('==', '=='),
         ('>=', '>='),
         ('<=', '<='),
         ('<', '<'),
         ('>', '>'),
         ('\+', '+'),
         ('\-', '-'),
         ('\*', '*'),
         ('\(', '('),
         ('\)', ')'),
         ('=', '='),
         (',', ','),
         ('[a-zA-Z_]\w*', 'ID'),
     )
     self.lexer = lexer.Lexer(lex_rules, skip_whitespace=True)
     self.cur_token = None
     self.operators = {'!=', '==', '>=', '<=', '<', '>', '+', '-', '*'}
Exemplo n.º 4
0
 def __init__(self, rules):
     # initialize the lex rules for the grammar
     self.lx = lexer.Lexer(rules, skip_whitespace=True)
     self.cur_token = None
     self.next_token = None
     # dictionary for variables (e.g. set x  = 10)
     self.vars = {}
Exemplo n.º 5
0
def run(text):
    # Generate tokens
    # print("RUN1")
    lexer = lex.Lexer(text)
    tokens, error = lexer.make_tokens()
    if error:
        print()
        # return None, error

    # Generate AST
    print("RUN2 | Токены: ", tokens)
    parser = pars.Parser(tokens)
    ast = parser.parse()
    # print(ast.node)
    # if ast.error:
    # print()
    # return (ast.error)

    # Run program
    print("RUN3 | АСТ:", ast.node)
    interpreter = inter.Interpreter()
    context = inter.Context('<program>')
    context.symbol_table = global_symbol_table
    result = interpreter.visit(ast.node, context)
    print(result.value)
    return result.error
Exemplo n.º 6
0
 def __init__(self, **kwargs):
     self.lexer = lexer.Lexer(**kwargs)
     self.tokens = self.lexer.tokens
     self.parser = yacc(debug=False,
                        debuglog=None,
                        module=self,
                        outputdir=tempfile.gettempdir())
Exemplo n.º 7
0
 def __init__(self, args=None):
     """
     Get the options from cli.
     """
     self.cli = cli.CLI()
     self.args = self.cli.parse_arguments(args)
     self.lexer = lexer.Lexer()
Exemplo n.º 8
0
    def TestReturnStatements(self):
        print("Running TestReturnStatements ...")        

        @dataclass
        class test:
            input: str
            expectedValue: object

        tests = [
            test("return 5", 5),
            test("return true", True),
            test("return y", "y"),

        ]  

        for tt in tests:         
            l = lxr.Lexer(tt.input)
            p = prsr.Parser(l)

            program = p.ParseProgram()
            self.checkParsersErrors(p)

            if program is None:
                raise Exception('ParseProgram() returned None')
        
            if len(program.Statements) != 1:
                raise Exception(f'program.Statements does not contain 1 statements. got={len(program.Statements)}')
        
            stmt = program.Statements[0]
            if not self.testReturnStatement(stmt):
                return 

            val = stmt.Value
            if not self.testLiteralExpression(val, tt.expectedValue):
                return
Exemplo n.º 9
0
    def TestFunctionParameterParsing(self):
        @dataclass
        class test:
            input: str
            expectedParams: List[str]
        
        tests = [
            test("fn() {}",[]),
            test("fn(x) {}",['x']),
            test("fn(x, y, z) {}",['x', 'y', 'z'])
        ]

        for tt in tests:
            l = lxr.Lexer(tt.input)
            p = prsr.Parser(l)

            program = p.ParseProgram()
            self.checkParsersErrors(p)     

            stmt = program.Statements[0]       

            if not isinstance(stmt, ast.ExpressionStatement):
                raise Exception(f'statement is not a ast.ExpressionStatement got={program.Statements[0]}')
            
            function = stmt.Expression

            if len(function.Parameters) != len(tt.expectedParams):
                raise Exception(f'function.Parameters does not contain {len(tt.expectedParams)} parameters got {len(function.Parameters)}')

            ix = 0
            for p in tt.expectedParams:
                self.testLiteralExpression(function.Parameters[ix], p)
                ix += 1
Exemplo n.º 10
0
    def TestCallExpressionParsing(self):
        print("Running TestCallExpressionParsing ...")
        input = """
        add(1, 2 * 3, 4 + 5)
        """     
        
        l = lxr.Lexer(input)
        p = prsr.Parser(l)

        program = p.ParseProgram()
        self.checkParsersErrors(p)

        if len(program.Statements) != 1:
            raise Exception(f'program.Statements does not contain 1 statement got={len(program.Statements)}')

        stmt = program.Statements[0]
        if not isinstance(stmt, ast.ExpressionStatement):
            raise Exception(f'statement is not a ast.ExpressionStatement got={program.Statements[0]}')        

        funcCall = stmt.Expression
        if not isinstance(funcCall, ast.CallExpression):
            raise Exception(f'stmt.Expression is not a ast.CallExpression got={stmt.Expression}') 

        if not self.testIdentifier(funcCall.Function, "add"):
            return 

        if len(function.Arguments) != 3:
            raise Exception(f'function.Arguments does not contain 3 statements got={stmt.Expression}') 

        self.testLiteralExpression(function.Arguments[0],1)
        self.testInfixExpression(function.Arguments[1], 2, "*", 3)
        self.testInfixExpression(function.Arguments[2], 4, "+", 5)
Exemplo n.º 11
0
    def TestIntegerLiteralExpressions(self):
        print("Running TestIntegerLiteralExpressions ...")
        input = """
        5
        """

        l = lxr.Lexer(input)
        p = prsr.Parser(l)

        program = p.ParseProgram()
        self.checkParsersErrors(p)

        if len(program.Statements) != 1:
            raise Exception(f'program.Statements does not contain 1 statements. got={len(program.Statements)}')

        stmt = program.Statements[0]
        if not isinstance(stmt, ast.ExpressionStatement):
            raise Exception(f'statement is not a ast.ExpressionStatement got={program.Statements[0]}')        
                
        
        literal = stmt.Expression
        if not isinstance(literal, ast.IntegerLiteral):
            raise Exception(f'Expression is not a ast.Identifier got={stmt.Expression}')        

        if literal.Value != 5:
            raise Exception(f'ident.Value is not 5 got={literal.Value}')

        if literal.TokenLiteral() != "5":
            raise Exception(f'ident.TokenLiteral() is not "5" got={literal.TokenLiteral()}')                    
Exemplo n.º 12
0
    def TestBooleanExpressions(self):
        print("Running TestBooleanExpressions ...")
        input = """
        true
        """     

        l = lxr.Lexer(input)
        p = prsr.Parser(l)

        program = p.ParseProgram()
        self.checkParsersErrors(p)

        if len(program.Statements) != 1:
            raise Exception(f'program.Statements does not contain 1 statements. got={len(program.Statements)}')

        stmt = program.Statements[0]
        if not isinstance(stmt, ast.ExpressionStatement):
            raise Exception(f'statement is not a ast.ExpressionStatement got={program.Statements[0]}')        
                
        
        ident = stmt.Expression
        if not isinstance(ident, ast.Boolean):
            raise Exception(f'Expression is not a ast.Boolean got={stmt.Expression}')        

        if ident.Value != True:
            raise Exception(f'ident.Value is not True got={ident.Value}')

        if ident.TokenLiteral() != "true":
            raise Exception(f'ident.TokenLiteral() is not True got={ident.TokenLiteral()}')           
Exemplo n.º 13
0
    def __init__(self, filename):

        lex = lexer.Lexer()
        self.tokens = lex.token_generator(filename)

        # currtok is the current token that we are looking at
        self.currtok = next(self.tokens)
Exemplo n.º 14
0
def main():

    # Variable to store the contents of the file
    content = ""

    with open('test.lang', 'r') as file:
        # reading and storing the contents of the file
        content = file.read()

    ############################
    #         Lexer           #
    ############################

    # Calling the lexer file Lexer class and initializing it with the source code in lex
    lex = lexer.Lexer(content)
    # Calling the tokenize method from inside the lexer instance
    tokens = lex.tokenize()

    ############################
    #         Parser           #
    ############################

    # Calling the parser file Parser class and initializing it with the source code
    parse = parser.Par(tokens)
    # Calling the parse method from inside the parse instance
    parse.parse()
Exemplo n.º 15
0
    def __init__(self,
                 compiler='aspCompiler',
                 silent=False,
                 decoupled=False,
                 agent=False):

        self.silent = silent
        self.debug = False
        self.ignore_errors = False
        self.ignore_undefined = False
        self.there_were_roles = False
        self.roles = []

        self.lex = lexer.Lexer()
        self.lex.build()
        self.par = parser.Parser()
        self.par.build()

        if compiler == 'aspCompiler':
            self.comp = aspCompiler.AspCompiler(decoupled=decoupled,
                                                agent=agent)
        elif compiler == 'aspReducedCompiler':
            self.comp = aspReducedCompiler.AspReducedCompiler(
                decoupled=decoupled, agent=agent)
        else:
            if not self.silent:
                print >> sys.stderr, "Could not find Compiler ", compiler, ". Sticking to aspCompiler."
            self.comp = aspCompiler.AspCompiler(decoupled=decoupled,
                                                agent=agent)

        self.default_output_path = "./"  #"../temp/"
Exemplo n.º 16
0
def eval_test(input: str) -> MonkeyObject:
    l = lexer.Lexer(input)
    p = parser.Parser(l)
    program = p.parse_program()
    env = mobject.Environment()

    return evaluator.eval(program, env)
Exemplo n.º 17
0
 def __init__(self):
     lex_rules = (
         ("if", "IF"),
         ("then", "THEN"),
         ("else", "ELSE"),
         ("true", "TRUE"),
         ("false", "FALSE"),
         ("\d+", "INT"),
         ("->", "ARROW"),
         ("!=", "NE"),
         ("==", "EQ"),
         (">=", "GE"),
         (">", "GT"),
         ("<=", "LE"),
         ("<", "LT"),
         ("\+", "PLUS"),
         ("\-", "MINUS"),
         ("\*", "MUL"),
         ("\/", "DIV")("\%", "MOD"),
         ("\(", "LPAREN"),
         ("\)", "RPAREN"),
         ("=", "ASSIGN"),
         (",", "DOT"),
         ("[a-zA-Z_]\w*", "ID"),
     )
     self.lexer = lexer.Lexer(lex_rules, skip_whitesapce=True)
     self.cur_token = None
     self.operators = {"!=", "==", ">=", "<=", "<", ">", "+", "-", "*", "%"}
Exemplo n.º 18
0
 def parse_imports(self, parent_unit: ast.TranslationUnit):
     for imp_stmt in parent_unit.import_statements:
         if imp_stmt.path not in self.modules:
             # Parse
             # TODO this should be encapsulated more nicely. Currently, same code
             # as in main.py
             try:
                 with open(imp_stmt.path) as f:
                     code = f.read()
             except Exception as e:
                 raise TypecheckException(
                     f"Importing {imp_stmt.path} impossible:"
                     f" '{e}'", imp_stmt)
             l = lexer.Lexer(code, imp_stmt.path)
             p = parser.Parser(l)
             child_unit = p.compile()
             # add2modmap
             if not isinstance(child_unit, ast.TranslationUnit):
                 raise TypecheckException(
                     f"Importing {imp_stmt.path}"
                     " failed.", imp_stmt)
             self.modules[imp_stmt.path] = child_unit
             # Recurse
             self.parse_imports(child_unit)
         # Add to symbol table
         parent_unit.symbols_global[imp_stmt.name] = bongtypes.Module(
             imp_stmt.path)
Exemplo n.º 19
0
    def __init__(self, stmt_as_is=False):

        self.lexer = lexer.Lexer(stmt_as_is)

        # TODO: `write_tables=0` is a temporary workaround until we find a directory to settle in

        self.parser = yacc.yacc(start="program", debug=True, debuglog=logging.getLogger('parser'), errorlog=logging.getLogger('parser2'), write_tables=0)
Exemplo n.º 20
0
def doMain(args):
    # Parse all legacy files.
    import seqan.dddoc.core as core
    app = core.App()
    for path in args.legacy_doc_dirs:
        print 'Scanning %s...' % path
        app.loadFiles(path)
    migrated_doc = raw_doc.RawDoc()
    if args.legacy_doc_dirs:
        app.loadingComplete()
        migrated_doc.entries = migration.migrate(app.dddoc_tree)
        print 'migrated_doc.entries', [e.name.text for e in migrated_doc.entries]
    # Parse all normal input files.
    fmgr = file_mgr.FileManager()
    master_doc = raw_doc.RawDoc()
    master_doc.merge(migrated_doc)
    fns = FileNameSource(args.inputs)
    for filename in fns.generate():
        if args.debug:
            print 'Processing %s' % filename
        the_file = fmgr.loadFile(filename)
        lex = lexer.Lexer(dox_tokens.LEXER_TOKENS, skip_whitespace=False)
        for comment in the_file.comments:
            # TODO(holtgrew): Also give offset.
            lex.input(comment.text, filename, comment.line, comment.col, comment.offset_col)
            parser = dox_parser.Parser()
            try:
                parser.parse(lex)
            except dox_parser.ParserError, e:
                dox_parser.printParserError(e)
                return 1
            master_doc.merge(parser.documentation)
Exemplo n.º 21
0
def TestParsingInfixExpression():
    class struct:
        def __init__(self, input, leftValue, operator, rightValue):
            self.input = input
            self.leftValue = leftValue
            self.operator = operator
            self.rightValue = rightValue
    infixTests = [struct("5 + 5;", 5, "+", 5),
                  struct("5 - 5;", 5, "-", 5),
                  struct("5 * 5;", 5, "*", 5),
                  struct("5 / 5;", 5, "/", 5),
                  struct("5 > 5;", 5, ">", 5),
                  struct("5 < 5;", 5, "<", 5),
                  struct("5 != 5;", 5, "!=", 5),
                  struct("true == true", True, "==", True),
                  struct("true != false", True, "!=", False)]
    for test in infixTests:
        l = lexer.Lexer(test.input)
        p = parser.Parser(l)
        program = p.ParseProgram()
        checkParserErrors(p)
        assert len(program.Statements) == 1
        stmt = program.Statements[0]
        assert isinstance(stmt, ast.ExpressionStatement)
        exp = stmt.Expression
        TestIntegerLiteral(exp.Left, test.leftValue)
        assert exp.Operator == test.operator
        TestIntegerLiteral(exp.Right, test.rightValue)
    print("TestParsingInfixExpression test is success")
Exemplo n.º 22
0
def main():
    file_name = __get_file_name(sys.argv)
    file_ext = __get_file_extension(file_name)

    if file_ext != ".fyp":
        eh.print_and_exit("error: invalid file extension: must be .fyp")

    if not __file_exists(file_name):
        eh.print_and_exit("error: file does not exist!")

    _file = open(file_name)
    _lex = lexer.Lexer(' '.join(
            [line.rstrip("\n") for line in _file if not __is_comment(line)]))
    _tokens = _lex.tokenize().tokens

    is_test = False

    if len(_tokens) > 1 and _tokens[2] == "test":
        is_test = True
        _tokens = _tokens[0:2] + _tokens[3:]

    _parser = parser_.Parser(_tokens)
    _eval = evaluator.Evaluator(_parser, is_test)

    if _eval.tokens[0] != "begin":
        eh.print_and_exit("error: first function must be 'begin'!")

    _eval.eval()
Exemplo n.º 23
0
    def testParse(self):
        ''' Test the parse() method (parsing an entire tree). '''
        class Handler(AbstractHandler):
            def __init__(self):
                super(AbstractHandler, self).__init__()
                self.edge_count = 0
                self.node_count = 0
                self.tree_end_count = 0
                self.leaf_count = 0

            def new_edge(self, bootstrap, length):
                self.edge_count += 1

            def new_tree_begin(self):
                self.node_count += 1

            def new_tree_end(self):
                self.tree_end_count += 1

            def new_leaf(self, name):
                self.leaf_count += 1

        l = lexer.Lexer("((A,B),C);")
        handler = Handler()
        parser = _Parser(l, handler)
        parser.parse_node()
        self.assertEqual(handler.node_count, handler.tree_end_count)
        self.assertEqual(handler.node_count, 2)
        self.assertEqual(handler.edge_count, 4)
        self.assertEqual(handler.leaf_count, 3)
Exemplo n.º 24
0
def test_parsing_infix_expressions():
    infix_tests = [
        ("5 + 5", 5, "+", 5),
        ("5 - 5", 5, "-", 5),
        ("5*5", 5, "*", 5),
        ("5 / 5", 5, "/", 5),
        ("5 >5", 5, ">", 5),
        ("5< 5", 5, "<", 5),
        ("5 == 5", 5, "==", 5),
        ("5!=5", 5, "!=", 5),
        ("true == true", True, "==", True),
        ("true!=false", True, "!=", False),
    ]

    for input, left_value, operator, right_value in infix_tests:
        l = lexer.Lexer(input)
        p = parser.Parser(l)
        program = p.parse_program()
        check_parser_errors(p)
        check_program_length(program, 1)

        statement = program.statements[0]
        check_statement(statement, ast.ExpressionStatement)

        exp = statement.expression

        infix_expression_test(exp, left_value, operator, right_value)
Exemplo n.º 25
0
 def test_operators(self):
     toks = list(lexer.Lexer("+-*/").make_token())
     self.assertEqual(toks, [
         tokens.Token(tokens.TokenType.PLUS),
         tokens.Token(tokens.TokenType.MINUS),
         tokens.Token(tokens.TokenType.MULTIPLY),
         tokens.Token(tokens.TokenType.DIVIDE)
     ])
Exemplo n.º 26
0
 def test_branch_length_handler(self):
     ''' Test that the handler works, by calculating the branch total_len
     sum. '''
     l = lexer.Lexer("(('foo' : 0.1, 'bar' : 1.0) : 2, baz)")
     handler = HandlerTest.BranchLengthSum()
     p = _Parser(l, handler)
     p.parse()
     self.assertEqual(handler.sum, 3.1)
Exemplo n.º 27
0
 def setUp(self):
     self.src = source.StringSource()
     self.lex = lexer.Lexer(self.src)
     self.environment = env.Environment()
     self.parser = pars.Parser(self.lex, self.environment)
     self.program = interpreter.Interpreter(self.environment, self.parser)
     self.output = io.StringIO()
     sys.stdout = self.output
Exemplo n.º 28
0
 def test_numbers(self):
     toks = list(lexer.Lexer("1245.4 .234 1234. .").make_token())
     self.assertEqual(toks, [
         tokens.Token(tokens.TokenType.NUMBER, 1245.4),
         tokens.Token(tokens.TokenType.NUMBER, .234),
         tokens.Token(tokens.TokenType.NUMBER, 1234.),
         tokens.Token(tokens.TokenType.NUMBER, 000.000),
     ])
Exemplo n.º 29
0
def main():
    content = ""
    with open('test.lang', 'r') as file:
        content = file.read()
    print(content)

    lex = lexer.Lexer(content)
    tokens = lex.tokenize()
Exemplo n.º 30
0
def get_program(input: str, n_statements: int) -> ast.Program:
    l = lexer.Lexer(input)
    p = parser.Parser(l)
    program = p.parse_program()
    check_parser_errors(p)
    check_program_length(program, n_statements)

    return program