Пример #1
0
def repl():
    line = input(">> ")
    lexer = Lexer(line)

    while (tok :=
           lexer.next_token()).type not in {TokenType.ILLEGAL, TokenType.EOF}:
        print(tok)
Пример #2
0
def test_next_token_on_source_code():
    input = """
        let five = 5;
        let ten = 10;

        let add = fn(x, y){
            x + y;
        };

        let result = add(five, ten);
    """

    expected = [
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "five"),
        (TokenType.ASSIGN, "="),
        (TokenType.INT, "5"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "ten"),
        (TokenType.ASSIGN, "="),
        (TokenType.INT, "10"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "add"),
        (TokenType.ASSIGN, "="),
        (TokenType.FUNCTION, "fn"),
        (TokenType.LPAREN, "("),
        (TokenType.IDENTIFIER, "x"),
        (TokenType.COMMA, ","),
        (TokenType.IDENTIFIER, "y"),
        (TokenType.RPAREN, ")"),
        (TokenType.LBRACE, "{"),
        (TokenType.IDENTIFIER, "x"),
        (TokenType.PLUS, "+"),
        (TokenType.IDENTIFIER, "y"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.RBRACE, "}"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "result"),
        (TokenType.ASSIGN, "="),
        (TokenType.IDENTIFIER, "add"),
        (TokenType.LPAREN, "("),
        (TokenType.IDENTIFIER, "five"),
        (TokenType.COMMA, ","),
        (TokenType.IDENTIFIER, "ten"),
        (TokenType.RPAREN, ")"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.EOF, ""),
    ]

    lexer = Lexer(input)

    for expected_token, expected_literal in expected:
        token = lexer.next_token()

        assert token.type == expected_token
        assert token.literal == expected_literal
Пример #3
0
    def testReturnStatement(self):
        inputs = '''return 5;
                    return 10;
                    return 838383;    :'''
        lex = Lexer(inputs)
        curtoken = lex.nextToken()
        peektoken = lex.nextToken()
        pas = Parser(lex, curtoken, peektoken)
        program = pas.parseProgram()

        self.assertEquals(3, len(program.statements))
Пример #4
0
    def testErrors(self):
        inputs = '''let five  5;
                    let =  10;
                    let   838383;    :'''
        lex = Lexer(inputs)
        curtoken = lex.nextToken()
        peektoken = lex.nextToken()
        pas = Parser(lex, curtoken, peektoken)
        program = pas.parseProgram()

        self.assertEquals(3, len(pas.errors))
        print 'Number of error is {}'.format(len(pas.errors))
Пример #5
0
    def testLetStatement(self):
        inputs = '''let five = 5;
                    let ten = 10;
                    let foobar = 838383;    :'''
        lex = Lexer(inputs)
        curtoken = lex.nextToken()
        peektoken = lex.nextToken()
        pas = Parser(lex, curtoken, peektoken)
        program = pas.parseProgram()

        self.assertNotEqual(program, None)
        self.assertEquals(3, len(program.statements))
        for i in range(len(program.statements) - 1):
            self.assertEquals(program.statements[i].tokenLiteral(), 'let')
Пример #6
0
def test_if_expression():
    source = "if (x < y) { x }"

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert type(stmt) == ast.ExpressionStatement

    assert type(stmt.expression) == ast.IfExpression

    assert stmt.expression.condition.left.value == "x"

    assert stmt.expression.condition.operator == "<"

    assert stmt.expression.condition.right.value == "y"

    # TODO Finish defining me please

    assert len(stmt.expression.consequence.statements) == 1
Пример #7
0
 def test_nextToken(self):
     inputs = '''let five = 5;
                 let ten = 10;
                 let add = fn(x, y) {
                 x + y;
                 };
                 let result = add(five, ten);
                 !-/*5;
                 5 < 10 > 5;
                 10 == 10;
                 10 != 9;'''
     l = Lexer(inputs)
     while l.readPosition <= len(inputs):
         tk = l.nextToken()
         print 'Literal is ' + tk.literal
         self.assertEquals(tk.tokenType, testToken[tk.literal])
 def test_is_whitespace(self):
     assert Lexer.is_whitespace(' ')
     assert Lexer.is_whitespace('\t')
     assert Lexer.is_whitespace('\n')
     assert Lexer.is_whitespace('\r')
     assert not Lexer.is_whitespace('4')
     assert not Lexer.is_whitespace('f')
     assert not Lexer.is_whitespace('$')
     assert not Lexer.is_whitespace('Z')
Пример #9
0
def test_parsing_let_statements():
    input = """
        let x = 5;
        let y = 10;
        let foobar = 838383;
    """
    lexer = Lexer(input)
    parser = Parser(lexer)
Пример #10
0
def start():
    env = object.Environment()
    while True:
        line = input(prompt)
        l = Lexer(line)
        p = Parser(l)
        program = p.parse_program()
        val = evaluator.eval(program, env)
        print(val)
Пример #11
0
def test_string_literal_expression():
    source = '"hello world";'
    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert isinstance(stmt, ast.ExpressionStatement)
    assert isinstance(stmt.expression, ast.StringLiteral)
    assert stmt.expression.value == "hello world"
Пример #12
0
def test_function_parameter_parsing(source, expected):
    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert type(stmt) == ast.ExpressionStatement
    assert type(stmt.expression) == ast.FunctionLiteral

    func: ast.FunctionLiteral = stmt.expression

    assert len(func.params) == len(expected)
Пример #13
0
def test_parsing_empty_hash_literal():
    source = "{}"

    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert isinstance(stmt, ast.ExpressionStatement)
    hash_literal = stmt.expression
    assert isinstance(hash_literal, ast.HashLiteral)
    assert len(hash_literal.pairs) == 0
 def test_is_digit(self):
     assert Lexer.is_digit('9')
     assert not Lexer.is_digit(' ')
     assert not Lexer.is_digit('f')
     assert not Lexer.is_digit('U')
     assert not Lexer.is_digit('$')
     assert not Lexer.is_digit('_')
Пример #15
0
def test_call_expression_parsing():
    source = "add(1, 2 * 3, 4 + 5);"

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert isinstance(stmt, ast.ExpressionStatement)

    assert isinstance(stmt.expression, ast.CallExpression)

    assert stmt.expression.function.value == "add"

    assert len(stmt.expression.arguments) == 3

    assert stmt.expression == ast.CallExpression(
        token=Token(tok_type="(", literal="("),
        function=ast.Identifier(token=Token(tok_type="IDENT", literal="add"),
                                value="add"),
        arguments=[
            ast.IntegerLiteral(token=Token(tok_type="INT", literal="1"),
                               value=1),
            ast.InfixExpression(
                token=Token(tok_type="*", literal="*"),
                left=ast.IntegerLiteral(token=Token(tok_type="INT",
                                                    literal="2"),
                                        value=2),
                operator="*",
                right=ast.IntegerLiteral(token=Token(tok_type="INT",
                                                     literal="3"),
                                         value=3),
            ),
            ast.InfixExpression(
                token=Token(tok_type="+", literal="+"),
                left=ast.IntegerLiteral(token=Token(tok_type="INT",
                                                    literal="4"),
                                        value=4),
                operator="+",
                right=ast.IntegerLiteral(token=Token(tok_type="INT",
                                                     literal="5"),
                                         value=5),
            ),
        ],
    )

    assert str(stmt.expression) == "add(1, (2 * 3), (4 + 5))"
Пример #16
0
def test_next_token():
    input = "=+(){},;"

    expected = [
        (TokenType.ASSIGN, "="),
        (TokenType.PLUS, "+"),
        (TokenType.LPAREN, "("),
        (TokenType.RPAREN, ")"),
        (TokenType.LBRACE, "{"),
        (TokenType.RBRACE, "}"),
        (TokenType.COMMA, ","),
        (TokenType.SEMICOLON, ";"),
        (TokenType.EOF, ""),
    ]

    lexer = Lexer(input)

    for expected_token, expected_literal in expected:
        token = lexer.next_token()

        assert token.type == expected_token
        assert token.literal == expected_literal
Пример #17
0
def test_parsing_array_literal():
    source = "[1, 2 * 2, 3 + 3];"
    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert isinstance(stmt, ast.ExpressionStatement)
    array = stmt.expression
    assert isinstance(array, ast.ArrayLiteral)
    assert len(array.elements) == 3
    literal = array.elements[0]
    assert literal.value == 1
    assert literal.token_literal() == "1"
Пример #18
0
def test_parsing_index_expressions():
    source = "myArray[1 + 1]"
    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert isinstance(stmt, ast.ExpressionStatement)
    expression = stmt.expression
    assert isinstance(expression, ast.IndexExpression)
    left = expression.left
    assert isinstance(left, ast.Identifier)
    assert left.value == "myArray"
    assert left.token_literal() == "myArray"
    check_infix_expression(expression.index, 1, "+", "1")
Пример #19
0
def test_let_statements(source, expected_identifier, expected_value):
    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert program is not None

    assert len(program.statements) == 1

    stmt = program.statements[0]

    check_let_statement(stmt, expected_identifier)

    assert stmt.value.value == expected_value
Пример #20
0
def test_prefix_expressions(source, operator, expected):
    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert type(stmt) == ast.ExpressionStatement

    assert type(stmt.expression) == ast.PrefixExpression

    assert stmt.expression.operator == operator

    assert stmt.expression.right.value == expected
Пример #21
0
def test_parsing_hash_literal():
    source = '{"one": 1, "two": 2, "three": 3}'
    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert isinstance(stmt, ast.ExpressionStatement)
    hash_literal = stmt.expression
    assert isinstance(hash_literal, ast.HashLiteral)

    assert len(hash_literal.pairs) == 3

    expected = {"one": 1, "two": 2, "three": 3}

    for key, value in hash_literal.pairs.items():
        assert isinstance(key, ast.StringLiteral)
        check_integer_literal(value, expected[str(key)])
Пример #22
0
def test_function_literal_parsing():

    source = "fn(x, y) { x + y; }"

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert type(stmt) == ast.ExpressionStatement

    assert type(stmt.expression) == ast.FunctionLiteral

    assert [str(s) for s in stmt.expression.params] == ["x", "y"]
Пример #23
0
def test_return_statements():
    source = """
    return 5;
    return 10;
    return 993322;
    """

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert program is not None

    assert len(program.statements) == 3

    for stmt in program.statements:
        assert type(stmt) == ast.ReturnStatement
        assert stmt.token_literal() == "return"
Пример #24
0
def test_boolean_expression():
    source = "true;"

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert type(stmt) == ast.ExpressionStatement

    assert type(stmt.expression) == ast.Boolean

    ident = stmt.expression

    assert ident.value == True
    assert ident.token_literal() == "true"
Пример #25
0
def test_integer_literals():
    source = "5;"

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert type(stmt) == ast.ExpressionStatement

    assert type(stmt.expression) == ast.IntegerLiteral

    literal = stmt.expression

    assert literal.value == 5
    assert literal.token_literal() == "5"
Пример #26
0
def test_identifier_expression():
    source = "foobar;"

    l = Lexer(source)
    p = Parser(l)

    program = p.parse_program()
    assert p.errors == []

    assert len(program.statements) == 1

    stmt = program.statements[0]

    assert type(stmt) == ast.ExpressionStatement

    assert type(stmt.expression) == ast.Identifier

    ident = stmt.expression

    assert ident.value == "foobar"
    assert ident.token_literal() == "foobar"
Пример #27
0
def test_parsing_hash_literal_with_expressions():
    source = '{"one": 0 + 1, "two": 10 - 8, "three": 15 / 5}'
    l = Lexer(source)
    p = Parser(l)
    program = p.parse_program()
    assert p.errors == []
    stmt = program.statements[0]
    assert isinstance(stmt, ast.ExpressionStatement)
    hash_literal = stmt.expression
    assert isinstance(hash_literal, ast.HashLiteral)

    assert len(hash_literal.pairs) == 3

    tests = {
        "one": lambda e: check_infix_expression(e, 0, "+", 1),
        "two": lambda e: check_infix_expression(e, 10, "-", 8),
        "three": lambda e: check_infix_expression(e, 15, "/", 5),
    }

    for key, value in hash_literal.pairs.items():
        assert isinstance(key, ast.StringLiteral)
        test_fn = tests[str(key)]
        test_fn(value)
Пример #28
0
def main():

    hist_file = Path("~/.monkeyhist").expanduser()

    # Create history file if it doesn't exist already
    if not hist_file.exists():
        hist_file.touch()

    session = PromptSession(history=FileHistory(hist_file),
                            completer=monkey_completer,
                            style=style)

    env = {}

    while True:
        try:
            scanned = session.prompt(">> ",
                                     lexer=PygmentsLexer(JavascriptLexer))
        except KeyboardInterrupt:
            continue
        except EOFError:
            break
        else:
            lexer = Lexer(scanned)
            parser = Parser(lexer)
            program = parser.parse_program()
            if parser.errors:
                for error in parser.errors:
                    print(error)
            evaluated = Eval(program, env)
            session.completer = WordCompleter(
                list(set(token.keywords) | set(env)))
            if evaluated is not None:
                print(evaluated.inspect())

    print("Farewell!")
Пример #29
0
def test_next_token_on_source_code_2():
    input = """
        !-/*5;
        5 < 10 > 5;

        if (5 < 10) {
            return true;
        } else {
            return false;
        }

        10 == 10;
        10 != 9;
    """

    expected = [
        (TokenType.BANG, "!"),
        (TokenType.MINUS, "-"),
        (TokenType.SLASH, "/"),
        (TokenType.ASTERIK, "*"),
        (TokenType.INT, "5"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.INT, "5"),
        (TokenType.LT, "<"),
        (TokenType.INT, "10"),
        (TokenType.GT, ">"),
        (TokenType.INT, "5"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.IF, "if"),
        (TokenType.LPAREN, "("),
        (TokenType.INT, "5"),
        (TokenType.LT, "<"),
        (TokenType.INT, "10"),
        (TokenType.RPAREN, ")"),
        (TokenType.LBRACE, "{"),
        (TokenType.RETURN, "return"),
        (TokenType.TRUE, "true"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.RBRACE, "}"),
        (TokenType.ELSE, "else"),
        (TokenType.LBRACE, "{"),
        (TokenType.RETURN, "return"),
        (TokenType.FALSE, "false"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.RBRACE, "}"),
        (TokenType.INT, "10"),
        (TokenType.EQ, "=="),
        (TokenType.INT, "10"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.INT, "10"),
        (TokenType.NOT_EQ, "!="),
        (TokenType.INT, "9"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.EOF, ""),
    ]

    lexer = Lexer(input)

    for expected_token, expected_literal in expected:
        token = lexer.next_token()

        assert token.type == expected_token
        assert token.literal == expected_literal
    def test_next_token_more(self):
        input = '''
            let five = 5;
            let ten = 10;
            let add = fn(x, y) {
                x + y;
            };
            let result = add(five, ten);
            !-/*5
            5 < 10 > 5
            
            if (5 < 10) {
                return true;
            } else {
                return false;
            }
            
            10 == 10;
            10 != 9;
        '''

        tests = [
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "five"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "ten"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "add"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.FUNCTION, "fn"),
            ExpectedToken(Token.LPAREN, "("),
            ExpectedToken(Token.IDENTIFIER, "x"),
            ExpectedToken(Token.COMMA, ","),
            ExpectedToken(Token.IDENTIFIER, "y"),
            ExpectedToken(Token.RPAREN, ")"),
            ExpectedToken(Token.LBRACE, "{"),
            ExpectedToken(Token.IDENTIFIER, "x"),
            ExpectedToken(Token.PLUS, "+"),
            ExpectedToken(Token.IDENTIFIER, "y"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.RBRACE, "}"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "result"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.IDENTIFIER, "add"),
            ExpectedToken(Token.LPAREN, "("),
            ExpectedToken(Token.IDENTIFIER, "five"),
            ExpectedToken(Token.COMMA, ","),
            ExpectedToken(Token.IDENTIFIER, "ten"),
            ExpectedToken(Token.RPAREN, ")"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.BANG, "!"),
            ExpectedToken(Token.MINUS, "-"),
            ExpectedToken(Token.SLASH, "/"),
            ExpectedToken(Token.ASTERISK, "*"),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.LT, "<"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.GT, ">"),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.IF, "if"),
            ExpectedToken(Token.LPAREN, "("),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.LT, "<"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.RPAREN, ")"),
            ExpectedToken(Token.LBRACE, "{"),
            ExpectedToken(Token.RETURN, "return"),
            ExpectedToken(Token.TRUE, "true"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.RBRACE, "}"),
            ExpectedToken(Token.ELSE, "else"),
            ExpectedToken(Token.LBRACE, "{"),
            ExpectedToken(Token.RETURN, "return"),
            ExpectedToken(Token.FALSE, "false"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.RBRACE, "}"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.EQ, "=="),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.NOT_EQ, "!="),
            ExpectedToken(Token.INT, "9"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.EOF, "")
        ]

        lexer = Lexer(input)

        for test in tests:
            token = lexer.next_token()
            assert token.type == test.expected_type
            assert token.literal == test.expected_literal