예제 #1
0
def repl():
    line = input(">> ")
    lexer = Lexer(line)

    while (tok :=
           lexer.next_token()).type not in {TokenType.ILLEGAL, TokenType.EOF}:
        print(tok)
예제 #2
0
def test_next_token_on_source_code():
    input = """
        let five = 5;
        let ten = 10;

        let add = fn(x, y){
            x + y;
        };

        let result = add(five, ten);
    """

    expected = [
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "five"),
        (TokenType.ASSIGN, "="),
        (TokenType.INT, "5"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "ten"),
        (TokenType.ASSIGN, "="),
        (TokenType.INT, "10"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "add"),
        (TokenType.ASSIGN, "="),
        (TokenType.FUNCTION, "fn"),
        (TokenType.LPAREN, "("),
        (TokenType.IDENTIFIER, "x"),
        (TokenType.COMMA, ","),
        (TokenType.IDENTIFIER, "y"),
        (TokenType.RPAREN, ")"),
        (TokenType.LBRACE, "{"),
        (TokenType.IDENTIFIER, "x"),
        (TokenType.PLUS, "+"),
        (TokenType.IDENTIFIER, "y"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.RBRACE, "}"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.LET, "let"),
        (TokenType.IDENTIFIER, "result"),
        (TokenType.ASSIGN, "="),
        (TokenType.IDENTIFIER, "add"),
        (TokenType.LPAREN, "("),
        (TokenType.IDENTIFIER, "five"),
        (TokenType.COMMA, ","),
        (TokenType.IDENTIFIER, "ten"),
        (TokenType.RPAREN, ")"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.EOF, ""),
    ]

    lexer = Lexer(input)

    for expected_token, expected_literal in expected:
        token = lexer.next_token()

        assert token.type == expected_token
        assert token.literal == expected_literal
예제 #3
0
def test_next_token():
    input = "=+(){},;"

    expected = [
        (TokenType.ASSIGN, "="),
        (TokenType.PLUS, "+"),
        (TokenType.LPAREN, "("),
        (TokenType.RPAREN, ")"),
        (TokenType.LBRACE, "{"),
        (TokenType.RBRACE, "}"),
        (TokenType.COMMA, ","),
        (TokenType.SEMICOLON, ";"),
        (TokenType.EOF, ""),
    ]

    lexer = Lexer(input)

    for expected_token, expected_literal in expected:
        token = lexer.next_token()

        assert token.type == expected_token
        assert token.literal == expected_literal
예제 #4
0
def test_next_token_on_source_code_2():
    input = """
        !-/*5;
        5 < 10 > 5;

        if (5 < 10) {
            return true;
        } else {
            return false;
        }

        10 == 10;
        10 != 9;
    """

    expected = [
        (TokenType.BANG, "!"),
        (TokenType.MINUS, "-"),
        (TokenType.SLASH, "/"),
        (TokenType.ASTERIK, "*"),
        (TokenType.INT, "5"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.INT, "5"),
        (TokenType.LT, "<"),
        (TokenType.INT, "10"),
        (TokenType.GT, ">"),
        (TokenType.INT, "5"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.IF, "if"),
        (TokenType.LPAREN, "("),
        (TokenType.INT, "5"),
        (TokenType.LT, "<"),
        (TokenType.INT, "10"),
        (TokenType.RPAREN, ")"),
        (TokenType.LBRACE, "{"),
        (TokenType.RETURN, "return"),
        (TokenType.TRUE, "true"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.RBRACE, "}"),
        (TokenType.ELSE, "else"),
        (TokenType.LBRACE, "{"),
        (TokenType.RETURN, "return"),
        (TokenType.FALSE, "false"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.RBRACE, "}"),
        (TokenType.INT, "10"),
        (TokenType.EQ, "=="),
        (TokenType.INT, "10"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.INT, "10"),
        (TokenType.NOT_EQ, "!="),
        (TokenType.INT, "9"),
        (TokenType.SEMICOLON, ";"),
        (TokenType.EOF, ""),
    ]

    lexer = Lexer(input)

    for expected_token, expected_literal in expected:
        token = lexer.next_token()

        assert token.type == expected_token
        assert token.literal == expected_literal
    def test_next_token_more(self):
        input = '''
            let five = 5;
            let ten = 10;
            let add = fn(x, y) {
                x + y;
            };
            let result = add(five, ten);
            !-/*5
            5 < 10 > 5
            
            if (5 < 10) {
                return true;
            } else {
                return false;
            }
            
            10 == 10;
            10 != 9;
        '''

        tests = [
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "five"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "ten"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "add"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.FUNCTION, "fn"),
            ExpectedToken(Token.LPAREN, "("),
            ExpectedToken(Token.IDENTIFIER, "x"),
            ExpectedToken(Token.COMMA, ","),
            ExpectedToken(Token.IDENTIFIER, "y"),
            ExpectedToken(Token.RPAREN, ")"),
            ExpectedToken(Token.LBRACE, "{"),
            ExpectedToken(Token.IDENTIFIER, "x"),
            ExpectedToken(Token.PLUS, "+"),
            ExpectedToken(Token.IDENTIFIER, "y"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.RBRACE, "}"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.LET, "let"),
            ExpectedToken(Token.IDENTIFIER, "result"),
            ExpectedToken(Token.ASSIGN, "="),
            ExpectedToken(Token.IDENTIFIER, "add"),
            ExpectedToken(Token.LPAREN, "("),
            ExpectedToken(Token.IDENTIFIER, "five"),
            ExpectedToken(Token.COMMA, ","),
            ExpectedToken(Token.IDENTIFIER, "ten"),
            ExpectedToken(Token.RPAREN, ")"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.BANG, "!"),
            ExpectedToken(Token.MINUS, "-"),
            ExpectedToken(Token.SLASH, "/"),
            ExpectedToken(Token.ASTERISK, "*"),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.LT, "<"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.GT, ">"),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.IF, "if"),
            ExpectedToken(Token.LPAREN, "("),
            ExpectedToken(Token.INT, "5"),
            ExpectedToken(Token.LT, "<"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.RPAREN, ")"),
            ExpectedToken(Token.LBRACE, "{"),
            ExpectedToken(Token.RETURN, "return"),
            ExpectedToken(Token.TRUE, "true"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.RBRACE, "}"),
            ExpectedToken(Token.ELSE, "else"),
            ExpectedToken(Token.LBRACE, "{"),
            ExpectedToken(Token.RETURN, "return"),
            ExpectedToken(Token.FALSE, "false"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.RBRACE, "}"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.EQ, "=="),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.INT, "10"),
            ExpectedToken(Token.NOT_EQ, "!="),
            ExpectedToken(Token.INT, "9"),
            ExpectedToken(Token.SEMICOLON, ";"),
            ExpectedToken(Token.EOF, "")
        ]

        lexer = Lexer(input)

        for test in tests:
            token = lexer.next_token()
            assert token.type == test.expected_type
            assert token.literal == test.expected_literal
예제 #6
0
def test_next_token():
    source = """
    let five = 5;
    let ten = 10;

    let add = fn(x, y) {
      x + y;
    };

    let result = add(five, ten);
    !-/*5;
    5 < 10 > 5;

    if (5 < 10) {
       return true;
    } else { 
       return false;
    } 

    10 == 10;
    10 != 9;
    "foobar"
    "foo bar"
    [1, 2];
    {"foo": "bar"}
    """

    class TokenTest(NamedTuple):
        expected_type: token.TokenType
        expected_literal: str

    tests = [
        TokenTest(token.LET, "let"),
        TokenTest(token.IDENT, "five"),
        TokenTest(token.ASSIGN, "="),
        TokenTest(token.INT, "5"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.LET, "let"),
        TokenTest(token.IDENT, "ten"),
        TokenTest(token.ASSIGN, "="),
        TokenTest(token.INT, "10"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.LET, "let"),
        TokenTest(token.IDENT, "add"),
        TokenTest(token.ASSIGN, "="),
        TokenTest(token.FUNCTION, "fn"),
        TokenTest(token.LPAREN, "("),
        TokenTest(token.IDENT, "x"),
        TokenTest(token.COMMA, ","),
        TokenTest(token.IDENT, "y"),
        TokenTest(token.RPAREN, ")"),
        TokenTest(token.LBRACE, "{"),
        TokenTest(token.IDENT, "x"),
        TokenTest(token.PLUS, "+"),
        TokenTest(token.IDENT, "y"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.RBRACE, "}"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.LET, "let"),
        TokenTest(token.IDENT, "result"),
        TokenTest(token.ASSIGN, "="),
        TokenTest(token.IDENT, "add"),
        TokenTest(token.LPAREN, "("),
        TokenTest(token.IDENT, "five"),
        TokenTest(token.COMMA, ","),
        TokenTest(token.IDENT, "ten"),
        TokenTest(token.RPAREN, ")"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.BANG, "!"),
        TokenTest(token.MINUS, "-"),
        TokenTest(token.SLASH, "/"),
        TokenTest(token.ASTERISK, "*"),
        TokenTest(token.INT, "5"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.INT, "5"),
        TokenTest(token.LT, "<"),
        TokenTest(token.INT, "10"),
        TokenTest(token.GT, ">"),
        TokenTest(token.INT, "5"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.IF, "if"),
        TokenTest(token.LPAREN, "("),
        TokenTest(token.INT, "5"),
        TokenTest(token.LT, "<"),
        TokenTest(token.INT, "10"),
        TokenTest(token.RPAREN, ")"),
        TokenTest(token.LBRACE, "{"),
        TokenTest(token.RETURN, "return"),
        TokenTest(token.TRUE, "true"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.RBRACE, "}"),
        TokenTest(token.ELSE, "else"),
        TokenTest(token.LBRACE, "{"),
        TokenTest(token.RETURN, "return"),
        TokenTest(token.FALSE, "false"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.RBRACE, "}"),
        TokenTest(token.INT, "10"),
        TokenTest(token.EQ, "=="),
        TokenTest(token.INT, "10"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.INT, "10"),
        TokenTest(token.NOT_EQ, "!="),
        TokenTest(token.INT, "9"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.STRING, "foobar"),
        TokenTest(token.STRING, "foo bar"),
        TokenTest(token.LBRACKET, "["),
        TokenTest(token.INT, "1"),
        TokenTest(token.COMMA, ","),
        TokenTest(token.INT, "2"),
        TokenTest(token.RBRACKET, "]"),
        TokenTest(token.SEMICOLON, ";"),
        TokenTest(token.LBRACE, "{"),
        TokenTest(token.STRING, "foo"),
        TokenTest(token.COLON, ":"),
        TokenTest(token.STRING, "bar"),
        TokenTest(token.RBRACE, "}"),
        TokenTest(token.EOF, ""),
    ]

    l = Lexer(source)

    for test in tests:
        tok = l.next_token()
        assert tok.tok_type == test.expected_type