Exemple #1
0
def test_lexer_parses_bare_monkey_syntax():
    data = "let five = 5;"
    lexer = Lexer(data)

    assert lexer.next_token() == Token(TokenTypes.LET, "let")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "five")
    assert lexer.next_token() == Token(TokenTypes.ASSIGN, "=")
    assert lexer.next_token() == Token(TokenTypes.INT, "5")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")
    assert lexer.next_token() == Token(TokenTypes.EOF, "")
Exemple #2
0
def test_eval_integer_expression(input_data, expected_val):
    lexer = Lexer(input_data)
    parser = Parser.new(lexer)
    program = parser.parse()
    check_parse_errors(parser)

    output = eval(program)
    assert output.value == expected_val
def test_operator_precedence_parsing(input_data, expected_str):
    lexer = Lexer(input_data)
    parser = Parser.new(lexer)
    program = parser.parse()

    check_parse_errors(parser)
    print(str(program))
    assert str(program).rstrip(" ") == expected_str
Exemple #4
0
def test_bang_operator(input_data, expected_val):
    lexer = Lexer(input_data)
    parser = Parser.new(lexer)
    program = parser.parse()
    check_parse_errors(parser)

    output = eval(program)
    assert output.value == expected_val
Exemple #5
0
def test_if_else_expression(input_data, expected_output):
    lexer = Lexer(input_data)
    parser = Parser.new(lexer)
    program = parser.parse()
    check_parse_errors(parser)

    output = eval(program)
    if hasattr(output, "value"):
        assert output.value == expected_output
    else:
        assert str(output) == expected_output
def test_let_statements(let_data):
    lexer = Lexer(let_data)
    parser = Parser.new(lexer)
    program = parser.parse()

    if program is None:
        pytest.fail(f"Failed to process the input")

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 3 : Found -> {len(program._statements)}")

    assert let_data == str(program)
def test_return_statement(provide_return_data):
    lexer = Lexer(provide_return_data)
    parser = Parser.new(lexer)
    program = parser.parse()

    if program is None:
        pytest.fail(f"Failed to process the input")

    if len(program._statements) != 3:
        pytest.fail(f"Mismatch: Expected -> 3 : Found -> {len(program._statements)}")

    for statement in program._statements:
        assert assert_single_return_statement(statement)
def test_pratt_infix_operators(input_data, expected_left, expected_op, expected_right):
    lexer = Lexer(input_data)
    parser = Parser.new(lexer)
    program = parser.parse()

    check_parse_errors(parser)

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 1 : Found -> {len(program._statements)}")

    statement = program._statements[0]

    assert assert_simple_infix_operator_statement(statement, str(expected_left), expected_op, str(expected_right))
def test_pratt_identifiers():
    input = "foobar;"

    lexer = Lexer(input)
    parser = Parser.new(lexer)
    program = parser.parse()

    if program is None:
        pytest.fail(f"Failed to process the input")

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 1 : Found -> {len(program._statements)}")

    assert assert_simple_identifier_expression_statement(program._statements[0], input)
Exemple #10
0
def test_lexer_returns_correct_tokens():
    data = "=+(){},;"
    lexer = Lexer(data)

    assert lexer.next_token() == Token(TokenTypes.ASSIGN, "=")
    assert lexer.next_token() == Token(TokenTypes.PLUS, "+")
    assert lexer.next_token() == Token(TokenTypes.LPAREN, "(")
    assert lexer.next_token() == Token(TokenTypes.RPAREN, ")")
    assert lexer.next_token() == Token(TokenTypes.LBRACE, "{")
    assert lexer.next_token() == Token(TokenTypes.RBRACE, "}")
    assert lexer.next_token() == Token(TokenTypes.COMMA, ",")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")
    assert lexer.next_token() == Token(TokenTypes.EOF, "")
def test_pratt_integer_literals():
    input = "5;"

    lexer = Lexer(input)
    parser = Parser.new(lexer)
    program = parser.parse()

    if program is None:
        pytest.fail(f"Failed to process the input")

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 1 : Found -> {len(program._statements)}")

    statement = program._statements[0]

    assert assert_simple_integer_expression(statement)
def test_if_expression():
    lexer = Lexer("if (x < y) { x } else { y }")
    parser = Parser.new(lexer)
    program = parser.parse()
    check_parse_errors(parser)

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 1 : Found -> {len(program._statements)}")

    statement = program._statements[0]

    assert isinstance(statement, ExpressionStatement), f"type mismatch {type(statement)}"

    if not isinstance(statement._expression, IfExpression):
        pytest.fail(f"Type mismatch:Got {type(statement._expression)}")

    assert str(program) == "if (x < y) { x } else { y }"
def test_function_literal():
    lexer = Lexer("fn(x, y) { x + y }")
    parser = Parser.new(lexer)
    program = parser.parse()
    check_parse_errors(parser)

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 1 : Found -> {len(program._statements)}")

    statement = program._statements[0]

    if not isinstance(statement._expression, FunctionLiteral):
        pytest.fail(f"Expected function literal : Found => {type(statement._expression)}")

    if len(statement._expression._parameters) != 2:
        pytest.fail(f"Expected two parameters : Got {len(statement._expression_parameters)}")

    assert statement._expression._parameters[0]._value == "x"
    assert statement._expression._parameters[1]._value == "y"

    assert str(statement._expression._block) == "(x + y)"
def test_call_expression():
    lexer = Lexer("add(1, 2 * 3, 4 + 5);")
    parser = Parser.new(lexer)
    program = parser.parse()
    check_parse_errors(parser)

    if len(program._statements) != 1:
        pytest.fail(f"Mismatch: Expected -> 1 : Found -> {len(program._statements)}")

    statement = program._statements[0]

    if not isinstance(statement, ExpressionStatement):
        pytest.fail(f"Expected ExpressionStatement : Got {type(statement)} instead")

    if not isinstance(statement._expression, CallExpression):
        pytest.fail(f"Expected Call Expression : Got => {type(statement._expression)}")

    if len(statement._expression._args) != 3:
        pytest.fail(f"Expected two parameters : Got {len(statement._expression._args)}")

    assert str(statement._expression._ident_or_func_literal) == "add"

    assert str(statement._expression._args[0]) == '1'
    assert str(statement._expression._args[1]) == '(2 * 3)'
Exemple #15
0
def test_lexer_returns_none_on_empty():
    lexer = Lexer("")
    assert lexer.next_token() == Token(TokenTypes.EOF, "")
Exemple #16
0
def test_lexer_parses_minimum_monkey_syntax():
    data = """let five = 5;
let ten = 10;
   let add = fn(x, y) {
     x + y;
};
   let result = add(five, ten);
   
   !-/*5;
   5 < 10 > 5;
   
   if (5 < 10) {
       return true;
   } else {
       return false;
   }
   
   10 == 10; 
   10 != 9;
   """

    lexer = Lexer(data)

    assert lexer.next_token() == Token(TokenTypes.LET, "let")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "five")
    assert lexer.next_token() == Token(TokenTypes.ASSIGN, "=")
    assert lexer.next_token() == Token(TokenTypes.INT, "5")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    assert lexer.next_token() == Token(TokenTypes.LET, "let")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "ten")
    assert lexer.next_token() == Token(TokenTypes.ASSIGN, "=")
    assert lexer.next_token() == Token(TokenTypes.INT, "10")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    assert lexer.next_token() == Token(TokenTypes.LET, "let")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "add")
    assert lexer.next_token() == Token(TokenTypes.ASSIGN, "=")
    assert lexer.next_token() == Token(TokenTypes.FUNCTION, "fn")
    assert lexer.next_token() == Token(TokenTypes.LPAREN, "(")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "x")
    assert lexer.next_token() == Token(TokenTypes.COMMA, ",")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "y")
    assert lexer.next_token() == Token(TokenTypes.RPAREN, ")")
    assert lexer.next_token() == Token(TokenTypes.LBRACE, "{")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "x")
    assert lexer.next_token() == Token(TokenTypes.PLUS, "+")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "y")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")
    assert lexer.next_token() == Token(TokenTypes.RBRACE, "}")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    assert lexer.next_token() == Token(TokenTypes.LET, "let")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "result")
    assert lexer.next_token() == Token(TokenTypes.ASSIGN, "=")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "add")
    assert lexer.next_token() == Token(TokenTypes.LPAREN, "(")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "five")
    assert lexer.next_token() == Token(TokenTypes.COMMA, ",")
    assert lexer.next_token() == Token(TokenTypes.IDENT, "ten")
    assert lexer.next_token() == Token(TokenTypes.RPAREN, ")")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    assert lexer.next_token() == Token(TokenTypes.BANG, "!")
    assert lexer.next_token() == Token(TokenTypes.MINUS, "-")
    assert lexer.next_token() == Token(TokenTypes.SLASH, "/")
    assert lexer.next_token() == Token(TokenTypes.ASTERISK, "*")
    assert lexer.next_token() == Token(TokenTypes.INT, "5")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    assert lexer.next_token() == Token(TokenTypes.INT, "5")
    assert lexer.next_token() == Token(TokenTypes.LT, "<")
    assert lexer.next_token() == Token(TokenTypes.INT, "10")
    assert lexer.next_token() == Token(TokenTypes.GT, ">")
    assert lexer.next_token() == Token(TokenTypes.INT, "5")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    # if (5 < 10) {
    # return true;
    # } else {
    # return false; }
    assert lexer.next_token() == Token(TokenTypes.IF, "if")
    assert lexer.next_token() == Token(TokenTypes.LPAREN, "(")
    assert lexer.next_token() == Token(TokenTypes.INT, "5")
    assert lexer.next_token() == Token(TokenTypes.LT, "<")
    assert lexer.next_token() == Token(TokenTypes.INT, "10")
    assert lexer.next_token() == Token(TokenTypes.RPAREN, ")")
    assert lexer.next_token() == Token(TokenTypes.LBRACE, "{")
    assert lexer.next_token() == Token(TokenTypes.RETURN, "return")
    assert lexer.next_token() == Token(TokenTypes.TRUE, "true")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")
    assert lexer.next_token() == Token(TokenTypes.RBRACE, "}")
    assert lexer.next_token() == Token(TokenTypes.ELSE, "else")
    assert lexer.next_token() == Token(TokenTypes.LBRACE, "{")
    assert lexer.next_token() == Token(TokenTypes.RETURN, "return")
    assert lexer.next_token() == Token(TokenTypes.FALSE, "false")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")
    assert lexer.next_token() == Token(TokenTypes.RBRACE, "}")

    assert lexer.next_token() == Token(TokenTypes.INT, "10")
    assert lexer.next_token() == Token(TokenTypes.EQ, "==")
    assert lexer.next_token() == Token(TokenTypes.INT, "10")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")

    assert lexer.next_token() == Token(TokenTypes.INT, "10")
    assert lexer.next_token() == Token(TokenTypes.NOT_EQ, "==")
    assert lexer.next_token() == Token(TokenTypes.INT, "9")
    assert lexer.next_token() == Token(TokenTypes.SEMICOLON, ";")
    assert lexer.next_token() == Token(TokenTypes.EOF, "")
Exemple #17
0
def test_is_letter():
    lexer = Lexer("")
    assert lexer.is_letter('c')
    assert lexer.is_letter('*') is False
    assert lexer.is_letter('_')
    assert lexer.is_letter(' ') is False