Exemple #1
0
def test_handle_unexpected_token_default():
    grammar = Grammar(_get_token_type)
    parser = Parser(grammar, _tokenizer('1 + 1'))

    with raises(UnexpectedToken) as exc_info:
        parser.parse()

    assert exc_info.value.token == '1'
Exemple #2
0
def test_handle_unexpected_token_default():
    grammar = Grammar(_get_token_type)
    parser = Parser(grammar, _tokenizer('1 + 1'))

    with raises(UnexpectedToken) as exc_info:
        parser.parse()

    assert exc_info.value.token == '1'
Exemple #3
0
def evaluate(string):
    """
    Evaluates a mathematical expressions. Available operators are `+`, `-`, `*`
    and `/`. Parenthesis are supported. The only available numbers are
    integers. Whitespace is ignored.
    """
    tokenizer = tokenize(string)
    parser = Parser(grammar, tokenizer)
    return parser.parse()
Exemple #4
0
def evaluate(string):
    """
    Evaluates a mathematical expressions. Available operators are `+`, `-`, `*`
    and `/`. Parenthesis are supported. The only available numbers are
    integers. Whitespace is ignored.
    """
    tokenizer = tokenize(string)
    parser = Parser(grammar, tokenizer)
    return parser.parse()
Exemple #5
0
def test_literal():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.literal('integer')
    def l(token):
        return int(token)
    parser = Parser(grammar, _tokenizer('1'))
    result = parser.parse()
    assert result == 1
Exemple #6
0
def test_parser_token_ahead_in_null_denotation():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('b')
    grammar.symbol('EOF')
    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        assert parser.token == 'b'
        return token
    parser = Parser(grammar, iter(['a', 'b', 'EOF']))
    result = parser.parse()
    assert result == 'a'
Exemple #7
0
def test_advance_failure():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        following_token = parser.advance('b')
        assert following_token is None
        return token
    parser = Parser(grammar, iter(['a', 'EOF']))
    result = parser.parse()
    assert result == 'a'
Exemple #8
0
def test_literal():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.literal('integer')
    def l(token):
        return int(token)

    parser = Parser(grammar, _tokenizer('1'))
    result = parser.parse()
    assert result == 1
Exemple #9
0
def test_parse_stops_when_lbp_less_than_rbp():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        return 'a'
    @grammar.left_denotation('b', -1)
    def left_denotation(token, parser, left):
        return 'b'
    parser = Parser(grammar, iter(['a', 'b', 'EOF']))
    result = parser.parse()
    assert result == 'a'
Exemple #10
0
def test_handle_unexpected_token_is_called():
    callback_called = [False]
    def handle_unexpected_token(token):
        callback_called[0] = True
        raise ValueError('syntax')
    grammar = Grammar(_get_token_type, handle_unexpected_token)
    parser = Parser(grammar, _tokenizer('1 + 1'))

    with raises(ValueError):
        parser.parse()

    assert callback_called[0]
Exemple #11
0
def test_infix():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.literal('integer')
    def l(token):
        return int(token)
    @grammar.infix('+', 10)
    def p(token, left, right):
        return left + right
    parser = Parser(grammar, _tokenizer('1 + 1'))
    result = parser.parse()
    assert result == 2
Exemple #12
0
def test_null_denotation_is_called_at_expression_start():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    called = [False]
    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        called[0] = True
        return 'foo'
    parser = Parser(grammar, iter(['a', 'EOF']))
    result = parser.parse()
    assert result == 'foo'
    assert called[0]
Exemple #13
0
def test_enclosing():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.literal('integer')
    def integer(token):
        return int(token)
    @grammar.enclosing('(', ')', 100)
    def parentheses(left_paren, right_paren, body):
        assert body == 1
        return body
    parser = Parser(grammar, _tokenizer('(1)'))
    result = parser.parse()
    assert result == 1
Exemple #14
0
def test_prefix():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.literal('integer')
    def l(token):
        return int(token)
    @grammar.prefix('-', 10)
    def p(token, operand):
        assert operand == 1
        return -operand
    parser = Parser(grammar, _tokenizer('-1'))
    result = parser.parse()
    assert result == -1
Exemple #15
0
def test_advance_failure():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        following_token = parser.advance('b')
        assert following_token is None
        return token

    parser = Parser(grammar, iter(['a', 'EOF']))
    result = parser.parse()
    assert result == 'a'
Exemple #16
0
def test_parser_token_ahead_in_null_denotation():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('b')
    grammar.symbol('EOF')

    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        assert parser.token == 'b'
        return token

    parser = Parser(grammar, iter(['a', 'b', 'EOF']))
    result = parser.parse()
    assert result == 'a'
Exemple #17
0
def test_infix_r():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    @grammar.literal('integer')
    def l(token):
        return int(token)
    @grammar.infix_r('**', 10)
    def p(token, left, right):
        return left ** right
    parser = Parser(grammar, _tokenizer('2 ** 3 ** 2'))
    result = parser.parse()
    # 2 ** (3 ** 2) == 512 but (2 ** 3) ** 2 == 64
    assert result == 512
Exemple #18
0
def test_handle_unexpected_token_is_called():
    callback_called = [False]

    def handle_unexpected_token(token):
        callback_called[0] = True
        raise ValueError('syntax')

    grammar = Grammar(_get_token_type, handle_unexpected_token)
    parser = Parser(grammar, _tokenizer('1 + 1'))

    with raises(ValueError):
        parser.parse()

    assert callback_called[0]
Exemple #19
0
def test_null_denotation_is_called_at_expression_start():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    called = [False]

    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        called[0] = True
        return 'foo'

    parser = Parser(grammar, iter(['a', 'EOF']))
    result = parser.parse()
    assert result == 'foo'
    assert called[0]
Exemple #20
0
def test_parse_stops_when_lbp_less_than_rbp():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        return 'a'

    @grammar.left_denotation('b', -1)
    def left_denotation(token, parser, left):
        return 'b'

    parser = Parser(grammar, iter(['a', 'b', 'EOF']))
    result = parser.parse()
    assert result == 'a'
Exemple #21
0
def test_infix():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.literal('integer')
    def l(token):
        return int(token)

    @grammar.infix('+', 10)
    def p(token, left, right):
        return left + right

    parser = Parser(grammar, _tokenizer('1 + 1'))
    result = parser.parse()
    assert result == 2
Exemple #22
0
def test_enclosing():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.literal('integer')
    def integer(token):
        return int(token)

    @grammar.enclosing('(', ')', 100)
    def parentheses(left_paren, right_paren, body):
        assert body == 1
        return body

    parser = Parser(grammar, _tokenizer('(1)'))
    result = parser.parse()
    assert result == 1
Exemple #23
0
def test_prefix():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.literal('integer')
    def l(token):
        return int(token)

    @grammar.prefix('-', 10)
    def p(token, operand):
        assert operand == 1
        return -operand

    parser = Parser(grammar, _tokenizer('-1'))
    result = parser.parse()
    assert result == -1
Exemple #24
0
def test_infix_r():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')

    @grammar.literal('integer')
    def l(token):
        return int(token)

    @grammar.infix_r('**', 10)
    def p(token, left, right):
        return left**right

    parser = Parser(grammar, _tokenizer('2 ** 3 ** 2'))
    result = parser.parse()
    # 2 ** (3 ** 2) == 512 but (2 ** 3) ** 2 == 64
    assert result == 512
Exemple #25
0
def test_ternary():
    grammar = Grammar(_get_token_type)
    grammar.symbol('EOF')
    @grammar.literal('integer')
    def integer(token):
        return int(token)
    @grammar.ternary('if', 'else', 10)
    def if_else(first_sep, second_sep, then, condition, orelse):
        assert first_sep == 'if'
        assert second_sep == 'else'
        assert then == 1
        assert condition == 2
        assert orelse == 3
        return 'foo'
    parser = Parser(grammar, iter(['1', 'if', '2', 'else', '3', 'EOF']))
    result = parser.parse()
    assert result == 'foo'
Exemple #26
0
def test_ternary():
    grammar = Grammar(_get_token_type)
    grammar.symbol('EOF')

    @grammar.literal('integer')
    def integer(token):
        return int(token)

    @grammar.ternary('if', 'else', 10)
    def if_else(first_sep, second_sep, then, condition, orelse):
        assert first_sep == 'if'
        assert second_sep == 'else'
        assert then == 1
        assert condition == 2
        assert orelse == 3
        return 'foo'

    parser = Parser(grammar, iter(['1', 'if', '2', 'else', '3', 'EOF']))
    result = parser.parse()
    assert result == 'foo'
Exemple #27
0
def test_left_denotation_is_called_after_expression_start():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    called = [False]
    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        return 'a'
    @grammar.left_denotation('b', 10)
    def left_denotation(token, parser, left):
        assert left == 'a'
        return 'b'
    @grammar.left_denotation('c', 10)
    def left_denotation(token, parser, left):
        assert left == 'b'
        called[0] = True
        return 'c'
    parser = Parser(grammar, iter(['a', 'b', 'c', 'EOF']))
    result = parser.parse()
    assert result == 'c'
    assert called[0]
Exemple #28
0
def test_left_denotation_is_called_after_expression_start():
    grammar = Grammar(_get_token_type, _handle_unexpected_token)
    grammar.symbol('EOF')
    called = [False]

    @grammar.null_denotation('a', 10)
    def null_denotation(token, parser):
        return 'a'

    @grammar.left_denotation('b', 10)
    def left_denotation(token, parser, left):
        assert left == 'a'
        return 'b'

    @grammar.left_denotation('c', 10)
    def left_denotation(token, parser, left):
        assert left == 'b'
        called[0] = True
        return 'c'

    parser = Parser(grammar, iter(['a', 'b', 'c', 'EOF']))
    result = parser.parse()
    assert result == 'c'
    assert called[0]
Exemple #29
0
def test_handle_unexpected_token_not_raising():
    grammar = Grammar(_get_token_type, lambda t: None)
    parser = Parser(grammar, _tokenizer('1 + 1'))

    with raises(RuntimeError):
        parser.parse()
Exemple #30
0
def test_handle_unexpected_token_not_raising():
    grammar = Grammar(_get_token_type, lambda t: None)
    parser = Parser(grammar, _tokenizer('1 + 1'))

    with raises(RuntimeError):
        parser.parse()