Exemple #1
0
def test_type_declaration():
    lexer = build_lexer()
    lexer.input('T : num(0); a, b : num(*); c : list bool; d: list num(0);')
    tokens = [(t.type, t.value) for t in lexer]
    assert tokens == [('IDENTIFIER', 'T'), (':', ':'), ('NUM_TYPE', 'num'), ('EXPRESSION', '0'), (';', ';'),
                      ('IDENTIFIER', 'a'), (',', ','), ('IDENTIFIER', 'b'), (':', ':'), ('NUM_TYPE', 'num'), ('*', '*'), (';', ';'),
                      ('IDENTIFIER', 'c'), (':', ':'), ('LIST_TYPE', 'list'), ('BOOL_TYPE', 'bool'), (';', ';'),
                      ('IDENTIFIER', 'd'), (':', ':'), ('LIST_TYPE', 'list'), ('NUM_TYPE', 'num'), ('EXPRESSION', '0'), (';', ';')]
Exemple #2
0
def test_illegal_character():
    lexer = build_lexer()
    with pytest.raises(SyntaxError) as error:
        lexer.input('a ~= 1;')
        tokens = [t for t in lexer]
    assert error.value.msg.type == 'error'
    assert error.value.msg.lineno == 1
    assert error.value.msg.value == '~= 1;'
    assert error.value.msg.lexpos == 2
Exemple #3
0
def test_forall_keyword():
    docstring = \
        """
        precondition : forall i (^q[i] >= -1 and ^q[i] <= 1);
        A : num(0)
        """
    lexer = build_lexer()
    parser = build_parser()
    forall_var, precondition, type_map = parser.parse(docstring, lexer=lexer)
    assert forall_var == ['i']
Exemple #4
0
def test_lineno():
    lexer = build_lexer()
    lexer.input('precondition(1 > 2);\nT : num(0);')
    tokens = [t for t in lexer]
    assert lexer.lineno == 2
    # reset line counter
    lexer.lineno = 1
    lexer.input('T : num(0);\nT : num(0);\nT : num(0);\n\n\n')
    tokens = [t for t in lexer]
    assert lexer.lineno == 6
Exemple #5
0
def test_parser():
    docstring = \
        """
        precondition : (^q[i] >= -1 and ^q[i] <= 1 and ^out[i] == 0);
        T, N, len, epsilon : num(0); q : list num(*); out : list bool;
        c1, c2, i : num(0); T_threshold, eta_1 : num(1); eta_2 : num(2 if q[i]+eta_2>=T_threshold else 0)
        """
    lexer = build_lexer()
    parser = build_parser()
    forall_var, precondition, type_map = parser.parse(docstring, lexer=lexer)
    assert forall_var is None
    assert precondition == "^q[i] >= -1 and ^q[i] <= 1 and ^out[i] == 0"
    assert type_map == jsonpickle.loads(
        r'{"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["T", {"py/object": "lightdp.typing.NumType", "value": "0"}]}, {"py/tuple": ["N", {"py/id": 1}]}, {"py/tuple": ["len", {"py/id": 1}]}, {"py/tuple": ["epsilon", {"py/id": 1}]}, {"py/tuple": ["q", {"py/object": "lightdp.typing.ListType", "elem_type": {"py/object": "lightdp.typing.NumType", "value": "*"}}]}, {"py/tuple": ["out", {"py/object": "lightdp.typing.ListType", "elem_type": {"py/object": "lightdp.typing.BoolType"}}]}, {"py/tuple": ["c1", {"py/object": "lightdp.typing.NumType", "value": "0"}]}, {"py/tuple": ["c2", {"py/id": 6}]}, {"py/tuple": ["i", {"py/id": 6}]}, {"py/tuple": ["T_threshold", {"py/object": "lightdp.typing.NumType", "value": "1"}]}, {"py/tuple": ["eta_1", {"py/id": 7}]}, {"py/tuple": ["eta_2", {"py/object": "lightdp.typing.NumType", "value": "2 if q[i]+eta_2>=T_threshold else 0"}]}]}]}'
    )
Exemple #6
0
def test_precondition():
    lexer = build_lexer()
    lexer.input('precondition : (q[i] >= -1 and q[i] <= 1);')
    tokens = [(t.type, t.value) for t in lexer]
    assert tokens == [('PRECONDITION', 'precondition'), (':', ':'), ('EXPRESSION', 'q[i] >= -1 and q[i] <= 1'),
                      (';', ';')]
Exemple #7
0
def test_precondition_with_forall():
    lexer = build_lexer()
    lexer.input('precondition : forall i (q[i] >= -1 and q[i] <= 1);')
    tokens = [(t.type, t.value) for t in lexer]
    assert tokens == [('PRECONDITION', 'precondition'), (':', ':'), ('FORALL', 'forall'), ('IDENTIFIER', 'i'),
                      ('EXPRESSION', 'q[i] >= -1 and q[i] <= 1'), (';', ';')]