示例#1
0
def test_get_next_token_1():
    tokenizer = Tokenizer(
        "1 if else 321 not is not < >= <= > ) * +213-432=vfe-elset else-")
    tokens = [
        t(1),
        t("if"),
        t("else"),
        t(321),
        t("not"),
        t("is not"),
        t("<"),
        t(">="),
        t("<="),
        t(">"),
        t(")"),
        t("*"),
        t("+"),
        t(213),
        t("-"),
        t(432),
        t("="),
        t("vfe"),
        t("-"),
        t("elset"),
        t("else"),
        t("-"),
        t(None)
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#2
0
def test_array_disabled():
    tokenizer = Tokenizer("[", Features.NONE)
    with pytest.raises(Exception):
        tokenizer.get_next_token()
    tokenizer = Tokenizer("]", Features.NONE)
    with pytest.raises(Exception):
        tokenizer.get_next_token()
示例#3
0
def test_get_next_token_3():
    tokenizer = Tokenizer("if\n{ else } if else elif")
    tokens = [
        t("if"),
        t("\n"),
        t("{"),
        t("else"),
        t("}"),
        t("if"),
        t("else"),
        t("elif"),
        t(None)
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_if_elif():
    ast = ctrl([
        _if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))])),
        _if(_is(v("a"), c(4)), blk([asg(v("b"), c(2))]))
    ], blk())
    parser = Parser(Tokenizer("if a is 8 { b = 3 } elif a is 4 { b = 2 }"))
    assert parser.statement() == ast
示例#5
0
def test_get_next_token_2():
    tokenizer = Tokenizer(
        "(1+2)/4 > 1 + 2 and 1 < 3 or 2%5 >= 3 or 3*4 <= 4/2 and not 1 is 2 or 4 is not 5"
    )
    s = t("and")
    tokens = [
        t("("),
        t("1"),
        t("+"),
        t("2"),
        t(")"),
        t("/"),
        t("4"),
        t(">"),
        t("1"),
        t("+"),
        t("2"),
        t("and"),
        t("1"),
        t("<"),
        t("3"),
        t("or"),
        t("2"),
        t("%"),
        t("5"),
        t(">="),
        t("3"),
        t("or"),
        t("3"),
        t("*"),
        t("4"),
        t("<="),
        t("4"),
        t("/"),
        t("2"),
        t("and"),
        t("not"),
        t("1"),
        t("is"),
        t("2"),
        t("or"),
        t("4"),
        t("is not"),
        t("5")
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#6
0
def test_advance():
    tokenizer = Tokenizer("123")
    assert tokenizer.cur_pos == 0
    assert tokenizer.cur_char == '1'
    tokenizer.advance()
    assert tokenizer.cur_pos == 1
    assert tokenizer.cur_char == '2'
    tokenizer.advance()
    assert tokenizer.cur_pos == 2
    assert tokenizer.cur_char == '3'
    tokenizer.advance()
    assert tokenizer.cur_char is None
示例#7
0
    def compile(self, prog, features=Features.ALL):
        """Currently this compiler simply returns an interpreter instead of compiling
        TODO: Write this compiler to increase LPProg run speed and to prevent exceeding maximum recursion depth

        Args:
            prog (str): A string containing the program.
            features (FeatureSet): The set of features to enable during compilation.

        Returns:
            LPProg
        """
        return LPProg(
            Parser(Tokenizer(prog, features), features).program(), features)
def test_eat_wrong_type_error():
    parser = Parser(Tokenizer("not b+1"))
    with pytest.raises(Exception):
        parser.eat(TokenTypes.INT)
    parser.eat(TokenTypes.NOT)
    with pytest.raises(Exception):
        parser.eat(TokenTypes.INT)
    parser.eat(TokenTypes.VAR)
    with pytest.raises(Exception):
        parser.eat(TokenTypes.INT)
    parser.eat(TokenTypes.ADD)
    with pytest.raises(Exception):
        parser.eat(TokenTypes.VAR)
    parser.eat(TokenTypes.INT)
示例#9
0
def test_elif_enabled():
    tokenizer = Tokenizer("elif", Features.ELIF)
    tokens = [t("elif")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#10
0
def test_var():
    ast = v("a")
    parser = Parser(Tokenizer("a"))
    assert parser.statement() == ast
示例#11
0
def test_simple():
    ast = _def(v("t"), sig([]), blk([asg(v("a"), c(1))]))

    parser = Parser(Tokenizer("func t() {a = 1}"))
    assert parser.statement() == ast
示例#12
0
def test_invalid_statement_in_block():
    parser = Parser(Tokenizer("func t() {a() {} }"))
    with pytest.raises(InvalidSyntaxException):
        parser.statement()
示例#13
0
def test_call_with_add():
    ast = asg(v("a"), call(v("t"), [add(v("b"), c(2)), add(c(1), v("c"))]))

    parser = Parser(Tokenizer("a = t(b+2, 1+c)"))
    assert parser.statement() == ast
示例#14
0
def test_call():
    ast = asg(v("a"), call(v("t"), [v("b"), v("c")]))

    parser = Parser(Tokenizer("a = t(b, c)"))
    assert parser.statement() == ast
示例#15
0
def test_binary_ops_level_4_and_below(op, left, right, op_str, left_str,
                                      right_str):
    ast = op(left, right)
    s = left_str + op_str + right_str
    parser = Parser(Tokenizer(s))
    assert parser.expression() == ast
示例#16
0
def test_elif_disabled():
    tokenizer = Tokenizer("elif", Features.NONE)
    tokens = [t("elif")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
示例#17
0
def test_for_loop_enabled():
    tokenizer = Tokenizer("for", Features.FOR_LOOP)
    tokens = [t("for")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#18
0
def test_array_enabled():
    tokenizer = Tokenizer("[ ] []", Features.TYPE_ARRAY)
    tokens = [t("["), t("]"), t("["), t("]")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#19
0
def test_literals(var, var_str, exprs, expr_strs):
    ast = asg(var, Array(exprs))
    s = "{} = [{}]".format(var_str, ",".join(expr_strs))
    parser = Parser(Tokenizer(s))
    assert parser.statement() == ast
示例#20
0
def test_setitem(var, expr, val, var_str, expr_str, val_str):
    ast = setitem(var, expr, val)
    parser = Parser(Tokenizer(var_str + "[" + expr_str + "]" + "=" + val_str))
    assert parser.statement() == ast
示例#21
0
def test_getitem(var, expr, var_str, expr_str):
    ast = getitem(var, expr)
    parser = Parser(Tokenizer(var_str + "[" + expr_str + "]"))
    assert parser.variable() == ast
示例#22
0
def test_if_enabled():
    tokenizer = Tokenizer("if else", Features.IF)
    tokens = [t("if"), t("else")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#23
0
def test_if():
    ast = ctrl(
        [_if(_is(getitem(v("a"), c(0)), c(8)), blk([asg(v("b"), c(3))]))],
        blk())
    parser = Parser(Tokenizer("if a[0] is 8 { b = 3 }"))
    assert parser.statement() == ast
示例#24
0
def test_binary_ops_all_levels(op, left, right, op_str, left_str, right_str):
    ast = op(left, right)
    s = " ".join((left_str, op_str, right_str))
    parser = Parser(Tokenizer(s))
    assert parser.expression() == ast
示例#25
0
def test_for_loop_disabled():
    tokenizer = Tokenizer("for", Features.NONE)
    tokens = [t("for")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
示例#26
0
def test_simple_return():
    ast = _def(v("t"), sig([]), blk([ret(c(1))]))

    parser = Parser(Tokenizer("func t() {return 1}"))
    assert parser.statement() == ast
示例#27
0
def test_func_loop_disabled():
    tokenizer = Tokenizer("func return", Features.NONE)
    tokens = [t("func"), t("return")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
示例#28
0
def test_simple_return_var():
    ast = _def(v("t"), sig([]), blk([asg(v("a"), c(1)), ret(v("a"))]))

    parser = Parser(Tokenizer("func t() {a = 1\nreturn a}"))
    assert parser.statement() == ast
示例#29
0
def test_func_loop_enabled():
    tokenizer = Tokenizer("func return", Features.FUNC)
    tokens = [t("func"), t("return")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
示例#30
0
def test_with_params():
    ast = _def(v("t"), sig([v("b"), v("c")]), blk([asg(v("a"), c(1))]))

    parser = Parser(Tokenizer("func t(b, c) {a = 1}"))
    assert parser.statement() == ast