Esempio n. 1
0
def test_number():
    tokenizer = Tokenizer("123")
    assert tokenizer.number() == t(123)
    tokenizer = Tokenizer("bla 123")
    for _ in range(4):
        tokenizer.advance()
    assert tokenizer.number() == t(123)
    tokenizer = Tokenizer("bla 1 bla")
    for _ in range(4):
        tokenizer.advance()
    assert tokenizer.number() == t(1)
    tokenizer = Tokenizer("bla 42 bla")
    for _ in range(4):
        tokenizer.advance()
    assert tokenizer.number() == t(42)
Esempio n. 2
0
def test_get_next_token_3():
    tokenizer = Tokenizer("if\n{ else } if else elif")
    tokens = [
        t("if"),
        t("\n"),
        t("{"),
        t("else"),
        t("}"),
        t("if"),
        t("else"),
        t("elif"),
        t(None)
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
Esempio n. 3
0
def test_array_str():
    node = Array([Int(t("1"))])
    s = str(node)
    assert s == "[1]"
    assert s == repr(node)

    node = Array([Int(t("1")), Int(t("2")), Int(t("3"))])
    s = str(node)
    assert s == "[1, 2, 3]"
    assert s == repr(node)

    node = Array([
        Int(t("1")),
        Int(t("2")),
        Int(t("3")),
        Array([Int(t("1")), Int(t("2")), Int(t("3"))])
    ])
    s = str(node)
    assert s == "[1, 2, 3, [1, 2, 3]]"
    assert s == repr(node)
Esempio n. 4
0
def test_test_for_multi_word_keyword():
    tokenizer = Tokenizer("is not")
    assert tokenizer.test_for_multi_word_keyword() == t("is not")
    tokenizer = Tokenizer("is not bla")
    assert tokenizer.test_for_multi_word_keyword() == t("is not")
    tokenizer = Tokenizer("bla is not foo bar")
    for _ in range(4):
        tokenizer.advance()
    assert tokenizer.test_for_multi_word_keyword() == t("is not")
    tokenizer = Tokenizer("is")
    assert not tokenizer.test_for_multi_word_keyword()
    tokenizer = Tokenizer("is nob")
    assert not tokenizer.test_for_multi_word_keyword()
    tokenizer = Tokenizer("bla is nob foo bar")
    for _ in range(4):
        tokenizer.advance()
    assert not tokenizer.test_for_multi_word_keyword()
    tokenizer = Tokenizer("bla ps not foo bar")
    for _ in range(4):
        tokenizer.advance()
    assert not tokenizer.test_for_multi_word_keyword()
Esempio n. 5
0
def test_id():
    tokenizer = Tokenizer("bla")
    assert tokenizer.id() == t("bla")
    tokenizer = Tokenizer("bla foo")
    for _ in range(4):
        tokenizer.advance()
    assert tokenizer.id() == t("foo")
    tokenizer = Tokenizer("bla foo bar")
    for _ in range(4):
        tokenizer.advance()
    assert tokenizer.id() == t("foo")
    tokenizer = Tokenizer("if")
    assert tokenizer.id() == t("if")
    tokenizer = Tokenizer("else")
    assert tokenizer.id() == t("else")
    tokenizer = Tokenizer("_a")
    assert tokenizer.id() == t("_a")
Esempio n. 6
0
def test_if_str():
    node = If(BinaryOp(t("<"), Int(t("1")), Var(t("a"))),
              Block([Assign(t("="), Var(t("a")), Int(t("1")))]))
    assert str(node) == "if 1 < a {\n" + TAB + "a = 1\n}"
Esempio n. 7
0
def test_block_str():
    node = Block([Assign(t("="), Var(t("a")), Int(t("1")))])
    assert str(node) == "{\n" + TAB + "a = 1\n}"
Esempio n. 8
0
def test_assign_str():
    node = Assign(t("="), Var(t("a")), Int(t("1")))
    assert str(node) == "a = 1"
Esempio n. 9
0
def test_binaryop_complex_str():
    node = BinaryOp(t("+"), BinaryOp(t("+"), Int(t("1")), Var(t("a"))),
                    Int(t("1")))
    assert str(node) == "(1 + a) + 1"
Esempio n. 10
0
def test_token_eq_with_different_type():
    assert t("1") != 1
    assert t("ab") != "ab"
Esempio n. 11
0
def test_elif_enabled():
    tokenizer = Tokenizer("elif", Features.ELIF)
    tokens = [t("elif")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
Esempio n. 12
0
def test_int_str():
    node = Int(t("1"))
    s = str(node)
    assert s == "1"
    assert s == repr(node)
Esempio n. 13
0
def test_array_enabled():
    tokenizer = Tokenizer("[ ] []", Features.TYPE_ARRAY)
    tokens = [t("["), t("]"), t("["), t("]")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
Esempio n. 14
0
def test_ast_eq_dif_types():
    assert Int(t("1")) != Var(t("1"))
Esempio n. 15
0
def test_ast_eq_same_type_diff_attr():
    c1 = Int(t("1"))
    c2 = Int(t("1"))
    c2.left = "bla"
    assert c1 != c2
Esempio n. 16
0
def test_control_block_eq_diff_type():
    assert ControlBlock([If([], Block())]) != Var(t("1"))
Esempio n. 17
0
def test_if_eq_diff_type():
    assert If([], Block()) != Var(t("1"))
Esempio n. 18
0
def test_ast_eq_same_type_same_attr_diff_value():
    c1 = Int(t("1"))
    c2 = Int(t("1"))
    c1.left = "bla"
    c2.left = "bla1"
    assert c1 != c2
Esempio n. 19
0
def test_control_block_str():
    node = ControlBlock([
        If(BinaryOp(t("<"), Int(t("1")), Var(t("a"))),
           Block([Assign(t("="), Var(t("a")), Int(t("1")))])),
        If(BinaryOp(t("<"), Int(t("1")), Var(t("a"))),
           Block([Assign(t("="), Var(t("a")), Int(t("1")))]))
    ], Block([Assign(t("="), Var(t("a")), Int(t("1")))]))
    assert str(
        node
    ) == "if 1 < a {\n" + TAB + "a = 1\n} elif 1 < a {\n" + TAB + "a = 1\n} else {\n" + TAB + "a = 1\n}"
Esempio n. 20
0
def test_for_loop_enabled():
    tokenizer = Tokenizer("for", Features.FOR_LOOP)
    tokens = [t("for")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
Esempio n. 21
0
def test_for_loop_str():
    node = ForLoop(
        Assign(t("="), Var(t("i")), Int(t("0"))),
        BinaryOp(t("<"), Var(t("i")), Int(t("10"))),
        Assign(t("="), Var(t("i")), BinaryOp(t("+"), Var(t("i")),
                                             Int(t("1")))),
        Block([Assign(t("="), Var(t("a")), Int(t("1")))]))
    assert str(node) == "for i = 0; i < 10; i = i + 1 {\n" + TAB + "a = 1\n}"
Esempio n. 22
0
def test_elif_disabled():
    tokenizer = Tokenizer("elif", Features.NONE)
    tokens = [t("elif")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
Esempio n. 23
0
def test_if_enabled():
    tokenizer = Tokenizer("if else", Features.IF)
    tokens = [t("if"), t("else")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
Esempio n. 24
0
def test_tokenizer_stop_iter():
    tokenizer = Tokenizer("")
    assert next(tokenizer) == t(None)
    with pytest.raises(StopIteration):
        next(tokenizer)
Esempio n. 25
0
def test_for_loop_disabled():
    tokenizer = Tokenizer("for", Features.NONE)
    tokens = [t("for")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
Esempio n. 26
0
def test_var_str():
    node = Var(t("a"))
    assert str(node) == "a"
Esempio n. 27
0
def test_func_loop_disabled():
    tokenizer = Tokenizer("func return", Features.NONE)
    tokens = [t("func"), t("return")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
Esempio n. 28
0
def test_unaryop_str():
    node = UnaryOp(t("not"), Var(t("a")))
    assert str(node) == "not(a)"
Esempio n. 29
0
def test_func_loop_enabled():
    tokenizer = Tokenizer("func return", Features.FUNC)
    tokens = [t("func"), t("return")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
Esempio n. 30
0
def test_binaryop_str():
    node = BinaryOp(t("+"), Int(t("1")), Var(t("a")))
    assert str(node) == "1 + a"