def test_array_disabled():
    tokenizer = Tokenizer("[", Features.NONE)
    with pytest.raises(Exception):
        tokenizer.get_next_token()
    tokenizer = Tokenizer("]", Features.NONE)
    with pytest.raises(Exception):
        tokenizer.get_next_token()
def test_get_next_token_1():
    tokenizer = Tokenizer(
        "1 if else 321 not is not < >= <= > ) * +213-432=vfe-elset else-")
    tokens = [
        t(1),
        t("if"),
        t("else"),
        t(321),
        t("not"),
        t("is not"),
        t("<"),
        t(">="),
        t("<="),
        t(">"),
        t(")"),
        t("*"),
        t("+"),
        t(213),
        t("-"),
        t(432),
        t("="),
        t("vfe"),
        t("-"),
        t("elset"),
        t("else"),
        t("-"),
        t(None)
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_get_next_token_3():
    tokenizer = Tokenizer("if\n{ else } if else elif")
    tokens = [
        t("if"),
        t("\n"),
        t("{"),
        t("else"),
        t("}"),
        t("if"),
        t("else"),
        t("elif"),
        t(None)
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_get_next_token_2():
    tokenizer = Tokenizer(
        "(1+2)/4 > 1 + 2 and 1 < 3 or 2%5 >= 3 or 3*4 <= 4/2 and not 1 is 2 or 4 is not 5"
    )
    s = t("and")
    tokens = [
        t("("),
        t("1"),
        t("+"),
        t("2"),
        t(")"),
        t("/"),
        t("4"),
        t(">"),
        t("1"),
        t("+"),
        t("2"),
        t("and"),
        t("1"),
        t("<"),
        t("3"),
        t("or"),
        t("2"),
        t("%"),
        t("5"),
        t(">="),
        t("3"),
        t("or"),
        t("3"),
        t("*"),
        t("4"),
        t("<="),
        t("4"),
        t("/"),
        t("2"),
        t("and"),
        t("not"),
        t("1"),
        t("is"),
        t("2"),
        t("or"),
        t("4"),
        t("is not"),
        t("5")
    ]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_func_loop_enabled():
    tokenizer = Tokenizer("func return", Features.FUNC)
    tokens = [t("func"), t("return")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_elif_disabled():
    tokenizer = Tokenizer("elif", Features.NONE)
    tokens = [t("elif")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
def test_func_loop_disabled():
    tokenizer = Tokenizer("func return", Features.NONE)
    tokens = [t("func"), t("return")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
def test_for_loop_enabled():
    tokenizer = Tokenizer("for", Features.FOR_LOOP)
    tokens = [t("for")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_for_loop_disabled():
    tokenizer = Tokenizer("for", Features.NONE)
    tokens = [t("for")]
    for token in tokens:
        assert tokenizer.get_next_token() != token
def test_array_enabled():
    tokenizer = Tokenizer("[ ] []", Features.TYPE_ARRAY)
    tokens = [t("["), t("]"), t("["), t("]")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_if_enabled():
    tokenizer = Tokenizer("if else", Features.IF)
    tokens = [t("if"), t("else")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_elif_enabled():
    tokenizer = Tokenizer("elif", Features.ELIF)
    tokens = [t("elif")]
    for token in tokens:
        assert tokenizer.get_next_token() == token
def test_get_next_token_8():
    tokenizer = Tokenizer("123$")
    tokenizer.get_next_token()
    with pytest.raises(Exception):
        tokenizer.get_next_token()
def test_get_next_token_7():
    tokenizer = Tokenizer("\r\t &")
    with pytest.raises(Exception):
        tokenizer.get_next_token()
def test_get_next_token_4():
    tokenizer = Tokenizer("bar\n\n\n\nfoo")
    tokens = [t("bar"), t("\n"), t("foo"), t(None)]
    for token in tokens:
        assert tokenizer.get_next_token() == token