Example #1
0
def test_invalid_syntax_tokenization():
    token = tokenizer.tokenize("@")

    assert isinstance(token, tokens.InvalidSyntaxToken)
Example #2
0
def test_minus_tokenization():
    token = tokenizer.tokenize("-")

    assert isinstance(token, tokens.DecrementCellValueToken)
Example #3
0
def test_open_square_bracket_tokenization():
    token = tokenizer.tokenize("[")

    assert isinstance(token, tokens.LoopStartToken)
Example #4
0
def test_close_square_bracket_tokenization():
    token = tokenizer.tokenize("]")

    assert isinstance(token, tokens.LoopEndToken)
Example #5
0
def test_less_sign_tokenization():
    token = tokenizer.tokenize("<")

    assert isinstance(token, tokens.PreviousCellToken)
Example #6
0
def test_plus_tokenization():
    token = tokenizer.tokenize("+")

    assert isinstance(token, tokens.IncrementCellValueToken)
Example #7
0
def test_more_sign_tokenization():
    token = tokenizer.tokenize(">")

    assert isinstance(token, tokens.NextCellToken)
Example #8
0
def test_comma_tokenization():
    token = tokenizer.tokenize(",")

    assert isinstance(token, tokens.GetCellValueToken)
Example #9
0
def test_dot_tokenization():
    token = tokenizer.tokenize(".")

    assert isinstance(token, tokens.PutCellValueToken)