Exemple #1
0
def test_invalid_syntax_tokenization():
    token = tokenizer.tokenize("@")

    assert isinstance(token, tokens.InvalidSyntaxToken)
Exemple #2
0
def test_minus_tokenization():
    token = tokenizer.tokenize("-")

    assert isinstance(token, tokens.DecrementCellValueToken)
Exemple #3
0
def test_open_square_bracket_tokenization():
    token = tokenizer.tokenize("[")

    assert isinstance(token, tokens.LoopStartToken)
Exemple #4
0
def test_close_square_bracket_tokenization():
    token = tokenizer.tokenize("]")

    assert isinstance(token, tokens.LoopEndToken)
Exemple #5
0
def test_less_sign_tokenization():
    token = tokenizer.tokenize("<")

    assert isinstance(token, tokens.PreviousCellToken)
Exemple #6
0
def test_plus_tokenization():
    token = tokenizer.tokenize("+")

    assert isinstance(token, tokens.IncrementCellValueToken)
Exemple #7
0
def test_more_sign_tokenization():
    token = tokenizer.tokenize(">")

    assert isinstance(token, tokens.NextCellToken)
Exemple #8
0
def test_comma_tokenization():
    token = tokenizer.tokenize(",")

    assert isinstance(token, tokens.GetCellValueToken)
Exemple #9
0
def test_dot_tokenization():
    token = tokenizer.tokenize(".")

    assert isinstance(token, tokens.PutCellValueToken)