Example #1
0
def test_tokenizer_errors() -> None:

    text = "4x + 2x^3 * 7\\"
    tokenizer = Tokenizer()

    with pytest.raises(ValueError):
        tokenizer.tokenize(text)
Example #2
0
def test_tokenizer_tokenize() -> None:

    text = "4x + 2x^3 * 7!"
    tokenizer = Tokenizer()
    tokens: List[Token] = tokenizer.tokenize(text)

    for token in tokens:
        print(token)
        assert token.type <= TOKEN_TYPES.EOF
        assert token.value is not None
Example #3
0
from typing import List

from mathy_core import Token, Tokenizer

text = "4x + 2x^3 * 7x"
tokenizer = Tokenizer()
tokens: List[Token] = tokenizer.tokenize(text)

for token in tokens:
    print(f"type: {token.type}, value: {token.value}")