Esempio n. 1
0
def test_tokenize_0():
    """
    Tests that tokenize_by_numbers matches a manually generated output
    on a specific example.
    """
    s = ' 123 klsdgh 56.7?98.2---\%-1e3'
    true_tokens = [' ', 123, ' klsdgh ', 56.7, '?', 98.2, '---\%', -1e3]
    tokens = tokenize_by_number(s)
    assert token_lists_match(tokens, true_tokens)
def test_tokenize_0():
    s = ' 123 klsdgh 56.7?98.2---\%-1e3'
    true_tokens = [' ', 123, ' klsdgh ', 56.7, '?', 98.2, '---\%', -1e3]
    tokens = tokenize_by_number(s)
    assert token_lists_nearly_match(tokens, true_tokens)
Esempio n. 3
0
def test_tokenize_0():
    s = ' 123 klsdgh 56.7?98.2---\%-1e3'
    true_tokens = [' ',123,' klsdgh ',56.7,'?',98.2,'---\%',-1e3]
    tokens = tokenize_by_number(s)
    assert token_lists_nearly_match(tokens, true_tokens)