Example #1
0
def match(string, token):
    assert tokenize([string]) == [(token, string), ('ENDMARKER', ''), None]
Example #2
0
def test_sequence():
    assert tokenize(['a', '123']) == [('NAME', 'a'), ('INT', '123'),
                                      ('ENDMARKER', ''), None]
Example #3
0
def test_keywords():
    for keyword in KEYWORDS:
        assert tokenize([keyword]) == [(keyword.upper(), keyword),
                                       ('ENDMARKER', ''), None]
Example #4
0
def test_empty():
    assert tokenize([]) == [('ENDMARKER', ''), None]
Example #5
0
def test_colon():
    match(':', 'COLON')
    assert tokenize([':']) == [('COLON', ':'), ('ENDMARKER', ''), None]
Example #6
0
def match(string, token):
    assert tokenize([string]) == [(token, string), ('ENDMARKER', ''), None]
Example #7
0
def test_keywords():
    for keyword in KEYWORDS:
        assert tokenize([keyword]) == [(keyword.upper(), keyword), ('ENDMARKER', ''), None]
Example #8
0
def test_sequence():
    assert tokenize(['a', '123']) == [('NAME', 'a'), ('INT', '123'), ('ENDMARKER', ''), None]
Example #9
0
def test_colon():
    match(':', 'COLON')
    assert tokenize([':']) == [('COLON', ':'), ('ENDMARKER', ''), None]
Example #10
0
def test_empty():
    assert tokenize([]) == [('ENDMARKER', ''), None]