def test_tokenizer(): t=tokenizer('void* my_fn(void)') assert not t.empty() assert next(t) == 'void' assert next(t) == '*' assert next(t) == 'my_fn' assert next(t) == '(' assert next(t) == 'void' assert next(t) == ')' assert t.empty()
def test_tokenizer(): t = tokenizer('void* my_fn(void)') assert not t.empty() assert next(t) == 'void' assert next(t) == '*' assert next(t) == 'my_fn' assert next(t) == '(' assert next(t) == 'void' assert next(t) == ')' assert t.empty()
def test_tokenizer_keywords(): define('long long', ctypes.c_longlong) t = tokenizer('void* my_fn(long long)') assert not t.empty() assert next(t) == 'void' assert next(t) == '*' assert next(t) == 'my_fn' assert next(t) == '(' assert next(t) == 'long long' assert next(t) == ')' assert t.empty()
def test_tokenizer_keywords(): define('long long', ctypes.c_longlong) t=tokenizer('void* my_fn(long long)') assert not t.empty() assert next(t) == 'void' assert next(t) == '*' assert next(t) == 'my_fn' assert next(t) == '(' assert next(t) == 'long long' assert next(t) == ')' assert t.empty()