def run_test(self, input, *expected): found = tokenize(input) extended = list(expected) if not extended[-1].has_type(Token.END): extended.append(Token.end()) self.assertEquals(extended, found)
def id(phase, *names): return Token.identifier(data.Identifier(phase, data.Path(list(names))))
#!/usr/bin/python from neutrino import ast, data from neutrino.token import Token, tokenize import unittest # Convenience shorthands wd = Token.word pt = Token.punctuation op = lambda v: Token.operation(v, False) aop = lambda v: Token.operation(v, True) tg = Token.tag lt = Token.literal ed = Token.end qt = Token.quote dec = data.DecimalFraction def id(phase, *names): return Token.identifier(data.Identifier(phase, data.Path(list(names)))) class TokenTest(unittest.TestCase): def run_test(self, input, *expected): found = tokenize(input) extended = list(expected) if not extended[-1].has_type(Token.END): extended.append(Token.end()) self.assertEquals(extended, found) def test_simple_tokens(self):