def testTokenize(self): g = [ TokenFoo, TokenBar ] tokenizer = parsers.Tokenizer(grammar = g) tokens = tokenizer.tokenize(os.path.join(os.path.dirname(__file__),"testfiles","test_tokenizer.txt")) t = tokens.next() self.assertEqual(t.__class__, TokenFoo) t = tokens.next() self.assertEqual(t.__class__, TokenBar) self.assertRaises(StopIteration, tokens.next)
def __init__(self): self._tokenizer = parsers.Tokenizer(grammar=_fullGrammarOutput())
def testInit(self): # fold>> g = [ TokenFoo, TokenBar ] tokenizer = parsers.Tokenizer(grammar = g)