Esempio n. 1
0
 def testTokenize(self):
     g = [ TokenFoo, TokenBar ] 
     tokenizer = parsers.Tokenizer(grammar = g)
     tokens = tokenizer.tokenize(os.path.join(os.path.dirname(__file__),"testfiles","test_tokenizer.txt"))
     t = tokens.next()
     self.assertEqual(t.__class__, TokenFoo)
     t = tokens.next()
     self.assertEqual(t.__class__, TokenBar)
     self.assertRaises(StopIteration, tokens.next)
Esempio n. 2
0
 def __init__(self):
     self._tokenizer = parsers.Tokenizer(grammar=_fullGrammarOutput())
Esempio n. 3
0
 def testInit(self): # fold>>
     g = [ TokenFoo, TokenBar ] 
     tokenizer = parsers.Tokenizer(grammar = g)