def test_nexttoken(self): t = Tokenizer() s = u"[item1] word1 [item2] word2(ícaro)" t.init(StringIO(s)) tokens = [ Token(Kinds.ITEM, u"item1"), Token(Kinds.PUNCTUATION, u" "), Token(Kinds.WORD, u"word1"), Token(Kinds.PUNCTUATION, u" "), Token(Kinds.ITEM, u"item2"), Token(Kinds.PUNCTUATION, u" "), Token(Kinds.WORD, u"word2"), Token(Kinds.PUNCTUATION, u"("), Token(Kinds.WORD, u"ícaro"), Token(Kinds.PUNCTUATION, u")") ] g = t.tokens() for expected in tokens: actual = g.next() self.assertEquals(expected, actual) try: g.next() except StopIteration: pass