class TestTokenizer(TestCase):

    def setUp(self):
        self.tokenizer = Tokenizer()

    def test_from_file(self):
        tokens = self.tokenizer.from_file(test_file)
        self.assertListEqual(test_sentences, tokens)

    def test_from_text(self):
        tokens = self.tokenizer.from_text(test_paragraph)
        self.assertListEqual(test_sentences, tokens)

    def test_stream_tokens(self):
        stream = self.tokenizer.stream_tokens(test_paragraph)
        for i, token in enumerate(stream):
            self.assertEqual(token, test_sentences[i])
 def setUp(self):
     self.tokenizer = Tokenizer()