Example #1
0
    def test_to_vw(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)
        result = StringIO()
        stream.to_vw(result)

        benchmark = " 1 doc1| failure:1 doomed:1\n 1 doc2| set:1 success:1\n"
        self.assertEqual(benchmark, result.getvalue())
Example #2
0
    def test_to_vw(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        result = StringIO()
        stream.to_vw(result)

        benchmark = " 1 doc1| failure:1 doomed:1\n 1 doc2| set:1 success:1\n"
        self.assertEqual(benchmark, result.getvalue())
Example #3
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])

        compare = result.toarray() == benchmark.toarray()
        self.assertTrue(compare.all())
Example #4
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])

        compare = result.toarray() == benchmark.toarray()
        self.assertTrue(compare.all())
Example #5
0
    def test_token_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)
        token_benchmark = [["doomed", "failure"], ["set", "success"]]
        id_benchmark = ["doc1", "doc2"]
        token_result = []
        for each in stream.token_stream(cache_list=["doc_id"]):
            token_result.append(each)

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(id_benchmark, stream.__dict__["doc_id_cache"])
Example #6
0
    def test_token_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'], ['set', 'success']]
        id_benchmark = ['doc1', 'doc2']
        token_result = []
        for each in stream.token_stream(cache_list=['doc_id']):
            token_result.append(each)

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
Example #7
0
    def test_token_stream(self):
        stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'],
                           ['set', 'success']]
        id_benchmark = ['doc1', 'doc2']
        token_result = []
        for each in stream.token_stream(cache_list=['doc_id']):
            token_result.append(each)

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
Example #8
0
    def test_info_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)
        token_benchmark = [["doomed", "failure"], ["set", "success"]]
        text_benchmark = ["doomed to failure\n", "set for success\n"]

        token_result = []
        text_result = []
        for each in stream.info_stream():
            token_result.append(each["tokens"])
            text_result.append(each["text"])

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(text_benchmark, text_result)
Example #9
0
    def test_info_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'], ['set', 'success']]
        text_benchmark = ['doomed to failure\n', 'set for success\n']

        token_result = []
        text_result = []
        for each in stream.info_stream():
            token_result.append(each['tokens'])
            text_result.append(each['text'])

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(text_benchmark, text_result)
Example #10
0
    def test_info_stream(self):
        stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'],
                           ['set', 'success']]
        text_benchmark = ['doomed to failure\n', 'set for success\n']

        token_result = []
        text_result = []
        for each in stream.info_stream():
            token_result.append(each['tokens'])
            text_result.append(each['text'])

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(text_benchmark, text_result)
Example #11
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
Example #12
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])