예제 #1
0
    def test_to_vw(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)
        result = StringIO()
        stream.to_vw(result)

        benchmark = " 1 doc1| failure:1 doomed:1\n 1 doc2| set:1 success:1\n"
        self.assertEqual(benchmark, result.getvalue())
예제 #2
0
    def test_to_vw(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        result = StringIO()
        stream.to_vw(result)

        benchmark = " 1 doc1| failure:1 doomed:1\n 1 doc2| set:1 success:1\n"
        self.assertEqual(benchmark, result.getvalue())
예제 #3
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])

        compare = result.toarray() == benchmark.toarray()
        self.assertTrue(compare.all())
예제 #4
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])

        compare = result.toarray() == benchmark.toarray()
        self.assertTrue(compare.all())
예제 #5
0
    def test_token_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)
        token_benchmark = [["doomed", "failure"], ["set", "success"]]
        id_benchmark = ["doc1", "doc2"]
        token_result = []
        for each in stream.token_stream(cache_list=["doc_id"]):
            token_result.append(each)

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(id_benchmark, stream.__dict__["doc_id_cache"])
예제 #6
0
    def test_token_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'], ['set', 'success']]
        id_benchmark = ['doc1', 'doc2']
        token_result = []
        for each in stream.token_stream(cache_list=['doc_id']):
            token_result.append(each)

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
예제 #7
0
    def test_token_stream(self):
        stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'],
                           ['set', 'success']]
        id_benchmark = ['doc1', 'doc2']
        token_result = []
        for each in stream.token_stream(cache_list=['doc_id']):
            token_result.append(each)

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(id_benchmark, stream.__dict__['doc_id_cache'])
예제 #8
0
    def test_info_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2], tokenizer=self.tokenizer)
        token_benchmark = [["doomed", "failure"], ["set", "success"]]
        text_benchmark = ["doomed to failure\n", "set for success\n"]

        token_result = []
        text_result = []
        for each in stream.info_stream():
            token_result.append(each["tokens"])
            text_result.append(each["text"])

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(text_benchmark, text_result)
예제 #9
0
    def test_info_stream(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'], ['set', 'success']]
        text_benchmark = ['doomed to failure\n', 'set for success\n']

        token_result = []
        text_result = []
        for each in stream.info_stream():
            token_result.append(each['tokens'])
            text_result.append(each['text'])

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(text_benchmark, text_result)
예제 #10
0
    def test_info_stream(self):
        stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)
        token_benchmark = [['doomed', 'failure'],
                           ['set', 'success']]
        text_benchmark = ['doomed to failure\n', 'set for success\n']

        token_result = []
        text_result = []
        for each in stream.info_stream():
            token_result.append(each['tokens'])
            text_result.append(each['text'])

        self.assertEqual(token_benchmark, token_result)
        self.assertEqual(text_benchmark, text_result)
예제 #11
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list = [self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])
예제 #12
0
    def test_to_scipyspare(self):
        stream = TextFileStreamer(path_list=[self.doc1, self.doc2],
                                  tokenizer=self.tokenizer)

        result = stream.to_scipysparse()
        benchmark = sparse.csr_matrix([[1, 1, 0, 0], [0, 0, 1, 1]])