Esempio n. 1
0
 def test__load_files(self):
     tp = TopicProcessor()
     statements, topics = tp._load_files()
     if not len(statements):
         self.fail()
Esempio n. 2
0
 def test__build_dictionary(self):
     tp = TopicProcessor()
     _, topics = tp._load_files()
     dictionary, reverse_dictionary, num_topics = tp._build_dictionarys(topics)
     if not len(dictionary) > 1:
         self.fail()
Esempio n. 3
0
 def test__create_tokenizer(self):
     tp = TopicProcessor()
     statements, _ = tp._load_files()
     tokenizer = tp._create_tokenizer(statements)
     if not isinstance(tokenizer, keras.preprocessing.text.Tokenizer):
         self.fail()
Esempio n. 4
0
 def test__vectorize_text(self):
     tp = TopicProcessor()
     statements, _ = tp._load_files()
     vectorized_text = tp._vectorize_text(statements)
     self.assertNotEqual(statements, vectorized_text)
Esempio n. 5
0
 def test__clean_text(self):
     tp = TopicProcessor()
     statements_1, topics_1 = tp._load_files()
     statements_2, topics_2 = tp._clean_text(statements_1, topics_1)
     self.assertNotEqual(statements_1, statements_2)