Пример #1
0
    def test_Tokenizer(self):
        t_test = Tokenizer(FROZEN_DICT_TRIE)
        self.assertEqual(t_test.word_tokenize(""), [])
        t_test.set_tokenize_engine("longest")
        self.assertEqual(t_test.word_tokenize(None), [])

        t_test = Tokenizer()
        self.assertEqual(t_test.word_tokenize("ก"), ["ก"])
    def test_Tokenizer(self):
        _tokenizer = Tokenizer(DEFAULT_WORD_DICT_TRIE)
        self.assertEqual(_tokenizer.word_tokenize(""), [])
        _tokenizer.set_tokenize_engine("longest")
        self.assertEqual(_tokenizer.word_tokenize(None), [])

        _tokenizer = Tokenizer()
        self.assertEqual(_tokenizer.word_tokenize("ก"), ["ก"])
Пример #3
0
 def test_Tokenizer(self):
     t_test = Tokenizer(FROZEN_DICT_TRIE)
     self.assertEqual(t_test.word_tokenize(""), [])
     t_test.set_tokenize_engine("longest")
     self.assertEqual(t_test.word_tokenize(None), [])