def test_split_tokenize(self): """ Split tokenize specially handles some limited punctuation. """ self.assertEqual( DictionaryAgent.split_tokenize(' this is a test! '), ['this', 'is', 'a', 'test', '!'], )
def construct_search_query(self, labels: List[str]) -> List[str]: """ Construct the search query. :param observation: observation from task :return query: return search query. """ assert labels search_query = [ ' '.join( [ w for w in DictionaryAgent.split_tokenize(labels[0]) if not filter_word(w) ] ) ] return search_query