예제 #1
0
def _process_tokens(text):
    return [t for t in wikitext_split.tokenize(text or "")]
예제 #2
0
def _process_tokens(text):
    return [t for t in wikitext_split.tokenize(text or "")]