示例#1
0
def _process_tokens(text):
    return [t for t in wikitext_split.tokenize(text or "")]
示例#2
0
def _process_tokens(text):
    return [t for t in wikitext_split.tokenize(text or "")]