Ejemplo n.º 1
0
def tokenize(s, *args, **kwargs):
    """ Returns a list of sentences, where punctuation marks have been split from words.
    """
    return parser.find_tokens(text_type(s), *args, **kwargs)
Ejemplo n.º 2
0
def split(s, token=[WORD, POS, CHUNK, PNP]):
    """ Returns a parsed Text from the given parsed string.
    """
    return Text(text_type(s), token)