Exemple #1
0
 def __init__(self,
              split_chars=' ',
              punctuation_chars=None,
              before_concatenation_rule=None,
              after_concatenation_rule=None):
     Tokenizer.__init__(self,
                        split_chars=split_chars,
                        punctuation_chars=punctuation_chars,
                        before_concatenation_rule=before_concatenation_rule,
                        after_concatenation_rule=after_concatenation_rule)
Exemple #2
0
 def __init__(self, split_chars=' '):
     Tokenizer.__init__(self, split_chars)