def __init__(self, string, contiguous_word_characters=None):
   if (contiguous_word_characters is None):
     contiguous_word_characters \
       = tokenizer.default_contiguous_word_characters \
       + r"\*?[]^+-.:"
   tokenizer.word_iterator.__init__(self,
     input_string=string,
     list_of_settings=[tokenizer.settings(
       contiguous_word_characters=contiguous_word_characters)])
 def __init__(self, string, contiguous_word_characters=None):
   if (contiguous_word_characters is None):
     contiguous_word_characters \
       = tokenizer.default_contiguous_word_characters \
       + r"\*?[]^+-.:"
   tokenizer.word_iterator.__init__(self,
     input_string=string,
     list_of_settings=[tokenizer.settings(
       contiguous_word_characters=contiguous_word_characters)])
Beispiel #3
0
def exercise_pickle():
    # TODO: verify this is intended change for py2/3 compat
    from six.moves import cPickle as pickle
    for p in [pickle]:
        o = tokenizer.word(value="hello")
        l = p.loads(p.dumps(o))
        assert l.value == "hello"
        o = tokenizer.settings(meta_comment="%")
        l = p.loads(p.dumps(o))
        assert l.meta_comment == "%"
        o = tokenizer.word_iterator(input_string="all")
        l = p.loads(p.dumps(o))
        assert l.char_iter.input_string == "all"
Beispiel #4
0
def exercise_pickle():
  import pickle
  import cPickle
  for p in [pickle, cPickle]:
    o = tokenizer.word(value="hello")
    l = p.loads(p.dumps(o))
    assert l.value == "hello"
    o = tokenizer.settings(meta_comment="%")
    l = p.loads(p.dumps(o))
    assert l.meta_comment == "%"
    o = tokenizer.word_iterator(input_string="all")
    l = p.loads(p.dumps(o))
    assert l.char_iter.input_string == "all"