示例#1
0
 def as_words(self, python_object, master):
   if (python_object is None):
     return [tokenizer.word(value="None")]
   if (python_object is Auto):
     return [tokenizer.word(value="Auto")]
   return [tokenizer.word(value="%.10g" % v)
     for v in python_object.parameters()]
示例#2
0
 def as_words(self, python_object, master):
     if (python_object is None):
         return [tokenizer.word(value="None")]
     if (python_object is Auto):
         return [tokenizer.word(value="Auto")]
     return [
         tokenizer.word(value="%.10g" % v)
         for v in python_object.parameters()
     ]
示例#3
0
def parse_str (value) :
  #value = value.decode("utf-8")
  try :
    word = tokenizer.word(value, quote_token='"""')
    phil_string = str(word)
  except ValueError, e :
    raise
示例#4
0
def parse_str(value):
  #value = value.decode("utf-8")
  try :
    word = tokenizer.word(value, quote_token='"""')
    phil_string = str(word)
  except ValueError, e :
    raise
示例#5
0
def exercise_pickle():
    # TODO: verify this is intended change for py2/3 compat
    from six.moves import cPickle as pickle
    for p in [pickle]:
        o = tokenizer.word(value="hello")
        l = p.loads(p.dumps(o))
        assert l.value == "hello"
        o = tokenizer.settings(meta_comment="%")
        l = p.loads(p.dumps(o))
        assert l.meta_comment == "%"
        o = tokenizer.word_iterator(input_string="all")
        l = p.loads(p.dumps(o))
        assert l.char_iter.input_string == "all"
示例#6
0
def exercise_pickle():
  import pickle
  import cPickle
  for p in [pickle, cPickle]:
    o = tokenizer.word(value="hello")
    l = p.loads(p.dumps(o))
    assert l.value == "hello"
    o = tokenizer.settings(meta_comment="%")
    l = p.loads(p.dumps(o))
    assert l.meta_comment == "%"
    o = tokenizer.word_iterator(input_string="all")
    l = p.loads(p.dumps(o))
    assert l.char_iter.input_string == "all"
示例#7
0
 def as_words(self, python_object, master):
   if (python_object is None):
     return [tokenizer.word(value="None")]
   return [tokenizer.word(value=python_object.upper())]
示例#8
0
 def as_words(self, python_object, master):
     if (python_object is None):
         return [tokenizer.word(value="None")]
     if (python_object is Auto):
         return [tokenizer.word(value="Auto")]
     return [tokenizer.word(value=str(python_object), quote_token='"')]
示例#9
0
 def as_words(self, python_object, master):
   if (python_object is None):
     return [tokenizer.word(value="None")]
   if (python_object is Auto):
     return [tokenizer.word(value="Auto")]
   return [tokenizer.word(value=str(python_object), quote_token='"')]