def to_text(self): ret_list = [] for k, v in self.iteritems(multi=True): key = escape_query_element(unicode(k), to_bytes=False) val = escape_query_element(unicode(v), to_bytes=False) ret_list.append(u'='.join((key, val))) return u'&'.join(ret_list)
def print_pos(pos, output): hex = pos.hex done = pos.done size = hex.size for y in xrange(size): print(u_lit(" ") * (size - y - 1), end=u_lit(""), file=output) for x in xrange(size + y): pos2 = (x, y) id = hex.get_by_pos(pos2).id if done.already_done(id): c = unicode(done[id][0]) if done[id][0] != EMPTY else u_lit(".") else: c = u_lit("?") print(u_lit("%s ") % c, end=u_lit(""), file=output) print(end=u_lit("\n"), file=output) for y in xrange(1, size): print(u_lit(" ") * y, end=u_lit(""), file=output) for x in xrange(y, size * 2 - 1): ry = size + y - 1 pos2 = (x, ry) id = hex.get_by_pos(pos2).id if done.already_done(id): c = unicode(done[id][0]) if done[id][0] != EMPTY else u_lit(".") else: c = u_lit("?") print(u_lit("%s ") % c, end=u_lit(""), file=output) print(end=u_lit("\n"), file=output)
def test_unicode(self): v = unicode(self.obj) self.assertEqual(json.loads(v), {}) a = random.randint(1, 10) self.obj.conn.hset(self.key, 'a', a) v = unicode(self.obj) self.assertEqual(int(json.loads(v).get('a')), a)
def to_bytes(self): # note: uses '%20' instead of '+' for spaces, based partially # on observed behavior in chromium. ret_list = [] for k, v in self.iteritems(multi=True): key = escape_query_element(unicode(k), to_bytes=True) val = escape_query_element(unicode(v), to_bytes=True) ret_list.append('='.join((key, val))) return '&'.join(ret_list)
def test_get(self): self.assertEqual(self.obj.get(), unicode()) a = random.randint(1, 32) self.obj.put(a) v = self.obj.get() self.assertEqual(int(v), a)
def parse_url(url_str, encoding=DEFAULT_ENCODING, strict=False): if isinstance(url_str, str): url_str = url_str.decode(encoding) else: url_str = unicode(url_str) #raise TypeError('parse_url expected unicode or bytes, not %r' % url_str) um = (_URL_RE_STRICT if strict else _URL_RE).match(url_str) try: gs = um.groupdict() except AttributeError: raise ValueError('could not parse url: %r' % url_str) if gs['authority']: try: gs['authority'] = gs['authority'].decode('idna') except: pass else: gs['authority'] = '' user, pw, family, host, port = parse_authority(gs['authority']) gs['username'] = user gs['password'] = pw gs['family'] = family gs['host'] = host gs['port'] = port return gs
def test_put(self): a = random.randint(1, 32) n = random.randint(1, 5) for _ in xrange(n): self.obj.put(a) v = self.obj.get() self.assertEqual(v, (unicode(a), n))
def _flatten_obj_instance(self, obj): """Recursively flatten an instance and return a json-friendly dict """ data = {} has_class = hasattr(obj, '__class__') has_dict = hasattr(obj, '__dict__') has_slots = not has_dict and hasattr(obj, '__slots__') # Support objects with __getstate__(); this ensures that # both __setstate__() and __getstate__() are implemented has_getstate = hasattr(obj, '__getstate__') has_getstate_support = has_getstate and hasattr(obj, '__setstate__') if has_class and not util.is_module(obj): module, name = _getclassdetail(obj) if self.unpicklable: #ksteinfe if module[:12] == "decodes.core": data[tags.OBJECT] = name else: data[tags.OBJECT] = '%s.%s' % (module, name) # Check for a custom handler handler = handlers.get(type(obj)) if handler is not None: return handler(self).flatten(obj, data) if util.is_module(obj): if self.unpicklable: data[tags.REPR] = '%s/%s' % (obj.__name__, obj.__name__) else: data = unicode(obj) return data if util.is_dictionary_subclass(obj): self._flatten_dict_obj(obj, data) if has_getstate_support: self._getstate(obj, data) return data if has_dict: # Support objects that subclasses list and set if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if has_getstate_support: return self._getstate(obj, data) # hack for zope persistent objects; this unghostifies the object getattr(obj, '_', None) return self._flatten_dict_obj(obj.__dict__, data) if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if util.is_noncomplex(obj): return [self._flatten(v) for v in obj] if has_slots: return self._flatten_newstyle_with_slots(obj, data)
def flatten(self, obj, data): pickler = self.context if not pickler.unpicklable: return unicode(obj) cls, args = obj.__reduce__() flatten = pickler.flatten payload = util.b64encode(args[0]) args = [payload] + [flatten(i, reset=False) for i in args[1:]] data['__reduce__'] = (flatten(cls, reset=False), args) return data
def http_request_host(self): # TODO: name ret = [] host = self.host.encode('idna') if self.family == socket.AF_INET6: ret.extend(['[', host, ']']) else: ret.append(host) if self.port: ret.extend([':', unicode(self.port)]) return ''.join(ret)
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if not isinstance(k, (str, unicode)): if self.keys: k = tags.JSON_KEY + encode(k, reset=False, keys=True, context=self, backend=self.backend, make_refs=self.make_refs) else: try: k = repr(k) except: k = unicode(k) data[k] = self._flatten(v) return data
def __init__(self, url_str=None, encoding=None, strict=False): encoding = encoding or DEFAULT_ENCODING # TODO: encoded query strings have an encoding behind the # percent-escaping, but otherwise is this member necessary? # if not, be more explicit self.encoding = encoding url_dict = {} if url_str: if isinstance(url_str, URL): url_str = url_str.to_text() # better way to copy URLs? url_dict = parse_url(url_str, encoding=encoding, strict=strict) _d = unicode() self.path_params = _d # TODO: support parsing path params? for attr in self._attrs: val = url_dict.get(attr, _d) or _d if attr in self._quotable_attrs and '%' in val: val = unquote(val) setattr(self, attr, val) self.query_params = QueryParamDict.from_string(self.query)
def get_authority(self, idna=True): parts = [] _add = parts.append if self.username: _add(self.username) if self.password: _add(':') _add(self.password) _add('@') if self.host: if self.family == socket.AF_INET6: _add('[') _add(self.host) _add(']') elif idna: _add(self.host.encode('idna')) else: _add(self.host) if self.port: _add(':') _add(unicode(self.port)) return u''.join(parts)
def parse_url(url): try: url = unicode(url) except UnicodeDecodeError: pass parsed = urlparse(url) if not (parsed.scheme and parsed.netloc): raise ValueError("invalid URL, no schema supplied: %r" % url) try: dec_netloc = parsed.netloc.encode('idna').decode('utf-8') parsed = parsed._replace(netloc=dec_netloc) except UnicodeError: raise ValueError('invalid characters in url: %r' % parsed.netloc) if not parsed.path: parsed = parsed._replace(path=u'/') for k, v in parsed._asdict().items(): parsed = parsed._replace(**{k: get_encoded(v)}) return parsed
def itemgetter(obj, getter=operator.itemgetter(0)): return unicode(getter(obj))
def get_encoded(val): if not isinstance(val, (unicode, bytes)): val = unicode(val) return val.encode('utf-8')
def test_unicode(self): v = json.loads(unicode(self.obj)) self.assertEqual(int(v.get('count')), 0) self.assertEqual(int(v.get('state')), 1) self.assertEqual(int(v.get('total')), 0)
def _ds_load(obj): n = unicode(obj) if n not in _ds_cache: _ds_cache[n] = blob.load(obj._name('pickle'), obj.jobid) _ds_cache.update(_ds_cache[n].get('cache', ())) return _ds_cache[n]
def getvalue(self): return unicode().join(self.data)