def test_backcompat_bytes_quoted_printable(self): """Test decoding bytes objects from older jsonpickle versions""" b1 = b'foo' b2 = b'foo\xff' # older versions of jsonpickle used a quoted-printable encoding expect = b1 actual = self.unpickler.restore({tags.BYTES: unicode('foo')}) self.assertEqual(expect, actual) expect = b2 actual = self.unpickler.restore({tags.BYTES: unicode('foo=FF')}) self.assertEqual(expect, actual)
def test_unicode_mixin(self): obj = UnicodeMixin('test') self.assertEqual(type(obj), UnicodeMixin) self.assertEqual(unicode(obj), unicode('test')) # Encode into JSON content = jsonpickle.encode(obj) # Resurrect from JSON new_obj = jsonpickle.decode(content) new_obj += ' passed' self.assertEqual(unicode(new_obj), unicode('test passed')) self.assertEqual(type(new_obj), UnicodeMixin) self.assertTrue(new_obj.ok())
def _restore_from_dict(self, obj, instance, ignorereserved=True): restore_key = self._restore_key_fn() method = _obj_setattr for k, v in sorted(obj.items(), key=util.itemgetter): # ignore the reserved attribute if ignorereserved and k in tags.RESERVED: continue if isinstance(k, numeric_types): str_k = unicode(k) else: str_k = k self._namestack.append(str_k) k = restore_key(k) # step into the namespace value = self._restore(v) if (util.is_noncomplex(instance) or util.is_dictionary_subclass(instance)): instance[k] = value else: setattr(instance, k, value) # This instance has an instance variable named `k` that is # currently a proxy and must be replaced if isinstance(value, _Proxy): self._proxies.append((instance, k, value, method)) # step out self._namestack.pop()
def flatten(self, obj, data): pickler = self.context if not pickler.unpicklable: return unicode(obj) flatten = pickler.flatten data['__reduce__'] = [flatten(i, reset=False) for i in obj.__reduce__()] return data
def _flatten_obj_instance(self, obj): """Recursively flatten an instance and return a json-friendly dict """ data = {} has_class = hasattr(obj, "__class__") has_dict = hasattr(obj, "__dict__") has_slots = not has_dict and hasattr(obj, "__slots__") has_getstate = has_dict and hasattr(obj, "__getstate__") has_getstate_support = has_getstate and hasattr(obj, "__setstate__") HandlerClass = handlers.get(type(obj)) if has_class and not util.is_module(obj): module, name = _getclassdetail(obj) if self.unpicklable: data[tags.OBJECT] = "%s.%s" % (module, name) # Check for a custom handler if HandlerClass: handler = HandlerClass(self) flat_obj = handler.flatten(obj, data) self._mkref(flat_obj) return flat_obj if util.is_module(obj): if self.unpicklable: data[tags.REPR] = "%s/%s" % (obj.__name__, obj.__name__) else: data = unicode(obj) return data if util.is_dictionary_subclass(obj): return self._flatten_dict_obj(obj, data) if has_dict: # Support objects that subclasses list and set if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) # Support objects with __getstate__(); this ensures that # both __setstate__() and __getstate__() are implemented if has_getstate_support: state = self._flatten(obj.__getstate__()) if self.unpicklable: data[tags.STATE] = state else: data = state return data # hack for zope persistent objects; this unghostifies the object getattr(obj, "_", None) return self._flatten_dict_obj(obj.__dict__, data) if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if util.is_noncomplex(obj): return [self._flatten(v) for v in obj] if has_slots: return self._flatten_newstyle_with_slots(obj, data)
def test_backend(self): expected_pickled = unicode( '{"things":[{' '"child":null,' '"name":"data",' '"py/object":"backend_test.Thing"}' ']}') self.assertEncodeDecode(expected_pickled)
def test_object_dict_keys(self): """Test that we handle random objects as keys. """ thing = Thing('random') pickle = jsonpickle.encode({thing: True}) actual = jsonpickle.decode(pickle) self.assertEqual(actual, {unicode('Thing("random")'): True})
def flatten_dtype(self, dtype, data): if hasattr(dtype, 'tostring'): data['dtype'] = dtype.tostring() else: dtype = unicode(dtype) prefix = '(numpy.record, ' if dtype.startswith(prefix): dtype = dtype[len(prefix):-1] data['dtype'] = dtype
def flatten(self, obj, data): pickler = self.context if not pickler.unpicklable: return unicode(obj) cls, args = obj.__reduce__() flatten = pickler.flatten payload = util.b64encode(args[0]) args = [payload] + [flatten(i, reset=False) for i in args[1:]] data['__reduce__'] = (flatten(cls, reset=False), args) return data
def test_backend(self): if PY3: return self.skip('no demjson for python3') expected_pickled = unicode( '{"things":[{' '"child":null,' '"name":"data",' '"py/object":"backend_test.Thing"}' ']}') self.assertEncodeDecode(expected_pickled)
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if not isinstance(k, (str, unicode)): try: k = repr(k) except: k = unicode(k) data[k] = self.flatten(v) return data
def test_backend(self): if PY3: self.skipTest('no demjson for python3') return expected_pickled = unicode( '{"things":[{' '"child":null,' '"name":"data",' '"py/object":"jsonpickle._samples.Thing"}' ']}') self.assertEncodeDecode(expected_pickled)
def _restore_dict(self, obj): data = {} restore_key = self._restore_key_fn() for k, v in sorted(obj.items(), key=util.itemgetter): if isinstance(k, numeric_types): str_k = unicode(k) else: str_k = k self._namestack.append(str_k) k = restore_key(k) data[k] = self._restore(v) self._namestack.pop() return data
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if not isinstance(k, (str, unicode)): if self.keys: k = tags.JSON_KEY + encode( k, reset=False, keys=True, context=self, backend=self.backend, make_refs=self.make_refs ) else: try: k = repr(k) except: k = unicode(k) data[k] = self._flatten(v) return data
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if self.keys: if not isinstance(k, (str, unicode)) or k.startswith(tags.JSON_KEY): k = self._escape_key(k) else: if not isinstance(k, (str, unicode)): try: k = repr(k) except: k = unicode(k) data[k] = self._flatten(v) return data
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if not isinstance(k, (str, unicode)): if self.keys: k = tags.JSON_KEY + encode(k, reset=False, keys=True, context=self, backend=self.backend, make_refs=self.make_refs) else: try: k = repr(k) except: k = unicode(k) data[k] = self._flatten(v) return data
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if self.keys: if not isinstance(k, (str, unicode)) or k.startswith(tags.JSON_KEY): k = self._escape_key(k) else: if k is None: k = 'null' # for compatibility with common json encoders if not isinstance(k, (str, unicode)): try: k = repr(k) except: k = unicode(k) data[k] = self._flatten(v) return data
def _flatten_key_value_pair(self, k, v, data): """Flatten a key/value pair into the passed-in dictionary.""" if not util.is_picklable(k, v): return data if self.keys: if not isinstance(k, (str, unicode)) or k.startswith(tags.JSON_KEY): k = self._escape_key(k) else: if k is None: k = 'null' # for compatibility with common json encoders if self.numeric_keys and isinstance(k, numeric_types): pass elif not isinstance(k, (str, unicode)): try: k = repr(k) except: k = unicode(k) data[k] = self._flatten(v) return data
def test_bytes_unicode(self): b1 = b'foo' b2 = b'foo\xff' u1 = unicode('foo') # unicode strings get encoded/decoded as is encoded = self.pickler.flatten(u1) self.assertTrue(encoded == u1) self.assertTrue(type(encoded) is unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == u1) self.assertTrue(type(decoded) is unicode) # bytestrings are wrapped in PY3 but in PY2 we try to decode first encoded = self.pickler.flatten(b1) if PY2: self.assertEqual(encoded, u1) self.assertEqual(type(encoded), unicode) else: self.assertNotEqual(encoded, u1) b64ustr = base64.encodestring(b'foo').decode('utf-8') self.assertEqual({tags.B64: b64ustr}, encoded) self.assertEqual(type(encoded[tags.B64]), unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == b1) if PY2: self.assertTrue(type(decoded) is unicode) else: self.assertTrue(type(decoded) is bytes) # bytestrings that we can't decode to UTF-8 will always be wrapped encoded = self.pickler.flatten(b2) self.assertNotEqual(encoded, b2) b64ustr = base64.encodestring(b'foo\xff').decode('utf-8') self.assertEqual({tags.B64: b64ustr}, encoded) self.assertEqual(type(encoded[tags.B64]), unicode) decoded = self.unpickler.restore(encoded) self.assertEqual(decoded, b2) self.assertTrue(type(decoded) is bytes)
def test_bytes_unicode(self): b1 = b'foo' b2 = b'foo\xff' u1 = unicode('foo') # unicode strings get encoded/decoded as is encoded = self.pickler.flatten(u1) self.assertTrue(encoded == u1) self.assertTrue(type(encoded) is unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == u1) self.assertTrue(type(decoded) is unicode) # bytestrings are wrapped in PY3 but in PY2 we try to decode first encoded = self.pickler.flatten(b1) if PY2: self.assertEqual(encoded, u1) self.assertEqual(type(encoded), unicode) else: self.assertNotEqual(encoded, u1) b64ustr= base64.encodestring(b'foo').decode('utf-8') self.assertEqual({tags.B64: b64ustr}, encoded) self.assertEqual(type(encoded[tags.B64]), unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == b1) if PY2: self.assertTrue(type(decoded) is unicode) else: self.assertTrue(type(decoded) is bytes) # bytestrings that we can't decode to UTF-8 will always be wrapped encoded = self.pickler.flatten(b2) self.assertNotEqual(encoded, b2) b64ustr= base64.encodestring(b'foo\xff').decode('utf-8') self.assertEqual({tags.B64: b64ustr}, encoded) self.assertEqual(type(encoded[tags.B64]), unicode) decoded = self.unpickler.restore(encoded) self.assertEqual(decoded, b2) self.assertTrue(type(decoded) is bytes)
def test_bytes_unicode(self): b1 = b'foo' b2 = b'foo\xff' u1 = unicode('foo') # unicode strings get encoded/decoded as is encoded = self.pickler.flatten(u1) self.assertTrue(encoded == u1) self.assertTrue(type(encoded) is unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == u1) self.assertTrue(type(decoded) is unicode) # bytestrings are wrapped in PY3 but in PY2 we try to decode first encoded = self.pickler.flatten(b1) if PY2: self.assertTrue(encoded == u1) self.assertTrue(type(encoded) is unicode) else: self.assertTrue(encoded != u1) self.assertTrue(encoded == {tags.BYTES: 'foo'}) self.assertTrue(type(encoded[tags.BYTES]) is unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == b1) if PY2: self.assertTrue(type(decoded) is unicode) else: self.assertTrue(type(decoded) is bytes) # bytestrings that we can't decode to UTF-8 will always be wrapped encoded = self.pickler.flatten(b2) self.assertTrue(encoded != b2) self.assertTrue(encoded == {tags.BYTES: 'foo=FF'}) self.assertTrue(type(encoded[tags.BYTES]) is unicode) decoded = self.unpickler.restore(encoded) self.assertTrue(decoded == b2) self.assertTrue(type(decoded) is bytes)
def _flatten_obj_instance(self, obj): """Recursively flatten an instance and return a json-friendly dict """ data = {} has_class = hasattr(obj, '__class__') has_dict = hasattr(obj, '__dict__') has_slots = not has_dict and hasattr(obj, '__slots__') has_getstate = has_dict and hasattr(obj, '__getstate__') has_getstate_support = has_getstate and hasattr(obj, '__setstate__') HandlerClass = handlers.BaseHandler._registry.get(type(obj)) if has_class and not util.is_module(obj): module, name = _getclassdetail(obj) if self.unpicklable: data[tags.OBJECT] = '%s.%s' % (module, name) # Check for a custom handler if HandlerClass: handler = HandlerClass(self) flat_obj = handler.flatten(obj, data) self._mkref(flat_obj) return flat_obj if util.is_module(obj): if self.unpicklable: data[tags.REPR] = '%s/%s' % (obj.__name__, obj.__name__) else: data = unicode(obj) return data if util.is_dictionary_subclass(obj): return self._flatten_dict_obj(obj, data) if has_dict: # Support objects that subclasses list and set if util.is_collection_subclass(obj): return self._flatten_collection_obj(obj, data) # Support objects with __getstate__(); this ensures that # both __setstate__() and __getstate__() are implemented if has_getstate_support: state = self.flatten(obj.__getstate__()) if self.unpicklable: data[tags.STATE] = state else: data = state return data # hack for zope persistent objects; this unghostifies the object getattr(obj, '_', None) return self._flatten_dict_obj(obj.__dict__, data) if util.is_collection_subclass(obj): return self._flatten_collection_obj(obj, data) if util.is_noncomplex(obj): return [self.flatten(v) for v in obj] if has_slots: return self._flatten_newstyle_with_slots(obj, data)
def flatten(self, obj, data): data['dtype'] = unicode(obj.dtype) data['values'] = self.context.flatten(obj.tolist(), reset=False) return data
def itemgetter(obj, getter=operator.itemgetter(0)): return unicode(getter(obj))
def test_unicode(self): self.assertEqual(unicode('a string'), self.pickler.flatten('a string')) self.assertEqual(unicode('a string'), self.unpickler.restore('a string'))
def _flatten_obj_instance(self, obj): """Recursively flatten an instance and return a json-friendly dict """ data = {} has_class = hasattr(obj, '__class__') has_dict = hasattr(obj, '__dict__') has_slots = not has_dict and hasattr(obj, '__slots__') has_getnewargs = hasattr(obj, '__getnewargs__') has_getnewargs_ex = hasattr(obj, '__getnewargs_ex__') has_getinitargs = hasattr(obj, '__getinitargs__') has_reduce, has_reduce_ex = util.has_reduce(obj) # Support objects with __getstate__(); this ensures that # both __setstate__() and __getstate__() are implemented has_getstate = hasattr(obj, '__getstate__') if has_class: cls = obj.__class__ else: cls = type(obj) # Check for a custom handler class_name = util.importable_name(cls) handler = handlers.get(class_name) if handler is not None: if self.unpicklable: data[tags.OBJECT] = class_name return handler(self).flatten(obj, data) reduce_val = None if has_class and not util.is_module(obj): if self.unpicklable: class_name = util.importable_name(cls) data[tags.OBJECT] = class_name # test for a reduce implementation, and redirect before doing anything else # if that is what reduce requests if has_reduce_ex: try: # we're implementing protocol 2 reduce_val = obj.__reduce_ex__(2) except TypeError: # A lot of builtin types have a reduce which just raises a TypeError # we ignore those pass if has_reduce and not reduce_val: try: reduce_val = obj.__reduce__() except TypeError: # A lot of builtin types have a reduce which just raises a TypeError # we ignore those pass if reduce_val: try: # At this stage, we only handle the case where __reduce__ returns a string # other reduce functionality is implemented further down if isinstance(reduce_val, (str, unicode)): varpath = iter(reduce_val.split('.')) # curmod will be transformed by the loop into the value to pickle curmod = sys.modules[next(varpath)] for modname in varpath: curmod = getattr(curmod, modname) # replace obj with value retrieved return self._flatten(curmod) except KeyError: # well, we can't do anything with that, so we ignore it pass if has_getnewargs_ex: data[tags.NEWARGSEX] = list( map(self._flatten, obj.__getnewargs_ex__())) if has_getnewargs and not has_getnewargs_ex: data[tags.NEWARGS] = self._flatten(obj.__getnewargs__()) if has_getinitargs: data[tags.INITARGS] = self._flatten(obj.__getinitargs__()) if has_getstate: try: state = obj.__getstate__() except TypeError: # Has getstate but it cannot be called, e.g. file descriptors # in Python3 self._pickle_warning(obj) return None else: return self._getstate(state, data) if util.is_module(obj): if self.unpicklable: data[tags.REPR] = '%s/%s' % (obj.__name__, obj.__name__) else: data = unicode(obj) return data if util.is_dictionary_subclass(obj): self._flatten_dict_obj(obj, data) return data if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if util.is_noncomplex(obj): return [self._flatten(v) for v in obj] if util.is_iterator(obj): # force list in python 3 data[tags.ITERATOR] = list( map(self._flatten, islice(obj, self._max_iter))) return data if reduce_val and not isinstance(reduce_val, (str, unicode)): # at this point, reduce_val should be some kind of iterable # pad out to len 5 rv_as_list = list(reduce_val) insufficiency = 5 - len(rv_as_list) if insufficiency: rv_as_list += [None] * insufficiency if rv_as_list[0].__name__ == '__newobj__': rv_as_list[0] = tags.NEWOBJ data[tags.REDUCE] = list(map(self._flatten, rv_as_list)) # lift out iterators, so we don't have to iterator and uniterator their content # on unpickle if data[tags.REDUCE][3]: data[tags.REDUCE][3] = data[tags.REDUCE][3][tags.ITERATOR] if data[tags.REDUCE][4]: data[tags.REDUCE][4] = data[tags.REDUCE][4][tags.ITERATOR] return data if has_dict: # Support objects that subclasses list and set if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) # hack for zope persistent objects; this unghostifies the object getattr(obj, '_', None) return self._flatten_dict_obj(obj.__dict__, data) if has_slots: return self._flatten_newstyle_with_slots(obj, data) self._pickle_warning(obj) return None
def test_is_primitive_unicode(self): self.assertTrue(util.is_primitive(unicode('hello'))) self.assertTrue(util.is_primitive(unicode(''))) self.assertTrue(util.is_primitive(unicode('hello')))
def test_is_primitive_unicode(self): self.assertTrue(is_primitive(unicode('hello'))) self.assertTrue(is_primitive(unicode(''))) self.assertTrue(is_primitive(unicode('hello')))
def _flatten_obj_instance(self, obj): """Recursively flatten an instance and return a json-friendly dict """ data = {} has_class = hasattr(obj, '__class__') has_dict = hasattr(obj, '__dict__') has_slots = not has_dict and hasattr(obj, '__slots__') has_getnewargs = util.has_method(obj, '__getnewargs__') has_getnewargs_ex = util.has_method(obj, '__getnewargs_ex__') has_getinitargs = util.has_method(obj, '__getinitargs__') has_reduce, has_reduce_ex = util.has_reduce(obj) # Support objects with __getstate__(); this ensures that # both __setstate__() and __getstate__() are implemented has_getstate = hasattr(obj, '__getstate__') # not using has_method since __getstate__() is handled separately below if has_class: cls = obj.__class__ else: cls = type(obj) # Check for a custom handler class_name = util.importable_name(cls) handler = handlers.get(cls, handlers.get(class_name)) if handler is not None: if self.unpicklable: data[tags.OBJECT] = class_name return handler(self).flatten(obj, data) reduce_val = None if has_class and not util.is_module(obj): if self.unpicklable: class_name = util.importable_name(cls) data[tags.OBJECT] = class_name # test for a reduce implementation, and redirect before doing anything else # if that is what reduce requests if has_reduce_ex: try: # we're implementing protocol 2 reduce_val = obj.__reduce_ex__(2) except TypeError: # A lot of builtin types have a reduce which just raises a TypeError # we ignore those pass if has_reduce and not reduce_val: try: reduce_val = obj.__reduce__() except TypeError: # A lot of builtin types have a reduce which just raises a TypeError # we ignore those pass if reduce_val: try: # At this stage, we only handle the case where __reduce__ returns a string # other reduce functionality is implemented further down if isinstance(reduce_val, (str, unicode)): varpath = iter(reduce_val.split('.')) # curmod will be transformed by the loop into the value to pickle curmod = sys.modules[next(varpath)] for modname in varpath: curmod = getattr(curmod, modname) # replace obj with value retrieved return self._flatten(curmod) except KeyError: # well, we can't do anything with that, so we ignore it pass if has_getnewargs_ex: data[tags.NEWARGSEX] = list(map(self._flatten, obj.__getnewargs_ex__())) if has_getnewargs and not has_getnewargs_ex: data[tags.NEWARGS] = self._flatten(obj.__getnewargs__()) if has_getinitargs: data[tags.INITARGS] = self._flatten(obj.__getinitargs__()) if has_getstate: try: state = obj.__getstate__() except TypeError: # Has getstate but it cannot be called, e.g. file descriptors # in Python3 self._pickle_warning(obj) return None else: return self._getstate(state, data) if util.is_module(obj): if self.unpicklable: data[tags.REPR] = '%s/%s' % (obj.__name__, obj.__name__) else: data = unicode(obj) return data if util.is_dictionary_subclass(obj): self._flatten_dict_obj(obj, data) return data if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if util.is_noncomplex(obj): return [self._flatten(v) for v in obj] if util.is_iterator(obj): # force list in python 3 data[tags.ITERATOR] = list(map(self._flatten, islice(obj, self._max_iter))) return data if reduce_val and not isinstance(reduce_val, (str, unicode)): # at this point, reduce_val should be some kind of iterable # pad out to len 5 rv_as_list = list(reduce_val) insufficiency = 5 - len(rv_as_list) if insufficiency: rv_as_list += [None] * insufficiency if rv_as_list[0].__name__ == '__newobj__': rv_as_list[0] = tags.NEWOBJ data[tags.REDUCE] = list(map(self._flatten, rv_as_list)) # lift out iterators, so we don't have to iterator and uniterator their content # on unpickle if data[tags.REDUCE][3]: data[tags.REDUCE][3] = data[tags.REDUCE][3][tags.ITERATOR] if data[tags.REDUCE][4]: data[tags.REDUCE][4] = data[tags.REDUCE][4][tags.ITERATOR] return data if has_dict: # Support objects that subclasses list and set if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) # hack for zope persistent objects; this unghostifies the object getattr(obj, '_', None) return self._flatten_dict_obj(obj.__dict__, data) if has_slots: return self._flatten_newstyle_with_slots(obj, data) # catchall return for data created above without a return # (e.g. __getnewargs__ is not supposed to be the end of the story) if data: return data self._pickle_warning(obj) return None
def test_is_primitive_unicode(self): self.assertTrue(is_primitive(unicode("hello"))) self.assertTrue(is_primitive(unicode(""))) self.assertTrue(is_primitive(unicode("hello")))
def blocks_from_given_navigator(self,a_navigator): return lambda index: self._sectors[unicode(a_navigator.get_sector())][index - 1]
def flatten(self, obj, data): data['dtype'] = unicode(obj) return data
def add_block(self, sector, block): self._sectors[unicode(sector)].append(block) return len(self._sectors[unicode(sector)])