def loads(self, s): up = Unpickler(BytesIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError as e: raise UnpicklingError("Could not find Node class for %s" % e)
def load(filename): print "* Loading..." copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method) in_file = open(filename, "rb") unpickler = Unpickler(in_file) unpickler.persistent_load = persistent_load loaded = unpickler.load() print "* Loaded!" return loaded
def restore(pickle_buffer): #print "* Restoring..." copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method) pickle_buffer.seek(0) unpickler = Unpickler(pickle_buffer) unpickler.persistent_load = persistent_load obj = unpickler.load() #print "* Restored!" return obj
def unpack(self, packet): from pickle import Unpickler from io import BytesIO buffer = BytesIO(packet) delegate = Unpickler(buffer) delegate.persistent_load = lambda id: self.world.get_token(int(id)) return delegate.load()
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def loads(self, data): dst = StringIO(data) unpickler = Unpickler(dst) unpickler.persistent_load = self.persistent_load return unpickler.load()