def cloneByPickle(obj, ignore_list=()): """Makes a copy of a ZODB object, loading ghosts as needed. Ignores specified objects along the way, replacing them with None in the copy. """ ignore_dict = {} for o in ignore_list: ignore_dict[id(o)] = o def persistent_id(ob, ignore_dict=ignore_dict): if id(ob) in ignore_dict: return 'ignored' if getattr(ob, '_p_changed', 0) is None: ob._p_changed = 0 return None def persistent_load(ref): assert ref == 'ignored' # Return a placeholder object that will be replaced by # removeNonVersionedData(). placeholder = SimpleItem() placeholder.id = "ignored_subobject" return placeholder stream = BytesIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def dumps(obj): def getpersid(obj): if hasattr(obj, 'getoid'): return obj.getoid() return None s = BytesIO() p = Pickler(s, _protocol) if sys.version_info[0] < 3: p.inst_persistent_id = getpersid else: p.persistent_id = getpersid p.dump(obj) p.dump(None) return s.getvalue()
def zodb_pickle(obj): """Create a pickle in the format expected by ZODB.""" f = BytesIO() p = Pickler(f, _protocol) if sys.version_info[0] < 3: p.inst_persistent_id = _persistent_id else: p.persistent_id = _persistent_id klass = obj.__class__ assert not hasattr(obj, '__getinitargs__'), "not ready for constructors" args = None mod = getattr(klass, '__module__', None) if mod is not None: klass = mod, klass.__name__ state = obj.__getstate__() p.dump((klass, args)) p.dump(state) return f.getvalue()
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=b''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() newpickle = self._crs_untransform_record_data(newpickle) file = BytesIO(newpickle) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load meta = unpickler.load() if isinstance(meta, tuple): klass = meta[0] newargs = meta[1] or () if isinstance(klass, tuple): klass = find_global(*klass) else: klass = meta newargs = () if klass in _unresolvable: raise ConflictError inst = klass.__new__(klass, *newargs) try: resolve = inst._p_resolveConflict except AttributeError: _unresolvable[klass] = 1 raise ConflictError oldData = self.loadSerial(oid, oldSerial) if not committedData: committedData = self.loadSerial(oid, committedSerial) if newpickle == oldData: # old -> new diff is empty, so merge is trivial return committedData if committedData == oldData: # old -> committed diff is empty, so merge is trivial return newpickle newstate = unpickler.load() old = state(self, oid, oldSerial, prfactory, oldData) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = BytesIO() pickler = Pickler(file, _protocol) if sys.version_info[0] < 3: pickler.inst_persistent_id = persistent_id else: pickler.persistent_id = persistent_id pickler.dump(meta) pickler.dump(resolved) return self._crs_transform_record_data(file.getvalue()) except (ConflictError, BadClassName): pass except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. logger.error("Unexpected error", exc_info=True) raise ConflictError(oid=oid, serials=(committedSerial, oldSerial), data=newpickle)
def _importDuringCommit(self, transaction, f, return_oid_list): """Import data during two-phase commit. Invoked by the transaction manager mid commit. Appends one item, the OID of the first object created, to return_oid_list. """ oids = {} # IMPORTANT: This code should be consistent with the code in # serialize.py. It is currently out of date and doesn't handle # weak references. def persistent_load(ooid): """Remap a persistent id to a new ID and create a ghost for it.""" klass = None if isinstance(ooid, tuple): ooid, klass = ooid if not isinstance(ooid, bytes): assert isinstance(ooid, str) # this happens on Python 3 when all bytes in the oid are < 0x80 ooid = ooid.encode('ascii') if ooid in oids: oid = oids[ooid] else: if klass is None: oid = self._storage.new_oid() else: oid = self._storage.new_oid(), klass oids[ooid] = oid return Ghost(oid) while 1: header = f.read(16) if header == export_end_marker: break if len(header) != 16: raise ExportError("Truncated export file") # Extract header information ooid = header[:8] length = u64(header[8:16]) data = f.read(length) if len(data) != length: raise ExportError("Truncated export file") if oids: oid = oids[ooid] if isinstance(oid, tuple): oid = oid[0] else: oids[ooid] = oid = self._storage.new_oid() return_oid_list.append(oid) # Blob support blob_begin = f.read(len(blob_begin_marker)) if blob_begin == blob_begin_marker: # Copy the blob data to a temporary file # and remember the name blob_len = u64(f.read(8)) blob_filename = mktemp() blob_file = open(blob_filename, "wb") cp(f, blob_file, blob_len) blob_file.close() else: f.seek(-len(blob_begin_marker), 1) blob_filename = None pfile = BytesIO(data) unpickler = Unpickler(pfile) unpickler.persistent_load = persistent_load newp = BytesIO() pickler = Pickler(newp, _protocol) if sys.version_info[0] < 3: pickler.inst_persistent_id = persistent_id else: pickler.persistent_id = persistent_id pickler.dump(unpickler.load()) pickler.dump(unpickler.load()) data = newp.getvalue() if blob_filename is not None: self._storage.storeBlob(oid, None, data, blob_filename, '', transaction) else: self._storage.store(oid, None, data, '', transaction)
def _importDuringCommit(self, transaction, f, return_oid_list): """Import data during two-phase commit. Invoked by the transaction manager mid commit. Appends one item, the OID of the first object created, to return_oid_list. """ oids = {} # IMPORTANT: This code should be consistent with the code in # serialize.py. It is currently out of date and doesn't handle # weak references. def persistent_load(ooid): """Remap a persistent id to a new ID and create a ghost for it.""" klass = None if isinstance(ooid, tuple): ooid, klass = ooid if not isinstance(ooid, bytes): assert isinstance(ooid, str) # this happens on Python 3 when all bytes in the oid are < 0x80 ooid = ooid.encode('ascii') if ooid in oids: oid = oids[ooid] else: if klass is None: oid = self._storage.new_oid() else: oid = self._storage.new_oid(), klass oids[ooid] = oid return Ghost(oid) while 1: header = f.read(16) if header == export_end_marker: break if len(header) != 16: raise ExportError("Truncated export file") # Extract header information ooid = header[:8] length = u64(header[8:16]) data = f.read(length) if len(data) != length: raise ExportError("Truncated export file") if oids: oid = oids[ooid] if isinstance(oid, tuple): oid = oid[0] else: oids[ooid] = oid = self._storage.new_oid() return_oid_list.append(oid) # Blob support blob_begin = f.read(len(blob_begin_marker)) if blob_begin == blob_begin_marker: # Copy the blob data to a temporary file # and remember the name blob_len = u64(f.read(8)) blob_filename = mktemp() blob_file = open(blob_filename, "wb") cp(f, blob_file, blob_len) blob_file.close() else: f.seek(-len(blob_begin_marker),1) blob_filename = None pfile = BytesIO(data) unpickler = Unpickler(pfile) unpickler.persistent_load = persistent_load newp = BytesIO() pickler = Pickler(newp, _protocol) if sys.version_info[0] < 3: pickler.inst_persistent_id = persistent_id else: pickler.persistent_id = persistent_id pickler.dump(unpickler.load()) pickler.dump(unpickler.load()) data = newp.getvalue() if blob_filename is not None: self._storage.storeBlob(oid, None, data, blob_filename, '', transaction) else: self._storage.store(oid, None, data, '', transaction)