def cloneByPickle(obj, ignore_list=()): """Makes a copy of a ZODB object, loading ghosts as needed. Ignores specified objects along the way, replacing them with None in the copy. """ ignore_dict = {} for o in ignore_list: ignore_dict[id(o)] = o def persistent_id(ob, ignore_dict=ignore_dict): if ignore_dict.has_key(id(ob)): return 'ignored' if getattr(ob, '_p_changed', 0) is None: ob._p_changed = 0 return None def persistent_load(ref): assert ref == 'ignored' # Return a placeholder object that will be replaced by # removeNonVersionedData(). placeholder = SimpleItem() placeholder.id = "ignored_subobject" return placeholder stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def cloneByPickle(obj, ignore_list=()): """Makes a copy of a ZODB object, loading ghosts as needed. Ignores specified objects along the way, replacing them with None in the copy. """ ignore_dict = {} for o in ignore_list: ignore_dict[id(o)] = o def persistent_id(ob, ignore_dict=ignore_dict): if ignore_dict.has_key(id(ob)): return 'ignored' if getattr(ob, '_p_changed', 0) is None: ob._p_changed = 0 return None def persistent_load(ref): assert ref == 'ignored' # Return a placeholder object that will be replaced by # removeNonVersionedData(). placeholder = SimpleItem() placeholder.id = "ignored_subobject" return placeholder stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def serialize(self, obj, deferred=False): """Serialize an object :param obj: The object to serialize. :param deferred: When this is true Deferred objects are serialized and their values are loaded on deserialization. When this is false Deferred objects are not serializable. """ if deferred: args = {} def persistent_id(obj): if isinstance(obj, Deferred): args[obj.id] = obj return obj.id return None else: args = None def persistent_id(obj): if isinstance(obj, Deferred): raise PicklingError('%s cannot be serialized' % obj) return None data = StringIO() pickle = Pickler(data, HIGHEST_PROTOCOL) pickle.persistent_id = persistent_id pickle.dump(obj) msg = data.getvalue() return (msg, args) if deferred else msg
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id cmf_uid = getattr(obj, 'cmf_uid', None) if IUniqueIdAnnotation.providedBy(cmf_uid): setattr(obj, 'cmf_uid', cmf_uid()) try: p.dump(aq_base(obj)) except TypeError: # just try again, this then seems to work # WTF? p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def serialize(self, obj, deferred=False): """Serialize an object :param obj: The object to serialize. :param deferred: When this is true Deferred objects are serialized and their values are loaded on deserialization. When this is false Deferred objects are not serializable. """ if deferred: args = {} def persistent_id(obj): if isinstance(obj, Deferred): args[obj.id] = obj return obj.id return None else: args = None def persistent_id(obj): if isinstance(obj, Deferred): raise PicklingError('%s cannot be serialized' % obj) return None data = StringIO() pickle = Pickler(data, HIGHEST_PROTOCOL) pickle.persistent_id = persistent_id pickle.dump(obj) msg = data.getvalue() return (msg, args) if deferred else msg
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() file = StringIO(newpickle) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load meta = unpickler.load() if isinstance(meta, tuple): klass = meta[0] newargs = meta[1] or () if isinstance(klass, tuple): klass = find_global(*klass) else: klass = meta newargs = () if klass in _unresolvable: return None newstate = unpickler.load() inst = klass.__new__(klass, *newargs) try: resolve = inst._p_resolveConflict except AttributeError: _unresolvable[klass] = 1 return None old = state(self, oid, oldSerial, prfactory) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = StringIO() pickler = Pickler(file, 1) pickler.persistent_id = persistent_id pickler.dump(meta) pickler.dump(resolved) return file.getvalue(1) except (ConflictError, BadClassName): return None except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. logger.error("Unexpected error", exc_info=True) return None
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() file = StringIO(newpickle) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load meta = unpickler.load() if isinstance(meta, tuple): klass = meta[0] newargs = meta[1] or () if isinstance(klass, tuple): klass = find_global(*klass) else: klass = meta newargs = () if klass in _unresolvable: return None newstate = unpickler.load() inst = klass.__new__(klass, *newargs) try: resolve = inst._p_resolveConflict except AttributeError: _unresolvable[klass] = 1 return None old = state(self, oid, oldSerial, prfactory) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = StringIO() pickler = Pickler(file,1) pickler.persistent_id = persistent_id pickler.dump(meta) pickler.dump(resolved) return file.getvalue(1) except (ConflictError, BadClassName): return None except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. logger.error("Unexpected error", exc_info=True) return None
def cloneByPickle(obj): """Makes a copy of a ZODB object, loading ghosts as needed. """ def persistent_id(o): if getattr(o, '_p_changed', 0) is None: o._p_changed = 0 return None stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) return u.load()
def cloneByPickle(obj): """Makes a copy of a ZODB object, loading ghosts as needed. """ def persistent_id(o): if getattr(o, '_p_changed', 0) is None: o._p_changed = 0 return None stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) return u.load()
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() file = StringIO(newpickle) unpickler = Unpickler(file) unpickler.persistent_load = prfactory.persistent_load class_tuple = unpickler.load()[0] if bad_class(class_tuple): return 0 newstate = unpickler.load() klass = load_class(class_tuple) if klass is None: return 0 inst = klass.__basicnew__() try: resolve = inst._p_resolveConflict except AttributeError: bad_classes[class_tuple] = 1 return 0 old = state(self, oid, oldSerial, prfactory) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = StringIO() pickler = Pickler(file,1) pickler.persistent_id = persistent_id pickler.dump(class_tuple) pickler.dump(resolved) return file.getvalue(1) except ConflictError: return 0 except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. zLOG.LOG("Conflict Resolution", zLOG.ERROR, "Unexpected error", error=sys.exc_info()) return 0
def copyOf(source): """Copies a ZODB object, loading subobjects as needed. Re-ghostifies objects along the way to save memory. """ former_ghosts = [] zclass_refs = {} def persistent_id(ob, former_ghosts=former_ghosts, zclass_refs=zclass_refs): if getattr(ob, '_p_changed', 0) is None: # Load temporarily. former_ghosts.append(ob) ob._p_changed = 0 if hasattr(ob, '__bases__'): m = getattr(ob, '__module__', None) if (m is not None and isinstance(m, StringType) and m.startswith('*')): n = getattr(ob, '__name__', None) if n is not None: # Pickling a ZClass instance. Store the reference to # the ZClass class separately, so that the pickler # and unpickler don't trip over the apparently # missing module. ref = (m, n) zclass_refs[ref] = ob return ref return None def persistent_load(ref, zclass_refs=zclass_refs): return zclass_refs[ref] stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(source) if former_ghosts: for g in former_ghosts: del g._p_changed del former_ghosts[:] stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def zodb_pickle(obj): """Create a pickle in the format expected by ZODB.""" f = StringIO() p = Pickler(f, 1) p.persistent_id = _persistent_id klass = obj.__class__ assert not hasattr(obj, '__getinitargs__'), "not ready for constructors" args = None mod = getattr(klass, '__module__', None) if mod is not None: klass = mod, klass.__name__ state = obj.__getstate__() p.dump((klass, args)) p.dump(state) return f.getvalue(1)
def zodb_pickle(obj): """Create a pickle in the format expected by ZODB.""" f = StringIO() p = Pickler(f, 1) p.persistent_id = _persistent_id klass = obj.__class__ assert not hasattr(obj, "__getinitargs__"), "not ready for constructors" args = None mod = getattr(klass, "__module__", None) if mod is not None: klass = mod, klass.__name__ state = obj.__getstate__() p.dump((klass, args)) p.dump(state) return f.getvalue(1)
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def find_unmanaged(obj, managed): """Gathers the list of unmanaged subobjects from an object. 'managed' is a list of subobjects known to be managed. """ d = {} for m in managed: d[id(m)] = m outfile = StringIO() p = Pickler(outfile, 1) # Binary pickle unmanaged = [] def persistent_id(ob, d_get=d.get, unmanaged=unmanaged): if d_get(id(ob)) is not None: # Don't search inside managed subobjects. return 'managed' if hasattr(ob, '_p_oid'): unmanaged.append(ob) return None p.persistent_id = persistent_id p.dump(obj) return unmanaged
def dump(data, stream): p = Pickler(stream) p.persistent_id = persistent_id p.dump(data)
def _importDuringCommit(self, transaction, f, return_oid_list): """Import data during two-phase commit. Invoked by the transaction manager mid commit. Appends one item, the OID of the first object created, to return_oid_list. """ oids = {} # IMPORTANT: This code should be consistent with the code in # serialize.py. It is currently out of date and doesn't handle # weak references. def persistent_load(ooid): """Remap a persistent id to a new ID and create a ghost for it.""" klass = None if isinstance(ooid, tuple): ooid, klass = ooid if ooid in oids: oid = oids[ooid] else: if klass is None: oid = self._storage.new_oid() else: oid = self._storage.new_oid(), klass oids[ooid] = oid return Ghost(oid) version = self._version while 1: header = f.read(16) if header == export_end_marker: break if len(header) != 16: raise ExportError("Truncated export file") # Extract header information ooid = header[:8] length = u64(header[8:16]) data = f.read(length) if len(data) != length: raise ExportError("Truncated export file") if oids: oid = oids[ooid] if isinstance(oid, tuple): oid = oid[0] else: oids[ooid] = oid = self._storage.new_oid() return_oid_list.append(oid) # Blob support blob_begin = f.read(len(blob_begin_marker)) if blob_begin == blob_begin_marker: # Copy the blob data to a temporary file # and remember the name blob_len = u64(f.read(8)) blob_filename = mktemp() blob_file = open(blob_filename, "wb") cp(f, blob_file, blob_len) blob_file.close() else: f.seek(-len(blob_begin_marker),1) blob_filename = None pfile = StringIO(data) unpickler = Unpickler(pfile) unpickler.persistent_load = persistent_load newp = StringIO() pickler = Pickler(newp, 1) pickler.persistent_id = persistent_id pickler.dump(unpickler.load()) pickler.dump(unpickler.load()) data = newp.getvalue() if blob_filename is not None: self._storage.storeBlob(oid, None, data, blob_filename, version, transaction) else: self._storage.store(oid, None, data, version, transaction)
def serialize(self, event): assert IFullSerializationEvent.providedBy(event) assert isinstance(event.obj, Persistent) # Allow pickling of cyclic references to the object. event.serialized('self', event.obj, False) # Ignore previously serialized attributes state = self.cleanDictCopy(event.obj.__dict__) for key in state.keys(): if key.startswith('_v_'): del state[key] for attrname in event.get_seralized_attributes(): if attrname in state: del state[attrname] if not state: # No data needs to be stored return '' outfile = StringIO() p = Pickler(outfile, 1) # Binary pickle unmanaged = [] def persistent_id(ob, identify_internal=event.identify_internal, unmanaged=unmanaged): ref = identify_internal(ob) if ref is None: if hasattr(ob, '_p_oid'): # Persistent objects that end up in the remainder # are unmanaged. Tell ZODB about them so that # ZODB can deal with them specially. unmanaged.append(ob) return ref # Preserve order to a reasonable extent by storing a list # instead of a dictionary. state_list = state.items() state_list.sort() p.persistent_id = persistent_id try: p.dump(state_list) except UnpickleableError, exc: # Try to reveal which attribute is unpickleable. attrname = None attrvalue = None for key, value in state_list: del unmanaged[:] outfile.seek(0) outfile.truncate() p = Pickler(outfile) p.persistent_id = persistent_id try: p.dump(value) except UnpickleableError: attrname = key attrvalue = value break if attrname is not None: # Provide a more informative exception. if os.environ.get('APE_TRACE_UNPICKLEABLE'): # Provide an opportunity to examine # the "attrvalue" attribute. raise RuntimeError( 'Unable to pickle the %s attribute, %s, ' 'of %s at %s. %s.' % (repr(attrname), repr(attrvalue), repr( event.obj), repr(event.oid), str(exc))) else: # Couldn't help. raise
def dumps(self, obj, protocol=None, bin=None): src = BytesIO() p = Pickler(src) p.persistent_id = self._get_ids p.dump(obj) return src.getvalue()
def _importDuringCommit(self, transaction, f, return_oid_list): """Import data during two-phase commit. Invoked by the transaction manager mid commit. Appends one item, the OID of the first object created, to return_oid_list. """ oids = {} # IMPORTANT: This code should be consistent with the code in # serialize.py. It is currently out of date and doesn't handle # weak references. def persistent_load(ooid): """Remap a persistent id to a new ID and create a ghost for it.""" klass = None if isinstance(ooid, tuple): ooid, klass = ooid if ooid in oids: oid = oids[ooid] else: if klass is None: oid = self._storage.new_oid() else: oid = self._storage.new_oid(), klass oids[ooid] = oid return Ghost(oid) version = self._version while 1: h = f.read(16) if h == export_end_marker: break if len(h) != 16: raise ExportError("Truncated export file") l = u64(h[8:16]) p = f.read(l) if len(p) != l: raise ExportError("Truncated export file") ooid = h[:8] if oids: oid = oids[ooid] if isinstance(oid, tuple): oid = oid[0] else: oids[ooid] = oid = self._storage.new_oid() return_oid_list.append(oid) pfile = StringIO(p) unpickler = Unpickler(pfile) unpickler.persistent_load = persistent_load newp = StringIO() pickler = Pickler(newp, 1) pickler.persistent_id = persistent_id pickler.dump(unpickler.load()) pickler.dump(unpickler.load()) p = newp.getvalue() self._storage.store(oid, None, p, version, transaction)
def _importDuringCommit(self, transaction, file, return_oid_list): ''' Invoked by the transaction manager mid commit. Appends one item, the OID of the first object created, to return_oid_list. ''' oids = {} storage = self._storage new_oid = storage.new_oid store = storage.store read = file.read def persistent_load(ooid, Ghost=Ghost, StringType=StringType, atoi=string.atoi, TupleType=type(()), oids=oids, wrote_oid=oids.has_key, new_oid=storage.new_oid): "Remap a persistent id to a new ID and create a ghost for it." if type(ooid) is TupleType: ooid, klass = ooid else: klass=None if wrote_oid(ooid): oid=oids[ooid] else: if klass is None: oid=new_oid() else: oid=new_oid(), klass oids[ooid]=oid Ghost=Ghost() Ghost.oid=oid return Ghost version = self._version while 1: h=read(16) if h==export_end_marker: break if len(h) != 16: raise POSException.ExportError, 'Truncated export file' l=u64(h[8:16]) p=read(l) if len(p) != l: raise POSException.ExportError, 'Truncated export file' ooid=h[:8] if oids: oid=oids[ooid] if type(oid) is TupleType: oid=oid[0] else: oids[ooid] = oid = storage.new_oid() return_oid_list.append(oid) pfile=StringIO(p) unpickler=Unpickler(pfile) unpickler.persistent_load=persistent_load newp=StringIO() pickler=Pickler(newp,1) pickler.persistent_id=persistent_id pickler.dump(unpickler.load()) pickler.dump(unpickler.load()) p=newp.getvalue() plen=len(p) store(oid, None, p, version, transaction)
def _dumps(self, obj): buffer = StringIO() pickler = Pickler(buffer) pickler.persistent_id = self.__persistent_id pickler.dump(obj) return buffer.getvalue()
def commit(self, object, transaction): if object is self: # We registered ourself. Execute a commit action, if any. if self.__onCommitActions is not None: method_name, args, kw = self.__onCommitActions.pop(0) apply(getattr(self, method_name), (transaction,) + args, kw) return oid = object._p_oid invalid = self._invalid if oid is None or object._p_jar is not self: # new object oid = self.new_oid() object._p_jar = self object._p_oid = oid self._creating.append(oid) elif object._p_changed: if invalid(oid) and not hasattr(object, '_p_resolveConflict'): raise ConflictError(object=object) self._invalidating.append(oid) else: # Nothing to do return stack = [object] # Create a special persistent_id that passes T and the subobject # stack along: # # def persistent_id(object, # self=self, # stackup=stackup, new_oid=self.new_oid): # if (not hasattr(object, '_p_oid') or # type(object) is ClassType): return None # # oid=object._p_oid # # if oid is None or object._p_jar is not self: # oid = self.new_oid() # object._p_jar=self # object._p_oid=oid # stackup(object) # # klass=object.__class__ # # if klass is ExtensionKlass: return oid # # if hasattr(klass, '__getinitargs__'): return oid # # module=getattr(klass,'__module__','') # if module: klass=module, klass.__name__ # # return oid, klass file=StringIO() seek=file.seek pickler=Pickler(file,1) pickler.persistent_id=new_persistent_id(self, stack) dbstore=self._storage.store file=file.getvalue cache=self._cache get=cache.get dump=pickler.dump clear_memo=pickler.clear_memo version=self._version while stack: object=stack[-1] del stack[-1] oid=object._p_oid serial=getattr(object, '_p_serial', '\0\0\0\0\0\0\0\0') if serial == '\0\0\0\0\0\0\0\0': # new object self._creating.append(oid) else: #XXX We should never get here if invalid(oid) and not hasattr(object, '_p_resolveConflict'): raise ConflictError(object=object) self._invalidating.append(oid) klass = object.__class__ if klass is ExtensionKlass: # Yee Ha! dict={} dict.update(object.__dict__) del dict['_p_jar'] args=object.__name__, object.__bases__, dict state=None else: if hasattr(klass, '__getinitargs__'): args = object.__getinitargs__() len(args) # XXX Assert it's a sequence else: args = None # New no-constructor protocol! module=getattr(klass,'__module__','') if module: klass=module, klass.__name__ __traceback_info__=klass, oid, self._version state=object.__getstate__() seek(0) clear_memo() dump((klass,args)) dump(state) p=file(1) s=dbstore(oid,serial,p,version,transaction) self._store_count = self._store_count + 1 # Put the object in the cache before handling the # response, just in case the response contains the # serial number for a newly created object try: cache[oid]=object except: # Dang, I bet its wrapped: if hasattr(object, 'aq_base'): cache[oid]=object.aq_base else: raise self._handle_serial(s, oid)
def serialize(self, event): assert IFullSerializationEvent.isImplementedBy(event) assert isinstance(event.obj, Persistent) # Allow pickling of cyclic references to the object. event.serialized('self', event.obj, False) # Ignore previously serialized attributes state = event.obj.__dict__.copy() for key in state.keys(): if key.startswith('_v_'): del state[key] for attrname in event.get_seralized_attributes(): if state.has_key(attrname): del state[attrname] if not state: # No data needs to be stored return '' outfile = StringIO() p = Pickler(outfile, 1) # Binary pickle unmanaged = [] def persistent_id(ob, identify_internal=event.identify_internal, unmanaged=unmanaged): ref = identify_internal(ob) if ref is None: if hasattr(ob, '_p_oid'): # Persistent objects that end up in the remainder # are unmanaged. Tell ZODB about them so that # ZODB can deal with them specially. unmanaged.append(ob) return ref # Preserve order to a reasonable extent by storing a list # instead of a dictionary. state_list = state.items() state_list.sort() p.persistent_id = persistent_id try: p.dump(state_list) except UnpickleableError, exc: # Try to reveal which attribute is unpickleable. attrname = None attrvalue = None for key, value in state_list: del unmanaged[:] outfile.seek(0) outfile.truncate() p = Pickler(outfile) p.persistent_id = persistent_id try: p.dump(value) except UnpickleableError: attrname = key attrvalue = value break if attrname is not None: # Provide a more informative exception. if os.environ.get('APE_TRACE_UNPICKLEABLE'): # Provide an opportunity to examine # the "attrvalue" attribute. import pdb pdb.set_trace() raise RuntimeError( 'Unable to pickle the %s attribute, %s, ' 'of %s at %s. %s.' % ( repr(attrname), repr(attrvalue), repr(event.obj), repr(event.oid), str(exc))) else: # Couldn't help. raise
def __init__(self, file, protocol=2): pickler = Pickler(file, protocol) pickler.persistent_id = self.persistent_id self.dump = pickler.dump self.clear_memo = pickler.clear_memo
def dumps(self, obj, protocol=None, bin=None): src = BytesIO() p = Pickler(src) p.persistent_id = self._get_ids p.dump(obj) return src.getvalue()
def serialize(self, io_file, ignore_user_types, iface_bitmaps, cur_serialized_objs, pending_objs, reference_counting): # Reference counting information # First integer represent the position in the buffer in which # reference counting starts. This is done to avoid "holding" # unnecessary information during a store or update in disk. # in new serialization, this will be done through padding # TODO: use padding instead once new serialization is implemented IntegerWrapper().write(io_file, 0) cur_master_loc = self.get_master_location() if cur_master_loc is not None: StringWrapper().write(io_file, str(cur_master_loc)) else: StringWrapper().write(io_file, str("x")) if hasattr(self, "__getstate__"): # The object has a user-defined serialization method. # Use that dco_extradata = self.__dclay_instance_extradata last_loaded_flag = dco_extradata.loaded_flag last_persistent_flag = dco_extradata.persistent_flag dco_extradata.loaded_flag = True dco_extradata.persistent_flag = False # Use pickle to the result of the serialization if six.PY2: import cPickle as pickle elif six.PY3: import _pickle as pickle state = pickle.dumps(self.__getstate__(), protocol=-1) # Leave the previous value, probably False & True` dco_extradata.loaded_flag = last_loaded_flag dco_extradata.persistent_flag = last_persistent_flag StringWrapper(mode="binary").write(io_file, state) else: # Regular dataClay provided serialization # Get the list of properties, making sure it is sorted properties = sorted(self.get_class_extradata().properties.values(), key=attrgetter('position')) logger.verbose("Serializing list of properties: %s", properties) for p in properties: try: value = object.__getattribute__( self, "%s%s" % (DCLAY_PROPERTY_PREFIX, p.name)) except AttributeError: value = None logger.verbose("Serializing property %s with value %s ", p.name, value) if value is None: BooleanWrapper().write(io_file, False) else: if isinstance(p.type, UserType): if not ignore_user_types: BooleanWrapper().write(io_file, True) SerializationLibUtilsSingleton.serialize_association( io_file, value, cur_serialized_objs, pending_objs, reference_counting) else: BooleanWrapper().write(io_file, False) else: BooleanWrapper().write(io_file, True) pck = Pickler(io_file, protocol=-1) pck.persistent_id = PersistentIdPicklerHelper( cur_serialized_objs, pending_objs, reference_counting) pck.dump(value) # Reference counting # TODO: this should be removed in new serialization # TODO: (by using paddings to directly access reference counters inside metadata) cur_stream_pos = io_file.tell() io_file.seek(0) IntegerWrapper().write(io_file, cur_stream_pos) io_file.seek(cur_stream_pos) reference_counting.serialize_reference_counting( self.get_object_id(), io_file)
def _importDuringCommit(self, transaction, f, return_oid_list): """Import data during two-phase commit. Invoked by the transaction manager mid commit. Appends one item, the OID of the first object created, to return_oid_list. """ oids = {} # IMPORTANT: This code should be consistent with the code in # serialize.py. It is currently out of date and doesn't handle # weak references. def persistent_load(ooid): """Remap a persistent id to a new ID and create a ghost for it.""" klass = None if isinstance(ooid, tuple): ooid, klass = ooid if ooid in oids: oid = oids[ooid] else: if klass is None: oid = self._storage.new_oid() else: oid = self._storage.new_oid(), klass oids[ooid] = oid return Ghost(oid) version = self._version while 1: h = f.read(16) if h == export_end_marker: break if len(h) != 16: raise ExportError("Truncated export file") l = u64(h[8:16]) p = f.read(l) if len(p) != l: raise ExportError("Truncated export file") ooid = h[:8] if oids: oid = oids[ooid] if isinstance(oid, tuple): oid = oid[0] else: oids[ooid] = oid = self._storage.new_oid() return_oid_list.append(oid) pfile = StringIO(p) unpickler = Unpickler(pfile) unpickler.persistent_load = persistent_load newp = StringIO() pickler = Pickler(newp, 1) pickler.persistent_id = persistent_id pickler.dump(unpickler.load()) pickler.dump(unpickler.load()) p = newp.getvalue() self._storage.store(oid, None, p, version, transaction)
def dump(data,stream): p = Pickler(stream) p.persistent_id = persistent_id p.dump(data)