Example #1
0
def zodb_unpickle(data):
    """Unpickle an object stored using the format expected by ZODB."""
    f = StringIO(data)
    u = Unpickler(f)
    u.persistent_load = persistent_load
    klass_info = u.load()
    if isinstance(klass_info, types.TupleType):
        if isinstance(klass_info[0], type):
            # Unclear:  what is the second part of klass_info?
            klass, xxx = klass_info
            assert not xxx
        else:
            if isinstance(klass_info[0], tuple):
                modname, klassname = klass_info[0]
            else:
                modname, klassname = klass_info
            if modname == "__main__":
                ns = globals()
            else:
                mod = import_helper(modname)
                ns = mod.__dict__
            try:
                klass = ns[klassname]
            except KeyError:
                print >> sys.stderr, "can't find %s in %r" % (klassname, ns)
        inst = klass()
    else:
        raise ValueError("expected class info: %s" % repr(klass_info))
    state = u.load()
    inst.__setstate__(state)
    return inst
 def setstate(self, object):
     """
     Unlike the 'stock' Connection class' setstate, this method
     doesn't raise ConflictErrors.  This is potentially dangerous
     for applications that need absolute consistency, but
     sessioning is not one of those.
     """
     oid=object._p_oid
     invalid = self._invalid
     if invalid(None):
         # only raise a conflict if there was
         # a mass invalidation, but not if we see this
         # object's oid as invalid
         raise ConflictError, `oid`
     p, serial = self._storage.load(oid, self._version)
     file=StringIO(p)
     unpickler=Unpickler(file)
     unpickler.persistent_load=self._persistent_load
     unpickler.load()
     state = unpickler.load()
     if hasattr(object, '__setstate__'):
         object.__setstate__(state)
     else:
         d=object.__dict__
         for k,v in state.items(): d[k]=v
     object._p_serial=serial
Example #3
0
def zodb_unpickle(data):
    """Unpickle an object stored using the format expected by ZODB."""
    f = StringIO(data)
    u = Unpickler(f)
    u.persistent_load = persistent_load
    klass_info = u.load()
    if isinstance(klass_info, tuple):
        if isinstance(klass_info[0], type):
            # Unclear:  what is the second part of klass_info?
            klass, xxx = klass_info
            assert not xxx
        else:
            if isinstance(klass_info[0], tuple):
                modname, klassname = klass_info[0]
            else:
                modname, klassname = klass_info
            if modname == "__main__":
                ns = globals()
            else:
                mod = import_helper(modname)
                ns = mod.__dict__
            try:
                klass = ns[klassname]
            except KeyError:
                print >> sys.stderr, "can't find %s in %r" % (klassname, ns)
        inst = klass()
    else:
        raise ValueError("expected class info: %s" % repr(klass_info))
    state = u.load()
    inst.__setstate__(state)
    return inst
Example #4
0
def cloneByPickle(obj, ignore_list=()):
    """Makes a copy of a ZODB object, loading ghosts as needed.

    Ignores specified objects along the way, replacing them with None
    in the copy.
    """
    ignore_dict = {}
    for o in ignore_list:
        ignore_dict[id(o)] = o

    def persistent_id(ob, ignore_dict=ignore_dict):
        if ignore_dict.has_key(id(ob)):
            return 'ignored'
        if getattr(ob, '_p_changed', 0) is None:
            ob._p_changed = 0
        return None

    def persistent_load(ref):
        assert ref == 'ignored'
        # Return a placeholder object that will be replaced by
        # removeNonVersionedData().
        placeholder = SimpleItem()
        placeholder.id = "ignored_subobject"
        return placeholder

    stream = StringIO()
    p = Pickler(stream, 1)
    p.persistent_id = persistent_id
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    u.persistent_load = persistent_load
    return u.load()
def _cloneByPickle(self, obj):
    """Returns a deep copy of a ZODB object, loading ghosts as needed.
    """
    modifier = getToolByName(self, 'portal_modifier')
    callbacks = modifier.getOnCloneModifiers(obj)
    if callbacks is not None:
        pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4]
    else:
        inside_orefs, outside_orefs = (), ()

    stream = StringIO()
    p = Pickler(stream, 1)
    if callbacks is not None:
        p.persistent_id = pers_id
    cmf_uid = getattr(obj, 'cmf_uid', None)
    if IUniqueIdAnnotation.providedBy(cmf_uid):
        setattr(obj, 'cmf_uid', cmf_uid())
    try:
        p.dump(aq_base(obj))
    except TypeError:
        # just try again, this then seems to work
        # WTF?
        p.dump(aq_base(obj))
    approxSize = stream.tell()
    stream.seek(0)
    u = Unpickler(stream)
    if callbacks is not None:
        u.persistent_load = pers_load
    return approxSize, u.load(), inside_orefs, outside_orefs
Example #6
0
def tryToResolveConflict(self,
                         oid,
                         committedSerial,
                         oldSerial,
                         newpickle,
                         committedData=''):
    # class_tuple, old, committed, newstate = ('',''), 0, 0, 0
    try:
        prfactory = PersistentReferenceFactory()
        newpickle = self._crs_untransform_record_data(newpickle)
        file = StringIO(newpickle)
        unpickler = Unpickler(file)
        unpickler.find_global = find_global
        unpickler.persistent_load = prfactory.persistent_load
        meta = unpickler.load()
        if isinstance(meta, tuple):
            klass = meta[0]
            newargs = meta[1] or ()
            if isinstance(klass, tuple):
                klass = find_global(*klass)
        else:
            klass = meta
            newargs = ()

        if klass in _unresolvable:
            raise ConflictError

        newstate = unpickler.load()
        inst = klass.__new__(klass, *newargs)

        try:
            resolve = inst._p_resolveConflict
        except AttributeError:
            _unresolvable[klass] = 1
            raise ConflictError

        old = state(self, oid, oldSerial, prfactory)
        committed = state(self, oid, committedSerial, prfactory, committedData)

        resolved = resolve(old, committed, newstate)

        file = StringIO()
        pickler = Pickler(file, 1)
        pickler.inst_persistent_id = persistent_id
        pickler.dump(meta)
        pickler.dump(resolved)
        return self._crs_transform_record_data(file.getvalue(1))
    except (ConflictError, BadClassName):
        pass
    except:
        # If anything else went wrong, catch it here and avoid passing an
        # arbitrary exception back to the client.  The error here will mask
        # the original ConflictError.  A client can recover from a
        # ConflictError, but not necessarily from other errors.  But log
        # the error so that any problems can be fixed.
        logger.error("Unexpected error", exc_info=True)

    raise ConflictError(oid=oid,
                        serials=(committedSerial, oldSerial),
                        data=newpickle)
def cloneByPickle(obj, ignore_list=()):
    """Makes a copy of a ZODB object, loading ghosts as needed.

    Ignores specified objects along the way, replacing them with None
    in the copy.
    """
    ignore_dict = {}
    for o in ignore_list:
        ignore_dict[id(o)] = o

    def persistent_id(ob, ignore_dict=ignore_dict):
        if ignore_dict.has_key(id(ob)):
            return 'ignored'
        if getattr(ob, '_p_changed', 0) is None:
            ob._p_changed = 0
        return None

    def persistent_load(ref):
        assert ref == 'ignored'
        # Return a placeholder object that will be replaced by
        # removeNonVersionedData().
        placeholder = SimpleItem()
        placeholder.id = "ignored_subobject"
        return placeholder

    stream = StringIO()
    p = Pickler(stream, 1)
    p.persistent_id = persistent_id
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    u.persistent_load = persistent_load
    return u.load()
Example #8
0
File: core.py Project: solebox/WorQ
    def deserialize(self, message, task_id=None):
        """Deserialize an object

        :param message: A serialized object (string).
        :param deferred: When true load deferreds. When false
            raise an error if the message contains deferreds.
        """
        fail = []
        if task_id is None:

            def persistent_load(task_id):
                raise UnpicklingError('message contained references to '
                                      'external objects: %s' % task_id)
        else:
            args = self._queue.get_arguments(task_id)
            args = {k: loads(v) for k, v in args.items()}

            def persistent_load(arg_id):
                value = args[arg_id]
                if isinstance(value, TaskFailure):
                    fail.append(value)
                return value

        data = StringIO(message)
        pickle = Unpickler(data)
        pickle.persistent_load = persistent_load
        obj = pickle.load()
        if fail and not obj.on_error_pass:
            # TODO detect errors earlier, fail earlier, cancel enqueued tasks
            self.set_result(obj, fail[0])
            obj = None
        return obj
Example #9
0
    def deserialize(self, event, state):
        assert IFullDeserializationEvent.isImplementedBy(event)
        assert isinstance(event.obj, Persistent)

        # Set up to resolve cyclic references to the object.
        event.deserialized('self', event.obj)

        state = state.strip()
        if state:
            if state.startswith('#'):
                # Text-encoded pickles start with a pound sign.
                # (A pound sign is not a valid pickle opcode.)
                data = decode_from_text(state)
            else:
                data = state
            infile = StringIO(data)
            u = Unpickler(infile)
            u.persistent_load = event.resolve_internal
            s = u.load()
            if not hasattr(s, 'items'):
                # Turn the list back into a dictionary
                s_list = s
                s = {}
                for key, value in s_list:
                    s[key] = value
            event.obj.__dict__.update(s)
            try:
                unmanaged = u.load()
            except EOFError:
                # old pickle with no list of unmanaged objects
                pass
            else:
                event.upos.extend(unmanaged)
Example #10
0
    def deserialize(self, message, task_id=None):
        """Deserialize an object

        :param message: A serialized object (string).
        :param deferred: When true load deferreds. When false
            raise an error if the message contains deferreds.
        """
        fail = []
        if task_id is None:
            def persistent_load(task_id):
                raise UnpicklingError('message contained references to '
                    'external objects: %s' % task_id)
        else:
            args = self._queue.get_arguments(task_id)
            args = {k: loads(v) for k, v in args.items()}

            def persistent_load(arg_id):
                value = args[arg_id]
                if isinstance(value, TaskFailure):
                    fail.append(value)
                return value
        data = StringIO(message)
        pickle = Unpickler(data)
        pickle.persistent_load = persistent_load
        obj = pickle.load()
        if fail and not obj.on_error_pass:
            # TODO detect errors earlier, fail earlier, cancel enqueued tasks
            self.set_result(obj, fail[0])
            obj = None
        return obj
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError, e:
         raise UnpicklingError("Could not find Node class for %s" % e)
Example #12
0
 def setstate(self, object):
     """
     Unlike the 'stock' Connection class' setstate, this method
     doesn't raise ConflictErrors.  This is potentially dangerous
     for applications that need absolute consistency, but
     sessioning is not one of those.
     """
     oid = object._p_oid
     invalid = self._invalid
     if invalid(None):
         # only raise a conflict if there was
         # a mass invalidation, but not if we see this
         # object's oid as invalid
         raise ConflictError, ` oid `
     p, serial = self._storage.load(oid, self._version)
     file = StringIO(p)
     unpickler = Unpickler(file)
     unpickler.persistent_load = self._persistent_load
     unpickler.load()
     state = unpickler.load()
     if hasattr(object, '__setstate__'):
         object.__setstate__(state)
     else:
         d = object.__dict__
         for k, v in state.items():
             d[k] = v
     object._p_serial = serial
Example #13
0
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError, e:
         raise UnpicklingError, "Could not find Node class for %s" % e
Example #14
0
    def setklassstate(self, object):
        try:
            oid=object._p_oid
            __traceback_info__=oid
            p, serial = self._storage.load(oid, self._version)
            file=StringIO(p)
            unpickler=Unpickler(file)
            unpickler.persistent_load=self._persistent_load

            copy = unpickler.load()

            klass, args = copy

            if klass is not ExtensionKlass:
                LOG('ZODB',ERROR,
                    "Unexpected klass when setting class state on %s"
                    % getattr(object,'__name__','(?)'))
                return

            copy=apply(klass,args)
            object.__dict__.clear()
            object.__dict__.update(copy.__dict__)

            object._p_oid=oid
            object._p_jar=self
            object._p_changed=0
            object._p_serial=serial
        except:
            LOG('ZODB',ERROR, 'setklassstate failed', error=sys.exc_info())
            raise
def state(self, oid, serial, prfactory, p=''):
    p = p or self.loadSerial(oid, serial)
    file = StringIO(p)
    unpickler = Unpickler(file)
    unpickler.persistent_load = prfactory.persistent_load
    class_tuple = unpickler.load()
    state = unpickler.load()
    return state
Example #16
0
def state(self, oid, serial, prfactory, p=''):
    p = p or self.loadSerial(oid, serial)
    file = StringIO(p)
    unpickler = Unpickler(file)
    unpickler.find_global = find_global
    unpickler.persistent_load = prfactory.persistent_load
    unpickler.load()  # skip the class tuple
    return unpickler.load()
Example #17
0
 def oldstate(self, object, serial):
     oid=object._p_oid
     p = self._storage.loadSerial(oid, serial)
     file=StringIO(p)
     unpickler=Unpickler(file)
     unpickler.persistent_load=self._persistent_load
     unpickler.load()
     return  unpickler.load()
Example #18
0
def state(self, oid, serial, prfactory, p=''):
    p = p or self.loadSerial(oid, serial)
    file = StringIO(p)
    unpickler = Unpickler(file)
    unpickler.find_global = find_global
    unpickler.persistent_load = prfactory.persistent_load
    unpickler.load() # skip the class tuple
    return unpickler.load()
Example #19
0
def state(self, oid, serial, prfactory, p=""):
    p = p or self.loadSerial(oid, serial)
    p = self._crs_untransform_record_data(p)
    file = StringIO(p)
    unpickler = Unpickler(file)
    unpickler.find_global = find_global
    unpickler.persistent_load = prfactory.persistent_load
    unpickler.load()  # skip the class tuple
    return unpickler.load()
Example #20
0
 def _get_unpickler(self, file):
     connection = self.connection
     get_instance = connection.get_cache().get_instance
     def persistent_load(oid_klass):
         oid, klass = oid_klass
         return get_instance(oid, klass, connection)
     unpickler = Unpickler(file)
     unpickler.persistent_load = persistent_load
     return unpickler
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle,
                         committedData=''):
    # class_tuple, old, committed, newstate = ('',''), 0, 0, 0
    try:
        prfactory = PersistentReferenceFactory()
        newpickle = self._crs_untransform_record_data(newpickle)
        file = StringIO(newpickle)
        unpickler = Unpickler(file)
        unpickler.find_global = find_global
        unpickler.persistent_load = prfactory.persistent_load
        meta = unpickler.load()
        if isinstance(meta, tuple):
            klass = meta[0]
            newargs = meta[1] or ()
            if isinstance(klass, tuple):
                klass = find_global(*klass)
        else:
            klass = meta
            newargs = ()

        if klass in _unresolvable:
            raise ConflictError

        newstate = unpickler.load()
        inst = klass.__new__(klass, *newargs)

        try:
            resolve = inst._p_resolveConflict
        except AttributeError:
            _unresolvable[klass] = 1
            raise ConflictError

        old = state(self, oid, oldSerial, prfactory)
        committed = state(self, oid, committedSerial, prfactory, committedData)

        resolved = resolve(old, committed, newstate)

        file = StringIO()
        pickler = Pickler(file,1)
        pickler.inst_persistent_id = persistent_id
        pickler.dump(meta)
        pickler.dump(resolved)
        return self._crs_transform_record_data(file.getvalue(1))
    except (ConflictError, BadClassName):
        pass
    except:
        # If anything else went wrong, catch it here and avoid passing an
        # arbitrary exception back to the client.  The error here will mask
        # the original ConflictError.  A client can recover from a
        # ConflictError, but not necessarily from other errors.  But log
        # the error so that any problems can be fixed.
        logger.error("Unexpected error", exc_info=True)

    raise ConflictError(oid=oid, serials=(committedSerial, oldSerial),
                        data=newpickle)
Example #22
0
 def _get_unpickler(self, file):
     def persistent_load(oid_klass):
         oid, klass = oid_klass
         obj = self.connection.cache_get(oid)
         if obj is None:
             # Go ahead and make the ghost instance.
             obj = klass.__new__(klass)
             obj._p_oid = oid
             obj._p_connection = self.connection
             obj._p_set_status_ghost()
             self.connection.cache_set(oid, obj)
         return obj
     unpickler = Unpickler(file)
     unpickler.persistent_load = persistent_load
     return unpickler
Example #23
0
def copyOf(source):
    """Copies a ZODB object, loading subobjects as needed.

    Re-ghostifies objects along the way to save memory.
    """
    former_ghosts = []
    zclass_refs = {}

    def persistent_id(ob, former_ghosts=former_ghosts,
                      zclass_refs=zclass_refs):
        if getattr(ob, '_p_changed', 0) is None:
            # Load temporarily.
            former_ghosts.append(ob)
            ob._p_changed = 0
        if hasattr(ob, '__bases__'):
            m = getattr(ob, '__module__', None)
            if (m is not None
                and isinstance(m, StringType)
                and m.startswith('*')):
                n = getattr(ob, '__name__', None)
                if n is not None:
                    # Pickling a ZClass instance.  Store the reference to
                    # the ZClass class separately, so that the pickler
                    # and unpickler don't trip over the apparently
                    # missing module.
                    ref = (m, n)
                    zclass_refs[ref] = ob
                    return ref
        return None

    def persistent_load(ref, zclass_refs=zclass_refs):
        return zclass_refs[ref]

    stream = StringIO()
    p = Pickler(stream, 1)
    p.persistent_id = persistent_id
    p.dump(source)
    if former_ghosts:
        for g in former_ghosts:
            del g._p_changed
        del former_ghosts[:]
    stream.seek(0)
    u = Unpickler(stream)
    u.persistent_load = persistent_load
    return u.load()
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle,
                         committedData=''):
    # class_tuple, old, committed, newstate = ('',''), 0, 0, 0
    try:
        prfactory = PersistentReferenceFactory()
        file = StringIO(newpickle)
        unpickler = Unpickler(file)
        unpickler.persistent_load = prfactory.persistent_load
        class_tuple = unpickler.load()[0]
        if bad_class(class_tuple):
            return 0
        newstate = unpickler.load()
        klass = load_class(class_tuple)
        if klass is None:
            return 0
        inst = klass.__basicnew__()

        try:
            resolve = inst._p_resolveConflict
        except AttributeError:
            bad_classes[class_tuple] = 1
            return 0

        old = state(self, oid, oldSerial, prfactory)
        committed = state(self, oid, committedSerial, prfactory, committedData)

        resolved = resolve(old, committed, newstate)

        file = StringIO()
        pickler = Pickler(file,1)
        pickler.persistent_id = persistent_id
        pickler.dump(class_tuple)
        pickler.dump(resolved)
        return file.getvalue(1)
    except ConflictError:
        return 0
    except:
        # If anything else went wrong, catch it here and avoid passing an
        # arbitrary exception back to the client.  The error here will mask
        # the original ConflictError.  A client can recover from a
        # ConflictError, but not necessarily from other errors.  But log
        # the error so that any problems can be fixed.
        zLOG.LOG("Conflict Resolution", zLOG.ERROR,
                 "Unexpected error", error=sys.exc_info())
        return 0
Example #25
0
    def __getitem__(self, oid, tt=type(())):
        obj = self._cache.get(oid, None)
        if obj is not None:
            return obj

        __traceback_info__ = (oid)
        p, serial = self._storage.load(oid, self._version)
        __traceback_info__ = (oid, p)
        file=StringIO(p)
        unpickler=Unpickler(file)
        unpickler.persistent_load=self._persistent_load

        try:
            object = unpickler.load()
        except:
            raise "Could not load oid %s, pickled data in traceback info may\
            contain clues" % (oid)

        klass, args = object

        if type(klass) is tt:
            module, name = klass
            klass=self._db._classFactory(self, module, name)

        if (args is None or
            not args and not hasattr(klass,'__getinitargs__')):
            object=klass.__basicnew__()
        else:
            object=apply(klass,args)
            if klass is not ExtensionKlass:
                object.__dict__.clear()

        object._p_oid=oid
        object._p_jar=self
        object._p_changed=None
        object._p_serial=serial

        self._cache[oid] = object
        if oid=='\0\0\0\0\0\0\0\0':
            self._root_=object # keep a ref
        return object
    def _cloneByPickle(self, obj):
        """Returns a deep copy of a ZODB object, loading ghosts as needed.
        """
        modifier = getToolByName(self, 'portal_modifier')
        callbacks = modifier.getOnCloneModifiers(obj)
        if callbacks is not None:
            pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4]
        else:
            inside_orefs, outside_orefs = (), ()

        stream = StringIO()
        p = Pickler(stream, 1)
        if callbacks is not None:
            p.persistent_id = pers_id
        p.dump(aq_base(obj))
        approxSize = stream.tell()
        stream.seek(0)
        u = Unpickler(stream)
        if callbacks is not None:
            u.persistent_load = pers_load
        return approxSize, u.load(), inside_orefs, outside_orefs
Example #27
0
    def _cloneByPickle(self, obj):
        """Returns a deep copy of a ZODB object, loading ghosts as needed.
        """
        modifier = getToolByName(self, 'portal_modifier')
        callbacks = modifier.getOnCloneModifiers(obj)
        if callbacks is not None:
            pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4]
        else:
            inside_orefs, outside_orefs = (), ()

        stream = StringIO()
        p = Pickler(stream, 1)
        if callbacks is not None:
            p.persistent_id = pers_id
        p.dump(aq_base(obj))
        approxSize = stream.tell()
        stream.seek(0)
        u = Unpickler(stream)
        if callbacks is not None:
            u.persistent_load = pers_load
        return approxSize, u.load(), inside_orefs, outside_orefs
Example #28
0
    def _importDuringCommit(self, transaction, f, return_oid_list):
        """Import data during two-phase commit.

        Invoked by the transaction manager mid commit.
        Appends one item, the OID of the first object created,
        to return_oid_list.
        """
        oids = {}

        # IMPORTANT: This code should be consistent with the code in
        # serialize.py. It is currently out of date and doesn't handle
        # weak references.

        def persistent_load(ooid):
            """Remap a persistent id to a new ID and create a ghost for it."""

            klass = None
            if isinstance(ooid, tuple):
                ooid, klass = ooid

            if ooid in oids:
                oid = oids[ooid]
            else:
                if klass is None:
                    oid = self._storage.new_oid()
                else:
                    oid = self._storage.new_oid(), klass
                oids[ooid] = oid

            return Ghost(oid)

        version = self._version

        while 1:
            h = f.read(16)
            if h == export_end_marker:
                break
            if len(h) != 16:
                raise ExportError("Truncated export file")
            l = u64(h[8:16])
            p = f.read(l)
            if len(p) != l:
                raise ExportError("Truncated export file")

            ooid = h[:8]
            if oids:
                oid = oids[ooid]
                if isinstance(oid, tuple):
                    oid = oid[0]
            else:
                oids[ooid] = oid = self._storage.new_oid()
                return_oid_list.append(oid)

            pfile = StringIO(p)
            unpickler = Unpickler(pfile)
            unpickler.persistent_load = persistent_load

            newp = StringIO()
            pickler = Pickler(newp, 1)
            pickler.persistent_id = persistent_id

            pickler.dump(unpickler.load())
            pickler.dump(unpickler.load())
            p = newp.getvalue()

            self._storage.store(oid, None, p, version, transaction)
Example #29
0
    def _importDuringCommit(self, transaction, f, return_oid_list):
        """Import data during two-phase commit.

        Invoked by the transaction manager mid commit.
        Appends one item, the OID of the first object created,
        to return_oid_list.
        """
        oids = {}

        # IMPORTANT: This code should be consistent with the code in
        # serialize.py. It is currently out of date and doesn't handle
        # weak references.

        def persistent_load(ooid):
            """Remap a persistent id to a new ID and create a ghost for it."""

            klass = None
            if isinstance(ooid, tuple):
                ooid, klass = ooid

            if ooid in oids:
                oid = oids[ooid]
            else:
                if klass is None:
                    oid = self._storage.new_oid()
                else:
                    oid = self._storage.new_oid(), klass
                oids[ooid] = oid

            return Ghost(oid)

        version = self._version

        while 1:
            header = f.read(16)
            if header == export_end_marker:
                break
            if len(header) != 16:
                raise ExportError("Truncated export file")

            # Extract header information
            ooid = header[:8]
            length = u64(header[8:16])
            data = f.read(length)

            if len(data) != length:
                raise ExportError("Truncated export file")

            if oids:
                oid = oids[ooid]
                if isinstance(oid, tuple):
                    oid = oid[0]
            else:
                oids[ooid] = oid = self._storage.new_oid()
                return_oid_list.append(oid)

            # Blob support
            blob_begin = f.read(len(blob_begin_marker))
            if blob_begin == blob_begin_marker:
                # Copy the blob data to a temporary file
                # and remember the name
                blob_len = u64(f.read(8))
                blob_filename = mktemp()
                blob_file = open(blob_filename, "wb")
                cp(f, blob_file, blob_len)
                blob_file.close()
            else:
                f.seek(-len(blob_begin_marker),1)
                blob_filename = None

            pfile = StringIO(data)
            unpickler = Unpickler(pfile)
            unpickler.persistent_load = persistent_load

            newp = StringIO()
            pickler = Pickler(newp, 1)
            pickler.persistent_id = persistent_id

            pickler.dump(unpickler.load())
            pickler.dump(unpickler.load())
            data = newp.getvalue()

            if blob_filename is not None:
                self._storage.storeBlob(oid, None, data, blob_filename, 
                                        version, transaction)
            else:
                self._storage.store(oid, None, data, version, transaction)
Example #30
0
    def _importDuringCommit(self, transaction, f, return_oid_list):
        """Import data during two-phase commit.

        Invoked by the transaction manager mid commit.
        Appends one item, the OID of the first object created,
        to return_oid_list.
        """
        oids = {}

        # IMPORTANT: This code should be consistent with the code in
        # serialize.py. It is currently out of date and doesn't handle
        # weak references.

        def persistent_load(ooid):
            """Remap a persistent id to a new ID and create a ghost for it."""

            klass = None
            if isinstance(ooid, tuple):
                ooid, klass = ooid

            if ooid in oids:
                oid = oids[ooid]
            else:
                if klass is None:
                    oid = self._storage.new_oid()
                else:
                    oid = self._storage.new_oid(), klass
                oids[ooid] = oid

            return Ghost(oid)

        while 1:
            header = f.read(16)
            if header == export_end_marker:
                break
            if len(header) != 16:
                raise ExportError("Truncated export file")

            # Extract header information
            ooid = header[:8]
            length = u64(header[8:16])
            data = f.read(length)

            if len(data) != length:
                raise ExportError("Truncated export file")

            if oids:
                oid = oids[ooid]
                if isinstance(oid, tuple):
                    oid = oid[0]
            else:
                oids[ooid] = oid = self._storage.new_oid()
                return_oid_list.append(oid)

            # Blob support
            blob_begin = f.read(len(blob_begin_marker))
            if blob_begin == blob_begin_marker:
                # Copy the blob data to a temporary file
                # and remember the name
                blob_len = u64(f.read(8))
                blob_filename = mktemp()
                blob_file = open(blob_filename, "wb")
                cp(f, blob_file, blob_len)
                blob_file.close()
            else:
                f.seek(-len(blob_begin_marker), 1)
                blob_filename = None

            pfile = StringIO(data)
            unpickler = Unpickler(pfile)
            unpickler.persistent_load = persistent_load

            newp = StringIO()
            pickler = Pickler(newp, 1)
            pickler.inst_persistent_id = persistent_id

            pickler.dump(unpickler.load())
            pickler.dump(unpickler.load())
            data = newp.getvalue()

            if blob_filename is not None:
                self._storage.storeBlob(oid, None, data, blob_filename, "", transaction)
            else:
                self._storage.store(oid, None, data, "", transaction)
Example #31
0
def load(stream):
    p = Unpickler(stream)
    p.persistent_load = persistent_load
    return p.load()
Example #32
0
 def __init__(self, file):
     unpickler = Unpickler(file)
     unpickler.persistent_load = self.persistent_load
     self.load = unpickler.load
     self.noload = unpickler.noload
Example #33
0
    def _importDuringCommit(self, transaction, file, return_oid_list):
        '''
        Invoked by the transaction manager mid commit.
        Appends one item, the OID of the first object created,
        to return_oid_list.
        '''
        oids = {}
        storage = self._storage
        new_oid = storage.new_oid
        store = storage.store
        read = file.read

        def persistent_load(ooid,
                            Ghost=Ghost, StringType=StringType,
                            atoi=string.atoi, TupleType=type(()),
                            oids=oids, wrote_oid=oids.has_key,
                            new_oid=storage.new_oid):

            "Remap a persistent id to a new ID and create a ghost for it."

            if type(ooid) is TupleType: ooid, klass = ooid
            else: klass=None

            if wrote_oid(ooid): oid=oids[ooid]
            else:
                if klass is None: oid=new_oid()
                else: oid=new_oid(), klass
                oids[ooid]=oid

            Ghost=Ghost()
            Ghost.oid=oid
            return Ghost

        version = self._version

        while 1:
            h=read(16)
            if h==export_end_marker: break
            if len(h) != 16:
                raise POSException.ExportError, 'Truncated export file'
            l=u64(h[8:16])
            p=read(l)
            if len(p) != l:
                raise POSException.ExportError, 'Truncated export file'

            ooid=h[:8]
            if oids:
                oid=oids[ooid]
                if type(oid) is TupleType: oid=oid[0]
            else:
                oids[ooid] = oid = storage.new_oid()
                return_oid_list.append(oid)

            pfile=StringIO(p)
            unpickler=Unpickler(pfile)
            unpickler.persistent_load=persistent_load

            newp=StringIO()
            pickler=Pickler(newp,1)
            pickler.persistent_id=persistent_id

            pickler.dump(unpickler.load())
            pickler.dump(unpickler.load())
            p=newp.getvalue()
            plen=len(p)

            store(oid, None, p, version, transaction)
Example #34
0
    def _importDuringCommit(self, transaction, f, return_oid_list):
        """Import data during two-phase commit.

        Invoked by the transaction manager mid commit.
        Appends one item, the OID of the first object created,
        to return_oid_list.
        """
        oids = {}

        # IMPORTANT: This code should be consistent with the code in
        # serialize.py. It is currently out of date and doesn't handle
        # weak references.

        def persistent_load(ooid):
            """Remap a persistent id to a new ID and create a ghost for it."""

            klass = None
            if isinstance(ooid, tuple):
                ooid, klass = ooid

            if ooid in oids:
                oid = oids[ooid]
            else:
                if klass is None:
                    oid = self._storage.new_oid()
                else:
                    oid = self._storage.new_oid(), klass
                oids[ooid] = oid

            return Ghost(oid)

        version = self._version

        while 1:
            h = f.read(16)
            if h == export_end_marker:
                break
            if len(h) != 16:
                raise ExportError("Truncated export file")
            l = u64(h[8:16])
            p = f.read(l)
            if len(p) != l:
                raise ExportError("Truncated export file")

            ooid = h[:8]
            if oids:
                oid = oids[ooid]
                if isinstance(oid, tuple):
                    oid = oid[0]
            else:
                oids[ooid] = oid = self._storage.new_oid()
                return_oid_list.append(oid)

            pfile = StringIO(p)
            unpickler = Unpickler(pfile)
            unpickler.persistent_load = persistent_load

            newp = StringIO()
            pickler = Pickler(newp, 1)
            pickler.persistent_id = persistent_id

            pickler.dump(unpickler.load())
            pickler.dump(unpickler.load())
            p = newp.getvalue()

            self._storage.store(oid, None, p, version, transaction)
Example #35
0
def load(stream):
    p = Unpickler(stream)
    p.persistent_load = persistent_load
    return p.load()
Example #36
0
def unpickle_state(data):
    unpickler = Unpickler(StringIO(data))
    unpickler.persistent_load = PersistentReferenceFactory().persistent_load
    unpickler.load()  # skip the class tuple
    return unpickler.load()
Example #37
0
 def _loads(self, data):
     unpickler = Unpickler(StringIO(data))
     unpickler.persistent_load = self.__persistent_load
     return unpickler.load()
Example #38
0
    def setstate(self, object):
        oid = object._p_oid

        if self._storage is None:
            msg = ("Shouldn't load state for %s "
                   "when the connection is closed" % `oid`)
            LOG('ZODB', ERROR, msg)
            raise RuntimeError(msg)

        try:
            p, serial = self._storage.load(oid, self._version)
            self._load_count = self._load_count + 1

            # XXX this is quite conservative!
            # We need, however, to avoid reading data from a transaction
            # that committed after the current "session" started, as
            # that might lead to mixing of cached data from earlier
            # transactions and new inconsistent data.
            #
            # Note that we (carefully) wait until after we call the
            # storage to make sure that we don't miss an invaildation
            # notifications between the time we check and the time we
            # read.
            if self._invalid(oid):
                if not hasattr(object.__class__, '_p_independent'):
                    get_transaction().register(self)
                    raise ReadConflictError(object=object)
                invalid = 1
            else:
                invalid = 0

            file = StringIO(p)
            unpickler = Unpickler(file)
            unpickler.persistent_load = self._persistent_load
            unpickler.load()
            state = unpickler.load()

            if hasattr(object, '__setstate__'):
                object.__setstate__(state)
            else:
                d = object.__dict__
                for k, v in state.items():
                    d[k] = v

            object._p_serial = serial

            if invalid:
                if object._p_independent():
                    try:
                        del self._invalidated[oid]
                    except KeyError:
                        pass
                else:
                    get_transaction().register(self)
                    raise ConflictError(object=object)

        except ConflictError:
            raise
        except:
            LOG('ZODB',ERROR, "Couldn't load state for %s" % `oid`,
                error=sys.exc_info())
            raise
Example #39
0
    def deserialize(self, io_file, iface_bitmaps, metadata,
                    cur_deserialized_python_objs):
        """Reciprocal to serialize."""
        logger.verbose("Deserializing object %s", str(self.get_object_id()))

        # Put slow debugging info inside here:
        #
        # NOTE: new implementation of ExecutionGateway assert is not needed and wrong
        # if logger.isEnabledFor(DEBUG):
        #     klass = self.__class__
        #     logger.debug("Deserializing instance %r from class %s",
        #                  self, klass.__name__)
        #     logger.debug("The previous class is from module %s, in file %s",
        #                  klass.__module__, inspect.getfile(klass))
        #     logger.debug("The class extradata is:\n%s", klass._dclay_class_extradata)
        #     assert klass._dclay_class_extradata == self._dclay_class_extradata
        #
        # LOADED FLAG = TRUE only once deserialization is finished to avoid concurrent problems!
        # # This may be due to race conditions. It may need to do some extra locking
        # if self.__dclay_instance_extradata.loaded_flag:
        #     logger.debug("Loaded Flag is True")
        # else:
        #     self.__dclay_instance_extradata.loaded_flag = True
        """ reference counting """
        """ discard padding """
        IntegerWrapper().read(io_file)
        """ deserialize master_location """
        des_master_loc_str = StringWrapper().read(io_file)
        if des_master_loc_str == "x":
            self.__dclay_instance_extradata.master_location = None
        else:
            self.__dclay_instance_extradata.master_location = UUID(
                des_master_loc_str)

        if hasattr(self, "__setstate__"):
            # The object has a user-defined deserialization method.

            # Use pickle, and use that method instead
            if six.PY2:
                import cPickle as pickle
            elif six.PY3:
                import _pickle as pickle

            state = pickle.loads(StringWrapper(mode="binary").read(io_file))
            self.__setstate__(state)

        else:
            # Regular dataClay provided deserialization

            # Start by getting the properties
            properties = sorted(self.get_class_extradata().properties.values(),
                                key=attrgetter('position'))

            logger.trace("Tell io_file before loop: %s", io_file.tell())
            logger.verbose("Deserializing list of properties: %s", properties)

            for p in properties:

                logger.trace("Tell io_file in loop: %s", io_file.tell())
                not_null = BooleanWrapper().read(io_file)
                value = None
                if not_null:
                    logger.debug("Not null property %s", p.name)
                    if isinstance(p.type, UserType):
                        try:
                            logger.debug("Property %s is an association",
                                         p.name)
                            value = DeserializationLibUtilsSingleton.deserialize_association(
                                io_file, iface_bitmaps, metadata,
                                cur_deserialized_python_objs, getRuntime())
                        except KeyError as e:
                            logger.error('Failed to deserialize association',
                                         exc_info=True)
                    else:
                        try:
                            upck = Unpickler(io_file)
                            upck.persistent_load = PersistentLoadPicklerHelper(
                                metadata, cur_deserialized_python_objs,
                                getRuntime())
                            value = upck.load()
                        except:
                            traceback.print_exc()

                logger.debug("Setting value %s for property %s", value, p.name)

                object.__setattr__(self,
                                   "%s%s" % (DCLAY_PROPERTY_PREFIX, p.name),
                                   value)
        """ reference counting bytes here """
        """ TODO: discard bytes? """