Пример #1
0
    def checkConflictErrorDoesntImport(self):
        from ZODB.serialize import ObjectWriter
        from ZODB.POSException import ConflictError
        from ZODB.tests.MinPO import MinPO
        import cPickle as pickle

        obj = MinPO()
        data = ObjectWriter().serialize(obj)

        # The pickle contains a GLOBAL ('c') opcode resolving to MinPO's
        # module and class.
        self.assert_('cZODB.tests.MinPO\nMinPO\n' in data)

        # Fiddle the pickle so it points to something "impossible" instead.
        data = data.replace('cZODB.tests.MinPO\nMinPO\n',
                            'cpath.that.does.not.exist\nlikewise.the.class\n')
        # Pickle can't resolve that GLOBAL opcode -- gets ImportError.
        self.assertRaises(ImportError, pickle.loads, data)

        # Verify that building ConflictError doesn't get ImportError.
        try:
            raise ConflictError(object=obj, data=data)
        except ConflictError, detail:
            # And verify that the msg names the impossible path.
            self.assert_('path.that.does.not.exist.likewise.the.class' in
                         str(detail))
    def test_ConflictErrorDoesntImport(self):
        from ZODB.serialize import ObjectWriter
        from ZODB.POSException import ConflictError
        from ZODB.tests.MinPO import MinPO

        obj = MinPO()
        data = ObjectWriter().serialize(obj)

        # The pickle contains a GLOBAL ('c') opcode resolving to MinPO's
        # module and class.
        self.assertTrue(b'cZODB.tests.MinPO\nMinPO\n' in data)

        # Fiddle the pickle so it points to something "impossible" instead.
        data = data.replace(
            b'cZODB.tests.MinPO\nMinPO\n',
            b'cpath.that.does.not.exist\nlikewise.the.class\n')
        # Pickle can't resolve that GLOBAL opcode -- gets ImportError.
        self.assertRaises(ImportError, loads, data)

        # Verify that building ConflictError doesn't get ImportError.
        try:
            raise ConflictError(object=obj, data=data)
        except ConflictError as detail:
            # And verify that the msg names the impossible path.
            self.assertTrue(
                'path.that.does.not.exist.likewise.the.class' in str(detail))
        else:
            self.fail("expected ConflictError, but no exception raised")
Пример #3
0
    def checkPersistentIdHandlesDescriptor(self):
        from ZODB.serialize import ObjectWriter
        class P(Persistent):
            pass

        writer = ObjectWriter(None)
        self.assertEqual(writer.persistent_id(P), None)
Пример #4
0
    def test_PersistentIdHandlesDescriptor(self):
        from ZODB.serialize import ObjectWriter
        class P(Persistent):
            pass

        writer = ObjectWriter(None)
        self.assertEqual(writer.persistent_id(P), None)
def canSerialize(obj):
  result = False
        
  container_type_tuple = (list, tuple, dict, set, frozenset)
  
  # if object is a container, we need to check its elements for presence of
  # objects that cannot be put inside the zodb
  if isinstance(obj, container_type_tuple):
    if isinstance(obj, dict):
      result_list = []
      for key, value in obj.iteritems():
        result_list.append(canSerialize(key))
        result_list.append(canSerialize(value))
    else:
      result_list = [canSerialize(element) for element in obj]
    return all(result_list)
  # if obj is an object and implements __getstate__, ZODB.serialize can check
  # if we can store it
  elif isinstance(obj, object) and hasattr(obj, '__getstate__'):
    # Need to unwrap the variable, otherwise we get a TypeError, because
    # objects cannot be pickled while inside an acquisition wrapper.
    unwrapped_obj = Acquisition.aq_base(obj)
    writer = ObjectWriter(unwrapped_obj)
    for obj in writer:
      try:
        writer.serialize(obj)
      # Because writer.serialize(obj) relies on the implementation of __getstate__
      # of obj, all errors can happen, so the "except all" is necessary here. 
      except:
        return False
    return True
  else:
    # If cannot serialize object with ZODB.serialize, try with cPickle
    # Only a dump of the object is not enough. Dumping and trying to
    # load it will properly raise errors in all possible situations, 
    # for example: if the user defines a dict with an object of a class 
    # that he created the dump will stil work, but the load will fail. 
    try:
      cPickle.loads(cPickle.dumps(obj))
    # By unknowing reasons, trying to catch cPickle.PicklingError in the "normal"
    # way isn't working. This issue might be related to some weirdness in 
    # pickle/cPickle that is reported in this issue: http://bugs.python.org/issue1457119.
    #
    # So, as a temporary fix, we're investigating the exception's class name as
    # string to be able to identify them.
    # 
    # Even though the issue seems complicated, this quickfix should be 
    # properly rewritten in a better way as soon as possible.
    except Exception as e:
      if type(e).__name__ in ('PicklingError', 'TypeError', 'NameError', 'AttributeError'):
        return False
      else:
        raise e
    else:
      return True
Пример #6
0
def canSerialize(obj):
    result = False

    container_type_tuple = (list, tuple, dict, set, frozenset)

    # if object is a container, we need to check its elements for presence of
    # objects that cannot be put inside the zodb
    if isinstance(obj, container_type_tuple):
        if isinstance(obj, dict):
            result_list = []
            for key, value in obj.iteritems():
                result_list.append(canSerialize(key))
                result_list.append(canSerialize(value))
        else:
            result_list = [canSerialize(element) for element in obj]
        return all(result_list)
    # if obj is an object and implements __getstate__, ZODB.serialize can check
    # if we can store it
    elif isinstance(obj, object) and hasattr(obj, '__getstate__') and hasattr(
            obj, '_p_jar'):
        # Need to unwrap the variable, otherwise we get a TypeError, because
        # objects cannot be pickled while inside an acquisition wrapper.
        unwrapped_obj = Acquisition.aq_base(obj)
        writer = ObjectWriter(unwrapped_obj)
        for obj in writer:
            try:
                writer.serialize(obj)
            # Because writer.serialize(obj) relies on the implementation of __getstate__
            # of obj, all errors can happen, so the "except all" is necessary here.
            except:
                return False
        return True
    else:
        # If cannot serialize object with ZODB.serialize, try with cPickle
        # Only a dump of the object is not enough. Dumping and trying to
        # load it will properly raise errors in all possible situations,
        # for example: if the user defines a dict with an object of a class
        # that he created the dump will stil work, but the load will fail.
        try:
            cPickle.loads(cPickle.dumps(obj))
        # By unknowing reasons, trying to catch cPickle.PicklingError in the "normal"
        # way isn't working. This issue might be related to some weirdness in
        # pickle/cPickle that is reported in this issue: http://bugs.python.org/issue1457119.
        #
        # So, as a temporary fix, we're investigating the exception's class name as
        # string to be able to identify them.
        #
        # Even though the issue seems complicated, this quickfix should be
        # properly rewritten in a better way as soon as possible.
        except (cPickle.PicklingError, TypeError, NameError,
                AttributeError) as e:
            return False
        else:
            return True
Пример #7
0
    def __new__(cls, obj, recursive=False):
        self = object.__new__(cls)
        obj = aq_base(obj)
        connection = obj._p_jar
        ObjectReader.__init__(self, connection, connection._cache,
                              connection._db.classFactory)
        ObjectWriter.__init__(self, obj)
        migrated_oid_set = set()
        oid_set = {obj._p_oid}
        while oid_set:
            oid = oid_set.pop()
            obj = ObjectReader.load_oid(self, oid)
            obj._p_activate()
            klass = obj.__class__
            self.lazy = None
            if not recursive:
                _setOb = getattr(klass, '_setOb', None)
                if _setOb:
                    if isinstance(_setOb, WorkflowMethod):
                        _setOb = _setOb._m
                    import six
                    if six.get_unbound_function(
                            _setOb) is six.get_unbound_function(
                                OFS_Folder._setOb):
                        self.lazy = Ghost
                elif klass.__module__[:
                                      7] == 'BTrees.' and klass.__name__ != 'Length':
                    self.lazy = LazyBTree()
            self.oid_dict = {}
            self.oid_set = set()
            p, serial = self._conn._storage.load(oid, '')
            unpickler = self._get_unpickler(p)

            def find_global(*args):
                self.do_migrate = args != (klass.__module__, klass.__name__) and \
                                  not isOldBTree('%s.%s' % args)
                unpickler.find_global = self._get_class
                return self._get_class(*args)

            unpickler.find_global = find_global
            unpickler.load()  # class
            state = unpickler.load()
            if isinstance(self.lazy, LazyPersistent):
                self.oid_set.update(self.lazy.getOidList(state))
            migrated_oid_set.add(oid)
            oid_set |= self.oid_set - migrated_oid_set
            self.oid_set = None
            if self.do_migrate:
                log.debug('PickleUpdater: migrate %r (%r)', obj, klass)
                self.setGhostState(obj, self.serialize(obj))
                obj._p_changed = 1
Пример #8
0
    def _commit(self, transaction):
        """Commit changes to an object"""

        if self._import:
            # We are importing an export file. We alsways do this
            # while making a savepoint so we can copy export data
            # directly to our storage, typically a TmpStore.
            self._importDuringCommit(transaction, *self._import)
            self._import = None

        # Just in case an object is added as a side-effect of storing
        # a modified object.  If, for example, a __getstate__() method
        # calls add(), the newly added objects will show up in
        # _added_during_commit.  This sounds insane, but has actually
        # happened.

        self._added_during_commit = []

        if self._invalidatedCache:
            raise ConflictError()

        for obj in self._registered_objects:
            oid = obj._p_oid
            assert oid
            if oid in self._conflicts:
                raise ReadConflictError(object=obj)

            if obj._p_jar is not self:
                raise InvalidObjectReference(obj, obj._p_jar)
            elif oid in self._added:
                assert obj._p_serial == z64
            elif obj._p_changed:
                if oid in self._invalidated:
                    resolve = getattr(obj, "_p_resolveConflict", None)
                    if resolve is None:
                        raise ConflictError(object=obj)
                self._modified.append(oid)
            else:
                # Nothing to do.  It's been said that it's legal, e.g., for
                # an object to set _p_changed to false after it's been
                # changed and registered.
                continue

            self._store_objects(ObjectWriter(obj), transaction)

        for obj in self._added_during_commit:
            self._store_objects(ObjectWriter(obj), transaction)
        self._added_during_commit = None
Пример #9
0
    def __new__(cls, obj, recursive=False):
        assert cls.get, "Persistent migration of pickle requires ZODB >= 3.5"
        self = object.__new__(cls)
        obj = aq_base(obj)
        connection = obj._p_jar
        ObjectReader.__init__(self, connection, connection._cache, connection._db.classFactory)
        ObjectWriter.__init__(self, obj)
        migrated_oid_set = set()
        oid_set = set((obj._p_oid,))
        while oid_set:
            oid = oid_set.pop()
            obj = self.get(oid)
            obj._p_activate()
            klass = obj.__class__
            self.lazy = None
            if not recursive:
                _setOb = getattr(klass, "_setOb", None)
                if _setOb:
                    if isinstance(_setOb, WorkflowMethod):
                        _setOb = _setOb._m
                    if _setOb.im_func is OFS_Folder._setOb.im_func:
                        self.lazy = Ghost
                elif klass.__module__[:7] == "BTrees." and klass.__name__ != "Length":
                    self.lazy = LazyBTree()
            self.oid_dict = {}
            self.oid_set = set()
            p, serial = self._conn._storage.load(oid, "")
            unpickler = self._get_unpickler(p)

            def find_global(*args):
                self.do_migrate = args != (klass.__module__, klass.__name__) and not isOldBTree("%s.%s" % args)
                unpickler.find_global = self._get_class
                return self._get_class(*args)

            unpickler.find_global = find_global
            unpickler.load()  # class
            state = unpickler.load()
            if isinstance(self.lazy, LazyPersistent):
                self.oid_set.update(self.lazy.getOidList(state))
            migrated_oid_set.add(oid)
            oid_set |= self.oid_set - migrated_oid_set
            self.oid_set = None
            if self.do_migrate:
                log.debug("PickleUpdater: migrate %r (%r)", obj, klass)
                self.setGhostState(obj, self.serialize(obj))
                obj._p_changed = 1
Пример #10
0
 def persistent_id(self, obj):
     assert type(obj) is not Ghost
     oid = self.getOid(obj)
     if type(oid) is str:
         try:
             return self.oid_dict[oid]
         except KeyError:
             obj._p_activate()
     return ObjectWriter.persistent_id(self, obj)
Пример #11
0
 def persistent_id(self, obj):
   assert type(obj) is not Ghost
   oid = self.getOid(obj)
   if type(oid) is str:
     try:
       return self.oid_dict[oid]
     except KeyError:
       obj._p_activate()
   return ObjectWriter.persistent_id(self, obj)
Пример #12
0
 def load_persistent(self, oid, klass):
   obj = ObjectReader.load_persistent(self, oid, klass)
   if self.oid_set is not None:
     if not self.lazy:
       self.oid_set.add(oid)
     obj._p_activate()
     self.oid_dict[oid] = oid_klass = ObjectWriter.persistent_id(self, obj)
     if oid_klass != (oid, klass):
       self.do_migrate = True
   return obj
Пример #13
0
def register(self, obj):
    """
    Serialize early to inspect PicklingErrors

    Raise any PicklingErrors when the object is added to the
    transaction as opposed to when the transaction is committed.
    Under pdb, for example, this allows inspecting the code that made
    the change resulting in the PicklingError.

    Requires either zope.testrunner or zope.testing which can be included using
    the 'zodb' or 'zodb-testing' extras respectively.
    """
    orig_register(self, obj)

    writer = ObjectWriter(obj)

    # Replace the pickler so that it doesn't set oids
    import cPickle as pickle
    writer._p = pickle.Pickler(writer._file, 1)

    # Try to serialize to raise piclkling errors early
    writer.serialize(obj)