Ejemplo n.º 1
0
 def get_state(self, data, load=True):
     self.load_count += 1
     s = BytesIO()
     s.write(data)
     s.seek(0)
     unpickler = self._get_unpickler(s)
     klass = unpickler.load()
     position = s.tell()
     if data[s.tell()] == COMPRESSED_START_BYTE:
         # This is almost certainly a compressed pickle.
         try:
             decompressed = decompress(data[position:])
         except zlib_error:
             pass # let the unpickler try anyway.
         else:
             s.write(decompressed)
             s.seek(position)
     if load:
         return unpickler.load()
     else:
         return s.read()
Ejemplo n.º 2
0
 def get_state(self, data, load=True):
     self.load_count += 1
     s = BytesIO()
     s.write(data)
     s.seek(0)
     unpickler = self._get_unpickler(s)
     klass = unpickler.load()
     position = s.tell()
     if data[s.tell()] == COMPRESSED_START_BYTE:
         # This is almost certainly a compressed pickle.
         try:
             decompressed = decompress(data[position:])
         except zlib_error:
             pass  # let the unpickler try anyway.
         else:
             s.write(decompressed)
             s.seek(position)
     if load:
         return unpickler.load()
     else:
         return s.read()
Ejemplo n.º 3
0
class ObjectWriter (object):
    """
    Serializes objects for storage in the database.

    The client is responsible for calling the close() method to avoid
    leaking memory.  The ObjectWriter uses a Pickler internally, and
    Pickler objects do not participate in garbage collection.
    """

    def __init__(self, connection):
        self.sio = BytesIO()
        self.pickler = Pickler(self.sio, PICKLE_PROTOCOL)
        self.pickler.persistent_id = method(
            call_if_persistent, self._persistent_id)
        self.objects_found = []
        self.refs = set() # populated by _persistent_id()
        self.connection = connection

    def close(self):
        # see ObjectWriter.__doc__
        # Explicitly break cycle involving pickler
        self.pickler.persistent_id = int
        self.pickler = None

    def _persistent_id(self, obj):
        """(PersistentBase) -> (oid:str, klass:type)
        This is called on PersistentBase instances during pickling.
        """
        if obj._p_oid is None:
            obj._p_oid = self.connection.new_oid()
            obj._p_connection = self.connection
            self.objects_found.append(obj)
        elif obj._p_connection is not self.connection:
            raise ValueError(
                "Reference to %r has a different connection." % obj)
        self.refs.add(obj._p_oid)
        return obj._p_oid, type(obj)

    def gen_new_objects(self, obj):
        def once(obj):
            raise RuntimeError('gen_new_objects() already called.')
        self.gen_new_objects = once
        yield obj # The modified object is also a "new" object.
        for obj in self.objects_found:
            yield obj

    def get_state(self, obj):
        self.sio.seek(0) # recycle BytesIO instance
        self.sio.truncate()
        self.pickler.clear_memo()
        self.pickler.dump(type(obj))
        self.refs.clear()
        position = self.sio.tell()
        self.pickler.dump(obj.__getstate__())
        uncompressed = self.sio.getvalue()
        pickled_type = uncompressed[:position]
        pickled_state = uncompressed[position:]
        if WRITE_COMPRESSED_STATE_PICKLES:
            state = compress(pickled_state)
        else:
            state = pickled_state
        data = pickled_type + state
        self.refs.discard(obj._p_oid)
        return data, join_bytes(self.refs)
Ejemplo n.º 4
0
class ObjectWriter(object):
    """
    Serializes objects for storage in the database.

    The client is responsible for calling the close() method to avoid
    leaking memory.  The ObjectWriter uses a Pickler internally, and
    Pickler objects do not participate in garbage collection.
    """
    def __init__(self, connection):
        self.sio = BytesIO()
        self.pickler = Pickler(self.sio, PICKLE_PROTOCOL)
        self.pickler.persistent_id = method(call_if_persistent,
                                            self._persistent_id)
        self.objects_found = []
        self.refs = set()  # populated by _persistent_id()
        self.connection = connection

    def close(self):
        # see ObjectWriter.__doc__
        # Explicitly break cycle involving pickler
        self.pickler.persistent_id = int
        self.pickler = None

    def _persistent_id(self, obj):
        """(PersistentBase) -> (oid:str, klass:type)
        This is called on PersistentBase instances during pickling.
        """
        if obj._p_oid is None:
            obj._p_oid = self.connection.new_oid()
            obj._p_connection = self.connection
            self.objects_found.append(obj)
        elif obj._p_connection is not self.connection:
            raise ValueError("Reference to %r has a different connection." %
                             obj)
        self.refs.add(obj._p_oid)
        return obj._p_oid, type(obj)

    def gen_new_objects(self, obj):
        def once(obj):
            raise RuntimeError('gen_new_objects() already called.')

        self.gen_new_objects = once
        yield obj  # The modified object is also a "new" object.
        for obj in self.objects_found:
            yield obj

    def get_state(self, obj):
        self.sio.seek(0)  # recycle BytesIO instance
        self.sio.truncate()
        self.pickler.clear_memo()
        self.pickler.dump(type(obj))
        self.refs.clear()
        position = self.sio.tell()
        self.pickler.dump(obj.__getstate__())
        uncompressed = self.sio.getvalue()
        pickled_type = uncompressed[:position]
        pickled_state = uncompressed[position:]
        if WRITE_COMPRESSED_STATE_PICKLES:
            state = compress(pickled_state)
        else:
            state = pickled_state
        data = pickled_type + state
        self.refs.discard(obj._p_oid)
        return data, join_bytes(self.refs)