Exemplo n.º 1
0
 def setUp(self):
     self.options = MockOptionsWithMemoryDB()
     self.connection = sqlite_connect(self.options,
                                      "pfx-ignored",
                                      close_async=False)
     assert self.connection.rs_db_filename == ':memory:', self.connection
     self.db = self._makeOne()
Exemplo n.º 2
0
    def test_delete_stale_objects_on_save(self):
        from relstorage.cache.local_database import Database
        from relstorage.cache.persistence import sqlite_connect
        c = self._makeOne(cache_local_dir=":memory:")

        conn = sqlite_connect(c.options, 'ignored-pfx', close_async=False)
        self.addCleanup(conn.close)
        db = Database.from_connection(conn)
        db.store_temp([(0, 0, b'state', 0)])
        db.move_from_temp()
        self.assertEqual(dict(db.oid_to_tid), {0: 0})
        conn.commit()
        # Pretend we loaded this from the db
        c[(0, 0)] = (b'state', 0)
        c._min_allowed_writeback[0] = 0

        # Pass a newer version through
        c[(0, 1)] = (b'state', 1)
        self.assertEqual(c._min_allowed_writeback[0], 1)
        # Evict it so we don't have it to write.
        del c._bucket0[(0, 1)]

        # But it gets removed based on having seen it and knowing
        # it's there.
        conn._rs_has_closed = True # Prevent closing by the write method
        try:
            c.write_to_sqlite(conn)
        finally:
            conn._rs_has_closed = False

        self.assertEmpty(db.oid_to_tid)
Exemplo n.º 3
0
    def save(self, object_index=None, checkpoints=None, **sqlite_args):
        options = self.options
        if options.cache_local_dir and self.size > self.__initial_weight:
            try:
                conn = sqlite_connect(options, self.prefix, **sqlite_args)
            except FAILURE_TO_OPEN_DB_EXCEPTIONS:
                logger.exception("Failed to open sqlite to write")
                return 0

            with closing(conn):
                self.write_to_sqlite(conn, checkpoints, object_index)
            # Testing: Return a signal when we tried to write
            # something.
            return 1
Exemplo n.º 4
0
    def remove_invalid_persistent_oids(self, bad_oids):
        """
        Remove data from the persistent cache for the given oids.
        """
        options = self.options
        if not options.cache_local_dir:
            return

        count_removed = 0
        conn = '(no oids to remove)'
        if bad_oids:
            conn = sqlite_connect(options, self.prefix, close_async=False)
            with closing(conn):
                db = Database.from_connection(conn)
                count_removed = db.remove_invalid_persistent_oids(bad_oids)
        logger.debug("Removed %d invalid OIDs from %s", count_removed, conn)
Exemplo n.º 5
0
    def restore(self):
        """
        Load the data from the persistent database.

        Returns the checkpoint data last saved, which may be None if
        there was no data.
        """
        options = self.options
        if options.cache_local_dir:
            try:
                conn = sqlite_connect(options, self.prefix)
            except FAILURE_TO_OPEN_DB_EXCEPTIONS:
                logger.exception("Failed to read data from sqlite")
                return
            with closing(conn):
                return self.read_from_sqlite(conn)
Exemplo n.º 6
0
    def restore(self, row_filter=None):
        """
        Load the data from the persistent database.

        If *row_filter* is given, it is a ``callable(checkpoints, row_iter)``
        that should return an iterator of four-tuples: ``(oid, key_tid, state, state_tid)``
        from the input rows ``(oid, state_tid, actual_tid)``. It is guaranteed
        that you won't see the same oid more than once.
        """
        options = self.options
        if options.cache_local_dir:
            try:
                conn = sqlite_connect(options, self.prefix, close_async=False)
            except FAILURE_TO_OPEN_DB_EXCEPTIONS:
                logger.exception("Failed to read data from sqlite")
                return
            with closing(conn):
                self.read_from_sqlite(conn, row_filter)
Exemplo n.º 7
0
    def save(self, **sqlite_args):
        options = self.options
        if options.cache_local_dir and self.__bucket.size:
            try:
                conn = sqlite_connect(options, self.prefix, **sqlite_args)
            except FAILURE_TO_OPEN_DB_EXCEPTIONS:
                logger.exception("Failed to open sqlite to write")
                return 0

            with closing(conn):
                try:
                    self.write_to_sqlite(conn)
                except CacheCorruptedError:
                    # The cache_trace_analysis.rst test fills
                    # us with junk data and triggers this.
                    logger.exception("Failed to save cache")
                    self.flush_all()
                    return 0
            # Testing: Return a signal when we tried to write
            # something.
            return 1
Exemplo n.º 8
0
 def connect(self):
     return sqlite_connect(self.options,
                           "pfx-ignored",
                           timeout=self.timeout)
Exemplo n.º 9
0
 def connect(self):
     return sqlite_connect(self.options,
                           "pfx-ignored",
                           close_async=False,
                           timeout=self.timeout)