def update_record(self,
                pathname, warebox_size, storage_size, lmtime,
                warebox_etag, storage_etag):

        lmtime_str = lmtime.strftime('%Y-%m-%d %H:%M:%S')
        AbstractCache.update_record(self,
                               pathname, warebox_size, storage_size,
                               lmtime_str, warebox_etag, storage_etag)
Exemple #2
0
 def get_all_records(self):
     records = AbstractCache.get_all_records(self)
     result = []
     for record in records:
         op_id, operation_str, timestamp_str = record
         # Pickled data are stored as binary data into a BLOB field
         operation = pickle.loads(str(operation_str))
         timestamp = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
         result.append((op_id, operation, timestamp))
     return result
 def get_all_records(self):
     records = AbstractCache.get_all_records(self)
     result = []
     for record in records:
         op_id, operation_str, timestamp_str = record
         # Pickled data are stored as binary data into a BLOB field
         operation = pickle.loads(str(operation_str))
         timestamp = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
         result.append((op_id, operation, timestamp))
     return result
    def get_all_records(self):
        """
        Returns either the list of tuples or False on error.

        The row is represented as a tuple containing the values of columns

        @return: a list of tuples each tuple contains
                (pathname, warebox_size, storage_size,
                lmtime, warebox_etag, storage_etag)
        """
        records = AbstractCache.get_all_records(self)
        result = []
        for record in records:
            pathname, warebox_size, storage_size, _, _, _ = record
            _, _, _, lmtime, warebox_etag, storage_etag = record
            lmtime = datetime.datetime.strptime(lmtime, '%Y-%m-%d %H:%M:%S')
            result.append((pathname, warebox_size, storage_size, lmtime,
                           warebox_etag, storage_etag))
        return result
    def get_all_records(self):
        """
        Returns either the list of tuples or False on error.

        The row is represented as a tuple containing the values of columns

        @return: a list of tuples each tuple contains
                (pathname, warebox_size, storage_size,
                lmtime, warebox_etag, storage_etag)
        """
        records = AbstractCache.get_all_records(self)
        result = []
        for record in records:
            pathname, warebox_size, storage_size, _, _, _ = record
            _, _, _, lmtime, warebox_etag, storage_etag = record
            lmtime = datetime.datetime.strptime(lmtime, '%Y-%m-%d %H:%M:%S')
            result.append((pathname, warebox_size, storage_size, lmtime,
                           warebox_etag, storage_etag))
        return result
Exemple #6
0
 def update_record(self, op_id, operation, transaction_timestamp):
     # Pickled data are stored as binary data into a BLOB field
     operation_str = buffer(pickle.dumps(operation))
     timestamp_str = transaction_timestamp.strftime('%Y-%m-%d %H:%M:%S')
     AbstractCache.update_record(self, op_id, operation_str, timestamp_str)
Exemple #7
0
 def __init__(self, database_file):
     logger = logging.getLogger("FR.%s" % self.__class__.__name__)
     AbstractCache.__init__(self, database_file, TABLE_NAME, SCHEMA, KEY,
                            logger)
 def __init__(self, database_file):
     logger = logging.getLogger("FR.%s" % self.__class__.__name__)
     AbstractCache.__init__(
             self, database_file, TABLE_NAME, SCHEMA, KEY, logger)
    def update_record(self, pathname, warebox_size, storage_size, lmtime,
                      warebox_etag, storage_etag):

        lmtime_str = lmtime.strftime('%Y-%m-%d %H:%M:%S')
        AbstractCache.update_record(self, pathname, warebox_size, storage_size,
                                    lmtime_str, warebox_etag, storage_etag)
 def test_set_wrong_key(self):
     with self.assertRaises(NonexistentKey):
         AbstractCache(FILENAME, DATABASE_NAME, SCHEMA, 'bad')
 def test_reload_and_destroy(self):
     newCache = AbstractCache(FILENAME, DATABASE_NAME, SCHEMA, 'first')
     newCache.destroy()
     self.assertFalse(os.path.exists(FILENAME))
 def test_reload_with_wrong_schema(self):
     with self.assertRaises(WrongSchema):
         AbstractCache(FILENAME, DATABASE_NAME, SCHEMA[2:], 'first')
 def setUp(self):
     self.cache = AbstractCache(FILENAME, DATABASE_NAME, SCHEMA, 'first')
     self.cache1 = AbstractCache('cache1.db', DATABASE_NAME, SCHEMA, 'first')
     self.cache2 = AbstractCache('cache2.db', DATABASE_NAME, SCHEMA, 'first')
class Test(unittest.TestCase):

    def setUp(self):
        self.cache = AbstractCache(FILENAME, DATABASE_NAME, SCHEMA, 'first')
        self.cache1 = AbstractCache('cache1.db', DATABASE_NAME, SCHEMA, 'first')
        self.cache2 = AbstractCache('cache2.db', DATABASE_NAME, SCHEMA, 'first')

    def tearDown(self):
        self.cache.destroy()
        self.cache1.destroy()
        self.cache2.destroy()
        assert not os.path.exists(FILENAME)
        assert not os.path.exists('cache1.db')
        assert not os.path.exists('cache2.db')

    def test_reload_with_wrong_schema(self):
        with self.assertRaises(WrongSchema):
            AbstractCache(FILENAME, DATABASE_NAME, SCHEMA[2:], 'first')

    def test_reload_and_destroy(self):
        newCache = AbstractCache(FILENAME, DATABASE_NAME, SCHEMA, 'first')
        newCache.destroy()
        self.assertFalse(os.path.exists(FILENAME))

    def test_set_wrong_key(self):
        with self.assertRaises(NonexistentKey):
            AbstractCache(FILENAME, DATABASE_NAME, SCHEMA, 'bad')

    def test_set_key(self):
        self.cache.key = 'second'

    def insert_right_number_columns(self):
        self.record = (1, 2, 3, 'foo')
        self.cache.update_record(*self.record)

    def test_update_new_record(self):
        self.insert_right_number_columns()
        self.assertEqual(self.cache.get_all_records(), [self.record])

    def test_update_existing_record(self):
        self.cache.update_record(1, 2, 3, 'foo')
        self.cache.update_record(1, 4, 5, 'bar')
        self.assertEqual(self.cache.get_all_records(), [(1, 4, 5, 'bar')])

    def test_update_record_wrong_number_columns(self):
        with self.assertRaises(WrongNumberOfParameters):
            self.cache.update_record(1, 2, 3, 'foo', '123')

    def test_update_fields_without_parameters(self):
        self.insert_right_number_columns()
        with self.assertRaises(WrongNumberOfParameters):
            self.cache.update_record_fields(1)

    def test_update_fields_with_too_many_parameters(self):
        self.insert_right_number_columns()
        with self.assertRaises(WrongNumberOfParameters):
            self.cache.update_record_fields(1,
                                            second=5,
                                            third=10,
                                            fourth='foo',
                                            fifth='alpha')

    def test_update_fields_with_nonexisting_column(self):
        self.insert_right_number_columns()
        with self.assertRaises(UnknownColumn):
            self.cache.update_record_fields(1,
                                            second=5,
                                            third=10,
                                            fifth='alpha')

    def test_update_fields_all_columns(self):
        self.insert_right_number_columns()
        self.cache.update_record_fields(1, second=5, third=10, fourth='alpha')
        self.assertIn((1, 5, 10, u'alpha'), self.cache.get_all_records())

    def test_update_fields_some_columns(self):
        self.insert_right_number_columns()
        self.cache.update_record_fields(1, fourth='alpha')
        self.assertIn((1, 2, 3, u'alpha'), self.cache.get_all_records())

    def test_get_non_existent_record(self):
        self.assertIsNone(self.cache.get_record(123))

    def test_delete_non_existent_record(self):
        self.cache.delete_record('foo')

    def test_delete_record(self):
        self.insert_right_number_columns()
        self.cache.delete_record(1)
        self.assertIsNone(self.cache.get_record(1))
        self.test_emptyness()

    def test_delete_all_records(self):
        self.insert_right_number_columns()
        record2 = (5, 6, 7, 'foo')
        self.cache.update_record(*record2)
        self.cache.delete_records([1, 5])
        self.test_emptyness()

    def test_delete_some_records(self):
        self.insert_right_number_columns()
        record2 = (5, 6, 7, 'foo')
        record3 = (9, 3, 1, 'bar')
        self.cache.update_record(*record2)
        self.cache.update_record(*record3)
        self.cache.delete_records([1, 5])
        self.assertEqual([(9, 3, 1, 'bar')], self.cache.get_all_records())

    def test_emptyness(self):
        self.assertEqual(self.cache.get_all_records(), [])

    def test_multi_cache_transaction(self):
        with self.cache1.transaction(self.cache2) as (c1, c2):
            c1.update_record(1, 2, 3, 'foo')
            c2.update_record(4, 5, 6, 'bar')
        self.assertEqual([(1, 2, 3, 'foo')], self.cache1.get_all_records())
        self.assertEqual([(4, 5, 6, 'bar')], self.cache2.get_all_records())

    def test_rollback_for_multi_cache_transaction(self):
        try:
            with self.cache1.transaction(self.cache2) as (c1, c2):
                c1.update_record(1, 2, 3, 'foo')
                c2.update_record(4, 5, 6, 'bar')
                raise Exception()
        except Exception:
            self.assertEqual([], self.cache1.get_all_records())
            self.assertEqual([], self.cache2.get_all_records())
 def update_record(self, op_id, operation, transaction_timestamp):
     # Pickled data are stored as binary data into a BLOB field
     operation_str = buffer(pickle.dumps(operation))
     timestamp_str = transaction_timestamp.strftime('%Y-%m-%d %H:%M:%S')
     AbstractCache.update_record(self, op_id, operation_str, timestamp_str)