def __init__(self, store, cache_expiration_time=60, cache_size_in_mb=2000, cache_id=None, max_archive_size_in_mb = 4, cache_dir='/tmp/cloudfusion'): """ :param store: the store whose access should be cached :param max_archive_size_in_mb: the maximum size of an archive :param cache_expiration_time: the time in seconds until any cache entry is expired :param cache_size_in_mb: Approximate (soft) limit of the cache in MB. :param hard_cache_size_limit_in_mb: Hard limit of the cache in MB, exceeding this limit should slow down write operations. :param cache_id: Serves as identifier for a persistent cache instance. :param cache_dir: Cache directory on local hard drive disk, default value is */tmp/cloudfusion*.""" #prevent simultaneous access to store (synchronous use of __deepcopy__ by _store SyncThread and a different method): self.store = SynchronizeProxy(store, private_methods_to_synchronize=['__deepcopy__']) self.max_archive_size_in_mb = max_archive_size_in_mb if cache_id == None: cache_id = str(random.random()) self.logger = logging.getLogger(self.get_logging_handler()) self.logger.debug("creating ChunkTransparentMultiprocessingCachingStore object") if cache_expiration_time < 240: self.logger.warning("Be aware of the synchronization issue https://github.com/joe42/CloudFusion/issues/16 \ or to avoid the issue set cache_expiration_time to more than 240 seconds.") self.cache_expiration_time = cache_expiration_time self.time_of_last_flush = time.time() self.cache_dir = cache_dir[:-1] if cache_dir[-1:] == '/' else cache_dir # remove slash at the end temp_dir = self.cache_dir+"/cachingstore_"+cache_id cache = PersistentLRUCache(temp_dir, cache_expiration_time, cache_size_in_mb) cache.set_resize_intervall(10) self.entries = SynchronizeProxy( cache ) #[shares_resource: write self.entries] self.sync_thread = ChunkStoreSyncThread(self.entries, self.store, temp_dir, self.logger) self.sync_thread.start()
def test_resize_zerosize(): test_obj = PersistentLRUCache(directory=directory, expiration_time=0.00001, maxsize_in_MB=0) test_obj.set_resize_intervall(0) test_obj.refresh("some_key", "43", time.time()) time.sleep(0.001) assert "some_key" in test_obj.get_keys() test_obj.refresh("some_other_key", "42", time.time()) assert "some_other_key" in test_obj.get_keys() assert not "some_key" in test_obj.get_keys() #deleted due to internal resize assert test_obj.get_value("some_other_key") == "42"
def test_resize(): test_obj = PersistentLRUCache(directory=directory, expiration_time=0.00001,maxsize_in_MB=30) test_obj.set_resize_intervall(0) for i in range(10,62): test_obj.refresh(str(i), "a"*2000000, time.time()) time.sleep(0.001) assert test_obj.get_size_of_cached_data() < 30000003 for j in range(10,i-14+1): assert not str(j) in test_obj.get_keys() for j in range(10,i+1)[-14:]: assert test_obj.get_value(str(j)) == "a"*2000000
def __init__(self, store, cache_expiration_time=60, cache_size_in_mb=2000, cache_id=None, cache_dir='/tmp/cloudfusion/'): """ :param store: the store whose access should be cached :param cache_expiration_time: the time in seconds until any cache entry is expired :param cache_size_in_mb: Approximate limit of the cache in MB. :param cache_id: Serves as identifier for a persistent cache instance. :param cache_dir: Cache directory on local hard drive disk, default value is */tmp/cloudfusion*. """ #prevent simultaneous access to store (synchronous use of __deepcopy__ by _store SyncThread and a different method): self.store = SynchronizeProxy( store, private_methods_to_synchronize=['__deepcopy__']) if cache_id == None: cache_id = str(random.random()) self.logger = logging.getLogger(self.get_logging_handler()) self.logger.debug("creating CachingStore object") if cache_expiration_time < 240: self.logger.warning( "Be aware of the synchronization issue https://github.com/joe42/CloudFusion/issues/16 \ or to avoid the issue set cache_expiration_time to more than 240 seconds." ) # self.temp_file = tempfile.SpooledTemporaryFile() self.cache_expiration_time = cache_expiration_time self.time_of_last_flush = time.time() self.cache_dir = cache_dir[:-1] if cache_dir[ -1:] == '/' else cache_dir # remove slash at the end cache = PersistentLRUCache( self.cache_dir + "/cachingstore_" + cache_id, cache_expiration_time, cache_size_in_mb) cache.set_resize_intervall(10) self.entries = SynchronizeProxy( cache) #[shares_resource: write self.entries] self.sync_thread = StoreSyncThread(self.entries, self.store, self.logger) self.sync_thread.start()
def test_resize_dirty(): test_obj = PersistentLRUCache(directory=directory, maxsize_in_MB=0) test_obj.set_resize_intervall(0) for i in range(10,62): test_obj.write(str(i), "a"*2000000) assert test_obj.get_size_of_cached_data() > 50000000