def do_test_artifact_cache(self, artifact_cache): key = CacheKey('muppet_key', 'fake_hash', 42, []) with temporary_file(artifact_cache.artifact_root) as f: # Write the file. f.write(TEST_CONTENT1) path = f.name f.close() # Cache it. self.assertFalse(artifact_cache.has(key)) self.assertFalse(bool(artifact_cache.use_cached_files(key))) artifact_cache.insert(key, [path]) self.assertTrue(artifact_cache.has(key)) # Stomp it. with open(path, 'w') as outfile: outfile.write(TEST_CONTENT2) # Recover it from the cache. self.assertTrue(bool(artifact_cache.use_cached_files(key))) # Check that it was recovered correctly. with open(path, 'r') as infile: content = infile.read() self.assertEquals(content, TEST_CONTENT1) # Delete it. artifact_cache.delete(key) self.assertFalse(artifact_cache.has(key))
def combine_cache_keys(cache_keys): if len(cache_keys) == 1: return cache_keys[0] else: sorted_cache_keys = sorted(cache_keys) # For commutativity. combined_id = ','.join([cache_key.id for cache_key in sorted_cache_keys]) combined_hash = ','.join([cache_key.hash for cache_key in sorted_cache_keys]) combined_num_sources = reduce(lambda x, y: x + y, [cache_key.num_sources for cache_key in sorted_cache_keys], 0) return CacheKey(combined_id, combined_hash, combined_num_sources, [])
def test_failed_multiproc(self): context = create_context() key = CacheKey('muppet_key', 'fake_hash', 55) # Failed requests should return failure status, but not raise exceptions with self.setup_rest_cache(return_failed=True) as cache: self.assertFalse( context.subproc_map(call_use_cached_files, [(cache, key)])[0]) with self.setup_test_file(cache.artifact_root) as path: context.subproc_map(call_insert, [(cache, key, [path], False)]) self.assertFalse( context.subproc_map(call_use_cached_files, [(cache, key)])[0])
def test_multiproc(self): context = create_context() key = CacheKey('muppet_key', 'fake_hash', 42) with self.setup_local_cache() as cache: self.assertEquals(context.subproc_map(call_use_cached_files, [(cache, key)]), [False]) with self.setup_test_file(cache.artifact_root) as path: context.subproc_map(call_insert, [(cache, key, [path], False)]) self.assertEquals(context.subproc_map(call_use_cached_files, [(cache, key)]), [True]) with self.setup_rest_cache() as cache: self.assertEquals(context.subproc_map(call_use_cached_files, [(cache, key)]), [False]) with self.setup_test_file(cache.artifact_root) as path: context.subproc_map(call_insert, [(cache, key, [path], False)]) self.assertEquals(context.subproc_map(call_use_cached_files, [(cache, key)]), [True])
def test_local_backed_remote_cache(self): """make sure that the combined cache finds what it should and that it backfills""" with self.setup_server() as url: with self.setup_local_cache() as local: tmp = TempLocalArtifactCache(local.artifact_root, 0) remote = RESTfulArtifactCache(local.artifact_root, url, tmp) combined = RESTfulArtifactCache(local.artifact_root, url, local) key = CacheKey('muppet_key', 'fake_hash', 42) with self.setup_test_file(local.artifact_root) as path: # No cache has key. self.assertFalse(local.has(key)) self.assertFalse(remote.has(key)) self.assertFalse(combined.has(key)) # No cache returns key. self.assertFalse(bool(local.use_cached_files(key))) self.assertFalse(bool(remote.use_cached_files(key))) self.assertFalse(bool(combined.use_cached_files(key))) # Attempting to use key that no cache had should not change anything. self.assertFalse(local.has(key)) self.assertFalse(remote.has(key)) self.assertFalse(combined.has(key)) # Add to only remote cache. remote.insert(key, [path]) # After insertion to remote, remote and only remote should have key self.assertFalse(local.has(key)) self.assertTrue(remote.has(key)) self.assertTrue(combined.has(key)) # Successfully using via remote should NOT change local. self.assertTrue(bool(remote.use_cached_files(key))) self.assertFalse(local.has(key)) # Successfully using via combined SHOULD backfill local. self.assertTrue(bool(combined.use_cached_files(key))) self.assertTrue(local.has(key)) self.assertTrue(bool(local.use_cached_files(key)))
def key_for(self, tid, sources): return CacheKey(tid, tid, len(sources))
def key_for_target(self, target, sources=None, transitive=False, fingerprint_strategy=None): return CacheKey(target.id, target.id, target.num_chunking_units)
def key_for_target(self, target, sources=None, transitive=False): return CacheKey(target.id, target.id, target.payload.num_chunking_units, [])