def do_test_artifact_cache(artifact_cache): key = CacheKey('muppet_key', 'fake_hash', 42) with temporary_file(artifact_cache.artifact_root) as f: # Write the file. f.write(TEST_CONTENT1) path = f.name f.close() # Cache it. assert not artifact_cache.has(key) assert not artifact_cache.use_cached_files(key) artifact_cache.insert(key, [path]) assert artifact_cache.has(key) # Stomp it. with open(path, 'w') as outfile: outfile.write(TEST_CONTENT2) # Recover it from the cache. assert artifact_cache.use_cached_files(key) # Check that it was recovered correctly. with open(path, 'r') as infile: content = infile.read() assert content == TEST_CONTENT1 # Delete it. artifact_cache.delete(key) assert not artifact_cache.has(key)
def combine_cache_keys(cache_keys): if len(cache_keys) == 1: return cache_keys[0] else: sorted_cache_keys = sorted(cache_keys) # For commutativity. combined_id = ','.join([cache_key.id for cache_key in sorted_cache_keys]) combined_hash = ','.join([cache_key.hash for cache_key in sorted_cache_keys]) combined_num_sources = reduce(lambda x, y: x + y, [cache_key.num_sources for cache_key in sorted_cache_keys], 0) return CacheKey(combined_id, combined_hash, combined_num_sources, [])
def test_use_cache(): with test_env() as (f, cache): key = CacheKey('muppet_key', 'fake_hash', 42) cache.insert(key, [f.name]) with temporary_dir() as staging: abs_fn = os.path.join(staging, os.path.basename(f.name)) assert not os.path.exists(abs_fn) cache.use_cached_files( key, lambda s, d: shutil.copyfile(s, os.path.join(staging, d))) assert os.path.exists(abs_fn) with open(abs_fn) as fd: assert fd.read() == TEST_CONTENT
def key_for(self, id, sources): return CacheKey(id, id, len(sources))
def key_for_target(self, target, sources=None, fingerprint_extra=None): return CacheKey(target.id, target.id, target.num_sources)
def key_for(self, tid, sources): return CacheKey(tid, tid, len(sources), [])