def test_existing_cache(self): dest_dir = os.path.join(self.tempdir, 'dest') with local_caching.NamedCache(self.cache_dir, self.policies) as cache: # Assume test_clean passes. a_path = os.path.join(dest_dir, u'a') b_path = os.path.join(dest_dir, u'b') cache.install(a_path, u'1') write_file(os.path.join(dest_dir, u'a', u'x'), u'x') cache.uninstall(a_path, u'1') # Test starts here. cache.install(a_path, u'1') cache.install(b_path, u'2') self.assertEqual({'a', 'b'}, set(os.listdir(dest_dir))) self.assertFalse(cache.available) self.assertEqual(['named'], os.listdir(cache.cache_dir)) self.assertEqual( 'x', read_file(os.path.join(os.path.join(dest_dir, u'a', u'x')))) write_file(os.path.join(a_path, 'x'), 'x2') write_file(os.path.join(b_path, 'y'), 'y') cache.uninstall(a_path, '1') cache.uninstall(b_path, '2') self.assertEqual(3, len(os.listdir(cache.cache_dir))) path1 = os.path.join(cache.cache_dir, cache._lru['1']) path2 = os.path.join(cache.cache_dir, cache._lru['2']) self.assertEqual('x2', read_file(os.path.join(path1, 'x'))) self.assertEqual('y', read_file(os.path.join(path2, 'y'))) self.assertEqual(os.readlink(cache._get_named_path('1')), path1) self.assertEqual(os.readlink(cache._get_named_path('2')), path2)
def test_clean_caches_disk(self): # Create an isolated cache and a named cache each with 2 items. Ensure that # one item from each is removed. now = self._now self._free_disk = 100000 # Setup caches. policies = _get_policies(min_free_space=1000) named_cache = local_caching.NamedCache( tempfile.mkdtemp(dir=self.tempdir, prefix='nc'), policies) short_names = self._prepare_named_cache(named_cache) isolated_cache = local_caching.DiskContentAddressedCache( tempfile.mkdtemp(dir=self.tempdir, prefix='ic'), policies, trim=False) self._prepare_isolated_cache(isolated_cache) self.assertEqual(now, self._now) # Request trimming. self._free_disk = 950 trimmed = local_caching.trim_caches( [isolated_cache, named_cache], self.tempdir, min_free_space=policies.min_free_space, max_age_secs=policies.max_age_secs) # Enough to free 50 bytes. The following sums to 56. expected = [1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7] self.assertEqual(expected, trimmed) # Cache verification. self._verify_named_cache(named_cache, short_names, range(8, 11)) self._verify_isolated_cache(isolated_cache, range(8, 11))
def process_named_cache_options(parser, options, time_fn=None): """Validates named cache options and returns a CacheManager.""" if options.named_caches and not options.named_cache_root: parser.error('--named-cache is specified, but --named-cache-root is empty') for name, path in options.named_caches: if not CACHE_NAME_RE.match(name): parser.error( 'cache name %r does not match %r' % (name, CACHE_NAME_RE.pattern)) if not path: parser.error('cache path cannot be empty') if options.named_cache_root: # Make these configurable later if there is use case but for now it's fairly # safe values. # In practice, a fair chunk of bots are already recycled on a daily schedule # so this code doesn't have any effect to them, unless they are preloaded # with a really old cache. policies = local_caching.CachePolicies( # 1TiB. max_cache_size=1024*1024*1024*1024, min_free_space=options.min_free_space, max_items=50, max_age_secs=MAX_AGE_SECS) root_dir = unicode(os.path.abspath(options.named_cache_root)) return local_caching.NamedCache(root_dir, policies, time_fn=time_fn) return None
def test_corrupted(self): os.mkdir(self.cache_dir) with open(os.path.join(self.cache_dir, u'state.json'), 'w') as f: f.write('}}}}') fs.makedirs(os.path.join(self.cache_dir, 'a'), 0777) with local_caching.NamedCache(self.cache_dir, self.policies) as cache: self.assertFalse(os.path.isdir(cache.cache_dir)) self.make_caches(cache, ['a']) self.assertTrue(fs.islink(os.path.join(cache.cache_dir, 'named', 'a')))
def test_trim(self): with local_caching.NamedCache(self.cache_dir, self.policies) as cache: item_count = self.policies.max_items + 10 self.make_caches(cache, range(item_count)) self.assertEqual(len(cache), item_count) cache.trim() self.assertEqual(len(cache), self.policies.max_items) self.assertEqual( set(map(str, xrange(10, 10 + self.policies.max_items))), set(os.listdir(os.path.join(cache.cache_dir, 'named'))))
def test_get_timestamp(self): now = 0 with local_caching.NamedCache(self.cache_dir, self.policies, time_fn=lambda: now) as cache: for i in xrange(10): self.make_caches(cache, [i]) now += 1 for i in xrange(10): self.assertEqual(i, cache.get_timestamp(str(i)))
def get_cache(self, policies): return local_caching.NamedCache(self.cache_dir, policies)
def test_run_command_caches(self): # This test puts a file into a named cache, remove it, runs a test that # updates the named cache, remaps it and asserts the content was updated. # # Directories: # <root_dir>/ # <root_dir>/c - <cache_dir> named cache root # <root_dir>/dest - <dest_dir> used for manual cache update # <root_dir>/w - <self.work_dir> used by the task. cache_dir = os.path.join(self.root_dir, u'c') dest_dir = os.path.join(self.root_dir, u'dest') policies = local_caching.CachePolicies(0, 0, 0, 0) # Inject file 'bar' in the named cache 'foo'. cache = local_caching.NamedCache(cache_dir, policies) cache.install(dest_dir, 'foo') with open(os.path.join(dest_dir, 'bar'), 'wb') as f: f.write('thecache') cache.uninstall(dest_dir, 'foo') self.assertFalse(os.path.exists(dest_dir)) self._expect_files([u'c/*/bar', u'c/state.json']) # Maps the cache 'foo' as 'cache_foo'. This runs inside self.work_dir. # This runs the command for real. script = ( 'import os\n' 'print "hi"\n' 'with open("cache_foo/bar", "rb") as f:\n' ' cached = f.read()\n' 'with open("../../result", "wb") as f:\n' ' f.write(cached)\n' 'with open("cache_foo/bar", "wb") as f:\n' ' f.write("updated_cache")\n') task_details = get_task_details( script, caches=[{'name': 'foo', 'path': 'cache_foo', 'hint': '100'}]) expected = { u'exit_code': 0, u'hard_timeout': False, u'io_timeout': False, u'must_signal_internal_failure': None, u'version': task_runner.OUT_VERSION, } self.assertEqual(expected, self._run_command(task_details)) self._expect_files( [ u'c/*/bar', u'c/state.json', u'result', u'w/run_isolated_args.json', ]) # Ensure the 'result' file written my the task contained foo/bar. with open(os.path.join(self.root_dir, 'result'), 'rb') as f: self.assertEqual('thecache', f.read()) os.remove(os.path.join(self.root_dir, 'result')) cache = local_caching.NamedCache(cache_dir, policies) self.assertFalse(os.path.exists(dest_dir)) self._expect_files( [u'c/*/bar', u'c/state.json', u'w/run_isolated_args.json']) cache.install(dest_dir, 'foo') self._expect_files( [u'dest/bar', u'c/state.json', u'w/run_isolated_args.json']) with open(os.path.join(dest_dir, 'bar'), 'rb') as f: self.assertEqual('updated_cache', f.read()) cache.uninstall(dest_dir, 'foo') self.assertFalse(os.path.exists(dest_dir)) # Now look at the updates sent by the bot as seen by the server. self.expectTask()
def test_get_oldest(self): with local_caching.NamedCache(self.cache_dir, self.policies) as cache: self.assertIsNone(cache.get_oldest()) self.make_caches(cache, range(10)) self.assertEqual(cache.get_oldest(), u'0')