def setup_rest_cache(self, local=None, return_failed=False): with temporary_dir() as artifact_root: local = local or TempLocalArtifactCache(artifact_root, 0) with self.setup_server(return_failed=return_failed) as server: yield RESTfulArtifactCache(artifact_root, BestUrlSelector([server.url]), local)
def test_local_backed_remote_cache(self): """make sure that the combined cache finds what it should and that it backfills.""" with self.setup_server() as server: with self.setup_local_cache() as local: tmp = TempLocalArtifactCache(local.artifact_root, local.artifact_extraction_root, 0) remote = RESTfulArtifactCache(local.artifact_root, BestUrlSelector([server.url]), tmp) combined = RESTfulArtifactCache(local.artifact_root, BestUrlSelector([server.url]), local) key = CacheKey("muppet_key", "fake_hash") with self.setup_test_file(local.artifact_root) as path: # No cache has key. self.assertFalse(local.has(key)) self.assertFalse(remote.has(key)) self.assertFalse(combined.has(key)) # No cache returns key. self.assertFalse(bool(local.use_cached_files(key))) self.assertFalse(bool(remote.use_cached_files(key))) self.assertFalse(bool(combined.use_cached_files(key))) # Attempting to use key that no cache had should not change anything. self.assertFalse(local.has(key)) self.assertFalse(remote.has(key)) self.assertFalse(combined.has(key)) # Add to only remote cache. remote.insert(key, [path]) # After insertion to remote, remote and only remote should have key self.assertFalse(local.has(key)) self.assertTrue(remote.has(key)) self.assertTrue(combined.has(key)) # Successfully using via remote should NOT change local. self.assertTrue(bool(remote.use_cached_files(key))) self.assertFalse(local.has(key)) # Successfully using via combined SHOULD backfill local. self.assertTrue(bool(combined.use_cached_files(key))) self.assertTrue(local.has(key)) self.assertTrue(bool(local.use_cached_files(key)))
def test_restful_cache(self): with self.assertRaises(InvalidRESTfulCacheProtoError): RESTfulArtifactCache('foo', BestUrlSelector(['ftp://localhost/bar']), 'foo') with self.setup_rest_cache() as artifact_cache: self.do_test_artifact_cache(artifact_cache)
def create_remote_cache(remote_spec, local_cache): urls = self.get_available_urls(remote_spec.split('|')) if len(urls) > 0: best_url_selector = BestUrlSelector(['{}/{}'.format(url.rstrip('/'), self._stable_name) for url in urls]) local_cache = local_cache or TempLocalArtifactCache(artifact_root, compression) return RESTfulArtifactCache(artifact_root, best_url_selector, local_cache)
def test_local_backed_remote_cache_corrupt_artifact(self): """Ensure that a combined cache clears outputs after a failure to extract an artifact.""" with temporary_dir() as remote_cache_dir: with self.setup_server(cache_root=remote_cache_dir) as server: with self.setup_local_cache() as local: tmp = TempLocalArtifactCache( local.artifact_root, local.artifact_extraction_root, compression=1) remote = RESTfulArtifactCache( local.artifact_root, BestUrlSelector([server.url]), tmp) combined = RESTfulArtifactCache( local.artifact_root, BestUrlSelector([server.url]), local) key = CacheKey("muppet_key", "fake_hash") results_dir = os.path.join(local.artifact_root, "a/sub/dir") safe_mkdir(results_dir) self.assertTrue(os.path.exists(results_dir)) with self.setup_test_file(results_dir) as path: # Add to only the remote cache. remote.insert(key, [path]) # Corrupt the artifact in the remote storage. self.assertTrue( server.corrupt_artifacts(r".*muppet_key.*") == 1) # An attempt to read the corrupt artifact should fail. self.assertFalse( combined.use_cached_files(key, results_dir=results_dir)) # The local artifact should not have been stored, and the results_dir should exist, # but be empty. self.assertFalse(local.has(key)) self.assertTrue(os.path.exists(results_dir)) self.assertTrue(len(os.listdir(results_dir)) == 0)
def test_restful_cache_failover(self): bad_url = 'http://badhost:123' with temporary_dir() as artifact_root: local = TempLocalArtifactCache(artifact_root, 0) # With fail-over, rest call second time will succeed with self.setup_server() as good_server: artifact_cache = RESTfulArtifactCache( artifact_root, BestUrlSelector([bad_url, good_server.url], max_failures=0), local) with self.assertRaises(NonfatalArtifactCacheError) as ex: self.do_test_artifact_cache(artifact_cache) self.assertIn('Failed to HEAD', str(ex.exception)) self.do_test_artifact_cache(artifact_cache)
def create_remote_cache(remote_spec, local_cache): urls = self.get_available_urls(remote_spec.split("|")) if len(urls) > 0: best_url_selector = BestUrlSelector([ "{}/{}".format(url.rstrip("/"), self._cache_dirname) for url in urls ]) local_cache = local_cache or TempLocalArtifactCache( artifact_root, compression) return RESTfulArtifactCache( artifact_root, best_url_selector, local_cache, read_timeout=self._options.read_timeout, write_timeout=self._options.write_timeout, )
class TestBestUrlSelector(BaseTest): def setUp(self): self.url1 = 'http://host1:123' self.url2 = 'https://host2:456' self.unsupported_url = 'ftp://ftpserver' self.best_url_selector = BestUrlSelector([self.url1, self.url2], max_failures=1) def call_url(self, expected_url, with_error=False): try: with self.best_url_selector.select_best_url() as url: self.assertEquals(urlparse.urlparse(expected_url), url) if with_error: raise RequestException( 'error connecting to {}'.format(url)) except RequestException: pass def test_unsupported_protocol(self): with self.assertRaises(InvalidRESTfulCacheProtoError): BestUrlSelector([self.unsupported_url]) def test_select_next_url_after_max_consecutive_failures(self): self.call_url(self.url1, with_error=True) # A success call will reset the counter. self.call_url(self.url1) # Too many failures for url1, switch to url2. self.call_url(self.url1, with_error=True) self.call_url(self.url1, with_error=True) self.call_url(self.url2) # Too many failures for url2, switch to url1. self.call_url(self.url2, with_error=True) self.call_url(self.url2, with_error=True) self.call_url(self.url1)
class TestBestUrlSelector(TestBase): def setUp(self): self.url1 = 'http://host1:123' self.url2 = 'https://host2:456' self.unsupported_url = 'ftp://ftpserver' self.best_url_selector = BestUrlSelector([self.url1, self.url2], max_failures=1) def call_url(self, expected_url, with_error=False): try: with self.best_url_selector.select_best_url() as url: self.assertEquals(urlparse.urlparse(expected_url), url) if with_error: raise RequestException('error connecting to {}'.format(url)) except RequestException: pass def test_unsupported_protocol(self): with self.assertRaises(InvalidRESTfulCacheProtoError): BestUrlSelector([self.unsupported_url]) def test_select_next_url_after_max_consecutive_failures(self): self.call_url(self.url1, with_error=True) # A success call will reset the counter. self.call_url(self.url1) # Too many failures for url1, switch to url2. self.call_url(self.url1, with_error=True) self.call_url(self.url1, with_error=True) self.call_url(self.url2) # Too many failures for url2, switch to url1. self.call_url(self.url2, with_error=True) self.call_url(self.url2, with_error=True) self.call_url(self.url1)
def test_unsupported_protocol(self): with self.assertRaises(InvalidRESTfulCacheProtoError): BestUrlSelector([self.unsupported_url])
def setUp(self): self.url1 = 'http://host1:123' self.url2 = 'https://host2:456' self.unsupported_url = 'ftp://ftpserver' self.best_url_selector = BestUrlSelector([self.url1, self.url2], max_failures=1)