def use_cached_files(self, cache_key): # This implementation fetches the appropriate tarball and extracts it. remote_path = self._remote_path_for_key(cache_key) try: # Send an HTTP request for the tarball. response = self._request('GET', remote_path) if response is None: return None with temporary_file() as outfile: total_bytes = 0 # Read the data in a loop. for chunk in response.iter_content(self.READ_SIZE): outfile.write(chunk) total_bytes += len(chunk) outfile.close() self.log.debug('Read %d bytes from artifact cache at %s' % (total_bytes, self._url_string(remote_path))) # Extract the tarfile. artifact = TarballArtifact(self.artifact_root, outfile.name, self.compress) artifact.extract() return artifact except Exception as e: self.log.warn( 'Error while reading from remote artifact cache: %s' % e) return None
def test_non_existent_tarball_extraction(self): with temporary_dir() as tmpdir: artifact = TarballArtifact(artifact_root=tmpdir, artifact_extraction_root=tmpdir, tarfile_="vapor.tar") with self.assertRaises(ArtifactError): artifact.extract()
def use_cached_files(self, cache_key): # This implementation fetches the appropriate tarball and extracts it. remote_path = self._remote_path_for_key(cache_key) try: # Send an HTTP request for the tarball. response = self._request('GET', remote_path) if response is None: return None done = False with temporary_file() as outfile: total_bytes = 0 # Read the data in a loop. while not done: data = response.read(self.READ_SIZE) outfile.write(data) if len(data) < self.READ_SIZE: done = True total_bytes += len(data) outfile.close() self.log.debug('Read %d bytes from artifact cache at %s' % (total_bytes,self._url_string(remote_path))) # Extract the tarfile. artifact = TarballArtifact(self.artifact_root, outfile.name, self.compress) artifact.extract() return artifact except Exception as e: self.log.warn('Error while reading from remote artifact cache: %s' % e) return None
def test_does_not_exist_when_no_tar_file(self): with temporary_dir() as tmpdir: artifact_root = os.path.join(tmpdir, 'artifacts') cache_root = os.path.join(tmpdir, 'cache') safe_mkdir(cache_root) artifact = TarballArtifact(artifact_root, os.path.join(cache_root, 'some.tar')) self.assertFalse(artifact.exists())
def test_corrupt_tarball_extraction(self): with temporary_dir() as tmpdir: path = self.touch_file_in(tmpdir, content="invalid") artifact = TarballArtifact(artifact_root=tmpdir, artifact_extraction_root=tmpdir, tarfile_=path) with self.assertRaises(ArtifactError): artifact.extract()
def try_insert(self, cache_key, paths): with temporary_file_path() as tarfile: artifact = TarballArtifact(self.artifact_root, tarfile, self.compress) artifact.collect(paths) with open(tarfile, 'rb') as infile: remote_path = self._remote_path_for_key(cache_key) if not self._request('PUT', remote_path, body=infile): raise self.CacheError('Failed to PUT to %s. Error: 404' % self._url_string(remote_path))
def test_exists_true_when_exists(self): with temporary_dir() as tmpdir: artifact_root = os.path.join(tmpdir, 'artifacts') cache_root = os.path.join(tmpdir, 'cache') safe_mkdir(cache_root) path = self.touch_file_in(artifact_root) artifact = TarballArtifact(artifact_root, os.path.join(cache_root, 'some.tar')) artifact.collect([path]) self.assertTrue(artifact.exists())
def test_get_paths_after_collect(self): with temporary_dir() as tmpdir: artifact_root = os.path.join(tmpdir, 'artifacts') cache_root = os.path.join(tmpdir, 'cache') safe_mkdir(cache_root) file_path = self.touch_file_in(artifact_root) artifact = TarballArtifact(artifact_root, os.path.join(cache_root, 'some.tar')) artifact.collect([file_path]) self.assertEqual([file_path], list(artifact.get_paths()))
def test_get_paths_after_collect(self): with temporary_dir() as tmpdir: artifact_root = os.path.join(tmpdir, 'artifacts') cache_root = os.path.join(tmpdir, 'cache') safe_mkdir(cache_root) file_path = self.touch_file_in(artifact_root) artifact = TarballArtifact(artifact_root, os.path.join(cache_root, 'some.tar')) artifact.collect([file_path]) self.assertEquals([file_path], list(artifact.get_paths()))
def use_cached_files(self, cache_key): try: tarfile = self._cache_file_for_key(cache_key) if os.path.exists(tarfile): artifact = TarballArtifact(self.artifact_root, tarfile, self._compress) artifact.extract() return artifact else: return None except Exception as e: self.log.warn('Error while reading from local artifact cache: %s' % e) return None
def _artifact(self, path): return TarballArtifact( self.artifact_root, self.artifact_extraction_root, path, self._compression, dereference=self._dereference, )
def try_insert(self, cache_key, paths): tarfile = self._cache_file_for_key(cache_key) safe_mkdir_for(tarfile) # Write to a temporary name (on the same filesystem), and move it atomically, so if we # crash in the middle we don't leave an incomplete or missing artifact. tarfile_tmp = tarfile + '.' + str(uuid.uuid4()) + '.tmp' if os.path.exists(tarfile_tmp): os.unlink(tarfile_tmp) artifact = TarballArtifact(self.artifact_root, tarfile_tmp, self._compress) artifact.collect(paths) # Note: Race condition here if multiple pants runs (in different workspaces) # try to write the same thing at the same time. However since rename is atomic, # this should not result in corruption. It may however result in a missing artifact # If we crash between the unlink and the rename. But that's OK. if os.path.exists(tarfile): os.unlink(tarfile) os.rename(tarfile_tmp, tarfile)
def test_non_existent_tarball_extraction(self): with temporary_dir() as tmpdir: artifact = TarballArtifact(artifact_root=tmpdir, tarfile_='vapor.tar') with self.assertRaises(ArtifactError): artifact.extract()
def _artifact(self, path): return TarballArtifact(self.artifact_root, path, self._compression)
def test_corrupt_tarball_extraction(self): with temporary_dir() as tmpdir: path = self.touch_file_in(tmpdir, content='invalid') artifact = TarballArtifact(artifact_root=tmpdir, tarfile_=path) with self.assertRaises(ArtifactError): artifact.extract()