def use_cached_files(self, cache_key): # This implementation fetches the appropriate tarball and extracts it. remote_path = self._remote_path_for_key(cache_key) try: # Send an HTTP request for the tarball. response = self._request('GET', remote_path) if response is None: return None done = False with temporary_file() as outfile: total_bytes = 0 # Read the data in a loop. while not done: data = response.read(self.READ_SIZE) outfile.write(data) if len(data) < self.READ_SIZE: done = True total_bytes += len(data) outfile.close() self.log.debug('Read %d bytes from artifact cache at %s' % (total_bytes,self._url_string(remote_path))) # Extract the tarfile. artifact = TarballArtifact(self.artifact_root, outfile.name, self.compress) artifact.extract() return artifact except Exception as e: self.log.warn('Error while reading from remote artifact cache: %s' % e) return None
def try_insert(self, cache_key, paths): with temporary_file_path() as tarfile: artifact = TarballArtifact(self.artifact_root, tarfile, self.compress) artifact.collect(paths) with open(tarfile, 'rb') as infile: remote_path = self._remote_path_for_key(cache_key) if not self._request('PUT', remote_path, body=infile): raise self.CacheError('Failed to PUT to %s. Error: 404' % self._url_string(remote_path))
def use_cached_files(self, cache_key): try: tarfile = self._cache_file_for_key(cache_key) if os.path.exists(tarfile): artifact = TarballArtifact(self.artifact_root, tarfile, self._compress) artifact.extract() return artifact else: return None except Exception as e: self.log.warn('Error while reading from local artifact cache: %s' % e) return None
def try_insert(self, cache_key, paths): tarfile = self._cache_file_for_key(cache_key) safe_mkdir_for(tarfile) # Write to a temporary name (on the same filesystem), and move it atomically, so if we # crash in the middle we don't leave an incomplete or missing artifact. tarfile_tmp = tarfile + '.' + str(uuid.uuid4()) + '.tmp' if os.path.exists(tarfile_tmp): os.unlink(tarfile_tmp) artifact = TarballArtifact(self.artifact_root, tarfile_tmp, self._compress) artifact.collect(paths) # Note: Race condition here if multiple pants runs (in different workspaces) # try to write the same thing at the same time. However since rename is atomic, # this should not result in corruption. It may however result in a missing artifact # If we crash between the unlink and the rename. But that's OK. if os.path.exists(tarfile): os.unlink(tarfile) os.rename(tarfile_tmp, tarfile)