def _locked_tempdir(self, dir=None, **kwargs): with lockfile(self._basedir) as cachelock: cachelock.lock(shared=False, timeout=0) with _tempdir(dir=self._basedir, **kwargs) as tmpdir, \ lockfile(tmpdir) as tmplock: tmplock.lock(shared=False, timeout=0) cachelock.unlock() yield (tmpdir, tmplock)
def clone_temporary(self, destination=None, checkout=None, refs=(), describeable=False): utime(join(self._repo_path, 'timestamp'), None) # TODO: Long term: lock a separate ref namespace and clone that was_exclusive = not self._shared self._gitlock.lock(timeout=0, shared=True) self._shared = True self._clone(destination=destination, checkout=checkout, refs=refs, describeable=describeable, shared=True) try: with lockfile(self._repo_path) as templock: templock.lock(timeout=0, shared=True) yield (templock.fd, ) finally: rmtree(destination, ignore_errors=True) if was_exclusive: try: self._gitlock.lock(timeout=1, shared=False) except IOError as e: if e.errno != EAGAIN: raise else: self._shared = False
def resolve_tree(self, treeish, refs=None): utime(join(self._repo_path, 'timestamp'), None) if self._objects_missing(objects=(treeish, )): self._update(commits=(treeish, ), refspecs=refs) output = self._gitcmd(check_output, ('rev-parse', treeish + '^{tree}')) treesha1 = output.strip() # TODO: Long term: Fine grained locking here by a ref namespace was_exclusive = not self._shared self._gitlock.lock(timeout=0, shared=True) self._shared = True try: with lockfile(self._repo_path) as templock: templock.lock(timeout=0, shared=True) yield (treesha1, (templock.fd, )) finally: if was_exclusive: try: self._gitlock.lock(timeout=1, shared=False) except IOError as e: if e.errno != EAGAIN: raise else: self._shared = False
def resolve_tree(self, treeish, refs=None): utime(join(self._repo_path, 'timestamp'), None) if self._objects_missing(objects=(treeish,)): self._update(commits=(treeish,), refspecs=refs) output = self._gitcmd( check_output, ('rev-parse', treeish + '^{tree}')) treesha1 = output.strip() # TODO: Long term: Fine grained locking here by a ref namespace was_exclusive = not self._shared self._gitlock.lock(timeout=0, shared=True) self._shared = True try: with lockfile(self._repo_path) as templock: templock.lock(timeout=0, shared=True) yield (treesha1, (templock.fd,)) finally: if was_exclusive: try: self._gitlock.lock(timeout=1, shared=False) except IOError as e: if e.errno != EAGAIN: raise else: self._shared = False
def compose(self, source_artifacts=()): with self._locked_tempdir(prefix='compose.') \ as (composedir, composelock), \ lockfile(self._basedir) as cachelock: cachelock.lock(shared=True, timeout=0) for artifact in source_artifacts: self._hardlink_artifact(artifact, composedir) composelock.lock(shared=True, timeout=0) cachelock.unlock() yield (composedir, composelock)
def _cachedirs(self): try: gitdirs = listdir(self._base_directory) except IOError as e: if e.errno == ENOENT: return for gitdir in gitdirs: with lockfile(join(self._base_directory, gitdir)) as l: try: l.lock(timeout=1, shared=True) except IOError as e: # Skip dir if can't get lock if e.errno == EAGAIN: continue urlpath = join(self._base_directory, gitdir, 'repo_url') with open(urlpath, 'r') as f: url = f.read() yield url
def clone_temporary(self, destination=None, checkout=None, refs=(), describeable=False): utime(join(self._repo_path, 'timestamp'), None) # TODO: Long term: lock a separate ref namespace and clone that was_exclusive = not self._shared self._gitlock.lock(timeout=0, shared=True) self._shared = True self._clone(destination=destination, checkout=checkout, refs=refs, describeable=describeable, shared=True) try: with lockfile(self._repo_path) as templock: templock.lock(timeout=0, shared=True) yield (templock.fd,) finally: rmtree(destination, ignore_errors=True) if was_exclusive: try: self._gitlock.lock(timeout=1, shared=False) except IOError as e: if e.errno != EAGAIN: raise else: self._shared = False
def get_repo(self, repo_url): # Ensure parent directory exists try: mkdir(self._base_directory, 0755) except OSError as e: if e.errno != EEXIST: raise repo_path = join(self._base_directory, sha256(repo_url).hexdigest()) # Make repository directory try: mkdir(repo_path, 0775) except OSError as e: if e.errno != EEXIST: raise with lockfile(repo_path) as repolock: # Set up repository try: repolock.lock(timeout=0, shared=False) except IOError as e: if e.errno != EAGAIN: raise else: self._init_repo(repo_url, repo_path, join(repo_path, 'repo')) # Taking exclusive lock failed and Setup in progress, # or finished setup. # Either take a fresh read lock with a timeout on the # assumption it will be initialised soon, # or converts the exclusive lock to a read lock. try: repolock.lock(timeout=self._setup_timeout, shared=True) except IOError as e: if e.errno == EAGAIN: # Timeout during setup, # TODO: recommend increase timeout? pass raise # TODO: Exclusive lock could have been taken to delete, # need to retry if this was the case, # and give up at some point. assert exists(repo_path) with lockfile(join(repo_path, 'repo')) as gitlock: shared = True if self._allow_autogc: try: gitlock.lock(timeout=0, shared=False) shared = False except IOError as e: if e.errno != EAGAIN: raise # Can't auto-gc, attempt shared lock, # or NO-OP re-lock exclusive if we succeeded, # saving a bit of logic. try: gitlock.lock(timeout=0, shared=shared) except IOError as e: if e.errno == EAGAIN: # Exclusive lock taken # TODO: Nice message pass raise yield LocalIndividualCoarseCachedGitRepository( self, repolock, gitlock, repo_url, repo_path, shared)
class LHAC(object): @contextmanager def _locked_tempdir(self, dir=None, **kwargs): with lockfile(self._basedir) as cachelock: cachelock.lock(shared=False, timeout=0) with _tempdir(dir=self._basedir, **kwargs) as tmpdir, \ lockfile(tmpdir) as tmplock: tmplock.lock(shared=False, timeout=0) cachelock.unlock() yield (tmpdir, tmplock) def _hardlink_artifact(self, artifact, composedir) artifactdir = join(self._basedir, artifact) with lockfile(artifactdir) as artifactlock: artifactlock.lock(shared=True) check_call(['cp', '-al', join(artifactdir, 'tree'), composedir]) @contextmanager def compose(self, source_artifacts=()): with self._locked_tempdir(prefix='compose.') \ as (composedir, composelock), \ lockfile(self._basedir) as cachelock: cachelock.lock(shared=True, timeout=0) for artifact in source_artifacts: self._hardlink_artifact(artifact, composedir) composelock.lock(shared=True, timeout=0) cachelock.unlock()