def _refresh_repositories(): settings = Settings() logging.debug("Refreshing repositories metadata...") path = settings.REPO_METADATADIR try: git.fetch(path) try: metadata = open(settings.REPO_METADATA, 'r') except IOError as error: raise JensError("Could not open '%s' to put a lock on it" % settings.REPO_METADATA) # jens-gitlab-producer collaborates with jens-update asynchronously # so have to make sure that exclusive access to the file when writing # is guaranteed. Of course, the reader will have to implement the same # protocol on the other end. try: logging.info("Trying to acquire a lock to refresh the metadata...") fcntl.flock(metadata, fcntl.LOCK_EX) logging.debug("Lock acquired") except IOError as error: metadata.close() raise JensError("Could not lock '%s'" % settings.REPO_METADATA) git.reset(path, "origin/master", hard=True) try: logging.debug("Trying to release the lock used to refresh the metadata...") fcntl.flock(metadata, fcntl.LOCK_UN) logging.debug("Lock released") except IOError as error: raise JensError("Could not unlock '%s'" % settings.REPO_METADATA) finally: metadata.close() except JensGitError as error: raise JensError("Couldn't refresh repositories metadata (%s)" % error)
def _expand_clones(partition, name, inventory, inventory_lock, new_refs, moved_refs, deleted_refs): settings = Settings() bare_path = _compose_bare_repository_path(name, partition) if new_refs: logging.debug("Processing new refs of %s/%s (%s)...", partition, name, new_refs) for refname in new_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Populating new ref '%s'", clone_path) try: if ref_is_commit(refname): commit_id = refname.replace(settings.HASHPREFIX, '') logging.debug("Will create a clone pointing to '%s'", commit_id) git.clone(clone_path, "%s" % bare_path, shared=True) git.reset(clone_path, commit_id, hard=True) else: git.clone(clone_path, "%s" % bare_path, branch=refname) # Needs reset so the proxy notices about the change on the mutable # http://docs.python.org/2.7/library/multiprocessing.html#managers # Locking on the assignment is guarateed by the library, but # additional locking is needed as A = A + 1 is a critical section. if inventory_lock: inventory_lock.acquire() inventory[name] += [refname] if inventory_lock: inventory_lock.release() except JensGitError, error: if os.path.isdir(clone_path): shutil.rmtree(clone_path) logging.error("Unable to create clone '%s' (%s)", clone_path, error)
def _refresh_environments(): settings = Settings() logging.debug("Refreshing environment metadata...") path = settings.ENV_METADATADIR try: git.fetch(path) git.reset(path, "origin/master", hard=True) except JensGitError, error: raise JensError("Couldn't refresh environments metadata (%s)" % error)
def _refresh_environments(): settings = Settings() logging.debug("Refreshing environment metadata...") path = settings.ENV_METADATADIR try: git.fetch(path) git.reset(path, "origin/master", hard=True) except JensGitError as error: raise JensError("Couldn't refresh environments metadata (%s)" % error)
def test_get_head_existing_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, bare, bare=False, branch='qa') commit_id = add_commit_to_branch(user, 'qa') git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEqual(git_wrapper.get_head(jens_clone), commit_id) self.assertEqual(git_wrapper.get_head(jens_clone, short=False), commit_id) self.assertEqual(git_wrapper.get_head(jens_clone, short=True), commit_id[0:7])
def test_get_head_existing_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, bare, bare=False, branch='qa') commit_id = add_commit_to_branch(user, 'qa') git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEquals(git_wrapper.get_head(jens_clone), commit_id) self.assertEquals(git_wrapper.get_head(jens_clone, short=False), commit_id) self.assertEquals(git_wrapper.get_head(jens_clone, short=True), commit_id[0:7])
def test_reset_and_fetch_refs_match_after_remote_commit(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_bare = "%s/_bare" % self.settings.BAREDIR git_wrapper.clone(jens_bare, bare, bare=True) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, jens_bare, bare=False, branch='qa') fname = 'should_be_checkedout' commit_id = add_commit_to_branch(user, 'qa', fname=fname) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEqual(get_repository_head(jens_clone), commit_id) self.assertTrue(os.path.isfile("%s/%s" % (jens_clone, fname))) new_commit = add_commit_to_branch(user, 'qa', fname=fname, remove=True) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertFalse(os.path.isfile("%s/%s" % (jens_clone, fname)))
def test_reset_and_fetch_refs_match_after_remote_commit(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_bare = "%s/_bare" % self.settings.BAREDIR git_wrapper.clone(jens_bare, bare, bare=True) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, jens_bare, bare=False, branch='qa') fname = 'should_be_checkedout' commit_id = add_commit_to_branch(user, 'qa', fname=fname) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEquals(get_repository_head(jens_clone), commit_id) self.assertTrue(os.path.isfile("%s/%s" % (jens_clone, fname))) new_commit = add_commit_to_branch(user, 'qa', fname=fname, remove=True) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertFalse(os.path.isfile("%s/%s" % (jens_clone, fname)))
try: git.fetch(path) try: metadata = open(settings.REPO_METADATA, 'r') except IOError, error: raise JensError("Could not open '%s' to put a lock on it" % settings.REPO_METADATA) # jens-gitlab-producer collaborates with jens-update asynchronously # so have to make sure that exclusive access to the file when writing # is guaranteed. Of course, the reader will have to implement the same # protocol on the other end. try: logging.info("Trying to acquire a lock to refresh the metadata...") fcntl.flock(metadata, fcntl.LOCK_EX) logging.debug("Lock acquired") except IOError, error: metadata.close() raise JensError("Could not lock '%s'" % settings.REPO_METADATA) git.reset(path, "origin/master", hard=True) try: logging.debug( "Trying to release the lock used to refresh the metadata...") fcntl.flock(metadata, fcntl.LOCK_UN) logging.debug("Lock released") except IOError, error: raise JensError("Could not unlock '%s'" % settings.REPO_METADATA) finally: metadata.close() except JensGitError, error: raise JensError("Couldn't refresh repositories metadata (%s)" % error)
def _expand_clones(partition, name, inventory, inventory_lock, new_refs, moved_refs, deleted_refs): settings = Settings() bare_path = _compose_bare_repository_path(name, partition) if new_refs: logging.debug("Processing new refs of %s/%s (%s)...", partition, name, new_refs) for refname in new_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Populating new ref '%s'", clone_path) try: if ref_is_commit(refname): commit_id = refname.replace(settings.HASHPREFIX, '') logging.debug("Will create a clone pointing to '%s'", commit_id) git.clone(clone_path, "%s" % bare_path, shared=True) git.reset(clone_path, commit_id, hard=True) else: git.clone(clone_path, "%s" % bare_path, branch=refname) # Needs reset so the proxy notices about the change on the mutable # http://docs.python.org/2.7/library/multiprocessing.html#managers # Locking on the assignment is guarateed by the library, but # additional locking is needed as A = A + 1 is a critical section. if inventory_lock: inventory_lock.acquire() inventory[name] += [refname] if inventory_lock: inventory_lock.release() except JensGitError as error: if os.path.isdir(clone_path): shutil.rmtree(clone_path) logging.error("Unable to create clone '%s' (%s)", clone_path, error) if moved_refs: logging.debug("Processing moved refs of %s/%s (%s)...", partition, name, moved_refs) for refname in moved_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Updating ref '%s'", clone_path) try: # If this fails, the bare would have the correct HEADs # but the clone will be out of date and won't ever be # updated until a new commit arrives to the bare. # Reason: a lock file left behind because Git was killed # mid-flight. git.fetch(clone_path) git.reset(clone_path, "origin/%s" % refname, hard=True) logging.info("Updated ref '%s' (%s)", clone_path, git.get_head(clone_path, short=True)) except JensGitError as error: logging.error("Unable to refresh clone '%s' (%s)", clone_path, error) if deleted_refs: logging.debug("Processing deleted refs of %s/%s (%s)...", partition, name, deleted_refs) for refname in deleted_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Removing %s", clone_path) try: if os.path.isdir(clone_path): shutil.rmtree(clone_path) if refname in inventory[name]: if inventory_lock: inventory_lock.acquire() element = inventory[name] element.remove(refname) inventory[name] = element if inventory_lock: inventory_lock.release() logging.info("%s/%s deleted from inventory", name, refname) except OSError as error: logging.error("Couldn't delete %s/%s/%s (%s)", partition, name, refname, error)
def test_reset_to_head(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) head = get_repository_head(user) git_wrapper.reset(user, head)
def test_reset_to_commit(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) head = get_repository_head(user) commit_id = add_commit_to_branch(user, "master") git_wrapper.reset(user, head)
error) if moved_refs: logging.debug("Processing moved refs of %s/%s (%s)...", partition, name, moved_refs) for refname in moved_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Updating ref '%s'", clone_path) try: # If this fails, the bare would have the correct HEADs # but the clone will be out of date and won't ever be # updated until a new commit arrives to the bare. # Reason: a lock file left behind because Git was killed # mid-flight. git.fetch(clone_path) git.reset(clone_path, "origin/%s" % refname, hard=True) logging.info("Updated ref '%s' (%s)", clone_path, git.get_head(clone_path, short=True)) except JensGitError, error: logging.error("Unable to refresh clone '%s' (%s)", clone_path, error) if deleted_refs: logging.debug("Processing deleted refs of %s/%s (%s)...", partition, name, deleted_refs) for refname in deleted_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Removing %s", clone_path) try: if os.path.isdir(clone_path): shutil.rmtree(clone_path)