def _expand_clones(partition, name, inventory, inventory_lock, new_refs, moved_refs, deleted_refs): settings = Settings() bare_path = _compose_bare_repository_path(name, partition) if new_refs: logging.debug("Processing new refs of %s/%s (%s)...", partition, name, new_refs) for refname in new_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Populating new ref '%s'", clone_path) try: if ref_is_commit(refname): commit_id = refname.replace(settings.HASHPREFIX, '') logging.debug("Will create a clone pointing to '%s'", commit_id) git.clone(clone_path, "%s" % bare_path, shared=True) git.reset(clone_path, commit_id, hard=True) else: git.clone(clone_path, "%s" % bare_path, branch=refname) # Needs reset so the proxy notices about the change on the mutable # http://docs.python.org/2.7/library/multiprocessing.html#managers # Locking on the assignment is guarateed by the library, but # additional locking is needed as A = A + 1 is a critical section. if inventory_lock: inventory_lock.acquire() inventory[name] += [refname] if inventory_lock: inventory_lock.release() except JensGitError, error: if os.path.isdir(clone_path): shutil.rmtree(clone_path) logging.error("Unable to create clone '%s' (%s)", clone_path, error)
def test_fetch_existing_bare_repository_and_prune(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa', 'f']) jens_bare = "%s/_bare" % self.settings.BAREDIR git_wrapper.clone(jens_bare, bare, bare=True) git_wrapper.fetch(jens_bare, prune=True) self.assertTrue('f' in git_wrapper.get_refs(jens_bare)) remove_branch_from_repo(user, 'f') git_wrapper.fetch(jens_bare, prune=False) self.assertTrue('f' in git_wrapper.get_refs(jens_bare)) git_wrapper.fetch(jens_bare, prune=True) self.assertFalse('f' in git_wrapper.get_refs(jens_bare))
def test_get_head_existing_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, bare, bare=False, branch='qa') commit_id = add_commit_to_branch(user, 'qa') git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEquals(git_wrapper.get_head(jens_clone), commit_id) self.assertEquals(git_wrapper.get_head(jens_clone, short=False), commit_id) self.assertEquals(git_wrapper.get_head(jens_clone, short=True), commit_id[0:7])
def test_get_head_existing_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, bare, bare=False, branch='qa') commit_id = add_commit_to_branch(user, 'qa') git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEqual(git_wrapper.get_head(jens_clone), commit_id) self.assertEqual(git_wrapper.get_head(jens_clone, short=False), commit_id) self.assertEqual(git_wrapper.get_head(jens_clone, short=True), commit_id[0:7])
def setUp(self): super(MetadataTest, self).setUp() self.settings = Settings() (self.environments_bare, self.environments) = \ create_fake_repository(self.sandbox_path) shutil.rmtree(self.settings.ENV_METADATADIR) clone(self.settings.ENV_METADATADIR, self.environments_bare, \ branch='master') (self.repositories_bare, self.repositories) = \ create_fake_repository(self.sandbox_path) shutil.rmtree(self.settings.REPO_METADATADIR) clone(self.settings.REPO_METADATADIR, self.repositories_bare, \ branch='master')
def setUp(self): super(MaintenanceTest, self).setUp() self.settings = Settings() # validate_directories() expects both below to look # like a Git repository. (self.environments_bare, self.environments) = \ create_fake_repository(self.sandbox_path) shutil.rmtree(self.settings.ENV_METADATADIR) clone(self.settings.ENV_METADATADIR, self.environments_bare, \ branch='master') (self.repositories_bare, self.repositories) = \ create_fake_repository(self.sandbox_path) shutil.rmtree(self.settings.REPO_METADATADIR) clone(self.settings.REPO_METADATADIR, self.repositories_bare, \ branch='master')
def _create_new_repositories(new_repositories, partition, definition, inventory, desired): settings = Settings() created = [] for repository in new_repositories: logging.info("Cloning and expanding %s/%s...", partition, repository) bare_path = _compose_bare_repository_path(repository, partition) bare_url = definition['repositories'][partition][repository] try: git.clone(bare_path, bare_url, bare=True) except JensGitError as error: logging.error("Unable to clone '%s' (%s). Skipping.", repository, error) if os.path.exists(bare_path): shutil.rmtree(bare_path) continue try: refs = list(git.get_refs(bare_path).keys()) except JensGitError as error: logging.error("Unable to get refs of '%s' (%s). Skipping.", repository, error) shutil.rmtree(bare_path) logging.debug("Bare repository %s has been removed", bare_path) continue # Check if the repository has the mandatory branches if all([ref in refs for ref in settings.MANDATORY_BRANCHES]): # Expand only the mandatory and available requested branches # commits will always be attempted to be expanded new = set(settings.MANDATORY_BRANCHES) new = new.union([ref for ref in desired.get(repository, []) if ref_is_commit(ref) or ref in refs]) inventory[repository] = [] _expand_clones(partition, repository, inventory, None, new, [], []) created.append(repository) else: logging.error("Repository '%s' lacks some of the mandatory branches. Skipping.", repository) shutil.rmtree(bare_path) logging.debug("Bare repository %s has been removed", bare_path) return created
def test_reset_and_fetch_refs_match_after_remote_commit(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_bare = "%s/_bare" % self.settings.BAREDIR git_wrapper.clone(jens_bare, bare, bare=True) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, jens_bare, bare=False, branch='qa') fname = 'should_be_checkedout' commit_id = add_commit_to_branch(user, 'qa', fname=fname) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEqual(get_repository_head(jens_clone), commit_id) self.assertTrue(os.path.isfile("%s/%s" % (jens_clone, fname))) new_commit = add_commit_to_branch(user, 'qa', fname=fname, remove=True) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertFalse(os.path.isfile("%s/%s" % (jens_clone, fname)))
def test_reset_and_fetch_refs_match_after_remote_commit(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) jens_bare = "%s/_bare" % self.settings.BAREDIR git_wrapper.clone(jens_bare, bare, bare=True) jens_clone = "%s/_clone" % self.settings.CLONEDIR git_wrapper.clone(jens_clone, jens_bare, bare=False, branch='qa') fname = 'should_be_checkedout' commit_id = add_commit_to_branch(user, 'qa', fname=fname) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertEquals(get_repository_head(jens_clone), commit_id) self.assertTrue(os.path.isfile("%s/%s" % (jens_clone, fname))) new_commit = add_commit_to_branch(user, 'qa', fname=fname, remove=True) git_wrapper.fetch(jens_bare) git_wrapper.fetch(jens_clone) git_wrapper.reset(jens_clone, 'origin/qa', hard=True) self.assertFalse(os.path.isfile("%s/%s" % (jens_clone, fname)))
def _create_new_repositories(new_repositories, partition, definition, inventory, desired): settings = Settings() created = [] for repository in new_repositories: logging.info("Cloning and expanding %s/%s...", partition, repository) bare_path = _compose_bare_repository_path(repository, partition) bare_url = definition['repositories'][partition][repository] try: git.clone(bare_path, bare_url, bare=True) except JensGitError, error: logging.error("Unable to clone '%s' (%s). Skipping.", repository, error) if os.path.exists(bare_path): shutil.rmtree(bare_path) continue try: refs = git.get_refs(bare_path).keys() except JensGitError, error: logging.error("Unable to get refs of '%s' (%s). Skipping.", repository, error) shutil.rmtree(bare_path) logging.debug("Bare repository %s has been removed", bare_path) continue
def _expand_clones(partition, name, inventory, inventory_lock, new_refs, moved_refs, deleted_refs): settings = Settings() bare_path = _compose_bare_repository_path(name, partition) if new_refs: logging.debug("Processing new refs of %s/%s (%s)...", partition, name, new_refs) for refname in new_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Populating new ref '%s'", clone_path) try: if ref_is_commit(refname): commit_id = refname.replace(settings.HASHPREFIX, '') logging.debug("Will create a clone pointing to '%s'", commit_id) git.clone(clone_path, "%s" % bare_path, shared=True) git.reset(clone_path, commit_id, hard=True) else: git.clone(clone_path, "%s" % bare_path, branch=refname) # Needs reset so the proxy notices about the change on the mutable # http://docs.python.org/2.7/library/multiprocessing.html#managers # Locking on the assignment is guarateed by the library, but # additional locking is needed as A = A + 1 is a critical section. if inventory_lock: inventory_lock.acquire() inventory[name] += [refname] if inventory_lock: inventory_lock.release() except JensGitError as error: if os.path.isdir(clone_path): shutil.rmtree(clone_path) logging.error("Unable to create clone '%s' (%s)", clone_path, error) if moved_refs: logging.debug("Processing moved refs of %s/%s (%s)...", partition, name, moved_refs) for refname in moved_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Updating ref '%s'", clone_path) try: # If this fails, the bare would have the correct HEADs # but the clone will be out of date and won't ever be # updated until a new commit arrives to the bare. # Reason: a lock file left behind because Git was killed # mid-flight. git.fetch(clone_path) git.reset(clone_path, "origin/%s" % refname, hard=True) logging.info("Updated ref '%s' (%s)", clone_path, git.get_head(clone_path, short=True)) except JensGitError as error: logging.error("Unable to refresh clone '%s' (%s)", clone_path, error) if deleted_refs: logging.debug("Processing deleted refs of %s/%s (%s)...", partition, name, deleted_refs) for refname in deleted_refs: clone_path = _compose_clone_repository_path(name, partition, refname) logging.info("Removing %s", clone_path) try: if os.path.isdir(clone_path): shutil.rmtree(clone_path) if refname in inventory[name]: if inventory_lock: inventory_lock.acquire() element = inventory[name] element.remove(refname) inventory[name] = element if inventory_lock: inventory_lock.release() logging.info("%s/%s deleted from inventory", name, refname) except OSError as error: logging.error("Couldn't delete %s/%s/%s (%s)", partition, name, refname, error)
def test_fetch_existing_bare_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) new_bare_path = "%s/cloned" % self.settings.CLONEDIR git_wrapper.clone(new_bare_path, bare, bare=True) git_wrapper.fetch(new_bare_path)
def test_clone_mirrored_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) clone_path = "%s/repo" % self.settings.CLONEDIR git_wrapper.clone(clone_path, bare, shared=True) self.assertTrue(os.path.isfile("%s/.git/objects/info/alternates" % clone_path))
def test_clone_existing_repository(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa']) git_wrapper.clone("%s/repo" % self.settings.CLONEDIR, user)
def test_clone_existing_bare_repository_specific_branch(self): (bare, user) = create_fake_repository(self.sandbox_path, ['qa', 'foo']) git_wrapper.clone("%s/repo" % self.settings.CLONEDIR, bare, bare=False, branch='foo')