def prepare_repo(repos_dir, repo_name, repo_url): # Confirm that a git repo exists at the given local directory. If it does # not exist, then attempt to clone it. If it already exists, then # fetch new branches and prune old branches. log.debug("Preparing repo " + repo_name) repo_path = os.path.join(repos_dir, repo_name) dot_git_path = os.path.join(repo_path, '.git') clone_the_repo = False if not os.path.isdir(repo_path): log.warning(f"Directory {repo_path} not found") clone_the_repo = True elif not os.path.isdir(dot_git_path): # If we find a directory in our mounted volume that is not a git # repo, then something is seriously wrong. Delete the directory and # start again with a clean repo. log.warning(f"Deleting invalid git repo found at {repo_path}") shutil.rmtree(repo_path) clone_the_repo = True else: log.debug(f"Directory {repo_path} is apparently a git repo") if clone_the_repo: log.info(f"Cloning {repo_name} ...") os.chdir(repos_dir) git.clone(repo_url) else: # The git repo exists, so bring it up to date log.info(f"Fetching changes to {repo_name}") os.chdir(repo_path) git.fetch('--all') return repo_path
def run(self, sorted_repos, versions_file): self.log("`git checkout` repositories...") versions = None with open(versions_file, 'r') as f: versions = json.load(f) cd(self._basedir) for repo in sorted_repos: if repo not in versions: self.log("skipping {0}, no version specified.".format(repo)) continue where = versions[repo] # where to checkout self.log("Checkout {0} -> {1}".format(repo, where)) with push_pop(repo): git.fetch() git.checkout("--quiet", where) # just in case that we didn't just cloned but updated: git.reset("--hard", where) self.log("done checking out repos.")
def lookup_branch(self, branch_name): if not stdout(git('ls-remote', '--heads', self.name, branch_name)): return None # The branch exists in the remote git.fetch(self.git_remote.name, branch_name) git_branch = self.gl_repo.git_repo.lookup_branch( self.git_remote.name + '/' + branch_name, pygit2.GIT_BRANCH_REMOTE) return RemoteBranch(git_branch, self.gl_repo)
def ensure_i18n_remote(self, args: argparse.Namespace) -> None: """ Make sure we have a git remote for the i18n repo. """ k = {"_cwd": args.root} if b"i18n" not in git.remote(**k).stdout: git.remote.add("i18n", args.url, **k) git.fetch("i18n", **k)
def update(conf, args): '''Apply updates from the upstream repository.''' print('Checking for updates...') # fetch changes from the canonical repo git.fetch(constants.GIT_REMOTE, no_tags=True, quiet=True) # get a list of the commit messages for the incoming changes updates = git('--no-pager', 'log', '..FETCH_HEAD', oneline=True) updates = [tuple(m.split(None, 1)) for m in updates.splitlines()] # print out a list of the incoming updates if len(updates) > 0: print('Available updates:') max_updates = 10 for commit, msg in updates[:max_updates]: print(color.yellow('*'), msg) # print a special message if too many updates are available if len(updates) > max_updates: print('...and', color.green(len(updates) - max_updates), 'more!') print('Run `git log ..FETCH_HEAD` to see the full list') # bail if we have uncommitted changes (git exits non-0 in this case) if git.diff(exit_code=True, quiet=True, _ok_code=(0, 1)).exit_code != 0: raise ValueError('The repository has uncommitted changes. Handle them, ' 'then try updating again.') print('Applying the update...') # stash _all_ changes to the repo git.stash(include_untracked=True, all=True, quiet=True) # squash all the fetched commits together and merge them into master git.merge('@{u}', squash=True, quiet=True) # add a nice update commit that includes the latest upstream commit hash commit_message = 'Update dotparty to %s' % updates[0][0] git.commit(m=commit_message, quiet=True) # TODO: if squash merge failed, roll back to the pre-update state and # complain with instructions for the user to do their own update. # un-stash all our old changes git.stash('pop', quiet=True) # push our changes back up to the remote git.push(quiet=True) print('Update successful!') else: print('Already up-to-date!')
def lookup_tag(self, tag_name): tag_info = stdout(git('ls-remote', '--tags', self.name, tag_name)) if not tag_info: return None # The tag exists in the remote git.fetch(self.git_remote.name, tag_name) regex = re.compile(r'(.*)\trefs/tags/.*') commit_id = regex.match(tag_info).group(1) commit = self.gl_repo.git_repo.get(commit_id).peel(pygit2.GIT_OBJ_COMMIT) return RemoteTag(self.git_remote.name, tag_name, commit)
def main(): options = json.load(sys.stdin) github_remote_url = f"{get_github_url()}/{options['target_remote']}.git" if len(list(pathlib.Path('.').iterdir())) > 0: print("Found existing files in work directory", file=sys.stderr) assert pathlib.Path('.git').exists( ), "if files are present in the work dir, it must be a git work tree" remote = "origin" if options["source_remote_name"] == remote: remote = remote + "2" add_or_set_git_remote(remote, github_remote_url) print(f"Fetching from {github_remote_url}", file=sys.stderr) git.fetch(remote) print( f"Checking out {options['target_branch']} from {remote}/{options['target_branch']}", file=sys.stderr) git.checkout("-B", options['target_branch'], f"{remote}/{options['target_branch']}") print(f"Cleaning work tree", file=sys.stderr) git.reset("--hard", "HEAD") git.clean("-fdx") else: print(f"Cloning {options['target_branch']} from {github_remote_url}", file=sys.stderr) git.clone("--branch", options['target_branch'], github_remote_url, ".") if options['target_remote'] != options['source_remote']: source_remote_name = options['source_remote_name'] add_or_set_git_remote( source_remote_name, f"{get_github_url()}/{options['source_remote']}.git") print( f"Fetching from {get_github_url()}/{options['source_remote']}.git", file=sys.stderr) git.fetch(source_remote_name) set_git_author_info(f"GitHub Action {os.environ['GITHUB_ACTION']}", "action@localhost") try: git("cherry-pick", options['source_commits']) print( f"Source commits ({options['source_commits']}) were successfully cherry-picked " f"onto {options['target_remote']}:{options['target_branch']}", file=sys.stderr) except sh.ErrorReturnCode: print( f"Source commits ({options['source_commits']}) could not be cherry-picked " f"onto {options['target_remote']}:{options['target_branch']}", file=sys.stderr) raise
def lookup_branch(self, branch_name): if not stdout(git('ls-remote', '--heads', self.name, branch_name)): return None # The branch exists in the remote git.fetch(self.git_remote.name, branch_name) git_branch = self.gl_repo.git_repo.lookup_branch( self.git_remote.name + '/' + branch_name, pygit2.GIT_BRANCH_REMOTE) # Make another check for the branch being None # As observed in issue : https://github.com/sdg-mit/gitless/issues/211 if git_branch is None: git.fetch(self.git_remote.name) git_branch = self.gl_repo.git_repo.lookup_branch( self.git_remote.name + '/' + branch_name, pygit2.GIT_BRANCH_REMOTE) return RemoteBranch(git_branch, self.gl_repo)
def manage_checkout(self): logger.info('manage checkout') if not os.path.exists(self.checkouts_dir): os.makedirs(self.checkouts_dir) if not os.path.exists(self.checkout_dir): logger.info('git clone %s %s' % (self.url, self.checkout_dir)) git.clone(self.url, self.checkout_dir) else: logger.info('git fetch -a') git.fetch('-a', _cwd=self.checkout_dir) logger.info('git pull --rebase') git.pull('--rebase', _cwd=self.checkout_dir)
def repository(namespace, name, branch='master'): '''Returns a repository''' with TemporaryDirectory() as download_path: old_directory = str(pwd()).strip() try: git.clone('https://github.com/{0}/{1}.git'.format(namespace, name), download_path) cd(download_path) git.fetch('origin', branch) git.checkout(branch) yield (download_path, git('rev-parse', 'HEAD'), redis.Dict(key="{0}.{1}".format(namespace, name))) except ErrorReturnCode_128: mkdir(download_path) yield (None, None, None) cd(old_directory)
def merge_and_check(base, head): """Merge <head> into <base>, then run some tests. Only modifies the working tree---doesn't actually create a merge commit. Resets and cleans the repo and leaves it in headless mode. Raises sh.ErrorReturnCode if the merge or the tests fail. """ # Make sure we're up to date git.fetch() # Make sure we can do a clean checkout git.reset(hard=True) git.clean('-dfx') git.checkout('origin/' + base) # Merge the working tree, but don't modify the index git.merge('origin/' + head, no_commit=True) # Check the PR! check()
def deploy_code(repo): logging.info("Deploying repo: %s, back2github: %s", repo["dir"], repo["sync"]) try: logging.info("Change working directory to %s", repo["dir"]) sh.cd(repo["dir"]) logging.info("Update code, git pull origin") git.pull("origin", _out=logging.info, _err=logging.error) logging.info("done") logging.info("Delete old branches, git fetch -p") git.fetch("origin", "-p", _out=logging.info, _err=logging.error) logging.info("done") if repo["sync"]: logging.info("Back2github, git push github") git.push("github", _out=logging.info, _err=logging.error, _bg=True) except Exception as e: logging.info("Deploy error: %s", e)
def lookup_branches(self, branch_names): try: git.fetch(self.git_remote.name, branch_names) except: return None remote_branches = [] for branch_name in branch_names: git_branch = self.gl_repo.git_repo.lookup_branch( self.git_remote.name + '/' + branch_name, pygit2.GIT_BRANCH_REMOTE) # Make another check for the branch being None # As observed in issue : https://github.com/sdg-mit/gitless/issues/211 if git_branch is None: git.fetch(self.git_remote.name) git_branch = self.gl_repo.git_repo.lookup_branch( self.git_remote.name + '/' + branch_name, pygit2.GIT_BRANCH_REMOTE) remote_branches.append(RemoteBranch(git_branch, self.gl_repo)) return remote_branches
def clone(self): self.tmp_dir = tempfile.mkdtemp(suffix='facio') try: from sh import git except ImportError: raise Exception # TODO: Custom exception try: git = git.bake(_cwd=self.tmp_dir) git.clone(self.repo, self.tmp_dir) git.fetch('--all') git.checkout('master') # TODO: Branch prompt to the user except Exception: raise Exception # TODO: Custom exception rmtree(os.path.join(self.tmp_dir, '.git')) with indent(4, quote=' >'): puts(blue('Clone complete'))
def sync(path, git_reference): new_env = resetEnv() logger.debug('Syncing {}'.format(path)) old_path = os.getcwd() jobid = get_current_job().id _open_console(jobid) try: os.chdir(path) _log_console(jobid, 'Syncing project with Git.\n') _l = lambda line: _log_console(jobid, str(line)) git.fetch(_out=_l, _err=_l, _env=new_env).wait() git.reset( '--hard', 'origin/{}'.format(git_reference), _out=_l, _err=_l, _env=new_env).wait() git.submodule( 'sync', _out=_l, _err=_l, _env=new_env).wait() git.submodule( 'update', _out=_l, _err=_l, _env=new_env).wait() except: logger.error( 'Failed to sync project at {}'.format(path), exc_info=True ) _close_console(jobid) os.chdir(old_path)
def clone(self): """ Clone the git repository into a temporary directory. """ try: from sh import git except ImportError: raise FacioException('Git must be installed to use git+ ' 'template paths') temp_diretory = self.get_temp_directory() self.out('Git Cloning {0} to {1}'.format(self.path, temp_diretory)) try: git = git.bake(_cwd=temp_diretory) git.clone(self.path, temp_diretory) git.fetch('--all') git.checkout('master') except: raise FacioException('Failed to clone git repository ' 'at {0}'.format(self.path)) return temp_diretory
def load_code(repo, ref): print("loading code from " + repo) os.chdir(cwd) # Look up our original repo so that we only load objects once. base_repo = redis.get("source:" + repo) if base_repo is None: r = requests.get("https://api.github.com/repos/" + repo, auth=(config["overall"]["github-username"], github_personal_access_token)) r = r.json() base_repo = "source" if "source" in r: base_repo = r["source"]["full_name"] redis.set("source:" + repo, base_repo) if base_repo is "source": base_repo = repo if type(base_repo) is bytes: base_repo = base_repo.decode("utf-8") print("Source repo of " + repo + " is " + base_repo) repo_path = "repos/" + base_repo github_base_url = "https://github.com/" + base_repo + ".git" github_head_url = "https://github.com/" + repo + ".git" print("waiting for repo lock") with redis.lock(base_repo, timeout=5 * 60, blocking_timeout=20 * 60): if not os.path.isdir(repo_path): os.makedirs(repo_path) git.clone(github_base_url, repo_path) # We must make .tmp after cloning because cloning will fail when the # directory isn't empty. os.makedirs(repo_path + "/.tmp") os.chdir(repo_path) git.fetch(github_head_url, ref) print("loaded", repo, ref)
def main(): github_remote_url = f"{get_github_url()}/{get_input('target_remote')}.git" work_dir = pathlib.Path(get_input("work_dir")) if work_dir.is_dir() and len(list(work_dir.iterdir())) > 0: os.chdir(work_dir) remote = "origin" if get_input("source_remote_name") == remote: remote = remote + "2" add_or_set_git_remote(remote, github_remote_url) git.fetch(remote) git.checkout("-B", get_input("target_branch"), f"{remote}/{get_input('target_branch')}") git.reset("--hard", "HEAD") git.clean("-fdx") else: git.clone("--branch", get_input("target_branch"), github_remote_url, str(work_dir)) os.chdir(work_dir) if get_input("target_remote") != get_input("source_remote"): source_remote_name = get_input("source_remote_name") add_or_set_git_remote( source_remote_name, f"{get_github_url()}/{get_input('source_remote')}.git") git.fetch(source_remote_name) set_git_author_info(f"GitHub Action {os.environ['GITHUB_ACTION']}", "action@localhost") try: git("cherry-pick", get_input("source_commits")) print("Source commits were cherry-picked successfully", file=sys.stderr) except sh.ErrorReturnCode: print("Source commits could not be cherry-picked", file=sys.stderr) raise
def _get_provisioner_repo(self): # Use the configured git repository, if any provisioner_git_repo = self.config.get('git_repo') provisioner_git_revision = self.config.get('git_revision') git_local_mirror = self._get_mirror_path(provisioner_git_repo) zabbix_repo = self.global_config.get('zabbix_repo', ZABBIX_REPO) lock_path = get_lock_path_from_repo(provisioner_git_repo) log( "Getting provisioner features from {r}".format( r=provisioner_git_repo), self._log_file) try: output = git("ls-remote", "--exit-code", provisioner_git_repo, provisioner_git_revision).strip() log( "Provisioner repository checked successfuly with output: " + output, self._log_file) except sh.ErrorReturnCode: log( "Invalid provisioner repository or invalid credentials. Please check your yaml 'config.yml' file", self._log_file) raise try: git_acquire_lock(lock_path, self._log_file) # Creates the Provisioner local mirror if not os.path.exists(git_local_mirror): log( "Creating local mirror [{r}] for the first time".format( r=git_local_mirror), self._log_file) os.makedirs(git_local_mirror) os.chdir(git_local_mirror) git.init(['--bare']) git.remote(['add', self.name, provisioner_git_repo]) git.remote(['add', 'zabbix', zabbix_repo]) log( "Fetching local mirror [{r}] remotes".format( r=git_local_mirror), self._log_file) os.chdir(git_local_mirror) git.fetch(['--all']) finally: git_release_lock(lock_path, self._log_file) log( "Cloning [{r}] repo with local mirror reference".format( r=provisioner_git_repo), self._log_file) git.clone([ '--reference', git_local_mirror, provisioner_git_repo, '-b', provisioner_git_revision, '--single-branch', self.local_repo_path + '/' ]) if os.path.exists(self.local_repo_path + '/.gitmodules'): os.chdir(self.local_repo_path) log("Re-map submodules on local git mirror", self._log_file) git_remap_submodule(self.local_repo_path, zabbix_repo, git_local_mirror, self._log_file) log("Submodule init and update", self._log_file) git.submodule('init') git.submodule('update')
def tag(obj, tag_name, remote, yes): current_branch = get_current_branch() remote_url = git.remote("get-url", remote).strip() gh, repo = obj tag = split_version(tag_name) tag_name = format_version(tag) major, minor, fix = tag with Spinner(f"Checking for milestone for tag {tag_name}"): tag_milestone = None for ms in repo.get_milestones(state="all"): if ms.title == tag_name: tag_milestone = ms break assert tag_milestone is not None, "Did not find milestone for tag" release_branch_name = f"release/v{major}.{minor:>02}.X" with Spinner("Refreshing branches"): git.fetch(all=True, prune=True) if fix == 0: # new minor release with Spinner(f"Checking out and updating {default_branch_name}"): git.checkout(default_branch_name) git.pull() assert not check_branch_exists( release_branch_name ), "For new minor: release branch CANNOT exist yet" with Spinner(f"Creating {release_branch_name}"): git.checkout("-b", release_branch_name) else: assert check_branch_exists( release_branch_name), "For new fix: release brunch MUST exist" with Spinner(f"Checking out {release_branch_name}"): git.checkout(release_branch_name) # we are not on release branch version_file = Path("version_number") assert version_file.exists(), "Version number file not found" current_version_string = version_file.read_text() print(f"Current version: [bold]{current_version_string}[/bold]") if fix == 0: assert current_version_string == "9.9.9", "Unexpected current version string found" else: assert current_version_string != f"{major}.{minor}.{fix-1}", "Unexpected current version string found" version_string = f"{major}.{minor}.{fix}" with Spinner( f"Bumping version number in '{version_file}' to '{version_string}'" ): with version_file.open("w") as fh: fh.write(version_string) with Spinner("Comitting"): git.add(version_file) git.commit(m=f"Bump version number to {version_string}") with Spinner(f"Creating tag {tag_name}"): git.tag(tag_name) print( f"I will now: push tag [bold green]{tag_name}[/bold green] and branch [bold green]{release_branch_name}[/bold green] to [bold]{remote_url}[/bold]" ) if not confirm("Continue?", yes=yes): raise SystemExit("Aborting") with Spinner(f"Pushing branch {release_branch_name}"): git.push("-u", remote, release_branch_name) with Spinner(f"Pushing tag {tag_name}"): git.push(remote, tag_name)
def ensure_i18n_remote(self, args: argparse.Namespace) -> None: k = {'_cwd': args.root} if b'i18n' not in git.remote(**k).stdout: git.remote.add('i18n', args.url, **k) git.fetch('i18n', **k)
def fetch(): print("git fetch") return git.fetch()
def _update(self): git.fetch(self.remote_name, self.branch_name) self.git_branch = self.gl_repo.git_repo.lookup_branch( self.remote_name + '/' + self.branch_name, pygit2.GIT_BRANCH_REMOTE)
def git_fetch(remote, branch, file_path): '''Fetch for remote changes''' p = git.fetch(remote, branch, _cwd=dir_path(file_path), _tty_out=False) p.wait() show_msg('Repo updated', 'Info')
def ensure_i18n_remote(self, args): k = {'_cwd': args.root} if b'i18n' not in git.remote(**k).stdout: git.remote.add('i18n', args.url, **k) git.fetch('i18n', **k)