def get_updates_info(verbose=False): gitdir = tempfile.mkdtemp(prefix='rdopkg-list-updates') uinfos = [] prev_cwd = os.getcwd() os.chdir(gitdir) try: cmd.git('init', log_cmd=False) f_project = filters.OrFilter() f_project.add_items('project', 'rdo-update') f_other = filters.Items() f_other.add_items('is', 'open') query = reviews.Query(cfg['RDO_UPDATE_GERRIT_HOST']) for review in query.filter(f_project, f_other): try: url = review.get('url', '???') if verbose: log.info("Processing update review: %s" % url) uinfo = get_review_update_info(review, gitdir) uinfos.append(uinfo) except Exception as ex: if verbose: log.warn("Error processing update review: %s: %s", type(ex).__name__, str(ex)) pass finally: os.chdir(prev_cwd) shutil.rmtree(gitdir) return uinfos
def update_branch_in_gerrit(project, branch_name, commit_id): # assuming we're in the repo dir git('checkout', 'master') git('branch', '-f', branch_name, commit_id) git('checkout', branch_name) git('pull', 'origin', branch_name) git('push', 'origin', branch_name)
def final_spec_diff(branch=None): _ensure_branch(branch) print("Important distgit changes:") spec = specfile.Spec() git('--no-pager', 'diff', 'HEAD~..HEAD', '--', spec.fn, direct=True) print("") git('--no-pager', 'log', '--name-status', 'HEAD~..HEAD', direct=True) print("\nRequested distgit update finished, see last commit.")
def add_patches(extra=False): git('checkout', 'master-patches') if extra: _do_patch('foofile', "#meh\n", 'Look, excluded patch') _do_patch('foofile', "#nope\n", 'Yet another excluded patch') _do_patch('foofile', "#huehue, change\n", 'Crazy first patch') _do_patch('foofile', "#lol, another change\n", 'Epic bugfix of doom MK2') git('checkout', 'master')
def prep_patches_branch(): git('checkout', '--orphan', 'master-patches') f = open('foofile', 'w') f.write("#not really a patch\n") f.close() git('add', 'foofile') git('commit', '-m', 'Create this test branch') git('tag', '1.2.3') git('checkout', 'master')
def rebase_nightly(upstream_branch, patches_branch, distgit_branch=None, lame_patches=None): spec = specfile.Spec() stable_tag, n_commits = spec.get_patches_base() if not distgit_branch: distgit_branch = git.current_branch() # tmp branches tmp_patches_branch = "tmp-" + patches_branch tmp_upstream_branch = "tmp-" + upstream_branch # nightly_parent serves to keep the parrent commit (patches_base) # everything will be rebased on top of it nightly_parent = "nightly-parent" # create the temporary branches git.create_branch(tmp_upstream_branch, upstream_branch) git.create_branch(tmp_patches_branch, patches_branch) git.checkout(tmp_upstream_branch) if lame_patches is not None: for commit in lame_patches: git.remove(commit) git.linearize(stable_tag) try: git.checkout(tmp_patches_branch) first_commit, last_commit = get_discarded_range(stable_tag, n_commits) # remove the discarded commits defined in the specfile from the # tmp patches branch if first_commit is not None: git('rebase', '--onto', first_commit + '^', last_commit, '--strategy', 'recursive', '-X', 'ours') git.create_branch(nightly_parent, last_commit) # add stable commits below downstream patches git('rebase', tmp_upstream_branch) if first_commit: # put everything on top of the commits that are discarded in # patches_base when running update-patches git('rebase', nightly_parent, '--strategy', 'recursive', '-X', 'theirs') # rebase tmp patches in the patches branch git.checkout(patches_branch) git('rebase', tmp_patches_branch) finally: git.delete_branch(tmp_upstream_branch) git.delete_branch(tmp_patches_branch) git.delete_branch(nightly_parent) git("checkout", distgit_branch)
def _clone(self): if self.verbose: log.info("Cloning {desc} repo: {url}\n" " {space} into: {path}".format( desc=self.repo_desc, space=len(self.repo_desc) * ' ', url=self.url, path=self.repo_path)) with helpers.cdir(self.base_path): cmd.git('clone', self.url, self.repo_name, log_cmd=self.verbose)
def test_filter_out(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'empty-ex-filter') with dist_path.as_cwd(): common.prep_patches_branch() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True, filtered=True) actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') commit_after = git('rev-parse', 'HEAD') common.assert_distgit(dist_path, 'patched-filter') assert commit_before != commit_after, "New commit not created"
def test_filter_out(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'empty-ex-filter') with dist_path.as_cwd(): common.prep_patches_branch() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True, filtered=True) update_patches('master', local_patches_branch='master-patches', version='1.2.3') commit_after = git('rev-parse', 'HEAD') common.assert_distgit(dist_path, 'patched-filter') assert commit_before != commit_after, "New commit not created"
def test_update_double_patches_base(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'double-patches') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch() commit_before = git('rev-parse', 'HEAD') with pytest.raises(rdopkg.utils.exception.DuplicatePatchesBaseError): update_patches('master', local_patches_branch='master-patches', version='1.2.3') commit_after = git('rev-parse', 'HEAD') common.assert_distgit(dist_path, 'double-patches') assert commit_before == commit_after, "Commit created on double patches_base (error)"
def get_review_update_info(review, gitdir): url = review['url'] patch_set = review['currentPatchSet'] ref = patch_set['ref'] uploader = patch_set['uploader'] apprs = patch_set.get('approvals', []) authors = ["%s <%s>" % (uploader['name'], uploader['email'])] cmd.git('fetch', cfg['RDO_UPDATE_REPO'], ref, log_cmd=False) cmd.git('checkout', 'FETCH_HEAD', log_cmd=False) upf = rdoupdate.actions.get_last_commit_update('.') update = rdoupdate.actions.check_file(upf) uinfo = UpdateInfo(upf, update, authors, gerrit_url=url, gerrit_apprs=apprs) return uinfo
def get_review_update_info(review, gitdir): url = review['url'] patch_set = review['currentPatchSet'] ref = patch_set['ref'] uploader = patch_set['uploader'] apprs = patch_set.get('approvals', []) authors = ["%s <%s>" % (uploader['name'], uploader['email'])] cmd.git('fetch', cfg['RDO_UPDATE_GERRIT_SSH'], ref, log_cmd=False) cmd.git('checkout', 'FETCH_HEAD', log_cmd=False) upf = rdoupdate.actions.get_last_commit_update('.') update = rdoupdate.actions.check_file(upf) uinfo = UpdateInfo(upf, update, authors, gerrit_url=url, gerrit_apprs=apprs) return uinfo
def add_patches(extra=False, filtered=False): git('checkout', 'master-patches') if extra: _do_patch('foofile', "#meh\n", 'Look, excluded patch') _do_patch('foofile', "#nope\n", 'Yet another excluded patch') _do_patch('foofile', "#huehue, change\n", 'Crazy first patch') if filtered: _do_patch('foofile', "#fix ci\n", 'DROP-IN-RPM: ci fix') _do_patch('foofile', "#and now for real\n", 'DROP-IN-RPM: moar ci fix') _do_patch('foofile', "#lol, another change\n", 'Epic bugfix of doom MK2') if filtered: _do_patch('foofile', "#oooops\n", 'DROP-IN-RPM: even moar ci fix') git('checkout', 'master')
def diff(version, new_version, bump_only=False, no_diff=False, version_tag_style=None): if bump_only or no_diff: return vtag_from = guess.version2tag(version, version_tag_style) vtag_to = guess.version2tag(new_version, version_tag_style) git('--no-pager', 'diff', '--stat', '%s..%s' % (vtag_from, vtag_to), direct=True) try: reqdiff(vtag_from, vtag_to) except Exception: pass raw_input("Press <Enter> to continue after you inspected the diff. ")
def rebase_nightly(upstream_branch, patches_branch, distgit_branch=None, lame_patches=None): spec = specfile.Spec() stable_tag, n_commits = spec.get_patches_base() if not distgit_branch: distgit_branch = git.current_branch() # tmp branches tmp_patches_branch = "tmp-" + patches_branch tmp_upstream_branch = "tmp-" + upstream_branch # nightly_parent serves to keep the parrent commit (patches_base) # everything will be rebased on top of it nightly_parent = "nightly-parent" # create the temporary branches git.create_branch(tmp_upstream_branch, upstream_branch) git.create_branch(tmp_patches_branch, patches_branch) git.checkout(tmp_upstream_branch) if lame_patches is not None: for commit in lame_patches: git.remove(commit) git.linearize(stable_tag) try: git.checkout(tmp_patches_branch) first_commit, last_commit = get_discarded_range(stable_tag, n_commits) # remove the discarded commits defined in the specfile from the # tmp patches branch if first_commit is not None: git("rebase", "--onto", first_commit + "^", last_commit, "--strategy", "recursive", "-X", "ours") git.create_branch(nightly_parent, last_commit) # add stable commits below downstream patches git("rebase", tmp_upstream_branch) if first_commit: # put everything on top of the commits that are discarded in # patches_base when running update-patches git("rebase", nightly_parent, "--strategy", "recursive", "-X", "theirs") # rebase tmp patches in the patches branch git.checkout(patches_branch) git("rebase", tmp_patches_branch) finally: git.delete_branch(tmp_upstream_branch) git.delete_branch(tmp_patches_branch) git.delete_branch(nightly_parent) git("checkout", distgit_branch)
def tag_patches_branch(package, local_patches_branch, patches_branch, force=False, push=False): """ Tag the local_patches_branch with this package's NVR. """ nvr = specfile.Spec().get_nvr(epoch=False) nvr_tag = package + '-' + nvr tag_cmd = ['tag', nvr_tag, local_patches_branch] if force: tag_cmd.append('-f') git(*tag_cmd) if push: patches_remote = patches_branch.partition('/')[0] git('push', patches_remote, nvr_tag) else: print('Not pushing tag. Run "git push patches %s" by hand.' % nvr_tag)
def test_update_dense(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'empty-dense') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch(dist_path) spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True) actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') common.assert_distgit(dist_path, 'patched-dense') assert commit_before != commit_after, "New commit not created"
def test_update_dense(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'empty-dense') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch() spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True) update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') common.assert_distgit(dist_path, 'patched-dense') assert commit_before != commit_after, "New commit not created"
def get_upstream_patches(version, local_patches_branch, patches_branch=None, upstream_branch=None, new_milestone=None): # TODO: nuke this, looks unused patches = git("log", "--cherry-pick", "--pretty=format:\%s", "%(remote)s...%(local)s" % {'remote': patches_branch, 'local': local_patches_branch}) changes = [p.strip().replace('\\', '') for p in patches.split('\n') if p != ''] if not changes: log.warn("No new patches detected in %s." % local_patches_branch) helpers.confirm("Do you want to continue anyway?", default_yes=False) n_patches = len(changes) changes.insert(0, ("Rebase %s changes from %s" % (n_patches, upstream_branch))) args = {'changes': changes} if n_patches > 0: if new_milestone: new_milestone += '.p%d' % n_patches else: new_milestone = 'p%d' % n_patches args['new_milestone'] = new_milestone return args
def test_update_git_am_buildarch_fail(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'git-am-fail') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch() spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True) with pytest.raises(rdopkg.utils.exception.BuildArchSanityCheckFailed): update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') apply_method = specfile.Spec().patches_apply_method() assert apply_method == 'git-am' assert commit_before == commit_after, "New commit created"
def test_update_autosetup(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'autosetup') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch() spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True) update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') apply_method = specfile.Spec().patches_apply_method() assert apply_method == 'autosetup' common.assert_distgit(dist_path, 'patched-autosetup') assert commit_before != commit_after, "New commit not created"
def test_update_autosetup(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'autosetup') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch(dist_path) spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True) actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') apply_method = specfile.Spec().patches_apply_method() assert apply_method == 'autosetup' common.assert_distgit(dist_path, 'patched-autosetup') assert commit_before != commit_after, "New commit not created"
def test_update_git_am_buildarch_fail(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'git-am-fail') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch(dist_path) spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') common.add_patches(extra=True) with pytest.raises(rdopkg.utils.exception.BuildArchSanityCheckFailed): actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') apply_method = specfile.Spec().patches_apply_method() assert apply_method == 'git-am' assert commit_before == commit_after, "New commit created"
def git_check_remote(self): assert(self.url) with self.repo_dir(): remotes = cmd.git('remote', '-v', log_cmd=False) pattern = '^origin\s+%s\s+\(fetch\)$' % re.escape(self.url) if not re.search(pattern, remotes, re.MULTILINE): raise exception.RepoError(what="origin isn't set to expected URL: " "%s" % self.url)
def git_check_remote(self): assert (self.url) with self.repo_dir(): remotes = cmd.git('remote', '-v', log_cmd=False) pattern = '^origin\s+%s\s+\(fetch\)$' % re.escape(self.url) if not re.search(pattern, remotes, re.MULTILINE): raise exception.RepoError(what="origin isn't set to expected URL: " "%s" % self.url)
def _push_pkg(upf): log.info("\nPushing update {t.bold}{upf}{t.normal}".format( t=log.term, upf=upf)) update = self._load_update_file(upf) pushed_rpms = [] try: _updated_repos = set() _updated_repo_bases = set() _pushed_build_tmp_paths = [] for build in update.builds: src_path = self._build_tmp_path(upf, build) if src_path in _pushed_build_tmp_paths: continue build_rpms = helpers.find_files(src_path, ext='.rpm') dest_repo_base_path = self._dest_repo_base_path(build.repo) if not os.path.isdir(dest_repo_base_path): raise exception.NotADirectory(path=dest_repo_base_path) dest_path = self._build_dest_path(build) for rpm in build_rpms: pushed_path = copy_package(rpm, dest_path, overwrite=self.overwrite) pushed_rpms.append(pushed_path) _pushed_build_tmp_paths.append(src_path) _updated_repo_bases.add(dest_repo_base_path) _updated_repos.add( self._dest_repo_path(build.repo, build.dist)) with helpers.cdir(self.update_repo_path): helpers.ensure_dir(self.pushed_dir) upf_base = os.path.basename(upf) pushed_upf = os.path.join(self.pushed_dir, upf_base) pushed_files_fn = pushed_upf + self.pushed_files_ext git('mv', upf, pushed_upf) pushed_files_f = open(pushed_files_fn, 'w') pushed_files_f.writelines( map(lambda x: "%s\n" % x, pushed_rpms)) pushed_files_f.close() git('add', pushed_files_fn) try: git('commit', '-m', "Push %s" % rdoupdate.core.pp_update(upf)) except Exception: git('git', 'reset', '--hard') raise updated_repos.update(_updated_repos) updated_repo_bases.update(_updated_repo_bases) except Exception as ex: if pushed_rpms: log.warn("Final push failed for %s, cleaning copied " "packages" % upf) for rpm in pushed_rpms: log.info("{t.warn}remove{t.normal} {rpm}".format( t=log.term, rpm=rpm)) os.remove(rpm) raise
def _push_pkg(upf): log.info("\nPushing update {t.bold}{upf}{t.normal}".format( t=log.term, upf=upf)) update = self._load_update_file(upf) pushed_rpms = [] try: _updated_repos = set() _updated_repo_bases = set() _pushed_build_tmp_paths = [] for build in update.builds: src_path = self._build_tmp_path(upf, build) if src_path in _pushed_build_tmp_paths: continue build_rpms = helpers.find_files(src_path, ext='.rpm') dest_repo_base_path = self._dest_repo_base_path(build.repo) if not os.path.isdir(dest_repo_base_path): raise exception.NotADirectory(path=dest_repo_base_path) dest_path = self._build_dest_path(build) for rpm in build_rpms: pushed_path = copy_package(rpm, dest_path, overwrite=self.overwrite) pushed_rpms.append(pushed_path) _pushed_build_tmp_paths.append(src_path) _updated_repo_bases.add(dest_repo_base_path) _updated_repos.add(self._dest_repo_path(build.repo, build.dist)) with helpers.cdir(self.update_repo_path): helpers.ensure_dir(self.pushed_dir) upf_base = os.path.basename(upf) pushed_upf = os.path.join(self.pushed_dir, upf_base) pushed_files_fn = pushed_upf + self.pushed_files_ext git('mv', upf, pushed_upf) pushed_files_f = open(pushed_files_fn, 'w') pushed_files_f.writelines( map(lambda x: "%s\n" % x, pushed_rpms)) pushed_files_f.close() git('add', pushed_files_fn) try: git('commit', '-m', "Push %s" % rdoupdate.core.pp_update(upf)) except Exception: git('git', 'reset', '--hard') raise updated_repos.update(_updated_repos) updated_repo_bases.update(_updated_repo_bases) except Exception as ex: if pushed_rpms: log.warn("Final push failed for %s, cleaning copied " "packages" % upf) for rpm in pushed_rpms: log.info("{t.warn}remove{t.normal} {rpm}".format( t=log.term, rpm=rpm)) os.remove(rpm) raise
def test_update_noop(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'patched') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch(dist_path) spec_before = spec_path.read() common.add_patches() actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() assert spec_after == spec_before commit_before = git('rev-parse', 'HEAD') actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') commit_after = git('rev-parse', 'HEAD') assert commit_before == commit_after, "Commit created for no-op"
def _test_new_version(asset, dir, steps): dist_path = common.prep_spec_test(dir, asset) spec_path = dist_path.join('foo.spec') log.log.setLevel(log.WARN) with dist_path.as_cwd(): common.prep_patches_branch() for new_version, spec_version, spec_release_parts, spec_milestone \ in steps: commit_before = git('rev-parse', 'HEAD') common.add_patches(tag=new_version) rdopkg('new-version', '-l', '-d', new_version) # after commit_after = git('rev-parse', 'HEAD') common.assert_spec_version(spec_version, spec_release_parts, spec_milestone) assert commit_before != commit_after
def test_update_noop(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'patched') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch() spec_before = spec_path.read() common.add_patches() update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() assert spec_after == spec_before commit_before = git('rev-parse', 'HEAD') update_patches('master', local_patches_branch='master-patches', version='1.2.3') commit_after = git('rev-parse', 'HEAD') assert commit_before == commit_after, "Commit created for no-op"
def fetch_patches_branch(local_patches_branch, gerrit_patches_chain, force=False): review_n = _review_number(gerrit_patches_chain) gerrit_host, gerrit_port = guess.gerrit_from_repo() query = GerritQuery(gerrit_host, gerrit_port) review = query('--current-patch-set', review_n) current_ps = review.get('currentPatchSet', {}) patchset_n = current_ps.get('number') if not patchset_n: raise exception.CantGuess( msg='Failed to determine current patch set for review: %s' % gerrit_patches_chain) gerrit_ref = _review_ref(review_n, patchset_n) git('fetch', 'patches', gerrit_ref) approvals = current_ps.get('approvals', []) jenkins = [ a for a in approvals if a.get('type') == 'Verified' and a.get('by', {}).get('username') == 'jenkins' ] code_reviews = [ int(a.get('Value', 0)) for a in approvals if a.get('type') == 'Code-Review' ] if not jenkins: verified = 0 else: verified = int(jenkins[0]['value']) if verified != 1: if force: log.warn("Ref %s has not been validated by CI." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) else: raise exception.UnverifiedPatch() if any(cr < 0 for cr in code_reviews): log.warn("Ref %s has at least one negative review." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) git('update-ref', 'refs/heads/%s' % local_patches_branch, 'FETCH_HEAD')
def fetch_patches_branch(local_patches_branch, gerrit_patches_chain, force=False): review_n = _review_number(gerrit_patches_chain) gerrit_host, gerrit_port = guess.gerrit_from_repo() query = GerritQuery(gerrit_host, gerrit_port) review = query('--current-patch-set', review_n) current_ps = review.get('currentPatchSet', {}) patchset_n = current_ps.get('number') if not patchset_n: raise exception.CantGuess( msg='Failed to determine current patch set for review: %s' % gerrit_patches_chain) gerrit_ref = _review_ref(review_n, patchset_n) git('fetch', 'patches', gerrit_ref) approvals = current_ps.get('approvals', []) jenkins = [a for a in approvals if a.get('type') == 'Verified' and a.get('by', {}).get('username') == 'jenkins'] code_reviews = [int(a.get('Value', 0)) for a in approvals if a.get('type') == 'Code-Review'] if not jenkins: verified = 0 else: verified = int(jenkins[0]['value']) if verified != 1: if force: log.warn( "Ref %s has not been validated by CI." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) else: raise exception.UnverifiedPatch() if any(cr < 0 for cr in code_reviews): log.warn( "Ref %s has at least one negative review." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) git('update-ref', 'refs/heads/%s' % local_patches_branch, 'FETCH_HEAD')
def rebase_patches_branch(new_version, local_patches_branch, patches_branch=None, local_patches=False, patches_style=None, version_tag_style=None, bump_only=False): if bump_only: return git.checkout(local_patches_branch) new_version_tag = guess.version2tag(new_version, version_tag_style) git('rebase', new_version_tag, direct=True) if patches_style != 'review': if local_patches or not patches_branch: return if _is_same_commit(local_patches_branch, patches_branch): log.info("%s is up to date, no need for push." % patches_branch) return try: remote, branch = git.remote_branch_split(patches_branch) helpers.confirm("Push %s to %s / %s (with --force)?" % ( local_patches_branch, remote, branch)) git('push', '--force', remote, '%s:%s' % (local_patches_branch, branch)) # push the tag git('push', '--force', remote, new_version_tag) except exception.UserAbort: pass
def _test_patch(asset, version, dir): dist_path = common.prep_spec_test(dir, asset) spec_path = dist_path.join('foo.spec') log.log.setLevel(log.WARN) with dist_path.as_cwd(): spec_version, spec_release_parts, spec_milestone = version tag = spec_version if spec_milestone: tag += spec_milestone common.prep_patches_branch(tag=tag) commit_before = git('rev-parse', 'HEAD') common.add_patches() rdopkg('patch', '-l') # after commit_after = git('rev-parse', 'HEAD') common.assert_spec_version(spec_version, spec_release_parts, spec_milestone) assert commit_before != commit_after
def prep_spec_test(tmpdir, distgit): dist_path = tmpdir.join('dist') shutil.copytree(os.path.join(ASSETS_DIR, 'spec', distgit), str(dist_path)) with dist_path.as_cwd(): git('init') git('add', '.') git('commit', '-m', 'Initial import') return dist_path
def test_update_empty(tmpdir): dist_path = common.prep_spec_test(tmpdir, 'empty') spec_path = dist_path.join('foo.spec') with dist_path.as_cwd(): common.prep_patches_branch(dist_path) spec_before = spec_path.read() commit_before = git('rev-parse', 'HEAD') actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') assert spec_after == spec_before assert commit_before == commit_after, "Commit created for no-op" with dist_path.as_cwd(): common.add_patches() actions.update_patches('master', local_patches_branch='master-patches', version='1.2.3') spec_after = spec_path.read() commit_after = git('rev-parse', 'HEAD') common.assert_distgit(dist_path, 'patched') assert commit_before != commit_after, "New commit not created"
def prep_push_test(tmpdir, update_repo, dest): rdoup_path = tmpdir.join('rdo-update') dest_path = tmpdir.join('dest') shutil.copytree(os.path.join(ASSETS_DIR, 'rdo-update.git', update_repo), str(rdoup_path)) shutil.copytree(os.path.join(ASSETS_DIR, 'dest', dest), str(dest_path)) with rdoup_path.as_cwd(): git('init') git('add', '.') git('commit', '-m', 'Initial import') return rdoup_path, dest_path
def _fetch(self, force=False): need_fetch = True with self.repo_dir(): if not force: try: t_fetch = os.path.getmtime('.git/FETCH_HEAD') t_now = int(time.time()) delta = t_now - t_fetch if delta < cfg['FETCH_PERIOD']: need_fetch = False except Exception: pass if need_fetch: if self.verbose: log.info("Fetching %s repo: %s" % ( self.repo_desc, self.repo_path)) cmd.git('fetch', 'origin', log_cmd=self.verbose) cmd.git('checkout', '-f', 'master', log_cmd=self.verbose) cmd.git('reset', '--hard', 'origin/master', log_cmd=self.verbose)
def prep_new_patches_branch(new_version, local_patches_branch, patches_branch, local_patches=False, bump_only=False, patches_style=None, version_tag_style=None): if patches_style == 'review': new_version_tag = guess.version2tag(new_version, version_tag_style) try: remote, branch = git.remote_branch_split(patches_branch) helpers.confirm("Push %s to %s/%s (with --force)?" % ( new_version_tag, remote, branch)) git('branch', '--force', local_patches_branch, new_version_tag) git('push', '--force', remote, '%s:%s' % (local_patches_branch, branch)) # push the tag git('push', '--force', remote, new_version_tag) except exception.UserAbort: pass else: if not (local_patches or bump_only): _reset_branch(local_patches_branch, remote_branch=patches_branch)
def _fetch(self, force=False): need_fetch = True with self.repo_dir(): if not force: try: t_fetch = os.path.getmtime('.git/FETCH_HEAD') t_now = int(time.time()) delta = t_now - t_fetch if delta < cfg['FETCH_PERIOD']: need_fetch = False except Exception: pass if need_fetch: if self.verbose: log.info("Fetching %s repo: %s" % (self.repo_desc, self.repo_path)) cmd.git('fetch', 'origin', log_cmd=self.verbose) cmd.git('checkout', '-f', 'master', log_cmd=self.verbose) cmd.git('reset', '--hard', 'origin/master', log_cmd=self.verbose)
def get_projects_status(sf_url, user, release, rdo_project=None, branch_template='stable/%s'): """Compiles the latest tag/hash for rdo_project for <release> on branch""" url = 'ssh://%s@%s:29418' % (user, sf_url) rdoinfo = rdoinfoutils.fetch_rdoinfo() branch = branch_template % release projects = {} missing_from_rdo = {} status = {'upstream': {'name': None, 'version': None, 'hash': None, 'timestamp': None}, 'rdo': {'hash': None, 'timestamp': None}, 'branch_is_synced': False} tempdir = create_and_get_tempdir() releases_dir = os.path.join(tempdir, 'releases') # checkout the releases repo print "Cloning releases info ... " clone(releases_info, releases_dir) release_dir = os.path.join(releases_dir, 'deliverables/%s' % release) for descriptor in glob.glob(os.path.join(release_dir, '*.yaml')): version, infos, repos = get_repos_infos_for_project(descriptor) for project in infos.keys(): try: (name, distgit, upstream, sfdistgit, maints, conf, mdistgit, patches) = rdoinfoutils.fetch_project_infos( rdoinfo, project) except Exception as e: print "Skipping %s: %s" % (project, e) missing_from_rdo[project] = os_git_root + repos.get(project) continue p_status = copy.deepcopy(status) if rdo_project and name != rdo_project: continue desc_file = descriptor.split('/')[-1] if rdo_project: print 'Info for %s found in %s' % (rdo_project, desc_file) p_status['upstream'] = {'name': project, 'version': version, 'hash': infos[project], 'timestamp': None} if not os.path.isdir(os.path.join(tempdir, project)): try: clone('%s/%s.git' % (url, name), os.path.join(tempdir, project)) except CommandFailed: # we already get the output, so do nothing p_status['rdo'] = {} projects[name] = p_status continue with cdir(os.path.join(tempdir, project)): if git.ref_exists('refs/remotes/origin/%s' % branch): git('checkout', 'origin/%s' % branch) current = git('log', '-1', '--pretty=format:%H') commit = infos[project] rdo_time = get_commit_time(current) upstream_time = get_commit_time(commit) p_status['upstream']['timestamp'] = upstream_time p_status['rdo']['hash'] = current p_status['rdo']['timestamp'] = rdo_time if upstream_time <= rdo_time: # rdo is up-to-date and more p_status['branch_is_synced'] = True projects[name] = p_status nuke_dir(tempdir) return projects, missing_from_rdo
def get_reqs_from_ref(ref): o = git('show', '%s:requirements.txt' % ref, log_cmd=False) return parse_reqs_txt(o)
def get_review(self, review_id): with self.repo_dir(): with helpers.setenv(USERNAME=self.user): cmd.git('review', '-d', str(review_id), direct=True)
if __name__ == '__main__': rdoinfo = fetch_rdoinfo(RDOINFO) wdir = tempfile.mkdtemp() if len(sys.argv) == 2: rdoinfo['packages'] = [ r for r in rdoinfo['packages'] if r['project'] == sys.argv[1] ] for pkg in rdoinfo['packages']: if pkg['conf'] == 'rpmfactory-puppet': upstream_url = pkg['upstream'] project = pkg['project'] print "Attempt to clone %s from %s to %s" % (project, upstream_url, wdir) pdir = os.path.join(wdir, project) if os.path.isdir(pdir): shutil.rmtree(pdir) try: with cdir(wdir): git('clone', upstream_url, project) except Exception, e: print "[FAILED] Clone from %s (%s)" % (upstream_url, e) with cdir(pdir): if os.path.isfile('metadata.json'): print "Attempt to generate spec file for %s" % project generate_spec_file(wdir, project, pkg)
def _do_patch(fn, content, msg): f = open(fn, 'w') f.write(content) f.close() git('add', fn) git('commit', '-m', msg)
def review_patch(branch): git("review", "-y", "-r", "review-patches", branch, direct=True)
def setup_review(self): with self.repo_dir(): with helpers.setenv(USERNAME=self.user): cmd.git('review', '-s')
def submit_update_file(self, id, msg=''): upfile_path = self._upfile_path(id) with self.repo_dir(): if not os.path.isfile(upfile_path): raise exception.UpdateFileNotFound(path=upfile_path) update = rdoupdate.actions.check_file(upfile_path) branch = update_summary(update) commit_msg = "New %s" % id if msg: commit_msg += "\n\n%s\n" % msg log.info("Setting up gerrit.") cmd.git('review', '-s', direct=True) cmd.git('branch', branch, 'origin/master') try: cmd.git('checkout', '-f', branch) cmd.git('add', upfile_path) cmd.git('commit', '-F', '-', input=commit_msg) log.info("Submitting update for review.") cmd.git('review', direct=True) finally: pass cmd.git('checkout', '-f', 'master') cmd.git('branch', '-D', branch)
def review_spec(branch): git("review", "-r", "review-origin", branch, direct=True)
def clone(repo, root_dir): if not os.path.isdir(root_dir): git('clone', repo, root_dir)
def review(self): with self.repo_dir(): with helpers.setenv(USERNAME=self.user): cmd.git('review', direct=True)
def get_commit_time(commit_id): return git('log', '-1', '--date=short', '--pretty=format:%ct', '-U', commit_id, '--no-patch')