def get_updates_info(verbose=False): gitdir = tempfile.mkdtemp(prefix='rdopkg-list-updates') uinfos = [] prev_cwd = os.getcwd() os.chdir(gitdir) try: cmd.git('init', log_cmd=False) f_project = filters.OrFilter() f_project.add_items('project', 'rdo-update') f_other = filters.Items() f_other.add_items('is', 'open') query = reviews.Query(cfg['RDO_UPDATE_GERRIT_HOST']) for review in query.filter(f_project, f_other): try: url = review.get('url', '???') if verbose: log.info("Processing update review: %s" % url) uinfo = get_review_update_info(review, gitdir) uinfos.append(uinfo) except Exception as ex: if verbose: log.warn("Error processing update review: %s: %s", type(ex).__name__, str(ex)) pass finally: os.chdir(prev_cwd) shutil.rmtree(gitdir) return uinfos
def get_upstream_patches(version, local_patches_branch, patches_branch=None, upstream_branch=None, new_milestone=None): # TODO: nuke this, looks unused patches = git("log", "--cherry-pick", "--pretty=format:\%s", "%(remote)s...%(local)s" % {'remote': patches_branch, 'local': local_patches_branch}) changes = [p.strip().replace('\\', '') for p in patches.split('\n') if p != ''] if not changes: log.warn("No new patches detected in %s." % local_patches_branch) helpers.confirm("Do you want to continue anyway?", default_yes=False) n_patches = len(changes) changes.insert(0, ("Rebase %s changes from %s" % (n_patches, upstream_branch))) args = {'changes': changes} if n_patches > 0: if new_milestone: new_milestone += '.p%d' % n_patches else: new_milestone = 'p%d' % n_patches args['new_milestone'] = new_milestone return args
def _add_amendables(msg, amendables): # Rather than go with an OrderedDict, keep it simple with a list AMENDABLES_ORDER = [ 'Change-Id', ] changeid_regex = 'Change-Id: (?P<id>I[a-fA-F0-9]+)' AMENDABLES = { 'Change-Id': { 'regex': re.compile(changeid_regex), 'msg': '%(msg)s\n\nChange-Id: %(id)s\n' }, } previous_commit = git.current_commit_message() for unknown in (set(amendables).difference(AMENDABLES_ORDER)): log.warn('"%s" is not a valid amendable field' % unknown) for a in AMENDABLES_ORDER: if a not in amendables: continue regex = AMENDABLES[a]['regex'] if regex.search(previous_commit): rdict = regex.search(previous_commit).groupdict() rdict['msg'] = msg msg = AMENDABLES[a]['msg'] % rdict return msg
def prep_new_patches_branch(new_version, local_patches_branch, patches_branch, local_patches=False, bump_only=False, patches_style=None, version_tag_style=None, unattended=False, no_push_patches=False): if patches_style == 'review': if no_push_patches: return new_version_tag = guess.version2tag(new_version, version_tag_style) try: remote, branch = git.remote_branch_split(patches_branch) if unattended: log.warn('Unattended mode: force pushing patches') else: helpers.confirm("Push %s to %s/%s (with --force)?" % (new_version_tag, remote, branch)) git('branch', '--force', local_patches_branch, new_version_tag) git('push', '--force', remote, '%s:%s' % (local_patches_branch, branch)) # push the tag git('push', '--force', remote, new_version_tag) except exception.UserAbort: pass else: if not (local_patches or bump_only): _reset_branch(local_patches_branch, remote_branch=patches_branch)
def prep_new_patches_branch(new_version, local_patches_branch, patches_branch, local_patches=False, bump_only=False, patches_style=None, version_tag_style=None, unattended=False, no_push_patches=False): if patches_style == 'review': if no_push_patches: return new_version_tag = guess.version2tag(new_version, version_tag_style) try: remote, branch = git.remote_branch_split(patches_branch) if unattended: log.warn('Unattended mode: force pushing patches') else: helpers.confirm("Push %s to %s/%s (with --force)?" % ( new_version_tag, remote, branch)) git('branch', '--force', local_patches_branch, new_version_tag) git('push', '--force', remote, '%s:%s' % (local_patches_branch, branch)) # push the tag git('push', '--force', remote, new_version_tag) except exception.UserAbort: pass else: if not (local_patches or bump_only): _reset_branch(local_patches_branch, remote_branch=patches_branch)
def new_build(self, srpm_url, release, dist, watch=False): copr = rdo_copr_name(release, dist) url = self.copr_api_url('coprs/%s/%s/new_build/' % (self.owner, copr)) data = { 'pkgs': srpm_url, } req = requests.post(url, auth=(self.user['login'], self.user['token']), data=data) output = _get_copr_data(req, self.user, type='new_build') build_ids = output.get('ids') if not build_ids: raise exception.CoprError( error="copr didn't return id of new build.(?!)") build_id = build_ids[0] if watch: log.info("\nWatching build (may be safely interrupted)...") prevstatus = None try: while True: try: status = self._fetch_build_status(build_id) except exception.CoprError as ex: log.warn("Failed to get build status: %s" % ex) break if prevstatus != status: now = datetime.datetime.now() if status in ['pending', 'waiting', 'running']: cstatus = log.term.bold(status) elif status == 'succeeded': cstatus = log.term.good(status) elif status == 'failed': cstatus = log.term.error(status) elif status == 'skipped': cstatus = ("{t.magenta}{st}{t.normal} (build " "already done)".format(t=log.term, st=status)) else: cstatus = log.term.warn(status) log.info("[%s] %s" % (now.strftime('%H:%M:%S'), cstatus)) prevstatus = status if status in [ 'succeeded', 'failed', 'canceled', 'skipped']: break time.sleep(60) except KeyboardInterrupt: pass except Exception as ex: log.warn("Error during copr build monitoring: %s" % ex) return build_id
def query(filter, package, verbose=False): r = _query.query_rdo(filter, package, verbose=verbose) if not r: log.warn('No distrepos information in rdoinfo for %s' % filter) return if verbose: print('') _query.pretty_print_query_results(r)
def _push_pkg(upf): log.info("\nPushing update {t.bold}{upf}{t.normal}".format( t=log.term, upf=upf)) update = self._load_update_file(upf) pushed_rpms = [] try: _updated_repos = set() _updated_repo_bases = set() _pushed_build_tmp_paths = [] for build in update.builds: src_path = self._build_tmp_path(upf, build) if src_path in _pushed_build_tmp_paths: continue build_rpms = helpers.find_files(src_path, ext='.rpm') dest_repo_base_path = self._dest_repo_base_path(build.repo) if not os.path.isdir(dest_repo_base_path): raise exception.NotADirectory(path=dest_repo_base_path) dest_path = self._build_dest_path(build) for rpm in build_rpms: pushed_path = copy_package(rpm, dest_path, overwrite=self.overwrite) pushed_rpms.append(pushed_path) _pushed_build_tmp_paths.append(src_path) _updated_repo_bases.add(dest_repo_base_path) _updated_repos.add( self._dest_repo_path(build.repo, build.dist)) with helpers.cdir(self.update_repo_path): helpers.ensure_dir(self.pushed_dir) upf_base = os.path.basename(upf) pushed_upf = os.path.join(self.pushed_dir, upf_base) pushed_files_fn = pushed_upf + self.pushed_files_ext git('mv', upf, pushed_upf) pushed_files_f = open(pushed_files_fn, 'w') pushed_files_f.writelines( map(lambda x: "%s\n" % x, pushed_rpms)) pushed_files_f.close() git('add', pushed_files_fn) try: git('commit', '-m', "Push %s" % rdoupdate.core.pp_update(upf)) except Exception: git('git', 'reset', '--hard') raise updated_repos.update(_updated_repos) updated_repo_bases.update(_updated_repo_bases) except Exception as ex: if pushed_rpms: log.warn("Final push failed for %s, cleaning copied " "packages" % upf) for rpm in pushed_rpms: log.info("{t.warn}remove{t.normal} {rpm}".format( t=log.term, rpm=rpm)) os.remove(rpm) raise
def set_source1(spec): """ Ensure Source1 is our -changes tarball. """ source_value = '%{name}-%{version}-%{commit}-changes.tar.gz' # This get_tag() will raise if Source1 is not present: source1 = spec.get_tag('Source1', expand_macros=False) if source1 != source_value: # Log the existing Source1, then clobber it. msg = 'overwriting Source1: %s with new -changes.tar.gz' % source1 log.warn(msg) spec.set_tag('Source1', source_value)
def _push_pkg(upf): log.info("\nPushing update {t.bold}{upf}{t.normal}".format( t=log.term, upf=upf)) update = self._load_update_file(upf) pushed_rpms = [] try: _updated_repos = set() _updated_repo_bases = set() _pushed_build_tmp_paths = [] for build in update.builds: src_path = self._build_tmp_path(upf, build) if src_path in _pushed_build_tmp_paths: continue build_rpms = helpers.find_files(src_path, ext='.rpm') dest_repo_base_path = self._dest_repo_base_path(build.repo) if not os.path.isdir(dest_repo_base_path): raise exception.NotADirectory(path=dest_repo_base_path) dest_path = self._build_dest_path(build) for rpm in build_rpms: pushed_path = copy_package(rpm, dest_path, overwrite=self.overwrite) pushed_rpms.append(pushed_path) _pushed_build_tmp_paths.append(src_path) _updated_repo_bases.add(dest_repo_base_path) _updated_repos.add(self._dest_repo_path(build.repo, build.dist)) with helpers.cdir(self.update_repo_path): helpers.ensure_dir(self.pushed_dir) upf_base = os.path.basename(upf) pushed_upf = os.path.join(self.pushed_dir, upf_base) pushed_files_fn = pushed_upf + self.pushed_files_ext git('mv', upf, pushed_upf) pushed_files_f = open(pushed_files_fn, 'w') pushed_files_f.writelines( map(lambda x: "%s\n" % x, pushed_rpms)) pushed_files_f.close() git('add', pushed_files_fn) try: git('commit', '-m', "Push %s" % rdoupdate.core.pp_update(upf)) except Exception: git('git', 'reset', '--hard') raise updated_repos.update(_updated_repos) updated_repo_bases.update(_updated_repo_bases) except Exception as ex: if pushed_rpms: log.warn("Final push failed for %s, cleaning copied " "packages" % upf) for rpm in pushed_rpms: log.info("{t.warn}remove{t.normal} {rpm}".format( t=log.term, rpm=rpm)) os.remove(rpm) raise
def review_patches_branch(local_patches_branch, patches_style=None, bump_only=False, unattended=False): if patches_style != 'review' or bump_only: return try: if unattended: log.warn("Unattended mode: sending %s branch for review") else: helpers.confirm("Send %s branch for review?" % local_patches_branch) rpmfactory.review_patch(local_patches_branch) except exception.UserAbort: pass
def parse_reqs_txt(txt): reqs = [] lines = sorted(txt.split('\n'), key=lambda l: l.lower()) for line in lines: if not line or re.match('\W', line): continue line = re.sub(r'\s*(?:#.*)$', '', line) m = re.match(r'([^<>=!\s]+)\s*(.*)$', line) if not m: log.warn("Failed to parse requirement: %s" % line) continue r = DiffReq(name=m.group(1), vers=m.group(2)) reqs.append(r) return reqs
def add_actions_modules(self, package, override=True): added = [] for importer, modname, ispkg in pkgutil.iter_modules(package.__path__): modpath = '%s.%s' % (package.__name__, modname) if not ispkg: continue try: mod = importer.find_module(modname).load_module(modname) except ImportError: log.warn("Failed to import module: %s" % modpath) continue if not hasattr(mod, 'ACTIONS'): continue self.add_actions_module(mod, name=modname, override=override) added += modname return added
def check_new_patches(version, local_patches_branch, patches_style=None, local_patches=False, patches_branch=None, changes=None, version_tag_style=None): if not changes: changes = [] if local_patches or patches_style == 'review': head = local_patches_branch else: if not patches_branch: raise exception.RequiredActionArgumentNotAvailable( action='check_new_patches', arg='patches_branch') head = patches_branch version_tag = guess.version2tag(version, version_tag_style) patches = git.get_commit_bzs(version_tag, head) spec = specfile.Spec() n_git_patches = len(patches) n_spec_patches = spec.get_n_patches() n_skip_patches = spec.get_n_excluded_patches() n_ignore_patches = 0 ignore_regex = spec.get_patches_ignore_regex() if ignore_regex: patches = (flatten(_partition_patches(patches, ignore_regex))) n_ignore_patches = n_git_patches - len(patches) patch_subjects = [] for hash, subject, bzs in patches: subj = subject bzstr = ' '.join(map(lambda x: 'rhbz#%s' % x, bzs)) if bzstr != '': subj += ' (%s)' % bzstr patch_subjects.append(subj) n_base_patches = n_skip_patches + n_spec_patches log.debug("Total patches in git:%d spec:%d skip:%d ignore:%d" % ( n_git_patches, n_spec_patches, n_skip_patches, n_ignore_patches)) if n_base_patches > 0: patch_subjects = patch_subjects[0:-n_base_patches] if not patch_subjects: log.warn("No new patches detected in %s." % head) helpers.confirm("Do you want to continue anyway?", default_yes=False) changes.extend(patch_subjects) return {'changes': changes}
def get_patches_branch(local_patches_branch, patches_branch, local_patches=False, patches_style=None, gerrit_patches_chain=None, bump_only=False, force=False): if local_patches or bump_only: return if patches_style == 'review': if not gerrit_patches_chain: gerrit_patches_chain = guess.gerrit_patches_chain() if gerrit_patches_chain: rpmfactory.fetch_patches_branch(local_patches_branch, gerrit_patches_chain, force) else: log.warn("Review patches chain not found. No patches yet?") else: _reset_branch(local_patches_branch, remote_branch=patches_branch)
def fetch_patches_branch(local_patches_branch, gerrit_patches_chain, force=False): review_n = _review_number(gerrit_patches_chain) gerrit_host, gerrit_port = guess.gerrit_from_repo() query = GerritQuery(gerrit_host, gerrit_port) review = query('--current-patch-set', review_n) current_ps = review.get('currentPatchSet', {}) patchset_n = current_ps.get('number') if not patchset_n: raise exception.CantGuess( msg='Failed to determine current patch set for review: %s' % gerrit_patches_chain) gerrit_ref = _review_ref(review_n, patchset_n) git('fetch', 'patches', gerrit_ref) approvals = current_ps.get('approvals', []) jenkins = [ a for a in approvals if a.get('type') == 'Verified' and a.get('by', {}).get('username') == 'jenkins' ] code_reviews = [ int(a.get('Value', 0)) for a in approvals if a.get('type') == 'Code-Review' ] if not jenkins: verified = 0 else: verified = int(jenkins[0]['value']) if verified != 1: if force: log.warn("Ref %s has not been validated by CI." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) else: raise exception.UnverifiedPatch() if any(cr < 0 for cr in code_reviews): log.warn("Ref %s has at least one negative review." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) git('update-ref', 'refs/heads/%s' % local_patches_branch, 'FETCH_HEAD')
def _add_amendables(msg, amendables): # Rather than go with an OrderedDict, keep it simple with a list AMENDABLES_ORDER = ['Change-Id', ] changeid_regex = 'Change-Id: (?P<id>I[a-fA-F0-9]+)' AMENDABLES = {'Change-Id': {'regex': re.compile(changeid_regex), 'msg': '%(msg)s\n\nChange-Id: %(id)s\n'}, } previous_commit = git.current_commit_message() for unknown in (set(amendables).difference(AMENDABLES_ORDER)): log.warn('"%s" is not a valid amendable field' % unknown) for a in AMENDABLES_ORDER: if a not in amendables: continue regex = AMENDABLES[a]['regex'] if regex.search(previous_commit): rdict = regex.search(previous_commit).groupdict() rdict['msg'] = msg msg = AMENDABLES[a]['msg'] % rdict return msg
def fetch_patches_branch(local_patches_branch, gerrit_patches_chain, force=False): review_n = _review_number(gerrit_patches_chain) gerrit_host, gerrit_port = guess.gerrit_from_repo() query = GerritQuery(gerrit_host, gerrit_port) review = query('--current-patch-set', review_n) current_ps = review.get('currentPatchSet', {}) patchset_n = current_ps.get('number') if not patchset_n: raise exception.CantGuess( msg='Failed to determine current patch set for review: %s' % gerrit_patches_chain) gerrit_ref = _review_ref(review_n, patchset_n) git('fetch', 'patches', gerrit_ref) approvals = current_ps.get('approvals', []) jenkins = [a for a in approvals if a.get('type') == 'Verified' and a.get('by', {}).get('username') == 'jenkins'] code_reviews = [int(a.get('Value', 0)) for a in approvals if a.get('type') == 'Code-Review'] if not jenkins: verified = 0 else: verified = int(jenkins[0]['value']) if verified != 1: if force: log.warn( "Ref %s has not been validated by CI." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) else: raise exception.UnverifiedPatch() if any(cr < 0 for cr in code_reviews): log.warn( "Ref %s has at least one negative review." % gerrit_patches_chain) helpers.confirm("Do you want to continue anyway?", default_yes=False) git('update-ref', 'refs/heads/%s' % local_patches_branch, 'FETCH_HEAD')
def init(self, force_fetch=False): if not self.url: if not os.path.isdir(self.repo_path): raise exception.NotADirectory(path=self.repo_path) return if self.base_path and not os.path.isdir(self.base_path): if self.verbose: log.info("Creating base directory: %s" % self.base_path) os.makedirs(self.base_path) if not os.path.isdir(self.repo_path): self._clone() else: try: self.git_check_remote() except exception.RepoError as e: if self.verbose: log.warn("%s repo didn't pass the checks, renewing: %s" % (self.repo_desc, e)) self._nuke() self._clone() else: self._fetch(force=force_fetch)
def update_rdoinfo_check(update): fail_msg = None warn = False rls = defaultdict(set) for b in update.builds: rls[b.repo].add(b) log.info("Checking update using rdoinfo...".format( t=log.term)) for r, builds in rls.items(): rbuilds = guess.builds(r) rbuilds_ = list(rbuilds) if not rbuilds: msg = 'Unknown OpenStack release: %s' % r log.warn(msg) if not fail_msg: fail_msg = msg continue for b in builds: found = False for dist_, src_ in rbuilds: if b.dist == dist_: found = True rbuilds_ = filter(lambda x: x[0] != b.dist, rbuilds_) break if not found: msg = 'Unexpected %s build: %s' % (r, b.dist) log.warn(msg) if not fail_msg: fail_msg = msg for dist_, src_ in rbuilds_: log.info('Missing %s build: %s' % (r, dist_)) warn = True if fail_msg: raise exception.UpdateCheckFailed(fail=fail_msg) if warn: helpers.confirm("Submit anyway?")
def clone( package, force_fetch=False, use_master_distgit=False, gerrit_remotes=False, review_user=None, distro='rdo'): rdo = rdoinfo.get_distroinfo(distro=distro) ri = rdo.get_info() pkg = get_package(ri, package) if not pkg: raise exception.InvalidPackage(package=package) if use_master_distgit: try: distgit = pkg['master-distgit'] distgit_str = 'master-distgit' except KeyError: raise exception.InvalidUsage( msg="-m/--use-master-distgit used but 'master-distgit' " "missing in rdoinfo for package: %s" % package) else: distgit = pkg['distgit'] distgit_str = 'distgit' log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format( t=log.term, dg=distgit_str, pkg=package)) patches = pkg.get('patches') upstream = pkg.get('upstream') review_patches = pkg.get('review-patches') review_origin = pkg.get('review-origin') git('clone', distgit, package) with helpers.cdir(package): if gerrit_remotes: log.info('Adding gerrit-origin remote...') git('remote', 'add', 'gerrit-origin', distgit) if patches: log.info('Adding patches remote...') git('remote', 'add', 'patches', patches) if gerrit_remotes: log.info('Adding gerrit-patches remote...') git('remote', 'add', 'gerrit-patches', patches) else: log.warn("'patches' remote information not available in rdoinfo.") if upstream: log.info('Adding upstream remote...') git('remote', 'add', 'upstream', upstream) else: log.warn("'upstream' remote information not available in rdoinfo.") if patches or upstream: git('fetch', '--all') if not review_user: # USERNAME is an env var used by gerrit review_user = os.environ.get('USERNAME') or os.environ.get('USER') msg_user = ('Using {t.bold}{u}{t.normal} as gerrit username, ' 'you can change it with ' '{t.cmd}git remote set-url {r} ...{t.normal}') if review_patches: log.info('Adding gerrit remote for patch chains reviews...') r = tidy_ssh_user(review_patches, review_user) log.info(msg_user.format(u=review_user, r='review-patches', t=log.term)) git('remote', 'add', 'review-patches', r) else: log.warn("'review-patches' remote information not available" " in rdoinfo.") if review_origin: log.info('Adding gerrit remote for reviews...') r = tidy_ssh_user(review_origin, review_user) log.info(msg_user.format(u=review_user, r='review-origin', t=log.term)) git('remote', 'add', 'review-origin', r) else: log.warn("'review-origin' remote information not available" " in rdoinfo.") git('remote', '-v', direct=True)
def _update_failed(self, update_file, error, stage): errs = str(error) self.fails.append((update_file, errs, stage)) log.warn("Failed during %s for %s: %s" % (stage, update_file, errs))
def new_version_setup(patches_branch=None, local_patches=False, version=None, new_version=None, version_tag_style=None, new_sources=None, no_new_sources=None, unattended=False, bug=None, bump_only=False): args = {} if new_version: # support both version and tag ver, _ = guess.tag2version(new_version) if ver != new_version: new_version = ver args['new_version'] = new_version new_version_tag = guess.version2tag(new_version, version_tag_style) if not bump_only and not git.object_type(new_version_tag): raise exception.InvalidGitRef(ref=new_version_tag) else: ub = guess.upstream_branch() if not git.ref_exists('refs/remotes/%s' % ub): msg = ("Upstream branch not found: %s\n" "Can't guess latest version.\n\n" "a) provide new version (git tag) yourself\n" " $ rdopkg new-version 1.2.3\n\n" "b) add upstream git remote:\n" " $ git remote add -f upstream GIT_URL\n" % ub) raise exception.CantGuess(msg=msg) new_version_tag = git.get_latest_tag(ub) new_version, _ = guess.tag2version(new_version_tag) args['new_version'] = new_version log.info("Latest version detected from %s: %s" % (ub, new_version)) if version == new_version: if unattended: log.info("Package is already at version %s\n" % version) raise exception.UserAbort(exit_code=0) helpers.confirm( msg="It seems the package is already at version %s\n\n" "Run new-version anyway?" % version, default_yes=False) changelog = 'Update to %s' % new_version if bug: changelog += ' (%s)' % bug args['changes'] = [changelog] args['new_patches_base'] = new_version_tag spec = specfile.Spec() rpm_version = spec.get_tag('Version') rpm_milestone = spec.get_milestone() new_rpm_version, new_milestone = specfile.version_parts(new_version) args['new_rpm_version'] = new_rpm_version if new_milestone: args['new_milestone'] = new_milestone if (rpm_version != new_rpm_version or bool(new_milestone) != bool(rpm_milestone)): if new_milestone: args['new_release'] = '0.1' else: args['new_release'] = '1' if not local_patches and not bump_only: if not patches_branch or \ not git.ref_exists('refs/remotes/' + patches_branch): log.warn("Patches branch '%s' not found. Running in --bump-only " "mode." % patches_branch) args['bump_only'] = True if new_sources or no_new_sources: if new_sources and no_new_sources: raise exception.InvalidUsage( msg="DOES NOT COMPUTE: both -n and -N don't make sense.") # new_sources == not no_new_sources else: new_sources = guess.new_sources() args['new_sources'] = new_sources return args
def clone( package, force_fetch=False, use_master_distgit=False, gerrit_remotes=False, review_user=None): inforepo = rdoinfo.get_default_inforepo() inforepo.init(force_fetch=force_fetch) pkg = inforepo.get_package(package) if not pkg: raise exception.InvalidRDOPackage(package=package) if use_master_distgit: try: distgit = pkg['master-distgit'] distgit_str = 'master-distgit' except KeyError: raise exception.InvalidUsage( msg="-m/--use-master-distgit used but 'master-distgit' " "missing in rdoinfo for package: %s" % package) else: distgit = pkg['distgit'] distgit_str = 'distgit' log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format( t=log.term, dg=distgit_str, pkg=package)) patches = pkg.get('patches') upstream = pkg.get('upstream') review_patches = pkg.get('review-patches') review_origin = pkg.get('review-origin') git('clone', distgit, package) with helpers.cdir(package): if gerrit_remotes: log.info('Adding gerrit-origin remote...') git('remote', 'add', 'gerrit-origin', distgit) if patches: log.info('Adding patches remote...') git('remote', 'add', 'patches', patches) if gerrit_remotes: log.info('Adding gerrit-patches remote...') git('remote', 'add', 'gerrit-patches', patches) else: log.warn("'patches' remote information not available in rdoinfo.") if upstream: log.info('Adding upstream remote...') git('remote', 'add', 'upstream', upstream) else: log.warn("'upstream' remote information not available in rdoinfo.") if patches or upstream: git('fetch', '--all') if not review_user: # USERNAME is an env var used by gerrit review_user = os.environ.get('USERNAME') or os.environ.get('USER') msg_user = ('Using {t.bold}{u}{t.normal} as gerrit username, ' 'you can change it with ' '{t.cmd}git remote set-url {r} ...{t.normal}') if review_patches: log.info('Adding gerrit remote for patch chains reviews...') r = tidy_ssh_user(review_patches, review_user) log.info(msg_user.format(u=review_user, r='review-patches', t=log.term)) git('remote', 'add', 'review-patches', r) else: log.warn("'review-patches' remote information not available" " in rdoinfo.") if review_origin: log.info('Adding gerrit remote for reviews...') r = tidy_ssh_user(review_origin, review_user) log.info(msg_user.format(u=review_user, r='review-origin', t=log.term)) git('remote', 'add', 'review-origin', r) else: log.warn("'review-origin' remote information not available" " in rdoinfo.") git('remote', '-v', direct=True)
def new_version_setup(patches_branch=None, local_patches=False, version=None, new_version=None, version_tag_style=None, new_sources=None, no_new_sources=None): args = {} if new_version: # support both version and tag ver, _ = guess.tag2version(new_version) if ver != new_version: new_version = ver args['new_version'] = new_version new_version_tag = guess.version2tag(new_version, version_tag_style) else: ub = guess.upstream_branch() if not git.ref_exists('refs/remotes/%s' % ub): msg = ("Upstream branch not found: %s\n" "Can't guess latest version.\n\n" "a) provide new version (git tag) yourself\n" " $ rdopkg new-version 1.2.3\n\n" "b) add upstream git remote:\n" " $ git remote add -f upstream GIT_URL\n" % ub) raise exception.CantGuess(msg=msg) new_version_tag = git.get_latest_tag(ub) new_version, _ = guess.tag2version(new_version_tag) args['new_version'] = new_version log.info("Latest version detected from %s: %s" % (ub, new_version)) if version == new_version: helpers.confirm( msg="It seems the package is already at version %s\n\n" "Run new-version anyway?" % version, default_yes=False) args['changes'] = ['Update to %s' % new_version] args['new_patches_base'] = new_version_tag spec = specfile.Spec() rpm_version = spec.get_tag('Version') rpm_milestone = spec.get_milestone() new_rpm_version, new_milestone = specfile.version_parts(new_version) args['new_rpm_version'] = new_rpm_version if new_milestone: args['new_milestone'] = new_milestone if (rpm_version != new_rpm_version or bool(new_milestone) != bool(rpm_milestone)): if new_milestone: args['new_release'] = '0.1' else: args['new_release'] = '1' if not local_patches: if not patches_branch or \ not git.ref_exists('refs/remotes/' + patches_branch): log.warn("Patches branch '%s' not found. Running in --bump-only " "mode." % patches_branch) args['bump_only'] = True if new_sources or no_new_sources: if new_sources and no_new_sources: raise exception.InvalidUsage( msg="DOES NOT COMPUTE: both -n and -N don't make sense.") # new_sources == not no_new_sources else: new_sources = guess.new_sources() args['new_sources'] = new_sources return args