def generate_output(content, output_format='std', ignore_no_results=False, ignore_errors=False, ignore_not_yet_released=False, ignore_all=False): filtered = filter_results(content, ignore_no_results, ignore_errors, ignore_not_yet_released, ignore_all) if output_format == 'json': return json.dumps(filtered, indent=4) if output_format == 'yaml': return yamlutils.dumps(filtered) if output_format == 'std': out = [] for repo in filtered: out.append('\033[1m\033[91m[ Unreleased changes in ' '{rep} ({br}) ]\033[0m'.format(rep=repo['repo'], br=repo['branch'])) if repo['not_yet_released'] or repo['error']: out.append(repo['msg']) continue range_msg = 'Changes between {start} and {end}'.format( start=repo['commits']['range'][0], end=repo['commits']['range'][1]) out.append(range_msg) if repo['commits'].get('logs', None): out.append("\n".join(repo['commits']['logs'])) out.append('') return '\n'.join(out)
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) subparsers = parser.add_subparsers(help='commands') relnote_parser = subparsers.add_parser( 'set-release-notes', help='set the release-notes field', ) relnote_parser.add_argument( 'url', help='the release-notes URL', ) relnote_parser.set_defaults(func=release_notes) stable_branch_parser = subparsers.add_parser( 'add-stable-branch', help='add a branch', ) stable_branch_parser.add_argument( 'location', help='version number', ) stable_branch_parser.set_defaults(func=stable_branch) eol_tag_parser = subparsers.add_parser( 'import-eol-tag', help='find the series EOL tag and add it', ) eol_tag_parser.set_defaults(func=eol_tag) args = parser.parse_args() # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. try: deliverable_info = get_deliverable_data(series, args.deliverable) except (IOError, OSError) as e: parser.error(e) args.func(args, series, deliverable_info) deliverable_filename = 'deliverables/%s/%s.yaml' % (series, args.deliverable) with open(deliverable_filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_info))
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'old_series', help='the previous release series, such as "newton"', ) parser.add_argument( 'new_series', help='the new release series, such as "ocata"', ) parser.add_argument( '--deliverables-dir', default=openstack_releases.deliverable_dir, help='location of deliverable files', ) args = parser.parse_args() all_deliv = deliverable.Deliverables( root_dir=args.deliverables_dir, collapse_history=False, ) new_deliverables = set( deliv.name for deliv in all_deliv.get_deliverables(None, args.new_series) ) outdir = os.path.join(args.deliverables_dir, args.new_series) if not os.path.exists(outdir): print('creating output directory {}'.format(outdir)) os.mkdir(outdir) old_deliverables = all_deliv.get_deliverables(None, args.old_series) for deliv in old_deliverables: if deliv.name in new_deliverables: continue if not deliv.is_released and not deliv.branches: # There were no releases for the deliverable in the # previous series, stop carrying it over. print('{} skipped (no releases in {})'.format( deliv.name, args.old_series)) continue # Clean up some series-specific data that should not be copied # over. raw_data = deliv.data for key in IGNORE: if key in raw_data: del raw_data[key] outfilename = os.path.abspath( os.path.join(outdir, deliv.name + '.yaml') ) with open(outfilename, 'w', encoding='utf-8') as f: print('{} created'.format(outfilename)) f.write(yamlutils.dumps(raw_data))
def main(): parser = argparse.ArgumentParser() parser.add_argument('file', nargs='*', help="Yaml file to reformat") args = parser.parse_args() for filename in args.file: print("Reformatting %s" % filename) with open(filename, 'rb') as fh: contents = yamlutils.loads(fh.read()) contents = yamlutils.dumps(contents) with open(filename, 'wb') as fh: fh.write(contents) if not contents.endswith("\n"): fh.write("\n")
def maybe_create_release(release_repo_path, deliverable_info, last_release, change_lines, latest_cycle, project, short_project, max_changes_show=100, should_prompt=True): if last_release: print("%s changes to release since %s are:" % (len(change_lines), last_release['version'])) else: print("%s changes to release are:" % (len(change_lines))) for sha, descr in change_lines[0:max_changes_show]: print(" %s %s" % (sha, descr)) leftover_change_lines = change_lines[max_changes_show:] if leftover_change_lines: print(" and %s more changes..." % len(leftover_change_lines)) if not should_prompt: return create_release = yes_no_prompt('Create a release in %s containing' ' those changes? ' % latest_cycle) if create_release: # NOTE(harlowja): use of an ordered-dict here is on purpose, so that # the ordering here stays similar to what is already being used. newest_release_path = os.path.join(release_repo_path, 'deliverables', latest_cycle, "%s.yaml" % short_project) ok_change = True if os.path.exists(newest_release_path): with open(newest_release_path, 'rb') as fh: newest_release = yamlutils.loads(fh.read()) ok_change = yes_no_prompt("Alter existing file (reformatting" " may lose comments and some existing" " yaml indenting/structure)? ") else: notes_link = to_unicode(NOTES_URL_TPL % (short_project, latest_cycle)) notes_link = prompt("Release notes link: ", validator=NoEmptyValidator(), default=notes_link) if deliverable_info: launchpad_project = to_unicode(deliverable_info['launchpad']) else: launchpad_project = prompt("Launchpad project name: ", validator=NoEmptyValidator(), default=to_unicode(short_project)) team = prompt("Project team: ", validator=NoEmptyValidator(), default=to_unicode(launchpad_project)) include_pypi_link = yes_no_prompt("Include pypi link? ") newest_release = collections.OrderedDict([ ('launchpad', launchpad_project), ('include-pypi-link', include_pypi_link), ('release-notes', notes_link), ('releases', []), ('team', team), ]) possible_hashes = [] for sha, _descr in change_lines: possible_hashes.append(sha) release_kind = prompt("Release type: ", validator=SetValidator(RELEASE_KINDS), completer=WordCompleter(RELEASE_KINDS)) suggested_version = generate_suggested_next_version( last_release, release_kind) if not suggested_version: suggested_version = '' version = prompt("Release version: ", validator=NoEmptyValidator(), default=to_unicode(suggested_version)) highlights = prompt("Highlights (esc then enter to" " exit): ", multiline=True) highlights = highlights.strip() release_hash = prompt("Hash to release at: ", validator=SetValidator(possible_hashes), completer=WordCompleter(possible_hashes), default=possible_hashes[0]) new_release = collections.OrderedDict([ ('version', version), ('projects', [ collections.OrderedDict([ ('repo', project), ('hash', release_hash), ]), ]), ]) if highlights: new_release['highlights'] = highlights if not ok_change: new_release = yamlutils.dumps(new_release) print("You may manually adjust %s and add:" % newest_release_path) print(new_release) else: try: newest_release['releases'].append(new_release) except KeyError: newest_release['releases'] = [new_release] newest_release = yamlutils.dumps(newest_release) with open(newest_release_path, 'wb') as fh: fh.write(newest_release)
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) parser.add_argument( '-v', '--verbose', default=False, action='store_true', help='be more chatty', ) parser.add_argument( '-i', '--interactive', default=False, action='store_true', help='Be interactive and only make releases when instructed') parser.add_argument( 'release_type', choices=('bugfix', 'feature', 'major', 'milestone', 'rc', 'procedural', 'eol', 'em', 'releasefix'), help='the type of release to generate', ) parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--force', default=False, action='store_true', help=('force a new tag, even if the HEAD of the ' 'branch is already tagged'), ) parser.add_argument( '--debug', default=False, action='store_true', help='show tracebacks on errors', ) parser.add_argument( '--stable-branch', default=False, action='store_true', help='create a new stable branch from the release', ) args = parser.parse_args() # Set up logging, including making some loggers quiet. logging.basicConfig( format='%(levelname)7s: %(message)s', stream=sys.stdout, level=logging.DEBUG if args.verbose else logging.INFO, ) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) is_procedural = args.release_type in 'procedural' is_retagging = is_procedural or args.release_type == 'releasefix' is_eol = args.release_type == 'eol' is_em = args.release_type == 'em' force_tag = args.force workdir = tempfile.mkdtemp(prefix='releases-') LOG.info('creating temporary files in %s', workdir) def error(msg): if args.debug: raise msg else: parser.error(msg) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: LOG.warning('not cleaning up %s', workdir) atexit.register(cleanup_workdir) # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. try: deliverable_info = get_deliverable_data(series, args.deliverable) except (IOError, OSError) as e: error(e) # Ensure we have a list for releases, even if it is empty. if deliverable_info.get('releases') is None: deliverable_info['releases'] = [] try: release_history = get_release_history(series, args.deliverable) this_series_history = release_history[0] last_release = get_last_release( release_history, args.deliverable, args.release_type, ) except RuntimeError as err: error(err) if last_release: last_version = last_release['version'].split('.') else: last_version = None LOG.debug('last_version %r', last_version) diff_start = None add_stable_branch = args.stable_branch or is_procedural # Validate new tag can be applied if last_version and 'eol' in last_version[0]: raise ValueError('Cannot create new release after EOL tagging.') if last_version is None: # Deliverables that have never been released before should # start at 0.1.0, indicating they are not feature complete or # stable but have features. LOG.debug('defaulting to 0.1.0 for first release') new_version_parts = ['0', '1', '0'] elif args.release_type in ('milestone', 'rc'): force_tag = True if deliverable_info['release-model'] not in _USES_RCS: raise ValueError('Cannot compute RC for {} project {}'.format( deliverable_info['release-model'], args.deliverable)) new_version_parts = increment_milestone_version( last_version, args.release_type) LOG.debug('computed new version %s release type %s', new_version_parts, args.release_type) # We are going to take some special steps for the first # release candidate, so figure out if that is what this # release will be. if args.release_type == 'rc' and new_version_parts[-1][3:] == '1': add_stable_branch = True elif args.release_type == 'procedural': # NOTE(dhellmann): We always compute the new version based on # the highest version on the branch, rather than the branch # base. If the differences are only patch levels the results # do not change, but if there was a minor version update then # the new version needs to be incremented based on that. new_version_parts = increment_version( last_version, (0, feature_increment(last_release), 0)) # NOTE(dhellmann): Save the SHAs for the commits where the # branch was created in each repo, even though that is # unlikely to be the same as the last_version, because commits # further down the stable branch will not be in the history of # the master branch and so we can't tag them as part of the # new series *AND* we always want stable branches created from # master. prev_info = get_last_series_info(series, args.deliverable) for b in prev_info['branches']: if b['name'].startswith('stable/'): last_branch_base = b['location'].split('.') break else: raise ValueError( 'Could not find a version in branch before {}'.format(series)) if last_version != last_branch_base: LOG.warning('last_version %s branch base %s', '.'.join(last_version), '.'.join(last_branch_base)) for r in prev_info['releases']: if r['version'] == '.'.join(last_branch_base): last_version_hashes = { p['repo']: p['hash'] for p in r['projects'] } break else: raise ValueError( ('Could not find SHAs for tag ' '{} in old deliverable file').format('.'.join(last_version))) elif args.release_type == 'releasefix': increment = (0, 0, 1) new_version_parts = increment_version(last_version, increment) last_version_hashes = { p['repo']: p['hash'] for p in last_release['projects'] } # Go back 2 releases so the release announcement includes the # actual changes. try: diff_start_release = this_series_history[-2] except IndexError: # We do not have 2 releases in this series yet, so go back # to the stable branch creation point. prev_info = get_last_series_info(series, args.deliverable) for b in prev_info['branches']: if b['name'].startswith('stable/'): diff_start = b['location'] LOG.info( 'using branch point from previous ' 'series as diff-start: %r', diff_start) break else: diff_start = diff_start_release['version'] LOG.info('using release from same series as diff-start: %r', diff_start) elif is_eol or is_em: last_version_hashes = { p['repo']: p['hash'] for p in last_release['projects'] } increment = None new_version_parts = None new_version = '{}-{}'.format(args.series, args.release_type) else: increment = { 'bugfix': (0, 0, 1), 'feature': (0, feature_increment(last_release), 0), 'major': (1, 0, 0), }[args.release_type] new_version_parts = increment_version(last_version, increment) LOG.debug('computed new version %s', new_version_parts) if new_version_parts is not None: # The EOL/EM tag version string is computed above and the parts # list is set to None to avoid recomputing it here. new_version = '.'.join(new_version_parts) if 'releases' not in deliverable_info: deliverable_info['releases'] = [] LOG.info('going from %s to %s', last_version, new_version) projects = [] changes = 0 for repo in deliverable_info['repository-settings'].keys(): LOG.info('processing %s', repo) # Look for the most recent time the repo was tagged and use # that info as the old sha. previous_sha = None previous_tag = None found = False for release in reversed(deliverable_info['releases']): for project in release['projects']: if project['repo'] == repo: previous_sha = project.get('hash') previous_tag = release['version'] LOG.info('last tagged as %s at %s', previous_tag, previous_sha) found = True break if found: break if is_retagging or (is_em and deliverable_info['release-model'] != 'untagged'): # Always use the last tagged hash, which should be coming # from the previous series or last release. sha = last_version_hashes[repo] else: # Figure out the hash for the HEAD of the branch. gitutils.clone_repo(workdir, repo) branches = gitutils.get_branches(workdir, repo) version = 'origin/stable/%s' % series if not any(branch for branch in branches if branch.endswith(version)): version = 'master' sha = gitutils.sha_for_tag(workdir, repo, version) # Check out the working repo to the sha gitutils.checkout_ref(workdir, repo, sha) if is_retagging: changes += 1 LOG.info('re-tagging %s at %s (%s)', repo, sha, previous_tag) if is_procedural: comment = 'procedural tag to support creating stable branch' else: comment = 'procedural tag to handle release job failure' new_project = { 'repo': repo, 'hash': sha, 'comment': comment, } projects.append(new_project) elif is_eol or is_em: changes += 1 LOG.info('tagging %s %s at %s', repo, args.release_type.upper(), sha) new_project = { 'repo': repo, 'hash': sha, } projects.append(new_project) elif previous_sha != sha or force_tag: # TODO(tonyb): Do this early and also prompt for release type. # Once we do that we can probably deprecate interactive-release if args.interactive: # NOTE(tonyb): This is pretty much just copied from # interactive-release last_tag = '.'.join(last_version) change_lines = list( clean_changes( gitutils.changes_since(workdir, repo, last_tag).splitlines())) max_changes_show = 100 LOG.info('') if last_tag: LOG.info("%s changes to %s since %s are:", len(change_lines), repo, last_tag) else: LOG.info("%s changes to %s are:", len(change_lines), repo) for sha, descr in change_lines[0:max_changes_show]: LOG.info("* %s %s", sha[:7], descr) leftover_change_lines = change_lines[max_changes_show:] if leftover_change_lines: LOG.info(" and %s more changes...", len(leftover_change_lines)) LOG.info('') changes += 1 LOG.info('advancing %s from %s (%s) to %s', repo, previous_sha, previous_tag, sha) new_project = { 'repo': repo, 'hash': sha, } projects.append(new_project) else: LOG.info('%s already tagged at most recent commit, skipping', repo) new_release_info = { 'version': new_version, 'projects': projects, } if diff_start: new_release_info['diff-start'] = diff_start deliverable_info['releases'].append(new_release_info) if add_stable_branch: branch_name = 'stable/{}'.format(series) # First check if this branch is already defined if 'branches' in deliverable_info: for branch in deliverable_info['branches']: if branch.get('name') == branch_name: LOG.debug('Branch %s already exists, skipping', branch_name) add_stable_branch = False break if add_stable_branch: LOG.info('adding stable branch at %s', new_version) deliverable_info.setdefault('branches', []).append({ 'name': branch_name, 'location': new_version, }) create_release = changes > 0 if create_release and args.interactive: create_release = yes_no_prompt( 'Create a release in %s containing those changes? ' % series) if create_release: deliverable_filename = 'deliverables/%s/%s.yaml' % (series, args.deliverable) with open(deliverable_filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_info))
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--all', default=False, action='store_true', help='process all deliverables, including release:cycle-trailing', ) parser.add_argument( '--verbose', '-v', action='store_true', default=False, help='produce detailed output', ) parser.add_argument( '--deliverables-dir', default=openstack_releases.deliverable_dir, help='location of deliverable files', ) parser.add_argument( '--series', default=defaults.RELEASE, help='the name of the release series to work on (%(default)s)') parser.add_argument( '--include-clients', action='append_const', const='client-library', default=['library'], dest='types', ) parser.add_argument( '--dry-run', '-n', default=False, action='store_true', help='report what action would be taken but do not take it', ) parser.add_argument( 'deliverable', nargs='*', default=[], help='the name(s) of the deliverable(s) to modify', ) args = parser.parse_args() if args.verbose: def verbose(msg): print(msg) else: def verbose(msg): pass deliverables_dir = args.deliverables_dir workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except Exception: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) pattern = os.path.join(deliverables_dir, args.series, '*.yaml') verbose('Scanning {}'.format(pattern)) deliverable_files = sorted(glob.glob(pattern)) new_branch = 'stable/' + args.series for filename in deliverable_files: deliverable_name = os.path.basename(filename)[:-5] if args.deliverable and deliverable_name not in args.deliverable: continue with open(filename, 'r', encoding='utf-8') as f: deliverable_data = yamlutils.loads(f.read()) if deliverable_data['type'] not in args.types: continue verbose('\n{}'.format(filename)) releases = deliverable_data.get('releases') if not releases: print('{} has no releases, not branching'.format(deliverable_name)) continue if 'branches' not in deliverable_data: deliverable_data['branches'] = [] skip = False for b in deliverable_data['branches']: if b['name'] == new_branch: print('{} already has branch {}'.format( deliverable_name, new_branch)) skip = True if skip: continue latest_release = releases[-1] print('{} new branch {} at {}'.format(deliverable_name, new_branch, latest_release['version'])) if not args.dry_run: deliverable_data['branches'].append({ 'name': new_branch, 'location': latest_release['version'], }) with open(filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_data))
def maybe_create_release( release_repo_path, deliverable_info, last_release, change_lines, latest_cycle, project, short_project, max_changes_show=100, should_prompt=True, ): if last_release: print("%s changes to release since %s are:" % (len(change_lines), last_release["version"])) else: print("%s changes to release are:" % (len(change_lines))) for sha, descr in change_lines[0:max_changes_show]: print(" %s %s" % (sha, descr)) leftover_change_lines = change_lines[max_changes_show:] if leftover_change_lines: print(" and %s more changes..." % len(leftover_change_lines)) if not should_prompt: return create_release = yes_no_prompt("Create a release in %s containing" " those changes? " % latest_cycle) if create_release: # NOTE(harlowja): use of an ordered-dict here is on purpose, so that # the ordering here stays similar to what is already being used. newest_release_path = os.path.join(release_repo_path, "deliverables", latest_cycle, "%s.yaml" % short_project) ok_change = True if os.path.exists(newest_release_path): with open(newest_release_path, "rb") as fh: newest_release = yamlutils.loads(fh.read()) ok_change = yes_no_prompt( "Alter existing file (reformatting" " may lose comments and some existing" " yaml indenting/structure)? " ) else: notes_link = to_unicode(NOTES_URL_TPL % (short_project, latest_cycle)) notes_link = prompt("Release notes link: ", validator=NoEmptyValidator(), default=notes_link) if deliverable_info: launchpad_project = to_unicode(deliverable_info["launchpad"]) else: launchpad_project = prompt( "Launchpad project name: ", validator=NoEmptyValidator(), default=to_unicode(short_project) ) team = prompt("Project team: ", validator=NoEmptyValidator(), default=to_unicode(launchpad_project)) include_pypi_link = yes_no_prompt("Include pypi link? ") newest_release = collections.OrderedDict( [ ("launchpad", launchpad_project), ("include-pypi-link", include_pypi_link), ("release-notes", notes_link), ("releases", []), ("team", team), ] ) possible_hashes = [] for sha, _descr in change_lines: possible_hashes.append(sha) release_kind = prompt( "Release type: ", validator=SetValidator(RELEASE_KINDS), completer=WordCompleter(RELEASE_KINDS) ) suggested_version = generate_suggested_next_version(last_release, release_kind) if not suggested_version: suggested_version = "" version = prompt("Release version: ", validator=NoEmptyValidator(), default=to_unicode(suggested_version)) highlights = prompt("Highlights (esc then enter to" " exit): ", multiline=True) highlights = highlights.strip() release_hash = prompt( "Hash to release at: ", validator=SetValidator(possible_hashes), completer=WordCompleter(possible_hashes), default=possible_hashes[0], ) new_release = collections.OrderedDict( [("version", version), ("projects", [collections.OrderedDict([("repo", project), ("hash", release_hash)])])] ) if highlights: new_release["highlights"] = highlights if not ok_change: new_release = yamlutils.dumps(new_release) print("You may manually adjust %s and add:" % newest_release_path) print(new_release) else: try: newest_release["releases"].append(new_release) except KeyError: newest_release["releases"] = [new_release] newest_release = yamlutils.dumps(newest_release) with open(newest_release_path, "wb") as fh: fh.write(newest_release)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--all', default=False, action='store_true', help='process all deliverables, including cycle-trailing ones', ) parser.add_argument( '--verbose', '-v', action='store_true', default=False, help='produce detailed output', ) parser.add_argument( '--canary', action='store_true', default=False, help='process only a canary final release on release-test', ) parser.add_argument( '--deliverables-dir', default=openstack_releases.deliverable_dir, help='location of deliverable files', ) parser.add_argument( 'prior_series', help='the name of the previous series', ) parser.add_argument( 'series', help='the name of the release series to work on' ) args = parser.parse_args() if args.verbose: def verbose(msg): print(msg) else: def verbose(msg): pass # Set up logging, including making some loggers quiet. logging.basicConfig( format='%(levelname)7s: %(message)s', stream=sys.stdout, level=logging.DEBUG if args.verbose else logging.INFO, ) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) deliverables_dir = args.deliverables_dir workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except Exception: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) verbose('Scanning {}/{}'.format(deliverables_dir, args.series)) all_deliv = deliverable.Deliverables( root_dir=args.deliverables_dir, collapse_history=False, ) for deliv in all_deliv.get_deliverables(None, args.series): if args.canary and deliv.name != "release-test": continue verbose('\n{} {}'.format(deliv.name, deliv.model)) if (deliv.model == 'cycle-trailing' or deliv.type == 'trailing'): verbose('# {} is a cycle-trailing project'.format( deliv.name)) if not args.all: continue if not deliv.releases: verbose('# no releases') continue latest_release = deliv.releases[-1] projects = latest_release.projects if not projects: verbose('# no projects in latest release') continue for pre_rel in ['a', 'b', 'rc']: if pre_rel in str(latest_release.version): break else: # we did not find any pre_rel verbose('# {} was not a release candidate'.format( latest_release.version)) continue # The new version is the same as the latest release version # without the pre-release component at the end. Make sure it # has 3 sets of digits. new_version = '.'.join( (latest_release.version.split('.')[:-1] + ['0'])[:3] ) branch = 'stable/{}'.format(args.prior_series) diff_start = get_prior_branch_point( workdir, projects[0].repo.name, branch, ) deliverable_data = deliv.data release_data = { 'version': new_version, 'projects': deliv.data['releases'][-1]['projects'], } if diff_start: release_data['diff-start'] = diff_start deliverable_data['releases'].append(release_data) print('new version for {}: {}'.format( deliv.name, new_version)) filename = os.path.join(deliverables_dir, deliv.filename) with open(filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_data))
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) # FIXME(dhellmann): Add milestone and rc types. parser.add_argument( 'release_type', choices=('bugfix', 'feature', 'major', 'milestone', 'rc', 'procedural'), help='the type of release to generate', ) parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--force', default=False, action='store_true', help=('force a new tag, even if the HEAD of the ' 'branch is already tagged'), ) parser.add_argument( '--stable-branch', default=False, action='store_true', help='create a new stable branch from the release', ) args = parser.parse_args() is_procedural = args.release_type == 'procedural' force_tag = args.force workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. try: deliverable_info = get_deliverable_data( series, args.deliverable) except (IOError, OSError) as e: parser.error(e) try: last_release = get_last_release( deliverable_info, series, args.deliverable, args.release_type, ) except RuntimeError as err: parser.error(err) last_version = last_release['version'].split('.') add_stable_branch = args.stable_branch or is_procedural if args.release_type in ('milestone', 'rc'): force_tag = True if deliverable_info['release-model'] not in _USES_RCS: raise ValueError('Cannot compute RC for {} project {}'.format( deliverable_info['release-model'], args.deliverable)) new_version_parts = increment_milestone_version( last_version, args.release_type) # We are going to take some special steps for the first # release candidate, so figure out if that is what this # release will be. if args.release_type == 'rc' and new_version_parts[-1][3:] == '1': add_stable_branch = True elif args.release_type == 'procedural': # NOTE(dhellmann): We always compute the new version based on # the highest version on the branch, rather than the branch # base. If the differences are only patch levels the results # do not change, but if there was a minor version update then # the new version needs to be incremented based on that. new_version_parts = increment_version(last_version, (0, 1, 0)) # NOTE(dhellmann): Save the SHAs for the commits where the # branch was created in each repo, even though that is # unlikely to be the same as the last_version, because commits # further down the stable branch will not be in the history of # the master branch and so we can't tag them as part of the # new series *AND* we always want stable branches created from # master. prev_info = get_last_series_info(series, args.deliverable) for b in prev_info['branches']: if b['name'].startswith('stable/'): last_branch_base = b['location'].split('.') break else: raise ValueError( 'Could not find a version in branch before {}'.format( series) ) if last_version != last_branch_base: print('WARNING: last_version {} branch base {}'.format( '.'.join(last_version), '.'.join(last_branch_base))) for r in prev_info['releases']: if r['version'] == '.'.join(last_branch_base): last_version_hashes = { p['repo']: p['hash'] for p in r['projects'] } break else: raise ValueError( ('Could not find SHAs for tag ' '{} in old deliverable file').format( '.'.join(last_version)) ) else: increment = { 'bugfix': (0, 0, 1), 'feature': (0, 1, 0), 'major': (1, 0, 0), }[args.release_type] new_version_parts = increment_version(last_version, increment) new_version = '.'.join(new_version_parts) if 'releases' not in deliverable_info: deliverable_info['releases'] = [] print('going from %s to %s' % (last_version, new_version)) projects = [] changes = 0 for project in last_release['projects']: if args.release_type == 'procedural': # Always use the last tagged hash, which should be coming # from the previous series. sha = last_version_hashes[project['repo']] else: # Figure out the hash for the HEAD of the branch. gitutils.clone_repo(workdir, project['repo']) branches = gitutils.get_branches(workdir, project['repo']) version = 'origin/stable/%s' % series if not any(branch for branch in branches if branch.endswith(version)): version = 'master' sha = gitutils.sha_for_tag(workdir, project['repo'], version) if is_procedural: changes += 1 print('re-tagging %s at %s (%s)' % (project['repo'], sha, last_release['version'])) new_project = { 'repo': project['repo'], 'hash': sha, 'comment': 'procedural tag to support creating stable branch', } if 'tarball-base' in project: new_project['tarball-base'] = project['tarball-base'] projects.append(new_project) elif project['hash'] != sha or force_tag: changes += 1 print('advancing %s from %s to %s' % (project['repo'], project['hash'], sha)) new_project = { 'repo': project['repo'], 'hash': sha, } if 'tarball-base' in project: new_project['tarball-base'] = project['tarball-base'] projects.append(new_project) else: print('{} already tagged at most recent commit, skipping'.format( project['repo'])) deliverable_info['releases'].append({ 'version': new_version, 'projects': projects, }) if add_stable_branch: branch_name = 'stable/{}'.format(series) # First check if this branch is already defined if 'branches' in deliverable_info: for branch in deliverable_info['branches']: if branch.get('name') == branch_name: print('Branch {} already existes, skipping'.format( branch_name)) add_stable_branch = False break if add_stable_branch: print('adding stable branch at {}'.format(new_version)) deliverable_info.setdefault('branches', []).append({ 'name': branch_name, 'location': new_version, }) if changes > 0: deliverable_filename = 'deliverables/%s/%s.yaml' % ( series, args.deliverable) with open(deliverable_filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_info))