def eol_tag(args, series, deliverable_info): workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): shutil.rmtree(workdir, True) atexit.register(cleanup_workdir) tag = '{}-eol'.format(series) projects = [] release = { 'version': tag, 'projects': projects, } for repo in deliverable_info['repository-settings'].keys(): if not gitutils.tag_exists(repo, tag): print('No {} tag for {}'.format(tag, repo)) continue gitutils.clone_repo(workdir, repo) sha = gitutils.sha_for_tag(workdir, repo, tag) projects.append({ 'repo': repo, 'hash': sha, }) if projects: deliverable_info['releases'].append(release)
def main(): if not sys.stdout.encoding: # Wrap sys.stdout with a writer that knows how to handle # encoding Unicode data. import codecs wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdout = wrapped_stdout parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) team_data = governance.get_team_data() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == '_independent': default_model = 'independent' else: default_model = 'no release model specified' # By default assume the project does not use milestones. header('Release model') print(deliverable_info.get('release-model', default_model)) header('Team details') if 'team' in deliverable_info: team_name = deliverable_info['team'] team_dict = team_data.get(team_name) if team_dict: team = governance.Team(team_name, team_dict) print('found team %s' % team_name) print(' PTL: %(name)s (%(irc)s)\n' % team.ptl) deliverable_name = os.path.basename(filename)[:-5] # remove .yaml deliverable = team.deliverables.get(deliverable_name) if deliverable: print('found deliverable %s' % deliverable_name) for rn, repo in sorted(deliverable.repositories.items()): print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliverable_name, team_name)) else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # If there are no releases listed, this is probably a new # deliverable file for initializing a new series. We don't # need to list its changes. if not deliverable_info.get('releases'): header('No releases') print('no releases were found, assuming an initialization file') continue # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] # build a map between version numbers and the release details by_version = { str(r['version']): r for r in deliverable_info['releases'] } for project in new_release['projects']: tag_exists = gitutils.commit_exists( project['repo'], new_release['version'], ) if tag_exists: print('%s %s exists on git server already' % (project['repo'], new_release['version'])) # Check out the code. print('\nChecking out repository {}'.format(project['repo'])) subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project['repo'], '{}^'.format(project['hash']) ) previous_release = by_version.get(previous_tag) start_range = previous_tag if previous_release: previous_project = { x['repo']: x for x in previous_release['projects'] }.get(project['repo']) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release['version']) print('\ngit describe %s\n' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project['repo'], title='Check existing tags', ref=project['hash'], ) branches = git_branch_contains( workdir=workdir, repo=project['repo'], title='Branches containing commit', commit=project['hash'], ) header('Relationship to HEAD') if series == '_independent': interesting_branches = sorted( b for b in branches if '->' not in b ) tag_branch = interesting_branches[0] head_sha = gitutils.sha_for_tag( workdir, project['repo'], tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: head_sha = gitutils.sha_for_tag( workdir, project['repo'], 'HEAD', ) print('HEAD of {} is {}'.format(branch, head_sha)) tag_branch = branch requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) # Show any requirements changes in the upcoming release. # Include setup.cfg, in case the project uses "extras". if start_range: git_diff(workdir, project['repo'], git_range, '*requirements*.txt') git_diff(workdir, project['repo'], git_range, 'setup.cfg') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range, extra_args=['--graph', '--oneline', '--decorate', '--topo-order']) git_log(workdir, project['repo'], 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.commit_exists( project['repo'], previous_release, ) if previous_tag_exists: git_log( workdir, project['repo'], 'Patches in previous release but not in this one', [project['hash'], '--not', previous_release['version']], extra_args=['--topo-order', '--oneline', '--no-merges'], ) header('New release %s includes previous release %s' % (new_release['version'], previous_release['version'])) if not tag_exists: subprocess.check_call( ['git', 'tag', new_release['version'], project['hash']], cwd=os.path.join(workdir, project['repo']), ) print('\ngit tag --contains %s\n' % previous_release['version']) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release['version']], cwd=os.path.join(workdir, project['repo']), ).split() print('Containing tags:', containing_tags) if new_release['version'] not in containing_tags: print('WARNING: Missing %s' % new_release['version']) else: print('Found new version %s' % new_release['version']) is_ancestor = gitutils.check_ancestry( workdir, project['repo'], previous_release['version'], project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) # FIXME(dhellmann): Add milestone and rc types. parser.add_argument( 'release_type', choices=('bugfix', 'feature', 'major'), help='the type of release to generate', ) parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) args = parser.parse_args() workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. deliverable_filename = 'deliverables/%s/%s.yaml' % ( series, args.deliverable) try: with open(deliverable_filename, 'r') as f: deliverable_info = yaml.safe_load(f) except (IOError, OSError) as e: parser.error(e) # Determine the new version number. last_release = deliverable_info['releases'][-1] last_version = last_release['version'].split('.') increment = { 'bugfix': (0, 0, 1), 'feature': (0, 1, 0), 'major': (1, 0, 0), }[args.release_type] new_version_parts = [] for cur, inc in zip(last_version, increment): new_version_parts.append(str(int(cur) + inc)) new_version = '.'.join(new_version_parts) print('going from %s to %s' % (last_version, new_version)) projects = [] changes = 0 for project in last_release['projects']: gitutils.clone_repo(workdir, project['repo']) branches = gitutils.get_branches(workdir, project['repo']) version = 'origin/stable/%s' % series if not any(branch for branch in branches if branch.endswith(version)): version = 'master' sha = gitutils.sha_for_tag(workdir, project['repo'], version) if project['hash'] != sha: changes += 1 print('advancing %s from %s to %s' % (project['repo'], project['hash'], sha)) projects.append({ 'repo': project['repo'], 'hash': sha, }) # The YAML dump formatter produces results that aren't very nice # to read, so we format the output ourselves. The file is only # regenerated if there are in fact changes to be made. if changes > 0: with open(deliverable_filename, 'a') as f: f.write(RELEASE_TEMPLATE.format(version=new_version)) for p in projects: f.write(PROJECT_TEMPLATE.format(**p))
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) parser.add_argument( '-v', '--verbose', default=False, action='store_true', help='be more chatty', ) parser.add_argument( '-i', '--interactive', default=False, action='store_true', help='Be interactive and only make releases when instructed') parser.add_argument( 'release_type', choices=('bugfix', 'feature', 'major', 'milestone', 'rc', 'procedural', 'eol', 'em', 'releasefix'), help='the type of release to generate', ) parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--force', default=False, action='store_true', help=('force a new tag, even if the HEAD of the ' 'branch is already tagged'), ) parser.add_argument( '--debug', default=False, action='store_true', help='show tracebacks on errors', ) parser.add_argument( '--stable-branch', default=False, action='store_true', help='create a new stable branch from the release', ) args = parser.parse_args() # Set up logging, including making some loggers quiet. logging.basicConfig( format='%(levelname)7s: %(message)s', stream=sys.stdout, level=logging.DEBUG if args.verbose else logging.INFO, ) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) is_procedural = args.release_type in 'procedural' is_retagging = is_procedural or args.release_type == 'releasefix' is_eol = args.release_type == 'eol' is_em = args.release_type == 'em' force_tag = args.force workdir = tempfile.mkdtemp(prefix='releases-') LOG.info('creating temporary files in %s', workdir) def error(msg): if args.debug: raise msg else: parser.error(msg) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: LOG.warning('not cleaning up %s', workdir) atexit.register(cleanup_workdir) # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. try: deliverable_info = get_deliverable_data(series, args.deliverable) except (IOError, OSError) as e: error(e) # Ensure we have a list for releases, even if it is empty. if deliverable_info.get('releases') is None: deliverable_info['releases'] = [] try: release_history = get_release_history(series, args.deliverable) this_series_history = release_history[0] last_release = get_last_release( release_history, args.deliverable, args.release_type, ) except RuntimeError as err: error(err) if last_release: last_version = last_release['version'].split('.') else: last_version = None LOG.debug('last_version %r', last_version) diff_start = None add_stable_branch = args.stable_branch or is_procedural # Validate new tag can be applied if last_version and 'eol' in last_version[0]: raise ValueError('Cannot create new release after EOL tagging.') if last_version is None: # Deliverables that have never been released before should # start at 0.1.0, indicating they are not feature complete or # stable but have features. LOG.debug('defaulting to 0.1.0 for first release') new_version_parts = ['0', '1', '0'] elif args.release_type in ('milestone', 'rc'): force_tag = True if deliverable_info['release-model'] not in _USES_RCS: raise ValueError('Cannot compute RC for {} project {}'.format( deliverable_info['release-model'], args.deliverable)) new_version_parts = increment_milestone_version( last_version, args.release_type) LOG.debug('computed new version %s release type %s', new_version_parts, args.release_type) # We are going to take some special steps for the first # release candidate, so figure out if that is what this # release will be. if args.release_type == 'rc' and new_version_parts[-1][3:] == '1': add_stable_branch = True elif args.release_type == 'procedural': # NOTE(dhellmann): We always compute the new version based on # the highest version on the branch, rather than the branch # base. If the differences are only patch levels the results # do not change, but if there was a minor version update then # the new version needs to be incremented based on that. new_version_parts = increment_version( last_version, (0, feature_increment(last_release), 0)) # NOTE(dhellmann): Save the SHAs for the commits where the # branch was created in each repo, even though that is # unlikely to be the same as the last_version, because commits # further down the stable branch will not be in the history of # the master branch and so we can't tag them as part of the # new series *AND* we always want stable branches created from # master. prev_info = get_last_series_info(series, args.deliverable) for b in prev_info['branches']: if b['name'].startswith('stable/'): last_branch_base = b['location'].split('.') break else: raise ValueError( 'Could not find a version in branch before {}'.format(series)) if last_version != last_branch_base: LOG.warning('last_version %s branch base %s', '.'.join(last_version), '.'.join(last_branch_base)) for r in prev_info['releases']: if r['version'] == '.'.join(last_branch_base): last_version_hashes = { p['repo']: p['hash'] for p in r['projects'] } break else: raise ValueError( ('Could not find SHAs for tag ' '{} in old deliverable file').format('.'.join(last_version))) elif args.release_type == 'releasefix': increment = (0, 0, 1) new_version_parts = increment_version(last_version, increment) last_version_hashes = { p['repo']: p['hash'] for p in last_release['projects'] } # Go back 2 releases so the release announcement includes the # actual changes. try: diff_start_release = this_series_history[-2] except IndexError: # We do not have 2 releases in this series yet, so go back # to the stable branch creation point. prev_info = get_last_series_info(series, args.deliverable) for b in prev_info['branches']: if b['name'].startswith('stable/'): diff_start = b['location'] LOG.info( 'using branch point from previous ' 'series as diff-start: %r', diff_start) break else: diff_start = diff_start_release['version'] LOG.info('using release from same series as diff-start: %r', diff_start) elif is_eol or is_em: last_version_hashes = { p['repo']: p['hash'] for p in last_release['projects'] } increment = None new_version_parts = None new_version = '{}-{}'.format(args.series, args.release_type) else: increment = { 'bugfix': (0, 0, 1), 'feature': (0, feature_increment(last_release), 0), 'major': (1, 0, 0), }[args.release_type] new_version_parts = increment_version(last_version, increment) LOG.debug('computed new version %s', new_version_parts) if new_version_parts is not None: # The EOL/EM tag version string is computed above and the parts # list is set to None to avoid recomputing it here. new_version = '.'.join(new_version_parts) if 'releases' not in deliverable_info: deliverable_info['releases'] = [] LOG.info('going from %s to %s', last_version, new_version) projects = [] changes = 0 for repo in deliverable_info['repository-settings'].keys(): LOG.info('processing %s', repo) # Look for the most recent time the repo was tagged and use # that info as the old sha. previous_sha = None previous_tag = None found = False for release in reversed(deliverable_info['releases']): for project in release['projects']: if project['repo'] == repo: previous_sha = project.get('hash') previous_tag = release['version'] LOG.info('last tagged as %s at %s', previous_tag, previous_sha) found = True break if found: break if is_retagging or (is_em and deliverable_info['release-model'] != 'untagged'): # Always use the last tagged hash, which should be coming # from the previous series or last release. sha = last_version_hashes[repo] else: # Figure out the hash for the HEAD of the branch. gitutils.clone_repo(workdir, repo) branches = gitutils.get_branches(workdir, repo) version = 'origin/stable/%s' % series if not any(branch for branch in branches if branch.endswith(version)): version = 'master' sha = gitutils.sha_for_tag(workdir, repo, version) # Check out the working repo to the sha gitutils.checkout_ref(workdir, repo, sha) if is_retagging: changes += 1 LOG.info('re-tagging %s at %s (%s)', repo, sha, previous_tag) if is_procedural: comment = 'procedural tag to support creating stable branch' else: comment = 'procedural tag to handle release job failure' new_project = { 'repo': repo, 'hash': sha, 'comment': comment, } projects.append(new_project) elif is_eol or is_em: changes += 1 LOG.info('tagging %s %s at %s', repo, args.release_type.upper(), sha) new_project = { 'repo': repo, 'hash': sha, } projects.append(new_project) elif previous_sha != sha or force_tag: # TODO(tonyb): Do this early and also prompt for release type. # Once we do that we can probably deprecate interactive-release if args.interactive: # NOTE(tonyb): This is pretty much just copied from # interactive-release last_tag = '.'.join(last_version) change_lines = list( clean_changes( gitutils.changes_since(workdir, repo, last_tag).splitlines())) max_changes_show = 100 LOG.info('') if last_tag: LOG.info("%s changes to %s since %s are:", len(change_lines), repo, last_tag) else: LOG.info("%s changes to %s are:", len(change_lines), repo) for sha, descr in change_lines[0:max_changes_show]: LOG.info("* %s %s", sha[:7], descr) leftover_change_lines = change_lines[max_changes_show:] if leftover_change_lines: LOG.info(" and %s more changes...", len(leftover_change_lines)) LOG.info('') changes += 1 LOG.info('advancing %s from %s (%s) to %s', repo, previous_sha, previous_tag, sha) new_project = { 'repo': repo, 'hash': sha, } projects.append(new_project) else: LOG.info('%s already tagged at most recent commit, skipping', repo) new_release_info = { 'version': new_version, 'projects': projects, } if diff_start: new_release_info['diff-start'] = diff_start deliverable_info['releases'].append(new_release_info) if add_stable_branch: branch_name = 'stable/{}'.format(series) # First check if this branch is already defined if 'branches' in deliverable_info: for branch in deliverable_info['branches']: if branch.get('name') == branch_name: LOG.debug('Branch %s already exists, skipping', branch_name) add_stable_branch = False break if add_stable_branch: LOG.info('adding stable branch at %s', new_version) deliverable_info.setdefault('branches', []).append({ 'name': branch_name, 'location': new_version, }) create_release = changes > 0 if create_release and args.interactive: create_release = yes_no_prompt( 'Create a release in %s containing those changes? ' % series) if create_release: deliverable_filename = 'deliverables/%s/%s.yaml' % (series, args.deliverable) with open(deliverable_filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_info))
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) team_data = governance.get_team_data() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) # By default assume the project does not use milestones. uses_milestones = False header('Team details') if 'team' in deliverable_info: team_name = deliverable_info['team'] team_dict = team_data.get(team_name) if team_dict: team = governance.Team(team_name, team_dict) print('found team %s' % team_name) print(' PTL: %(name)s (%(irc)s)\n' % team.ptl) deliverable_name = os.path.basename(filename)[:-5] # remove .yaml deliverable = team.deliverables.get(deliverable_name) if deliverable: print('found deliverable %s' % deliverable_name) for rn, repo in sorted(deliverable.repositories.items()): print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') uses_milestones = 'release:cycle-with-milestones' in repo.tags else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliverable_name, team_name)) else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') if uses_milestones: print('uses milestones') series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] # Warn if the new release looks like a milestone release but # the project does not use milestones. if not uses_milestones: for pre_indicator in ['a', 'b', 'rc']: if pre_indicator in new_release['version']: print(('WARNING: %s looks like a pre-release ' 'but %s does not use milestones') % (new_release['version'], deliverable_name)) # build a map between version numbers and the release details by_version = { str(r['version']): r for r in deliverable_info['releases'] } for project in new_release['projects']: tag_exists = gitutils.commit_exists( project['repo'], new_release['version'], ) if tag_exists: print('%s %s exists on git server already' % (project['repo'], new_release['version'])) # Check out the code. print('\nChecking out repository {}'.format(project['repo'])) subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project['repo'], '{}^'.format(project['hash']) ) previous_release = by_version.get(previous_tag) start_range = previous_tag if previous_release: previous_project = { x['repo']: x for x in previous_release['projects'] }.get(project['repo']) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release['version']) print('\ngit describe %s\n' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project['repo'], title='Check existing tags', ref=project['hash'], ) branches = git_branch_contains( workdir=workdir, repo=project['repo'], title='Branches containing commit', commit=project['hash'], ) header('Relationship to HEAD') if series == '_independent': interesting_branches = sorted( b for b in branches if '->' not in b ) tag_branch = interesting_branches[0] head_sha = gitutils.sha_for_tag( workdir, project['repo'], tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: head_sha = gitutils.sha_for_tag( workdir, project['repo'], 'HEAD', ) print('HEAD of {} is {}'.format(branch, head_sha)) tag_branch = branch requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) # Show any requirements changes in the upcoming release. if start_range: git_diff(workdir, project['repo'], git_range, '*requirements*.txt') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range, extra_args=['--graph', '--oneline', '--decorate', '--topo-order']) git_log(workdir, project['repo'], 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.commit_exists( project['repo'], previous_release, ) if previous_tag_exists: git_log( workdir, project['repo'], 'Patches in previous release but not in this one', [project['hash'], '--not', previous_release['version']], extra_args=['--topo-order', '--oneline', '--no-merges'], ) header('New release %s includes previous release %s' % (new_release['version'], previous_release['version'])) if not tag_exists: subprocess.check_call( ['git', 'tag', new_release['version'], project['hash']], cwd=os.path.join(workdir, project['repo']), ) print('\ngit tag --contains %s\n' % previous_release['version']) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release['version']], cwd=os.path.join(workdir, project['repo']), ).split() print('Containing tags:', containing_tags) if new_release['version'] not in containing_tags: print('WARNING: Missing %s' % new_release['version']) else: print('Found new version %s' % new_release['version']) is_ancestor = gitutils.check_ancestry( workdir, project['repo'], previous_release['version'], project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\nChecking %s' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # assume the releases are in order and take the last two new_release = deliverable_info['releases'][-1] if len(deliverable_info['releases']) >= 2: previous_release = deliverable_info['releases'][-2] else: previous_release = None for project in new_release['projects']: if gitutils.commit_exists(project['repo'], new_release['version']): print('%s %s exists already' % (project['repo'], new_release['version'])) continue # Check out the code. subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) start_range = (previous_release['projects'][0]['hash'] if previous_release else None) if not start_range: start_range = ( gitutils.get_latest_tag(workdir, project['repo']) or None ) if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show the changes since the last release. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. head_sha = gitutils.sha_for_tag(workdir, project['repo'], 'HEAD') requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) if head_sha == requested_sha: print('Request releases from HEAD on %s' % branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha)) # Show more details about the commit being tagged. print() print('git describe %s' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) return 0
def validate_releases(deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error): """Apply validation rules to the 'releases' list for the deliverable. """ release_model = get_model(deliverable_info, series_name) is_independent = (release_model == 'independent') # Remember which entries are new so we can verify that they # appear at the end of the file. new_releases = {} release_type = deliverable_info.get('release-type', 'std') link_mode = deliverable_info.get('artifact-link-mode', 'tarball') prev_version = None prev_projects = set() for release in deliverable_info.get('releases', []): for project in release['projects']: # Check for release jobs (if we ship a tarball) if link_mode != 'none': project_config.require_release_jobs_for_repo( deliverable_info, zuul_layout, project['repo'], release_type, mk_warning, mk_error, ) # Check the SHA specified for the tag. print('%s SHA %s ' % (project['repo'], project['hash'])) if not is_a_hash(project['hash']): mk_error( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % { 'repo': project['repo'], 'hash': project['hash'], 'version': release['version'], } ) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: mk_error('No commit %(hash)r in %(repo)r' % project) # No point in running extra checks if the SHA just # doesn't exist. continue # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. version_exists = gitutils.tag_exists( project['repo'], release['version'], ) gitutils.clone_repo(workdir, project['repo']) if version_exists: actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha != project['hash']: mk_error( ('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('Found new version {}'.format(release['version'])) new_releases[release['version']] = release if prev_projects and project['repo'] not in prev_projects: print('not included in previous release for %s: %s' % (prev_version, ', '.join(sorted(prev_projects)))) else: for e in versionutils.validate_version( release['version'], release_type=release_type, pre_ok=(release_model in _USES_PREVER)): msg = ('could not validate version %r: %s' % (release['version'], e)) mk_error(msg) if is_independent: mk_warning('skipping descendant test for ' 'independent project, verify ' 'branch manually') elif not prev_version: # If this is the first version in the series, # check that the commit is actually on the # targeted branch. if not gitutils.check_branch_sha(workdir, project['repo'], series_name, defaults.RELEASE, project['hash']): msg = '%s %s not present in %s branch' % ( project['repo'], project['hash'], series_name, ) mk_error(msg) else: # Check to see if we are re-tagging the same # commit with a new version. old_sha = gitutils.sha_for_tag( workdir, project['repo'], prev_version, ) if old_sha == project['hash']: # FIXME(dhellmann): This needs a test. print('Retagging the SHA with a new version') else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if not is_ancestor: mk_error( '%s %s receiving %s ' 'is not a descendant of %s' % ( project['repo'], project['hash'], release['version'], prev_version, ) ) prev_version = release['version'] prev_projects = set(p['repo'] for p in release['projects']) # Make sure that new entries have been appended to the file. for v, nr in new_releases.items(): if nr != deliverable_info['releases'][-1]: msg = ('new release %s must be listed last, ' 'with one new release per patch' % nr['version']) mk_error(msg)
def validate_releases(deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error): """Apply validation rules to the 'releases' list for the deliverable. """ release_model = get_model(deliverable_info, series_name) is_independent = (release_model == 'independent') # Remember which entries are new so we can verify that they # appear at the end of the file. new_releases = {} release_type = deliverable_info.get('release-type', 'std') link_mode = deliverable_info.get('artifact-link-mode', 'tarball') prev_version = None prev_projects = set() for release in deliverable_info.get('releases', []): print('checking %s' % release['version']) for project in release['projects']: # Check for release jobs (if we ship a tarball) if link_mode != 'none': project_config.require_release_jobs_for_repo( deliverable_info, zuul_layout, project['repo'], release_type, mk_warning, mk_error, ) # Check the SHA specified for the tag. print('%s SHA %s ' % (project['repo'], project['hash'])) if not is_a_hash(project['hash']): mk_error( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % { 'repo': project['repo'], 'hash': project['hash'], 'version': release['version'], }) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: mk_error('No commit %(hash)r in %(repo)r' % project) # No point in running extra checks if the SHA just # doesn't exist. continue # Ensure we have a local copy of the repository so we # can scan for values that are more difficult to get # remotely. gitutils.clone_repo(workdir, project['repo'], project['hash']) # Check that the sdist name and tarball-base name match. if link_mode == 'tarball': sdist = pythonutils.get_sdist_name(workdir, project['repo']) if sdist is not None: expected = project.get( 'tarball-base', os.path.basename(project['repo']), ) if sdist != expected: if 'tarball-base' in project: action = 'is set to' else: action = 'defaults to' mk_error(('tarball-base for %s %s %s %r ' 'but the sdist name is actually %r. ' + _PLEASE) % (project['repo'], release['version'], action, expected, sdist)) # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha != project['hash']: mk_error(('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('Found new version {}'.format(release['version'])) new_releases[release['version']] = release if prev_projects and project['repo'] not in prev_projects: print('not included in previous release for %s: %s' % (prev_version, ', '.join(sorted(prev_projects)))) else: for e in versionutils.validate_version( release['version'], release_type=release_type, pre_ok=(release_model in _USES_PREVER)): msg = ('could not validate version %r: %s' % (release['version'], e)) mk_error(msg) # If this is a puppet module, ensure # that the tag and metadata file # match. if puppetutils.looks_like_a_module( workdir, project['repo']): puppet_ver = puppetutils.get_version( workdir, project['repo']) if puppet_ver != release['version']: mk_error('%s metadata contains "%s" ' 'but is being tagged "%s"' % ( project['repo'], puppet_ver, release['version'], )) if is_independent: mk_warning('skipping descendant test for ' 'independent project, verify ' 'branch manually') else: # If this is the first version in the series, # check that the commit is actually on the # targeted branch. if not gitutils.check_branch_sha( workdir, project['repo'], series_name, defaults.RELEASE, project['hash']): msg = '%s %s not present in %s branch' % ( project['repo'], project['hash'], series_name, ) mk_error(msg) if prev_version: # Check to see if we are re-tagging the same # commit with a new version. old_sha = gitutils.sha_for_tag( workdir, project['repo'], prev_version, ) if old_sha == project['hash']: # FIXME(dhellmann): This needs a test. print('Retagging the SHA with ' 'a new version') else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if not is_ancestor: mk_error('%s %s receiving %s ' 'is not a descendant of %s' % ( project['repo'], project['hash'], release['version'], prev_version, )) prev_version = release['version'] prev_projects = set(p['repo'] for p in release['projects']) # Make sure that new entries have been appended to the file. for v, nr in new_releases.items(): if nr != deliverable_info['releases'][-1]: msg = ('new release %s must be listed last, ' 'with one new release per patch' % nr['version']) mk_error(msg)
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) # FIXME(dhellmann): Add milestone and rc types. parser.add_argument( 'release_type', choices=('bugfix', 'feature', 'major', 'milestone', 'rc', 'procedural'), help='the type of release to generate', ) parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--force', default=False, action='store_true', help=('force a new tag, even if the HEAD of the ' 'branch is already tagged'), ) parser.add_argument( '--stable-branch', default=False, action='store_true', help='create a new stable branch from the release', ) args = parser.parse_args() is_procedural = args.release_type == 'procedural' force_tag = args.force workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. try: deliverable_info = get_deliverable_data( series, args.deliverable) except (IOError, OSError) as e: parser.error(e) try: last_release = get_last_release( deliverable_info, series, args.deliverable, args.release_type, ) except RuntimeError as err: parser.error(err) last_version = last_release['version'].split('.') add_stable_branch = args.stable_branch or is_procedural if args.release_type in ('milestone', 'rc'): force_tag = True if deliverable_info['release-model'] not in _USES_RCS: raise ValueError('Cannot compute RC for {} project {}'.format( deliverable_info['release-model'], args.deliverable)) new_version_parts = increment_milestone_version( last_version, args.release_type) # We are going to take some special steps for the first # release candidate, so figure out if that is what this # release will be. if args.release_type == 'rc' and new_version_parts[-1][3:] == '1': add_stable_branch = True elif args.release_type == 'procedural': # NOTE(dhellmann): We always compute the new version based on # the highest version on the branch, rather than the branch # base. If the differences are only patch levels the results # do not change, but if there was a minor version update then # the new version needs to be incremented based on that. new_version_parts = increment_version(last_version, (0, 1, 0)) # NOTE(dhellmann): Save the SHAs for the commits where the # branch was created in each repo, even though that is # unlikely to be the same as the last_version, because commits # further down the stable branch will not be in the history of # the master branch and so we can't tag them as part of the # new series *AND* we always want stable branches created from # master. prev_info = get_last_series_info(series, args.deliverable) for b in prev_info['branches']: if b['name'].startswith('stable/'): last_branch_base = b['location'].split('.') break else: raise ValueError( 'Could not find a version in branch before {}'.format( series) ) if last_version != last_branch_base: print('WARNING: last_version {} branch base {}'.format( '.'.join(last_version), '.'.join(last_branch_base))) for r in prev_info['releases']: if r['version'] == '.'.join(last_branch_base): last_version_hashes = { p['repo']: p['hash'] for p in r['projects'] } break else: raise ValueError( ('Could not find SHAs for tag ' '{} in old deliverable file').format( '.'.join(last_version)) ) else: increment = { 'bugfix': (0, 0, 1), 'feature': (0, 1, 0), 'major': (1, 0, 0), }[args.release_type] new_version_parts = increment_version(last_version, increment) new_version = '.'.join(new_version_parts) if 'releases' not in deliverable_info: deliverable_info['releases'] = [] print('going from %s to %s' % (last_version, new_version)) projects = [] changes = 0 for project in last_release['projects']: if args.release_type == 'procedural': # Always use the last tagged hash, which should be coming # from the previous series. sha = last_version_hashes[project['repo']] else: # Figure out the hash for the HEAD of the branch. gitutils.clone_repo(workdir, project['repo']) branches = gitutils.get_branches(workdir, project['repo']) version = 'origin/stable/%s' % series if not any(branch for branch in branches if branch.endswith(version)): version = 'master' sha = gitutils.sha_for_tag(workdir, project['repo'], version) if is_procedural: changes += 1 print('re-tagging %s at %s (%s)' % (project['repo'], sha, last_release['version'])) new_project = { 'repo': project['repo'], 'hash': sha, 'comment': 'procedural tag to support creating stable branch', } if 'tarball-base' in project: new_project['tarball-base'] = project['tarball-base'] projects.append(new_project) elif project['hash'] != sha or force_tag: changes += 1 print('advancing %s from %s to %s' % (project['repo'], project['hash'], sha)) new_project = { 'repo': project['repo'], 'hash': sha, } if 'tarball-base' in project: new_project['tarball-base'] = project['tarball-base'] projects.append(new_project) else: print('{} already tagged at most recent commit, skipping'.format( project['repo'])) deliverable_info['releases'].append({ 'version': new_version, 'projects': projects, }) if add_stable_branch: branch_name = 'stable/{}'.format(series) # First check if this branch is already defined if 'branches' in deliverable_info: for branch in deliverable_info['branches']: if branch.get('name') == branch_name: print('Branch {} already existes, skipping'.format( branch_name)) add_stable_branch = False break if add_stable_branch: print('adding stable branch at {}'.format(new_version)) deliverable_info.setdefault('branches', []).append({ 'name': branch_name, 'location': new_version, }) if changes > 0: deliverable_filename = 'deliverables/%s/%s.yaml' % ( series, args.deliverable) with open(deliverable_filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_info))
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) # Look for the launchpad project try: lp_name = deliverable_info['launchpad'] except KeyError: errors.append('No launchpad project given in %s' % filename) print('no launchpad project name given') else: print('launchpad project %s ' % lp_name, end='') lp_resp = requests.get('https://api.launchpad.net/1.0/' + lp_name) if (lp_resp.status_code // 100) == 4: print('MISSING') errors.append('Launchpad project %s does not exist' % lp_name) else: print('found') prev_version = None for release in deliverable_info['releases']: for project in release['projects']: print('%s SHA %s ' % (project['repo'], project['hash']), end='') if not is_a_hash(project['hash']): print('NOT A SHA HASH') errors.append( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % project ) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: print('MISSING', end='') errors.append('No commit %(hash)r in %(repo)r' % project) else: print('found ', end='') # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('version %s ' % release['version'], end='') version_exists = gitutils.commit_exists( project['repo'], release['version'], ) if version_exists: gitutils.clone_repo(workdir, project['repo']) actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha == project['hash']: print('found and matches SHA') else: print('found DIFFERENT %r' % actual_sha) errors.append( ('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('NEW ', end='') if not prev_version: print() else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') errors.append( '%s %s is not a descendant of %s' % ( project['repo'], project['hash'], prev_version) ) prev_version = release['version'] if errors: print('\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) parser.add_argument( 'deliverable', help='the base name of the deliverable file', ) # FIXME(dhellmann): Add milestone and rc types. parser.add_argument( 'release_type', choices=('bugfix', 'feature', 'major'), help='the type of release to generate', ) parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) args = parser.parse_args() workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Allow for independent projects. series = args.series if series.lstrip('_') == 'independent': series = '_independent' # Load existing deliverable data. deliverable_filename = 'deliverables/%s/%s.yaml' % (series, args.deliverable) try: with open(deliverable_filename, 'r') as f: deliverable_info = yaml.safe_load(f) except (IOError, OSError) as e: parser.error(e) # Determine the new version number. last_release = deliverable_info['releases'][-1] last_version = last_release['version'].split('.') increment = { 'bugfix': (0, 0, 1), 'feature': (0, 1, 0), 'major': (1, 0, 0), }[args.release_type] new_version_parts = [] clear = False for cur, inc in zip(last_version, increment): if clear: new_version_parts.append('0') else: new_version_parts.append(str(int(cur) + inc)) if inc: clear = True new_version = '.'.join(new_version_parts) print('going from %s to %s' % (last_version, new_version)) projects = [] changes = 0 for project in last_release['projects']: gitutils.clone_repo(workdir, project['repo']) branches = gitutils.get_branches(workdir, project['repo']) version = 'origin/stable/%s' % series if not any(branch for branch in branches if branch.endswith(version)): version = 'master' sha = gitutils.sha_for_tag(workdir, project['repo'], version) if project['hash'] != sha: changes += 1 print('advancing %s from %s to %s' % (project['repo'], project['hash'], sha)) projects.append({ 'repo': project['repo'], 'hash': sha, }) # The YAML dump formatter produces results that aren't very nice # to read, so we format the output ourselves. The file is only # regenerated if there are in fact changes to be made. if changes > 0: with open(deliverable_filename, 'a') as f: f.write(RELEASE_TEMPLATE.format(version=new_version)) for p in projects: f.write(PROJECT_TEMPLATE.format(**p))
def main(): if not sys.stdout.encoding: # Wrap sys.stdout with a writer that knows how to handle # encoding Unicode data. import codecs wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdout = wrapped_stdout parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--no-shortcut', '--force', '-f', dest='shortcut', default=True, action='store_false', help='if a tag has been applied, skip the repo', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() # Set up logging, including making some loggers quiet. logging.basicConfig( format='%(levelname)7s: %(message)s', stream=sys.stdout, level=logging.DEBUG, ) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) gov_data = governance.Governance.from_remote_repo() official_repos = set(r.name for r in gov_data.get_repositories()) all_deliverables = deliverable.Deliverables( './deliverables', False, ) liaison_data = liaisons.get_liaisons() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) deliv = deliverable.Deliverable.read_file(filename) stable_branch = deliv.series not in ['independent', defaults.RELEASE] # By default assume the project does not use milestones. header('Release model') print(deliv.model) header('Team details') if deliv.team: team_name = deliv.team try: team = gov_data.get_team(team_name) except ValueError: team = None if team: print('found team %s' % team_name) print(' PTL : %(name)s (%(irc)s)' % team.ptl) for liaison in liaison_data.get(team.name.lower(), []): print(' Liaison: %(name)s (%(irc)s)' % liaison) team_deliv = team.deliverables.get(deliv.name) if team_deliv: print('found deliverable %s' % deliv.name) for rn, repo in sorted(team_deliv.repositories.items()): follows_stable_policy = 'stable:follows-policy' in repo.tags print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') if stable_branch and follows_stable_policy: banner('Needs Stable Policy Review') print() else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliv.name, team_name)) if not deliv.is_independent: # Show other deliverables owned by the team and # included in this series. team_deliv_in_series = all_deliverables.get_deliverables( team.name, deliv.series) if team_deliv_in_series: print('Other {} deliverables in {}:'.format( team.name, deliv.series)) for d in team_deliv_in_series: print(' {} ({})'.format(d.name, d.latest_release or None)) print() else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') # If there are no releases listed, this is probably a new # deliverable file for initializing a new series. We don't # need to list its changes. if not deliv.is_released: header('No releases') print('no releases were found, assuming an initialization file') continue # assume the releases are in order and take the last one new_release = deliv.releases[-1] for project in new_release.projects: tag_exists = gitutils.tag_exists( project.repo.name, new_release.version, ) if tag_exists: print('%s %s exists on git server already' % (project.repo.name, new_release.version)) if args.shortcut: print('skipping further processing') continue if project.repo.is_retired: print('%s is retired' % (project.repo.name, )) if args.shortcut: print('skipping further processing') continue # Start by checking out master, always. We need the repo # checked out before we can tell if the stable branch # really exists. gitutils.clone_repo( workdir, project.repo.name, branch='master', ) # Set some git configuration values to allow us to perform # local operations like tagging. gitutils.ensure_basic_git_config( workdir, project.repo.name, { 'user.email': '*****@*****.**', 'user.name': 'OpenStack Proposal Bot' }, ) # Determine which branch we should actually be looking # at. Assume any series for which there is no stable # branch will be on 'master'. if gitutils.stable_branch_exists(workdir, project.repo.name, deliv.series): branch = 'stable/' + deliv.series else: branch = 'master' if branch != 'master': # Check out the repo again to the right branch if we # didn't get it the first time. gitutils.clone_repo( workdir, project.repo.name, branch=branch, ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project.repo.name, '{}^'.format(project.hash), always=False, ) try: previous_release = deliv.get_release(previous_tag) except ValueError: previous_release = None start_range = previous_tag if previous_release: previous_project = { x.repo.name: x for x in previous_release.projects }.get(project.repo.name) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project.hash) else: git_range = project.hash # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release.version) print('\ngit describe %s\n' % project.hash) try: subprocess.check_call( ['git', 'describe', project.hash], cwd=os.path.join(workdir, project.repo.name), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project.repo.name, title='Check existing tags', ref=project.hash, ) git_list_existing_branches( workdir=workdir, repo=project.repo.name, ) branches = git_branch_contains( workdir=workdir, repo=project.repo.name, title='Branches containing commit', commit=project.hash, ) header('Relationship to HEAD') if deliv.is_independent: if branches: tag_branch = branches[0] else: tag_branch = branch head_sha = gitutils.sha_for_tag( workdir, project.repo.name, tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: if (branch in branches) or (not branches): tag_branch = branch else: tag_branch = branches[0] head_sha = gitutils.sha_for_tag( workdir, project.repo.name, tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) requested_sha = gitutils.sha_for_tag( workdir, project.repo.name, project.hash, ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project.repo.name, 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) show_watched_queries(branch, project.repo.name) # Show any requirements changes in the upcoming release. # Include setup.cfg, in case the project uses "extras". if start_range: git_diff(workdir, project.repo.name, git_range, '*requirements*.txt', 'Requirements Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'doc/requirements.txt', 'Doc Requirements Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'setup.cfg', 'setup.cfg Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'bindep.txt', 'bindep.txt Changes %s' % git_range) # Before we try to determine if the previous release # is an ancestor or produce the release notes we need # the tag to exist in the local repository. if not tag_exists: header('Applying Temporary Tag') print('\ngit tag {version} {hash}'.format( version=new_release.version, hash=project.hash, )) subprocess.check_call( ['git', 'tag', new_release.version, project.hash], cwd=os.path.join(workdir, project.repo.name), ) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.tag_exists( project.repo.name, previous_release.version, ) if previous_tag_exists: git_log( workdir, project.repo.name, 'Patches in previous release but not in this one', [previous_release.version, '--not', project.hash], extra_args=['--topo-order', '--oneline', '--no-merges'], ) # The tag will have been added as a local tag above if # it does not already exist. header('New release %s includes previous release %s' % (new_release.version, previous_release.version)) print('\ngit tag --contains %s\n' % previous_release.version) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release.version], cwd=os.path.join(workdir, project.repo.name), ).decode('utf-8').split() print('Containing tags:', containing_tags) if new_release.version not in containing_tags: print('WARNING: Missing %s' % new_release.version) else: print('Found new version %s' % new_release.version) is_ancestor = gitutils.check_ancestry( workdir, project.repo.name, previous_release.version, project.hash, ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project.repo.name, 'Release %s will include' % new_release.version, git_range, extra_args=[ '--graph', '--oneline', '--decorate', '--topo-order' ]) git_log(workdir, project.repo.name, 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # The tag will have been added as a local tag above if it does # not already exist. header('Release Notes') try: notes = release_notes.generate_release_notes( repo=project.repo.name, repo_path=os.path.join(workdir, project.repo.name), start_revision=new_release.diff_start or start_range or '', end_revision=new_release.version, show_dates=True, skip_requirement_merges=True, is_stable=branch.startswith('stable/'), series=deliv.series, email='*****@*****.**', email_from='*****@*****.**', email_reply_to='*****@*****.**', email_tags='', include_pypi_link=False, changes_only=False, first_release=deliv.is_first_release, deliverable_file=filename, description='', publishing_dir_name=project.repo.name, ) except Exception as e: logging.exception('Failed to produce release notes') else: print('\n') print(notes) if 'library' in deliv.type: show_dependency_listings( project.guess_sdist_name(), official_repos, ) return 0