def eol_tag(args, series, deliverable_info): workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): shutil.rmtree(workdir, True) atexit.register(cleanup_workdir) tag = '{}-eol'.format(series) projects = [] release = { 'version': tag, 'projects': projects, } for repo in deliverable_info['repository-settings'].keys(): if not gitutils.tag_exists(repo, tag): print('No {} tag for {}'.format(tag, repo)) continue gitutils.clone_repo(workdir, repo) sha = gitutils.sha_for_tag(workdir, repo, tag) projects.append({ 'repo': repo, 'hash': sha, }) if projects: deliverable_info['releases'].append(release)
def validate_releases(deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error): """Apply validation rules to the 'releases' list for the deliverable. """ release_model = get_model(deliverable_info, series_name) is_independent = (release_model == 'independent') # Remember which entries are new so we can verify that they # appear at the end of the file. new_releases = {} release_type = deliverable_info.get('release-type', 'std') link_mode = deliverable_info.get('artifact-link-mode', 'tarball') prev_version = None prev_projects = set() for release in deliverable_info.get('releases', []): for project in release['projects']: # Check for release jobs (if we ship a tarball) if link_mode != 'none': project_config.require_release_jobs_for_repo( deliverable_info, zuul_layout, project['repo'], release_type, mk_warning, mk_error, ) # Check the SHA specified for the tag. print('%s SHA %s ' % (project['repo'], project['hash'])) if not is_a_hash(project['hash']): mk_error( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % { 'repo': project['repo'], 'hash': project['hash'], 'version': release['version'], } ) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: mk_error('No commit %(hash)r in %(repo)r' % project) # No point in running extra checks if the SHA just # doesn't exist. continue # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. version_exists = gitutils.tag_exists( project['repo'], release['version'], ) gitutils.clone_repo(workdir, project['repo']) if version_exists: actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha != project['hash']: mk_error( ('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('Found new version {}'.format(release['version'])) new_releases[release['version']] = release if prev_projects and project['repo'] not in prev_projects: print('not included in previous release for %s: %s' % (prev_version, ', '.join(sorted(prev_projects)))) else: for e in versionutils.validate_version( release['version'], release_type=release_type, pre_ok=(release_model in _USES_PREVER)): msg = ('could not validate version %r: %s' % (release['version'], e)) mk_error(msg) if is_independent: mk_warning('skipping descendant test for ' 'independent project, verify ' 'branch manually') elif not prev_version: # If this is the first version in the series, # check that the commit is actually on the # targeted branch. if not gitutils.check_branch_sha(workdir, project['repo'], series_name, defaults.RELEASE, project['hash']): msg = '%s %s not present in %s branch' % ( project['repo'], project['hash'], series_name, ) mk_error(msg) else: # Check to see if we are re-tagging the same # commit with a new version. old_sha = gitutils.sha_for_tag( workdir, project['repo'], prev_version, ) if old_sha == project['hash']: # FIXME(dhellmann): This needs a test. print('Retagging the SHA with a new version') else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if not is_ancestor: mk_error( '%s %s receiving %s ' 'is not a descendant of %s' % ( project['repo'], project['hash'], release['version'], prev_version, ) ) prev_version = release['version'] prev_projects = set(p['repo'] for p in release['projects']) # Make sure that new entries have been appended to the file. for v, nr in new_releases.items(): if nr != deliverable_info['releases'][-1]: msg = ('new release %s must be listed last, ' 'with one new release per patch' % nr['version']) mk_error(msg)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = glob.glob('deliverables/%s/*.yaml' % args.series) else: filenames = gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) for release in deliverable_info['releases']: for project in release['projects']: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('%s %s' % (project['repo'], release['version']), end=' ') version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: print('found') else: print('MISSING') errors.append( '%s missing tag %s' % ( project['repo'], release['version'], ) ) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( '--artifacts', default=False, action='store_true', help='only scan the build artifacts', ) parser.add_argument( '--all', default=False, action='store_true', help='scan all releases, not just most recent', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = sorted(glob.glob('deliverables/%s/*.yaml' % args.series)) else: filenames = sorted(gitutils.find_modified_deliverable_files()) if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) link_mode = deliverable_info.get('artifact-link-mode', 'tarball') releases = deliverable_info.get('releases', []) if not args.all: releases = releases[-1:] for release in releases: for project in release['projects']: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('%s %s' % (project['repo'], release['version']), end=' ') if not args.artifacts: version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: print('tag:found', end=' ') else: print('tag:MISSING', end=' ') errors.append('%s missing tag %s' % (project['repo'], release['version'])) # Look for the tarball associated with the tag and # report if that exists. if link_mode == 'tarball': tb_url = links.tarball_url(release['version'], project) if links.link_exists(tb_url): print('tarball:found', end=' ') else: print('tarball:MISSING\n%s' % tb_url) errors.append('%s missing tarball %s' % (filename, tb_url)) sig_url = links.signature_url(release['version'], project) if links.link_exists(sig_url): print('signature:found', end=' ') else: print('signature:MISSING\n%s' % sig_url) errors.append('%s missing signature %s' % (filename, sig_url)) print() if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( '--artifacts', default=False, action='store_true', help='only scan the build artifacts', ) parser.add_argument( '--all', default=False, action='store_true', help='scan all releases, not just most recent', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = sorted(glob.glob('deliverables/%s/*.yaml' % args.series)) else: filenames = sorted(gitutils.find_modified_deliverable_files()) if not filenames: print( 'no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue deliv = deliverable.Deliverable.read_file(filename) releases = deliv.releases if not args.all: releases = releases[-1:] for release in releases: version = release.version for project in release.projects: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('\n%s %s' % (project.repo.name, version)) if not args.artifacts: version_exists = gitutils.tag_exists( project.repo.name, version, ) if version_exists: print(' found tag') else: print(' did not find tag') errors.append('%s missing tag %s' % (project.repo.name, version)) # Look for the tarball associated with the tag and # report if that exists. if deliv.artifact_link_mode == 'tarball': tb_url = links.tarball_url(version, project) errors.extend(check_signed_file('tarball', tb_url)) if 'a' in version or 'b' in version or 'rc' in version: print(' pre-releases are not uploaded to PyPI') continue pypi_name = project.repo.pypi_name if not pypi_name: pypi_name = project.guess_sdist_name() pypi_info = pythonutils.get_pypi_info(pypi_name) if not pypi_info: print(' apparently not a python module') continue wheel_2_errors = list( check_url('python 2 wheel', links.wheel_py2_url(version, project))) wheel_both_errors = list( check_url('python 2/3 wheel', links.wheel_both_url(version, project))) # We only expect to find one wheel. Look for both, # and minimize what we report as errors. if wheel_2_errors and wheel_both_errors: # We have neither wheel. errors.extend(wheel_2_errors) errors.extend(wheel_both_errors) elif not wheel_both_errors: # We have the "both" wheel, so check for the # signature file. errors.extend( check_url( 'python 2/3 wheel signature', links.wheel_both_url(version, project) + '.asc', )) elif not wheel_2_errors: # We have the py2 wheel, so check for the # signature file. errors.extend( check_url( 'python 2 wheel signature', links.wheel_py2_url(version, project) + '.asc', )) if version not in pypi_info.get('releases', {}): msg = ('{} dist with version {} ' 'not uploaded to PyPI').format( pypi_name, version) print(' {}'.format(msg)) errors.append(msg) else: print(' found version {} on PyPI'.format(version)) expected_types = set(['bdist_wheel', 'sdist']) actual_types = set( r['packagetype'] for r in pypi_info['releases'][version]) for actual in actual_types: print(' found {} on PyPI'.format(actual)) for missing in expected_types.difference(actual_types): msg = '{} not found on PyPI'.format(missing) print(' {}'.format(msg)) errors.append(msg) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def validate_releases(deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error): """Apply validation rules to the 'releases' list for the deliverable. """ release_model = get_model(deliverable_info, series_name) is_independent = (release_model == 'independent') # Remember which entries are new so we can verify that they # appear at the end of the file. new_releases = {} release_type = deliverable_info.get('release-type', 'std') link_mode = deliverable_info.get('artifact-link-mode', 'tarball') prev_version = None prev_projects = set() for release in deliverable_info.get('releases', []): print('checking %s' % release['version']) for project in release['projects']: # Check for release jobs (if we ship a tarball) if link_mode != 'none': project_config.require_release_jobs_for_repo( deliverable_info, zuul_layout, project['repo'], release_type, mk_warning, mk_error, ) # Check the SHA specified for the tag. print('%s SHA %s ' % (project['repo'], project['hash'])) if not is_a_hash(project['hash']): mk_error( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % { 'repo': project['repo'], 'hash': project['hash'], 'version': release['version'], }) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: mk_error('No commit %(hash)r in %(repo)r' % project) # No point in running extra checks if the SHA just # doesn't exist. continue # Ensure we have a local copy of the repository so we # can scan for values that are more difficult to get # remotely. gitutils.clone_repo(workdir, project['repo'], project['hash']) # Check that the sdist name and tarball-base name match. if link_mode == 'tarball': sdist = pythonutils.get_sdist_name(workdir, project['repo']) if sdist is not None: expected = project.get( 'tarball-base', os.path.basename(project['repo']), ) if sdist != expected: if 'tarball-base' in project: action = 'is set to' else: action = 'defaults to' mk_error(('tarball-base for %s %s %s %r ' 'but the sdist name is actually %r. ' + _PLEASE) % (project['repo'], release['version'], action, expected, sdist)) # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha != project['hash']: mk_error(('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('Found new version {}'.format(release['version'])) new_releases[release['version']] = release if prev_projects and project['repo'] not in prev_projects: print('not included in previous release for %s: %s' % (prev_version, ', '.join(sorted(prev_projects)))) else: for e in versionutils.validate_version( release['version'], release_type=release_type, pre_ok=(release_model in _USES_PREVER)): msg = ('could not validate version %r: %s' % (release['version'], e)) mk_error(msg) # If this is a puppet module, ensure # that the tag and metadata file # match. if puppetutils.looks_like_a_module( workdir, project['repo']): puppet_ver = puppetutils.get_version( workdir, project['repo']) if puppet_ver != release['version']: mk_error('%s metadata contains "%s" ' 'but is being tagged "%s"' % ( project['repo'], puppet_ver, release['version'], )) if is_independent: mk_warning('skipping descendant test for ' 'independent project, verify ' 'branch manually') else: # If this is the first version in the series, # check that the commit is actually on the # targeted branch. if not gitutils.check_branch_sha( workdir, project['repo'], series_name, defaults.RELEASE, project['hash']): msg = '%s %s not present in %s branch' % ( project['repo'], project['hash'], series_name, ) mk_error(msg) if prev_version: # Check to see if we are re-tagging the same # commit with a new version. old_sha = gitutils.sha_for_tag( workdir, project['repo'], prev_version, ) if old_sha == project['hash']: # FIXME(dhellmann): This needs a test. print('Retagging the SHA with ' 'a new version') else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if not is_ancestor: mk_error('%s %s receiving %s ' 'is not a descendant of %s' % ( project['repo'], project['hash'], release['version'], prev_version, )) prev_version = release['version'] prev_projects = set(p['repo'] for p in release['projects']) # Make sure that new entries have been appended to the file. for v, nr in new_releases.items(): if nr != deliverable_info['releases'][-1]: msg = ('new release %s must be listed last, ' 'with one new release per patch' % nr['version']) mk_error(msg)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = glob.glob('deliverables/%s/*.yaml' % args.series) else: filenames = gitutils.find_modified_deliverable_files() if not filenames: print( 'no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) for release in deliverable_info['releases']: for project in release['projects']: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('%s %s' % (project['repo'], release['version']), end=' ') version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: print('found') else: print('MISSING') errors.append('%s missing tag %s' % ( project['repo'], release['version'], )) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): if not sys.stdout.encoding: # Wrap sys.stdout with a writer that knows how to handle # encoding Unicode data. import codecs wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdout = wrapped_stdout parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--no-shortcut', '--force', '-f', dest='shortcut', default=True, action='store_false', help='if a tag has been applied, skip the repo', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() # Set up logging, including making some loggers quiet. logging.basicConfig( format='%(levelname)7s: %(message)s', stream=sys.stdout, level=logging.DEBUG, ) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) gov_data = governance.Governance.from_remote_repo() official_repos = set(r.name for r in gov_data.get_repositories()) all_deliverables = deliverable.Deliverables( './deliverables', False, ) liaison_data = liaisons.get_liaisons() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) deliv = deliverable.Deliverable.read_file(filename) stable_branch = deliv.series not in ['independent', defaults.RELEASE] # By default assume the project does not use milestones. header('Release model') print(deliv.model) header('Team details') if deliv.team: team_name = deliv.team try: team = gov_data.get_team(team_name) except ValueError: team = None if team: print('found team %s' % team_name) print(' PTL : %(name)s (%(irc)s)' % team.ptl) for liaison in liaison_data.get(team.name.lower(), []): print(' Liaison: %(name)s (%(irc)s)' % liaison) team_deliv = team.deliverables.get(deliv.name) if team_deliv: print('found deliverable %s' % deliv.name) for rn, repo in sorted(team_deliv.repositories.items()): follows_stable_policy = 'stable:follows-policy' in repo.tags print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') if stable_branch and follows_stable_policy: banner('Needs Stable Policy Review') print() else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliv.name, team_name)) if not deliv.is_independent: # Show other deliverables owned by the team and # included in this series. team_deliv_in_series = all_deliverables.get_deliverables( team.name, deliv.series) if team_deliv_in_series: print('Other {} deliverables in {}:'.format( team.name, deliv.series)) for d in team_deliv_in_series: print(' {} ({})'.format(d.name, d.latest_release or None)) print() else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') # If there are no releases listed, this is probably a new # deliverable file for initializing a new series. We don't # need to list its changes. if not deliv.is_released: header('No releases') print('no releases were found, assuming an initialization file') continue # assume the releases are in order and take the last one new_release = deliv.releases[-1] for project in new_release.projects: tag_exists = gitutils.tag_exists( project.repo.name, new_release.version, ) if tag_exists: print('%s %s exists on git server already' % (project.repo.name, new_release.version)) if args.shortcut: print('skipping further processing') continue if project.repo.is_retired: print('%s is retired' % (project.repo.name, )) if args.shortcut: print('skipping further processing') continue # Start by checking out master, always. We need the repo # checked out before we can tell if the stable branch # really exists. gitutils.clone_repo( workdir, project.repo.name, branch='master', ) # Set some git configuration values to allow us to perform # local operations like tagging. gitutils.ensure_basic_git_config( workdir, project.repo.name, { 'user.email': '*****@*****.**', 'user.name': 'OpenStack Proposal Bot' }, ) # Determine which branch we should actually be looking # at. Assume any series for which there is no stable # branch will be on 'master'. if gitutils.stable_branch_exists(workdir, project.repo.name, deliv.series): branch = 'stable/' + deliv.series else: branch = 'master' if branch != 'master': # Check out the repo again to the right branch if we # didn't get it the first time. gitutils.clone_repo( workdir, project.repo.name, branch=branch, ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project.repo.name, '{}^'.format(project.hash), always=False, ) try: previous_release = deliv.get_release(previous_tag) except ValueError: previous_release = None start_range = previous_tag if previous_release: previous_project = { x.repo.name: x for x in previous_release.projects }.get(project.repo.name) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project.hash) else: git_range = project.hash # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release.version) print('\ngit describe %s\n' % project.hash) try: subprocess.check_call( ['git', 'describe', project.hash], cwd=os.path.join(workdir, project.repo.name), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project.repo.name, title='Check existing tags', ref=project.hash, ) git_list_existing_branches( workdir=workdir, repo=project.repo.name, ) branches = git_branch_contains( workdir=workdir, repo=project.repo.name, title='Branches containing commit', commit=project.hash, ) header('Relationship to HEAD') if deliv.is_independent: if branches: tag_branch = branches[0] else: tag_branch = branch head_sha = gitutils.sha_for_tag( workdir, project.repo.name, tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: if (branch in branches) or (not branches): tag_branch = branch else: tag_branch = branches[0] head_sha = gitutils.sha_for_tag( workdir, project.repo.name, tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) requested_sha = gitutils.sha_for_tag( workdir, project.repo.name, project.hash, ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project.repo.name, 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) show_watched_queries(branch, project.repo.name) # Show any requirements changes in the upcoming release. # Include setup.cfg, in case the project uses "extras". if start_range: git_diff(workdir, project.repo.name, git_range, '*requirements*.txt', 'Requirements Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'doc/requirements.txt', 'Doc Requirements Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'setup.cfg', 'setup.cfg Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'bindep.txt', 'bindep.txt Changes %s' % git_range) # Before we try to determine if the previous release # is an ancestor or produce the release notes we need # the tag to exist in the local repository. if not tag_exists: header('Applying Temporary Tag') print('\ngit tag {version} {hash}'.format( version=new_release.version, hash=project.hash, )) subprocess.check_call( ['git', 'tag', new_release.version, project.hash], cwd=os.path.join(workdir, project.repo.name), ) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.tag_exists( project.repo.name, previous_release.version, ) if previous_tag_exists: git_log( workdir, project.repo.name, 'Patches in previous release but not in this one', [previous_release.version, '--not', project.hash], extra_args=['--topo-order', '--oneline', '--no-merges'], ) # The tag will have been added as a local tag above if # it does not already exist. header('New release %s includes previous release %s' % (new_release.version, previous_release.version)) print('\ngit tag --contains %s\n' % previous_release.version) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release.version], cwd=os.path.join(workdir, project.repo.name), ).decode('utf-8').split() print('Containing tags:', containing_tags) if new_release.version not in containing_tags: print('WARNING: Missing %s' % new_release.version) else: print('Found new version %s' % new_release.version) is_ancestor = gitutils.check_ancestry( workdir, project.repo.name, previous_release.version, project.hash, ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project.repo.name, 'Release %s will include' % new_release.version, git_range, extra_args=[ '--graph', '--oneline', '--decorate', '--topo-order' ]) git_log(workdir, project.repo.name, 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # The tag will have been added as a local tag above if it does # not already exist. header('Release Notes') try: notes = release_notes.generate_release_notes( repo=project.repo.name, repo_path=os.path.join(workdir, project.repo.name), start_revision=new_release.diff_start or start_range or '', end_revision=new_release.version, show_dates=True, skip_requirement_merges=True, is_stable=branch.startswith('stable/'), series=deliv.series, email='*****@*****.**', email_from='*****@*****.**', email_reply_to='*****@*****.**', email_tags='', include_pypi_link=False, changes_only=False, first_release=deliv.is_first_release, deliverable_file=filename, description='', publishing_dir_name=project.repo.name, ) except Exception as e: logging.exception('Failed to produce release notes') else: print('\n') print(notes) if 'library' in deliv.type: show_dependency_listings( project.guess_sdist_name(), official_repos, ) return 0