def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'prev_series', help='previous series name', ) parser.add_argument( 'input', nargs='*', help=('YAML files to examine, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print( 'no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yamlutils.loads(f.read()) branch = 'stable/' + args.prev_series if not deliverable_info.get('releases'): print(' no releases') continue # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] print('version {}'.format(new_release['version'])) diff_start = new_release.get('diff-start') if not diff_start: print(' no diff-start') continue else: print(' diff-start: {!r}'.format(diff_start)) for project in new_release['projects']: gitutils.clone_repo(workdir, project['repo']) branch_base = gitutils.get_branch_base( workdir, project['repo'], branch, ) if branch_base: branch_version = gitutils.get_latest_tag( workdir, project['repo'], branch_base, ) if diff_start == branch_version: print(' SAME') else: print(' DIFFERENT {} at {}'.format( branch_version, branch_base))
def main(): if not sys.stdout.encoding: # Wrap sys.stdout with a writer that knows how to handle # encoding Unicode data. import codecs wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdout = wrapped_stdout parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) team_data = governance.get_team_data() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == '_independent': default_model = 'independent' else: default_model = 'no release model specified' # By default assume the project does not use milestones. header('Release model') print(deliverable_info.get('release-model', default_model)) header('Team details') if 'team' in deliverable_info: team_name = deliverable_info['team'] team_dict = team_data.get(team_name) if team_dict: team = governance.Team(team_name, team_dict) print('found team %s' % team_name) print(' PTL: %(name)s (%(irc)s)\n' % team.ptl) deliverable_name = os.path.basename(filename)[:-5] # remove .yaml deliverable = team.deliverables.get(deliverable_name) if deliverable: print('found deliverable %s' % deliverable_name) for rn, repo in sorted(deliverable.repositories.items()): print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliverable_name, team_name)) else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # If there are no releases listed, this is probably a new # deliverable file for initializing a new series. We don't # need to list its changes. if not deliverable_info.get('releases'): header('No releases') print('no releases were found, assuming an initialization file') continue # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] # build a map between version numbers and the release details by_version = { str(r['version']): r for r in deliverable_info['releases'] } for project in new_release['projects']: tag_exists = gitutils.commit_exists( project['repo'], new_release['version'], ) if tag_exists: print('%s %s exists on git server already' % (project['repo'], new_release['version'])) # Check out the code. print('\nChecking out repository {}'.format(project['repo'])) subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project['repo'], '{}^'.format(project['hash']) ) previous_release = by_version.get(previous_tag) start_range = previous_tag if previous_release: previous_project = { x['repo']: x for x in previous_release['projects'] }.get(project['repo']) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release['version']) print('\ngit describe %s\n' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project['repo'], title='Check existing tags', ref=project['hash'], ) branches = git_branch_contains( workdir=workdir, repo=project['repo'], title='Branches containing commit', commit=project['hash'], ) header('Relationship to HEAD') if series == '_independent': interesting_branches = sorted( b for b in branches if '->' not in b ) tag_branch = interesting_branches[0] head_sha = gitutils.sha_for_tag( workdir, project['repo'], tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: head_sha = gitutils.sha_for_tag( workdir, project['repo'], 'HEAD', ) print('HEAD of {} is {}'.format(branch, head_sha)) tag_branch = branch requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) # Show any requirements changes in the upcoming release. # Include setup.cfg, in case the project uses "extras". if start_range: git_diff(workdir, project['repo'], git_range, '*requirements*.txt') git_diff(workdir, project['repo'], git_range, 'setup.cfg') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range, extra_args=['--graph', '--oneline', '--decorate', '--topo-order']) git_log(workdir, project['repo'], 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.commit_exists( project['repo'], previous_release, ) if previous_tag_exists: git_log( workdir, project['repo'], 'Patches in previous release but not in this one', [project['hash'], '--not', previous_release['version']], extra_args=['--topo-order', '--oneline', '--no-merges'], ) header('New release %s includes previous release %s' % (new_release['version'], previous_release['version'])) if not tag_exists: subprocess.check_call( ['git', 'tag', new_release['version'], project['hash']], cwd=os.path.join(workdir, project['repo']), ) print('\ngit tag --contains %s\n' % previous_release['version']) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release['version']], cwd=os.path.join(workdir, project['repo']), ).split() print('Containing tags:', containing_tags) if new_release['version'] not in containing_tags: print('WARNING: Missing %s' % new_release['version']) else: print('Found new version %s' % new_release['version']) is_ancestor = gitutils.check_ancestry( workdir, project['repo'], previous_release['version'], project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\nChecking %s' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # assume the releases are in order and take the last two new_release = deliverable_info['releases'][-1] if len(deliverable_info['releases']) >= 2: previous_release = deliverable_info['releases'][-2] else: previous_release = None for project in new_release['projects']: if gitutils.commit_exists(project['repo'], new_release['version']): print('%s %s exists already' % (project['repo'], new_release['version'])) continue # Check out the code. subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) start_range = (previous_release['projects'][0]['hash'] if previous_release else None) if not start_range: start_range = ( gitutils.get_latest_tag(workdir, project['repo']) or None ) if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show the changes since the last release. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. head_sha = gitutils.sha_for_tag(workdir, project['repo'], 'HEAD') requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) if head_sha == requested_sha: print('Request releases from HEAD on %s' % branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha)) # Show more details about the commit being tagged. print() print('git describe %s' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) team_data = governance.get_team_data() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) # By default assume the project does not use milestones. uses_milestones = False header('Team details') if 'team' in deliverable_info: team_name = deliverable_info['team'] team_dict = team_data.get(team_name) if team_dict: team = governance.Team(team_name, team_dict) print('found team %s' % team_name) print(' PTL: %(name)s (%(irc)s)\n' % team.ptl) deliverable_name = os.path.basename(filename)[:-5] # remove .yaml deliverable = team.deliverables.get(deliverable_name) if deliverable: print('found deliverable %s' % deliverable_name) for rn, repo in sorted(deliverable.repositories.items()): print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') uses_milestones = 'release:cycle-with-milestones' in repo.tags else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliverable_name, team_name)) else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') if uses_milestones: print('uses milestones') series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] # Warn if the new release looks like a milestone release but # the project does not use milestones. if not uses_milestones: for pre_indicator in ['a', 'b', 'rc']: if pre_indicator in new_release['version']: print(('WARNING: %s looks like a pre-release ' 'but %s does not use milestones') % (new_release['version'], deliverable_name)) # build a map between version numbers and the release details by_version = { str(r['version']): r for r in deliverable_info['releases'] } for project in new_release['projects']: tag_exists = gitutils.commit_exists( project['repo'], new_release['version'], ) if tag_exists: print('%s %s exists on git server already' % (project['repo'], new_release['version'])) # Check out the code. print('\nChecking out repository {}'.format(project['repo'])) subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project['repo'], '{}^'.format(project['hash']) ) previous_release = by_version.get(previous_tag) start_range = previous_tag if previous_release: previous_project = { x['repo']: x for x in previous_release['projects'] }.get(project['repo']) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release['version']) print('\ngit describe %s\n' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project['repo'], title='Check existing tags', ref=project['hash'], ) branches = git_branch_contains( workdir=workdir, repo=project['repo'], title='Branches containing commit', commit=project['hash'], ) header('Relationship to HEAD') if series == '_independent': interesting_branches = sorted( b for b in branches if '->' not in b ) tag_branch = interesting_branches[0] head_sha = gitutils.sha_for_tag( workdir, project['repo'], tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: head_sha = gitutils.sha_for_tag( workdir, project['repo'], 'HEAD', ) print('HEAD of {} is {}'.format(branch, head_sha)) tag_branch = branch requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) # Show any requirements changes in the upcoming release. if start_range: git_diff(workdir, project['repo'], git_range, '*requirements*.txt') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range, extra_args=['--graph', '--oneline', '--decorate', '--topo-order']) git_log(workdir, project['repo'], 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.commit_exists( project['repo'], previous_release, ) if previous_tag_exists: git_log( workdir, project['repo'], 'Patches in previous release but not in this one', [project['hash'], '--not', previous_release['version']], extra_args=['--topo-order', '--oneline', '--no-merges'], ) header('New release %s includes previous release %s' % (new_release['version'], previous_release['version'])) if not tag_exists: subprocess.check_call( ['git', 'tag', new_release['version'], project['hash']], cwd=os.path.join(workdir, project['repo']), ) print('\ngit tag --contains %s\n' % previous_release['version']) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release['version']], cwd=os.path.join(workdir, project['repo']), ).split() print('Containing tags:', containing_tags) if new_release['version'] not in containing_tags: print('WARNING: Missing %s' % new_release['version']) else: print('Found new version %s' % new_release['version']) is_ancestor = gitutils.check_ancestry( workdir, project['repo'], previous_release['version'], project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = glob.glob('deliverables/%s/*.yaml' % args.series) else: filenames = gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) for release in deliverable_info['releases']: for project in release['projects']: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('%s %s' % (project['repo'], release['version']), end=' ') version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: print('found') else: print('MISSING') errors.append( '%s missing tag %s' % ( project['repo'], release['version'], ) ) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files and no arguments, ' 'skipping validation') return 0 zuul_layout = project_config.get_zuul_layout_data() team_data = governance.get_team_data() errors = [] warnings = [] workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series_name = os.path.basename( os.path.dirname(filename) ) def mk_warning(msg): print('WARNING: {}'.format(msg)) warnings.append('{}: {}'.format(filename, msg)) def mk_error(msg): print('ERROR: {}'.format(msg)) errors.append('{}: {}'.format(filename, msg)) validate_launchpad(deliverable_info, mk_warning, mk_error) validate_team(deliverable_info, team_data, mk_warning, mk_error) validate_release_notes(deliverable_info, mk_warning, mk_error) validate_type(deliverable_info, mk_warning, mk_error) validate_model(deliverable_info, series_name, mk_warning, mk_error) validate_releases( deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error, ) # Some rules only apply to the most current release. if series_name == defaults.RELEASE: validate_new_releases( deliverable_info, filename, team_data, mk_warning, mk_error, ) validate_branch_prefixes( deliverable_info, mk_warning, mk_error, ) validate_stable_branches( deliverable_info, mk_warning, mk_error, ) validate_feature_branches( deliverable_info, workdir, mk_warning, mk_error, ) validate_driverfixes_branches( deliverable_info, workdir, mk_warning, mk_error, ) if warnings: print('\n\n%s warnings found' % len(warnings)) for w in warnings: print(w) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( '--artifacts', default=False, action='store_true', help='only scan the build artifacts', ) parser.add_argument( '--all', default=False, action='store_true', help='scan all releases, not just most recent', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = sorted(glob.glob('deliverables/%s/*.yaml' % args.series)) else: filenames = sorted(gitutils.find_modified_deliverable_files()) if not filenames: print( 'no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue deliv = deliverable.Deliverable.read_file(filename) releases = deliv.releases if not args.all: releases = releases[-1:] for release in releases: version = release.version for project in release.projects: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('\n%s %s' % (project.repo.name, version)) if not args.artifacts: version_exists = gitutils.tag_exists( project.repo.name, version, ) if version_exists: print(' found tag') else: print(' did not find tag') errors.append('%s missing tag %s' % (project.repo.name, version)) # Look for the tarball associated with the tag and # report if that exists. if deliv.artifact_link_mode == 'tarball': tb_url = links.tarball_url(version, project) errors.extend(check_signed_file('tarball', tb_url)) if 'a' in version or 'b' in version or 'rc' in version: print(' pre-releases are not uploaded to PyPI') continue pypi_name = project.repo.pypi_name if not pypi_name: pypi_name = project.guess_sdist_name() pypi_info = pythonutils.get_pypi_info(pypi_name) if not pypi_info: print(' apparently not a python module') continue wheel_2_errors = list( check_url('python 2 wheel', links.wheel_py2_url(version, project))) wheel_both_errors = list( check_url('python 2/3 wheel', links.wheel_both_url(version, project))) # We only expect to find one wheel. Look for both, # and minimize what we report as errors. if wheel_2_errors and wheel_both_errors: # We have neither wheel. errors.extend(wheel_2_errors) errors.extend(wheel_both_errors) elif not wheel_both_errors: # We have the "both" wheel, so check for the # signature file. errors.extend( check_url( 'python 2/3 wheel signature', links.wheel_both_url(version, project) + '.asc', )) elif not wheel_2_errors: # We have the py2 wheel, so check for the # signature file. errors.extend( check_url( 'python 2 wheel signature', links.wheel_py2_url(version, project) + '.asc', )) if version not in pypi_info.get('releases', {}): msg = ('{} dist with version {} ' 'not uploaded to PyPI').format( pypi_name, version) print(' {}'.format(msg)) errors.append(msg) else: print(' found version {} on PyPI'.format(version)) expected_types = set(['bdist_wheel', 'sdist']) actual_types = set( r['packagetype'] for r in pypi_info['releases'][version]) for actual in actual_types: print(' found {} on PyPI'.format(actual)) for missing in expected_types.difference(actual_types): msg = '{} not found on PyPI'.format(missing) print(' {}'.format(msg)) errors.append(msg) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( '--artifacts', default=False, action='store_true', help='only scan the build artifacts', ) parser.add_argument( '--all', default=False, action='store_true', help='scan all releases, not just most recent', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = sorted(glob.glob('deliverables/%s/*.yaml' % args.series)) else: filenames = sorted(gitutils.find_modified_deliverable_files()) if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) link_mode = deliverable_info.get('artifact-link-mode', 'tarball') releases = deliverable_info.get('releases', []) if not args.all: releases = releases[-1:] for release in releases: for project in release['projects']: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('%s %s' % (project['repo'], release['version']), end=' ') if not args.artifacts: version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: print('tag:found', end=' ') else: print('tag:MISSING', end=' ') errors.append('%s missing tag %s' % (project['repo'], release['version'])) # Look for the tarball associated with the tag and # report if that exists. if link_mode == 'tarball': tb_url = links.tarball_url(release['version'], project) if links.link_exists(tb_url): print('tarball:found', end=' ') else: print('tarball:MISSING\n%s' % tb_url) errors.append('%s missing tarball %s' % (filename, tb_url)) sig_url = links.signature_url(release['version'], project) if links.link_exists(sig_url): print('signature:found', end=' ') else: print('signature:MISSING\n%s' % sig_url) errors.append('%s missing signature %s' % (filename, sig_url)) print() if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files and no arguments, ' 'skipping validation') return 0 zuul_layout = project_config.get_zuul_layout_data() team_data = governance.get_team_data() errors = [] warnings = [] workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series_name = os.path.basename(os.path.dirname(filename)) def mk_warning(msg): print('WARNING: {}'.format(msg)) warnings.append('{}: {}'.format(filename, msg)) def mk_error(msg): print('ERROR: {}'.format(msg)) errors.append('{}: {}'.format(filename, msg)) validate_bugtracker(deliverable_info, mk_warning, mk_error) validate_team(deliverable_info, team_data, mk_warning, mk_error) validate_release_notes(deliverable_info, mk_warning, mk_error) validate_type(deliverable_info, mk_warning, mk_error) validate_model(deliverable_info, series_name, mk_warning, mk_error) validate_releases( deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error, ) # Some rules only apply to the most current release. if series_name == defaults.RELEASE: validate_new_releases( deliverable_info, filename, team_data, mk_warning, mk_error, ) validate_series_open( deliverable_info, series_name, filename, mk_warning, mk_error, ) validate_series_first( deliverable_info, series_name, mk_warning, mk_error, ) validate_branch_prefixes( deliverable_info, mk_warning, mk_error, ) validate_stable_branches( deliverable_info, series_name, mk_warning, mk_error, ) validate_feature_branches( deliverable_info, workdir, mk_warning, mk_error, ) validate_driverfixes_branches( deliverable_info, workdir, mk_warning, mk_error, ) if warnings: print('\n\n%s warnings found' % len(warnings)) for w in warnings: print(w) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--series', '-s', help='release series to scan', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() if args.input: filenames = args.input elif args.series: filenames = glob.glob('deliverables/%s/*.yaml' % args.series) else: filenames = gitutils.find_modified_deliverable_files() if not filenames: print( 'no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] for filename in filenames: print('\nChecking %s' % filename) if not os.path.exists(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) for release in deliverable_info['releases']: for project in release['projects']: # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('%s %s' % (project['repo'], release['version']), end=' ') version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: print('found') else: print('MISSING') errors.append('%s missing tag %s' % ( project['repo'], release['version'], )) if errors: print('\n\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) # Look for the launchpad project try: lp_name = deliverable_info['launchpad'] except KeyError: errors.append('No launchpad project given in %s' % filename) print('no launchpad project name given') else: print('launchpad project %s ' % lp_name, end='') lp_resp = requests.get('https://api.launchpad.net/1.0/' + lp_name) if (lp_resp.status_code // 100) == 4: print('MISSING') errors.append('Launchpad project %s does not exist' % lp_name) else: print('found') prev_version = None for release in deliverable_info['releases']: for project in release['projects']: print('%s SHA %s ' % (project['repo'], project['hash']), end='') if not is_a_hash(project['hash']): print('NOT A SHA HASH') errors.append( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % project ) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: print('MISSING', end='') errors.append('No commit %(hash)r in %(repo)r' % project) else: print('found ', end='') # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('version %s ' % release['version'], end='') version_exists = gitutils.commit_exists( project['repo'], release['version'], ) if version_exists: gitutils.clone_repo(workdir, project['repo']) actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha == project['hash']: print('found and matches SHA') else: print('found DIFFERENT %r' % actual_sha) errors.append( ('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('NEW ', end='') if not prev_version: print() else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') errors.append( '%s %s is not a descendant of %s' % ( project['repo'], project['hash'], prev_version) ) prev_version = release['version'] if errors: print('\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0
def main(): if not sys.stdout.encoding: # Wrap sys.stdout with a writer that knows how to handle # encoding Unicode data. import codecs wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdout = wrapped_stdout parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--no-shortcut', '--force', '-f', dest='shortcut', default=True, action='store_false', help='if a tag has been applied, skip the repo', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() # Set up logging, including making some loggers quiet. logging.basicConfig( format='%(levelname)7s: %(message)s', stream=sys.stdout, level=logging.DEBUG, ) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) gov_data = governance.Governance.from_remote_repo() official_repos = set(r.name for r in gov_data.get_repositories()) all_deliverables = deliverable.Deliverables( './deliverables', False, ) liaison_data = liaisons.get_liaisons() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) deliv = deliverable.Deliverable.read_file(filename) stable_branch = deliv.series not in ['independent', defaults.RELEASE] # By default assume the project does not use milestones. header('Release model') print(deliv.model) header('Team details') if deliv.team: team_name = deliv.team try: team = gov_data.get_team(team_name) except ValueError: team = None if team: print('found team %s' % team_name) print(' PTL : %(name)s (%(irc)s)' % team.ptl) for liaison in liaison_data.get(team.name.lower(), []): print(' Liaison: %(name)s (%(irc)s)' % liaison) team_deliv = team.deliverables.get(deliv.name) if team_deliv: print('found deliverable %s' % deliv.name) for rn, repo in sorted(team_deliv.repositories.items()): follows_stable_policy = 'stable:follows-policy' in repo.tags print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') if stable_branch and follows_stable_policy: banner('Needs Stable Policy Review') print() else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliv.name, team_name)) if not deliv.is_independent: # Show other deliverables owned by the team and # included in this series. team_deliv_in_series = all_deliverables.get_deliverables( team.name, deliv.series) if team_deliv_in_series: print('Other {} deliverables in {}:'.format( team.name, deliv.series)) for d in team_deliv_in_series: print(' {} ({})'.format(d.name, d.latest_release or None)) print() else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') # If there are no releases listed, this is probably a new # deliverable file for initializing a new series. We don't # need to list its changes. if not deliv.is_released: header('No releases') print('no releases were found, assuming an initialization file') continue # assume the releases are in order and take the last one new_release = deliv.releases[-1] for project in new_release.projects: tag_exists = gitutils.tag_exists( project.repo.name, new_release.version, ) if tag_exists: print('%s %s exists on git server already' % (project.repo.name, new_release.version)) if args.shortcut: print('skipping further processing') continue if project.repo.is_retired: print('%s is retired' % (project.repo.name, )) if args.shortcut: print('skipping further processing') continue # Start by checking out master, always. We need the repo # checked out before we can tell if the stable branch # really exists. gitutils.clone_repo( workdir, project.repo.name, branch='master', ) # Set some git configuration values to allow us to perform # local operations like tagging. gitutils.ensure_basic_git_config( workdir, project.repo.name, { 'user.email': '*****@*****.**', 'user.name': 'OpenStack Proposal Bot' }, ) # Determine which branch we should actually be looking # at. Assume any series for which there is no stable # branch will be on 'master'. if gitutils.stable_branch_exists(workdir, project.repo.name, deliv.series): branch = 'stable/' + deliv.series else: branch = 'master' if branch != 'master': # Check out the repo again to the right branch if we # didn't get it the first time. gitutils.clone_repo( workdir, project.repo.name, branch=branch, ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project.repo.name, '{}^'.format(project.hash), always=False, ) try: previous_release = deliv.get_release(previous_tag) except ValueError: previous_release = None start_range = previous_tag if previous_release: previous_project = { x.repo.name: x for x in previous_release.projects }.get(project.repo.name) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project.hash) else: git_range = project.hash # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release.version) print('\ngit describe %s\n' % project.hash) try: subprocess.check_call( ['git', 'describe', project.hash], cwd=os.path.join(workdir, project.repo.name), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project.repo.name, title='Check existing tags', ref=project.hash, ) git_list_existing_branches( workdir=workdir, repo=project.repo.name, ) branches = git_branch_contains( workdir=workdir, repo=project.repo.name, title='Branches containing commit', commit=project.hash, ) header('Relationship to HEAD') if deliv.is_independent: if branches: tag_branch = branches[0] else: tag_branch = branch head_sha = gitutils.sha_for_tag( workdir, project.repo.name, tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: if (branch in branches) or (not branches): tag_branch = branch else: tag_branch = branches[0] head_sha = gitutils.sha_for_tag( workdir, project.repo.name, tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) requested_sha = gitutils.sha_for_tag( workdir, project.repo.name, project.hash, ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project.repo.name, 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) show_watched_queries(branch, project.repo.name) # Show any requirements changes in the upcoming release. # Include setup.cfg, in case the project uses "extras". if start_range: git_diff(workdir, project.repo.name, git_range, '*requirements*.txt', 'Requirements Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'doc/requirements.txt', 'Doc Requirements Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'setup.cfg', 'setup.cfg Changes %s' % git_range) git_diff(workdir, project.repo.name, git_range, 'bindep.txt', 'bindep.txt Changes %s' % git_range) # Before we try to determine if the previous release # is an ancestor or produce the release notes we need # the tag to exist in the local repository. if not tag_exists: header('Applying Temporary Tag') print('\ngit tag {version} {hash}'.format( version=new_release.version, hash=project.hash, )) subprocess.check_call( ['git', 'tag', new_release.version, project.hash], cwd=os.path.join(workdir, project.repo.name), ) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.tag_exists( project.repo.name, previous_release.version, ) if previous_tag_exists: git_log( workdir, project.repo.name, 'Patches in previous release but not in this one', [previous_release.version, '--not', project.hash], extra_args=['--topo-order', '--oneline', '--no-merges'], ) # The tag will have been added as a local tag above if # it does not already exist. header('New release %s includes previous release %s' % (new_release.version, previous_release.version)) print('\ngit tag --contains %s\n' % previous_release.version) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release.version], cwd=os.path.join(workdir, project.repo.name), ).decode('utf-8').split() print('Containing tags:', containing_tags) if new_release.version not in containing_tags: print('WARNING: Missing %s' % new_release.version) else: print('Found new version %s' % new_release.version) is_ancestor = gitutils.check_ancestry( workdir, project.repo.name, previous_release.version, project.hash, ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project.repo.name, 'Release %s will include' % new_release.version, git_range, extra_args=[ '--graph', '--oneline', '--decorate', '--topo-order' ]) git_log(workdir, project.repo.name, 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # The tag will have been added as a local tag above if it does # not already exist. header('Release Notes') try: notes = release_notes.generate_release_notes( repo=project.repo.name, repo_path=os.path.join(workdir, project.repo.name), start_revision=new_release.diff_start or start_range or '', end_revision=new_release.version, show_dates=True, skip_requirement_merges=True, is_stable=branch.startswith('stable/'), series=deliv.series, email='*****@*****.**', email_from='*****@*****.**', email_reply_to='*****@*****.**', email_tags='', include_pypi_link=False, changes_only=False, first_release=deliv.is_first_release, deliverable_file=filename, description='', publishing_dir_name=project.repo.name, ) except Exception as e: logging.exception('Failed to produce release notes') else: print('\n') print(notes) if 'library' in deliv.type: show_dependency_listings( project.guess_sdist_name(), official_repos, ) return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'prev_series', help='previous series name', ) parser.add_argument( 'input', nargs='*', help=('YAML files to examine, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) branch = 'stable/' + args.prev_series # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] print('version {}'.format(new_release['version'])) diff_start = new_release.get('diff-start') if not diff_start: print(' no diff-start') continue else: print(' diff-start: {!r}'.format(diff_start)) for project in new_release['projects']: gitutils.clone_repo(workdir, project['repo']) branch_base = gitutils.get_branch_base( workdir, project['repo'], branch, ) if branch_base: branch_version = gitutils.get_latest_tag( workdir, project['repo'], branch_base, ) if diff_start == branch_version: print(' SAME') else: print(' DIFFERENT {} at {}'.format( branch_version, branch_base))