def validate_feature_branches(deliverable_info, workdir, mk_warning, mk_error): "Apply the rules for feature branches." branches = deliverable_info.get('branches', []) for branch in branches: prefix, series = branch['name'].split('/') if prefix != 'feature': continue location = branch['location'] if not isinstance(location, dict): mk_error( ('branch location for %s is ' 'expected to be a mapping but got a %s' % ( branch['name'], type(location))) ) # The other rules aren't going to be testable, so skip them. continue for repo, loc in sorted(location.items()): if not is_a_hash(loc): mk_error( ('feature branches should be created from commits by SHA ' 'but location %s for branch %s of %s does not look ' 'like a SHA' % ( (loc, repo, branch['name']))) ) if not gitutils.commit_exists(repo, loc): mk_error( ('feature branches should be created from merged commits ' 'but location %s for branch %s of %s does not exist' % ( (loc, repo, branch['name']))) )
def validate_driverfixes_branches(deliverable_info, workdir, mk_warning, mk_error): "Apply the rules for driverfixes branches." known_series = sorted( list(d for d in os.listdir('deliverables') if not d.startswith('_'))) branches = deliverable_info.get('branches', []) for branch in branches: prefix, series = branch['name'].split('/') if prefix != 'driverfixes': continue location = branch['location'] if series not in known_series: mk_error(('driverfixes branches must be named for known series ' 'but %s was not found in %s' % (branch['name'], known_series))) if not isinstance(location, dict): mk_error(('branch location for %s is ' 'expected to be a mapping but got a %s' % (branch['name'], type(location)))) # The other rules aren't going to be testable, so skip them. continue for repo, loc in sorted(location.items()): if not is_a_hash(loc): mk_error(( 'driverfixes branches should be created from commits by SHA ' 'but location %s for branch %s of %s does not look ' 'like a SHA' % ((loc, repo, branch['name'])))) if not gitutils.commit_exists(repo, loc): mk_error(( 'driverfixes branches should be created from merged commits ' 'but location %s for branch %s of %s does not exist' % ((loc, repo, branch['name']))))
def validate_feature_branches(deliverable_info, workdir, mk_warning, mk_error): "Apply the rules for feature branches." branches = deliverable_info.get('branches', []) for branch in branches: prefix, series = branch['name'].split('/') if prefix != 'feature': continue location = branch['location'] if not isinstance(location, dict): mk_error(('branch location for %s is ' 'expected to be a mapping but got a %s' % (branch['name'], type(location)))) # The other rules aren't going to be testable, so skip them. continue for repo, loc in sorted(location.items()): if not is_a_hash(loc): mk_error( ('feature branches should be created from commits by SHA ' 'but location %s for branch %s of %s does not look ' 'like a SHA' % ((loc, repo, branch['name'])))) if not gitutils.commit_exists(repo, loc): mk_error( ('feature branches should be created from merged commits ' 'but location %s for branch %s of %s does not exist' % ((loc, repo, branch['name']))))
def validate_driverfixes_branches(deliverable_info, workdir, mk_warning, mk_error): "Apply the rules for driverfixes branches." known_series = sorted(list( d for d in os.listdir('deliverables') if not d.startswith('_') )) branches = deliverable_info.get('branches', []) for branch in branches: prefix, series = branch['name'].split('/') if prefix != 'driverfixes': continue location = branch['location'] if series not in known_series: mk_error( ('driverfixes branches must be named for known series ' 'but %s was not found in %s' % ( branch['name'], known_series)) ) if not isinstance(location, dict): mk_error( ('branch location for %s is ' 'expected to be a mapping but got a %s' % ( branch['name'], type(location))) ) # The other rules aren't going to be testable, so skip them. continue for repo, loc in sorted(location.items()): if not is_a_hash(loc): mk_error( ('driverfixes branches should be created from commits by SHA ' 'but location %s for branch %s of %s does not look ' 'like a SHA' % ( (loc, repo, branch['name']))) ) if not gitutils.commit_exists(repo, loc): mk_error( ('driverfixes branches should be created from merged commits ' 'but location %s for branch %s of %s does not exist' % ( (loc, repo, branch['name']))) )
def main(): if not sys.stdout.encoding: # Wrap sys.stdout with a writer that knows how to handle # encoding Unicode data. import codecs wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout) sys.stdout = wrapped_stdout parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) team_data = governance.get_team_data() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == '_independent': default_model = 'independent' else: default_model = 'no release model specified' # By default assume the project does not use milestones. header('Release model') print(deliverable_info.get('release-model', default_model)) header('Team details') if 'team' in deliverable_info: team_name = deliverable_info['team'] team_dict = team_data.get(team_name) if team_dict: team = governance.Team(team_name, team_dict) print('found team %s' % team_name) print(' PTL: %(name)s (%(irc)s)\n' % team.ptl) deliverable_name = os.path.basename(filename)[:-5] # remove .yaml deliverable = team.deliverables.get(deliverable_name) if deliverable: print('found deliverable %s' % deliverable_name) for rn, repo in sorted(deliverable.repositories.items()): print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliverable_name, team_name)) else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # If there are no releases listed, this is probably a new # deliverable file for initializing a new series. We don't # need to list its changes. if not deliverable_info.get('releases'): header('No releases') print('no releases were found, assuming an initialization file') continue # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] # build a map between version numbers and the release details by_version = { str(r['version']): r for r in deliverable_info['releases'] } for project in new_release['projects']: tag_exists = gitutils.commit_exists( project['repo'], new_release['version'], ) if tag_exists: print('%s %s exists on git server already' % (project['repo'], new_release['version'])) # Check out the code. print('\nChecking out repository {}'.format(project['repo'])) subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project['repo'], '{}^'.format(project['hash']) ) previous_release = by_version.get(previous_tag) start_range = previous_tag if previous_release: previous_project = { x['repo']: x for x in previous_release['projects'] }.get(project['repo']) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release['version']) print('\ngit describe %s\n' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project['repo'], title='Check existing tags', ref=project['hash'], ) branches = git_branch_contains( workdir=workdir, repo=project['repo'], title='Branches containing commit', commit=project['hash'], ) header('Relationship to HEAD') if series == '_independent': interesting_branches = sorted( b for b in branches if '->' not in b ) tag_branch = interesting_branches[0] head_sha = gitutils.sha_for_tag( workdir, project['repo'], tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: head_sha = gitutils.sha_for_tag( workdir, project['repo'], 'HEAD', ) print('HEAD of {} is {}'.format(branch, head_sha)) tag_branch = branch requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) # Show any requirements changes in the upcoming release. # Include setup.cfg, in case the project uses "extras". if start_range: git_diff(workdir, project['repo'], git_range, '*requirements*.txt') git_diff(workdir, project['repo'], git_range, 'setup.cfg') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range, extra_args=['--graph', '--oneline', '--decorate', '--topo-order']) git_log(workdir, project['repo'], 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.commit_exists( project['repo'], previous_release, ) if previous_tag_exists: git_log( workdir, project['repo'], 'Patches in previous release but not in this one', [project['hash'], '--not', previous_release['version']], extra_args=['--topo-order', '--oneline', '--no-merges'], ) header('New release %s includes previous release %s' % (new_release['version'], previous_release['version'])) if not tag_exists: subprocess.check_call( ['git', 'tag', new_release['version'], project['hash']], cwd=os.path.join(workdir, project['repo']), ) print('\ngit tag --contains %s\n' % previous_release['version']) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release['version']], cwd=os.path.join(workdir, project['repo']), ).split() print('Containing tags:', containing_tags) if new_release['version'] not in containing_tags: print('WARNING: Missing %s' % new_release['version']) else: print('Found new version %s' % new_release['version']) is_ancestor = gitutils.check_ancestry( workdir, project['repo'], previous_release['version'], project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) team_data = governance.get_team_data() # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\n' + ('=' * 80)) print('\nChecking %s\n' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) # By default assume the project does not use milestones. uses_milestones = False header('Team details') if 'team' in deliverable_info: team_name = deliverable_info['team'] team_dict = team_data.get(team_name) if team_dict: team = governance.Team(team_name, team_dict) print('found team %s' % team_name) print(' PTL: %(name)s (%(irc)s)\n' % team.ptl) deliverable_name = os.path.basename(filename)[:-5] # remove .yaml deliverable = team.deliverables.get(deliverable_name) if deliverable: print('found deliverable %s' % deliverable_name) for rn, repo in sorted(deliverable.repositories.items()): print('\nrepo %s\ntags:' % repo.name) for t in repo.tags: print(' %s' % t) print('') uses_milestones = 'release:cycle-with-milestones' in repo.tags else: print(('no deliverable %r found for team %r, ' 'cannot report on governance status') % (deliverable_name, team_name)) else: print('no team %r found, cannot report on governance status' % team_name) else: print('no team name given, cannot report on governance status') if uses_milestones: print('uses milestones') series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] # Warn if the new release looks like a milestone release but # the project does not use milestones. if not uses_milestones: for pre_indicator in ['a', 'b', 'rc']: if pre_indicator in new_release['version']: print(('WARNING: %s looks like a pre-release ' 'but %s does not use milestones') % (new_release['version'], deliverable_name)) # build a map between version numbers and the release details by_version = { str(r['version']): r for r in deliverable_info['releases'] } for project in new_release['projects']: tag_exists = gitutils.commit_exists( project['repo'], new_release['version'], ) if tag_exists: print('%s %s exists on git server already' % (project['repo'], new_release['version'])) # Check out the code. print('\nChecking out repository {}'.format(project['repo'])) subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) # look at the previous tag for the parent of the commit # getting the new release previous_tag = gitutils.get_latest_tag( workdir, project['repo'], '{}^'.format(project['hash']) ) previous_release = by_version.get(previous_tag) start_range = previous_tag if previous_release: previous_project = { x['repo']: x for x in previous_release['projects'] }.get(project['repo']) if previous_project is not None: start_range = previous_tag if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show details about the commit being tagged. header('Details for commit receiving new tag %s' % new_release['version']) print('\ngit describe %s\n' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) git_show( workdir=workdir, repo=project['repo'], title='Check existing tags', ref=project['hash'], ) branches = git_branch_contains( workdir=workdir, repo=project['repo'], title='Branches containing commit', commit=project['hash'], ) header('Relationship to HEAD') if series == '_independent': interesting_branches = sorted( b for b in branches if '->' not in b ) tag_branch = interesting_branches[0] head_sha = gitutils.sha_for_tag( workdir, project['repo'], tag_branch, ) print('HEAD of {} is {}'.format(tag_branch, head_sha)) else: head_sha = gitutils.sha_for_tag( workdir, project['repo'], 'HEAD', ) print('HEAD of {} is {}'.format(branch, head_sha)) tag_branch = branch requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. if head_sha == requested_sha: print('\nRequest releases from HEAD on %s' % tag_branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha), extra_args=['--format=%h %ci %s']) # Show any requirements changes in the upcoming release. if start_range: git_diff(workdir, project['repo'], git_range, '*requirements*.txt') # Show the changes since the last release, first as a # graph view so we can check for bad merges, and then with # more detail. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range, extra_args=['--graph', '--oneline', '--decorate', '--topo-order']) git_log(workdir, project['repo'], 'Details Contents', git_range, extra_args=['--no-merges', '--topo-order']) # Show any changes in the previous release but not in this # release, in case someone picks an "early" SHA or a # regular commit instead of the appropriate merge commit. previous_tag_exists = False if previous_release: previous_tag_exists = gitutils.commit_exists( project['repo'], previous_release, ) if previous_tag_exists: git_log( workdir, project['repo'], 'Patches in previous release but not in this one', [project['hash'], '--not', previous_release['version']], extra_args=['--topo-order', '--oneline', '--no-merges'], ) header('New release %s includes previous release %s' % (new_release['version'], previous_release['version'])) if not tag_exists: subprocess.check_call( ['git', 'tag', new_release['version'], project['hash']], cwd=os.path.join(workdir, project['repo']), ) print('\ngit tag --contains %s\n' % previous_release['version']) containing_tags = subprocess.check_output( ['git', 'tag', '--contains', previous_release['version']], cwd=os.path.join(workdir, project['repo']), ).split() print('Containing tags:', containing_tags) if new_release['version'] not in containing_tags: print('WARNING: Missing %s' % new_release['version']) else: print('Found new version %s' % new_release['version']) is_ancestor = gitutils.check_ancestry( workdir, project['repo'], previous_release['version'], project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, skipping report') return 0 workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) # Remove any inherited PAGER environment variable to avoid # blocking the output waiting for input. os.environ['PAGER'] = '' for filename in filenames: if not os.path.exists(filename): print('\n%s was removed, skipping' % filename) continue print('\nChecking %s' % filename) with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) series = os.path.basename( os.path.dirname( os.path.abspath(filename) ) ) if series == defaults.RELEASE: branch = 'master' else: branch = 'stable/' + series # assume the releases are in order and take the last two new_release = deliverable_info['releases'][-1] if len(deliverable_info['releases']) >= 2: previous_release = deliverable_info['releases'][-2] else: previous_release = None for project in new_release['projects']: if gitutils.commit_exists(project['repo'], new_release['version']): print('%s %s exists already' % (project['repo'], new_release['version'])) continue # Check out the code. subprocess.check_call( ['zuul-cloner', '--branch', branch, '--workspace', workdir, 'git://git.openstack.org', project['repo'], ] ) start_range = (previous_release['projects'][0]['hash'] if previous_release else None) if not start_range: start_range = ( gitutils.get_latest_tag(workdir, project['repo']) or None ) if start_range: git_range = '%s..%s' % (start_range, project['hash']) else: git_range = project['hash'] # Show the changes since the last release. git_log(workdir, project['repo'], 'Release %s will include' % new_release['version'], git_range) # If the sha for HEAD and the requested release don't # match, show any unreleased changes on the branch. We ask # git to give us the real SHA for the requested release in # case the deliverables file has the short version of the # hash. head_sha = gitutils.sha_for_tag(workdir, project['repo'], 'HEAD') requested_sha = gitutils.sha_for_tag( workdir, project['repo'], project['hash'], ) if head_sha == requested_sha: print('Request releases from HEAD on %s' % branch) else: git_log(workdir, project['repo'], 'Release will NOT include', '%s..%s' % (requested_sha, head_sha)) # Show more details about the commit being tagged. print() print('git describe %s' % project['hash']) try: subprocess.check_call( ['git', 'describe', project['hash']], cwd=os.path.join(workdir, project['repo']), ) except subprocess.CalledProcessError as e: print('WARNING: Could not run git describe: %s' % e) return 0
def validate_releases(deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error): """Apply validation rules to the 'releases' list for the deliverable. """ release_model = get_model(deliverable_info, series_name) is_independent = (release_model == 'independent') # Remember which entries are new so we can verify that they # appear at the end of the file. new_releases = {} release_type = deliverable_info.get('release-type', 'std') link_mode = deliverable_info.get('artifact-link-mode', 'tarball') prev_version = None prev_projects = set() for release in deliverable_info.get('releases', []): for project in release['projects']: # Check for release jobs (if we ship a tarball) if link_mode != 'none': project_config.require_release_jobs_for_repo( deliverable_info, zuul_layout, project['repo'], release_type, mk_warning, mk_error, ) # Check the SHA specified for the tag. print('%s SHA %s ' % (project['repo'], project['hash'])) if not is_a_hash(project['hash']): mk_error( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % { 'repo': project['repo'], 'hash': project['hash'], 'version': release['version'], } ) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: mk_error('No commit %(hash)r in %(repo)r' % project) # No point in running extra checks if the SHA just # doesn't exist. continue # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. version_exists = gitutils.tag_exists( project['repo'], release['version'], ) gitutils.clone_repo(workdir, project['repo']) if version_exists: actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha != project['hash']: mk_error( ('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('Found new version {}'.format(release['version'])) new_releases[release['version']] = release if prev_projects and project['repo'] not in prev_projects: print('not included in previous release for %s: %s' % (prev_version, ', '.join(sorted(prev_projects)))) else: for e in versionutils.validate_version( release['version'], release_type=release_type, pre_ok=(release_model in _USES_PREVER)): msg = ('could not validate version %r: %s' % (release['version'], e)) mk_error(msg) if is_independent: mk_warning('skipping descendant test for ' 'independent project, verify ' 'branch manually') elif not prev_version: # If this is the first version in the series, # check that the commit is actually on the # targeted branch. if not gitutils.check_branch_sha(workdir, project['repo'], series_name, defaults.RELEASE, project['hash']): msg = '%s %s not present in %s branch' % ( project['repo'], project['hash'], series_name, ) mk_error(msg) else: # Check to see if we are re-tagging the same # commit with a new version. old_sha = gitutils.sha_for_tag( workdir, project['repo'], prev_version, ) if old_sha == project['hash']: # FIXME(dhellmann): This needs a test. print('Retagging the SHA with a new version') else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if not is_ancestor: mk_error( '%s %s receiving %s ' 'is not a descendant of %s' % ( project['repo'], project['hash'], release['version'], prev_version, ) ) prev_version = release['version'] prev_projects = set(p['repo'] for p in release['projects']) # Make sure that new entries have been appended to the file. for v, nr in new_releases.items(): if nr != deliverable_info['releases'][-1]: msg = ('new release %s must be listed last, ' 'with one new release per patch' % nr['version']) mk_error(msg)
def validate_releases(deliverable_info, zuul_layout, series_name, workdir, mk_warning, mk_error): """Apply validation rules to the 'releases' list for the deliverable. """ release_model = get_model(deliverable_info, series_name) is_independent = (release_model == 'independent') # Remember which entries are new so we can verify that they # appear at the end of the file. new_releases = {} release_type = deliverable_info.get('release-type', 'std') link_mode = deliverable_info.get('artifact-link-mode', 'tarball') prev_version = None prev_projects = set() for release in deliverable_info.get('releases', []): print('checking %s' % release['version']) for project in release['projects']: # Check for release jobs (if we ship a tarball) if link_mode != 'none': project_config.require_release_jobs_for_repo( deliverable_info, zuul_layout, project['repo'], release_type, mk_warning, mk_error, ) # Check the SHA specified for the tag. print('%s SHA %s ' % (project['repo'], project['hash'])) if not is_a_hash(project['hash']): mk_error( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % { 'repo': project['repo'], 'hash': project['hash'], 'version': release['version'], }) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: mk_error('No commit %(hash)r in %(repo)r' % project) # No point in running extra checks if the SHA just # doesn't exist. continue # Ensure we have a local copy of the repository so we # can scan for values that are more difficult to get # remotely. gitutils.clone_repo(workdir, project['repo'], project['hash']) # Check that the sdist name and tarball-base name match. if link_mode == 'tarball': sdist = pythonutils.get_sdist_name(workdir, project['repo']) if sdist is not None: expected = project.get( 'tarball-base', os.path.basename(project['repo']), ) if sdist != expected: if 'tarball-base' in project: action = 'is set to' else: action = 'defaults to' mk_error(('tarball-base for %s %s %s %r ' 'but the sdist name is actually %r. ' + _PLEASE) % (project['repo'], release['version'], action, expected, sdist)) # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. version_exists = gitutils.tag_exists( project['repo'], release['version'], ) if version_exists: actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha != project['hash']: mk_error(('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('Found new version {}'.format(release['version'])) new_releases[release['version']] = release if prev_projects and project['repo'] not in prev_projects: print('not included in previous release for %s: %s' % (prev_version, ', '.join(sorted(prev_projects)))) else: for e in versionutils.validate_version( release['version'], release_type=release_type, pre_ok=(release_model in _USES_PREVER)): msg = ('could not validate version %r: %s' % (release['version'], e)) mk_error(msg) # If this is a puppet module, ensure # that the tag and metadata file # match. if puppetutils.looks_like_a_module( workdir, project['repo']): puppet_ver = puppetutils.get_version( workdir, project['repo']) if puppet_ver != release['version']: mk_error('%s metadata contains "%s" ' 'but is being tagged "%s"' % ( project['repo'], puppet_ver, release['version'], )) if is_independent: mk_warning('skipping descendant test for ' 'independent project, verify ' 'branch manually') else: # If this is the first version in the series, # check that the commit is actually on the # targeted branch. if not gitutils.check_branch_sha( workdir, project['repo'], series_name, defaults.RELEASE, project['hash']): msg = '%s %s not present in %s branch' % ( project['repo'], project['hash'], series_name, ) mk_error(msg) if prev_version: # Check to see if we are re-tagging the same # commit with a new version. old_sha = gitutils.sha_for_tag( workdir, project['repo'], prev_version, ) if old_sha == project['hash']: # FIXME(dhellmann): This needs a test. print('Retagging the SHA with ' 'a new version') else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if not is_ancestor: mk_error('%s %s receiving %s ' 'is not a descendant of %s' % ( project['repo'], project['hash'], release['version'], prev_version, )) prev_version = release['version'] prev_projects = set(p['repo'] for p in release['projects']) # Make sure that new entries have been appended to the file. for v, nr in new_releases.items(): if nr != deliverable_info['releases'][-1]: msg = ('new release %s must be listed last, ' 'with one new release per patch' % nr['version']) mk_error(msg)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'input', nargs='*', help=('YAML files to validate, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print('no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') errors = [] workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yaml.load(f.read()) # Look for the launchpad project try: lp_name = deliverable_info['launchpad'] except KeyError: errors.append('No launchpad project given in %s' % filename) print('no launchpad project name given') else: print('launchpad project %s ' % lp_name, end='') lp_resp = requests.get('https://api.launchpad.net/1.0/' + lp_name) if (lp_resp.status_code // 100) == 4: print('MISSING') errors.append('Launchpad project %s does not exist' % lp_name) else: print('found') prev_version = None for release in deliverable_info['releases']: for project in release['projects']: print('%s SHA %s ' % (project['repo'], project['hash']), end='') if not is_a_hash(project['hash']): print('NOT A SHA HASH') errors.append( ('%(repo)s version %(version)s release from ' '%(hash)r, which is not a hash') % project ) else: # Report if the SHA exists or not (an error if it # does not). sha_exists = gitutils.commit_exists( project['repo'], project['hash'], ) if not sha_exists: print('MISSING', end='') errors.append('No commit %(hash)r in %(repo)r' % project) else: print('found ', end='') # Report if the version has already been # tagged. We expect it to not exist, but neither # case is an error because sometimes we want to # import history and sometimes we want to make new # releases. print('version %s ' % release['version'], end='') version_exists = gitutils.commit_exists( project['repo'], release['version'], ) if version_exists: gitutils.clone_repo(workdir, project['repo']) actual_sha = gitutils.sha_for_tag( workdir, project['repo'], release['version'], ) if actual_sha == project['hash']: print('found and matches SHA') else: print('found DIFFERENT %r' % actual_sha) errors.append( ('Version %s in %s is on ' 'commit %s instead of %s') % (release['version'], project['repo'], actual_sha, project['hash'])) else: print('NEW ', end='') if not prev_version: print() else: # Check to see if the commit for the new # version is in the ancestors of the # previous release, meaning it is actually # merged into the branch. is_ancestor = gitutils.check_ancestry( workdir, project['repo'], prev_version, project['hash'], ) if is_ancestor: print('SHA found in descendants') else: print('SHA NOT FOUND in descendants') errors.append( '%s %s is not a descendant of %s' % ( project['repo'], project['hash'], prev_version) ) prev_version = release['version'] if errors: print('\n%s errors found' % len(errors)) for e in errors: print(e) return 1 if errors else 0