def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the name of the release series to scan', ) args = parser.parse_args() filenames = sorted(glob.glob('deliverables/' + args.series + '/*.yaml')) if not filenames: print('no deliverable files found under {}'.format(args.series)) return 1 missing = [] for filename in filenames: with open(filename, 'r', encoding='utf-8') as f: deliverable_info = yamlutils.loads(f.read()) deliverable_name = os.path.splitext(os.path.basename(filename))[0] if not deliverable_info.get('releases'): missing.append(deliverable_name) continue # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] print('{}==={}'.format(deliverable_name, new_release['version'])) # print out any deliverables without releases if missing: print('\nMissing releases:\n {}'.format('\n '.join(missing))) return 0
def validate_one_file(filename, schema_data, debug): LOG.info('Checking %s', filename) validator = make_validator_with_date(schema_data) with open(filename, 'r', encoding='utf-8') as f: info = yamlutils.loads(f.read()) for error in validator.iter_errors(info): LOG.error(error) yield '{}: {}'.format(filename, error) if debug: raise RuntimeError(error)
def get_team_data(url=PROJECTS_LIST): """Return the parsed team data from the governance repository. :param url: Optional URL to the location of the projects.yaml file. Defaults to the most current version in the public git repository. """ r = requests.get(url) return yamlutils.loads(r.text)
def main(): parser = argparse.ArgumentParser() parser.add_argument('file', nargs='*', help="Yaml file to reformat") args = parser.parse_args() for filename in args.file: print("Reformatting %s" % filename) with open(filename, 'rb') as fh: contents = yamlutils.loads(fh.read()) contents = yamlutils.dumps(contents) with open(filename, 'wb') as fh: fh.write(contents) if not contents.endswith("\n"): fh.write("\n")
def show_watched_queries(branch, repo): with open('watched_queries.yml', 'r', encoding='utf-8') as f: watched_queries = yamlutils.loads(f.read()) template = watched_queries['template'] for q in watched_queries['queries']: list_gerrit_patches( q['title'], q.get('template', template), q['query'].format( branch=branch, project=repo, ), )
def read_file(cls, filename): with open(filename, 'r', encoding='utf-8') as f: data = yamlutils.loads(f.read()) series_name = os.path.basename(os.path.dirname(filename)).lstrip('_') deliverable_name = os.path.basename(filename)[:-5] # strip .yaml return cls( team=None, # extracted from the info automatically series=series_name, name=deliverable_name, data=data or {}, # if the file is empty yaml returns None )
class TestStableStatus(base.BaseTestCase): _series_status_data = yamlutils.loads( textwrap.dedent(''' - name: stein status: future initial-release: 2019-04-11 - name: rocky status: development initial-release: 2018-08-30 - name: queens status: maintained initial-release: 2018-02-28 - name: ocata status: extended maintenance initial-release: 2017-02-22 - name: newton status: end of life initial-release: 2016-10-06 eol-date: 2017-10-25 ''')) def setUp(self): super().setUp() self.series_status = series_status.SeriesStatus( self._series_status_data) self.useFixture( fixtures.MockPatch( 'openstack_releases.deliverable.Deliverable._series_status_data', self.series_status, )) def test_default_to_series(self): d = deliverable.Deliverable( team='team', series='ocata', name='name', data={}, ) self.assertEqual('extended maintenance', d.stable_status) def test_override_series(self): d = deliverable.Deliverable( team='team', series='newton', name='name', data={ 'stable-status': 'extended maintenance', }, ) self.assertEqual('extended maintenance', d.stable_status)
def test_is_eol_tag_true(self): deliverable_data = textwrap.dedent(''' releases: - version: newton-eol projects: - repo: openstack/release-test hash: a26e6a2e8a5e321b2e3517dbb01a7b9a56a8bfd5 ''') deliv = deliverable.Deliverable( team='team', series='newton', name='name', data=yamlutils.loads(deliverable_data), ) self.assertTrue(deliv.releases[-1].is_eol)
def get_zuul_project_data(url=ZUUL_PROJECTS_URL): """Return the data from the zuul.d/projects.yaml file. :param url: Optional URL to the location of the file. Defaults to the most current version in the public git repository. """ r = requests.get(url) raw = yamlutils.loads(r.text) # Convert the raw list to a mapping from repo name to repo # settings, since that is how we access this most often. # # The inputs are like: # # - project: # name: openstack/oslo.config # templates: # - system-required # - openstack-python-jobs # - openstack-python35-jobs # - publish-openstack-sphinx-docs # - check-requirements # - lib-forward-testing # - release-notes-jobs # - periodic-newton # - periodic-ocata # - periodic-pike # - publish-to-pypi # # And the output is: # # openstack/oslo.config: # templates: # - system-required # - openstack-python-jobs # - openstack-python35-jobs # - publish-openstack-sphinx-docs # - check-requirements # - lib-forward-testing # - release-notes-jobs # - periodic-newton # - periodic-ocata # - periodic-pike # - publish-to-pypi # return {p['project']['name']: p['project'] for p in raw}
def _load_deliverable_files(self, root_dir): deliverable_files = glob.glob(os.path.join(root_dir, '*/*.yaml')) for filename in sorted(deliverable_files): series = self._series_from_filename(filename) deliverable = self._deliverable_from_filename(filename) with open(filename, 'r', encoding='utf-8') as f: d_info = yamlutils.loads(f.read()) if self._collapse_history: _collapse_deliverable_history(deliverable, d_info) team = d_info['team'] self._add_deliverable_file( filename, series, team, deliverable, d_info, )
def test_eol_series_for_version_tag(self): deliverable_data = textwrap.dedent(''' releases: - version: 0.3.0 projects: - repo: openstack/release-test hash: a26e6a2e8a5e321b2e3517dbb01a7b9a56a8bfd5 ''') deliv = deliverable.Deliverable( team='team', series='newton', name='name', data=yamlutils.loads(deliverable_data), ) self.assertEqual( '', deliv.releases[-1].eol_series, )
def parse_deliverable(series, repo, deliverable_file=None): """Parse useful information out of the deliverable file. Currently only parses the bug URL, but could potentially be expanded to get other useful settings. :param series: The release series being processed. :param repo: The name of the repo. :param deliverable_file: The deliverable file. """ release_repo = os.path.realpath( os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) if deliverable_file is None: deliverable_file = os.path.join('deliverables', series.lower(), '%s.yaml' % repo) deliverable_path = os.path.join(release_repo, deliverable_file) # Hard coding source URL for now sections = { 'bug_url': '', 'source_url': 'https://opendev.org/openstack/%s' % repo, } try: with open(deliverable_path, 'r') as d: deliverable_info = yamlutils.loads(d) except Exception: # TODO(smcginnis): If the deliverable doesn't match the repo name, we # can try to find it by loading all deliverable data and iterating on # each deliverables repos to find it. LOG.warning('Unable to parse %s %s deliverable file', repo, series) return sections if deliverable_info.get('launchpad'): sections['bug_url'] = ('https://bugs.launchpad.net/%s/+bugs' % deliverable_info['launchpad']) elif deliverable_info.get('storyboard'): sections['bug_url'] = ( 'https://storyboard.openstack.org/#!/project/%s' % deliverable_info['storyboard']) return sections
import glob import logging import os import os.path import pkgutil import sys import jsonschema import jsonschema.validators from openstack_releases import yamlutils LOG = logging.getLogger('') _SERIES_SCHEMA = yamlutils.loads( pkgutil.get_data('openstack_releases', 'series_status_schema.yaml').decode('utf-8')) _DELIVERABLE_SCHEMA = yamlutils.loads( pkgutil.get_data('openstack_releases', 'schema.yaml').decode('utf-8')) def is_date(validator, value, instance, schema): if not isinstance(instance, str): return try: return datetime.datetime.strptime(instance, "%Y-%m-%d") except Exception: yield jsonschema.ValidationError('Invalid date {!r}'.format(instance))
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( 'prev_series', help='previous series name', ) parser.add_argument( 'input', nargs='*', help=('YAML files to examine, defaults to ' 'files changed in the latest commit'), ) args = parser.parse_args() filenames = args.input or gitutils.find_modified_deliverable_files() if not filenames: print( 'no modified deliverable files, validating all releases from %s' % defaults.RELEASE) filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml') workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: shutil.rmtree(workdir, True) else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) for filename in filenames: print('\nChecking %s' % filename) if not os.path.isfile(filename): print("File was deleted, skipping.") continue with open(filename, 'r') as f: deliverable_info = yamlutils.loads(f.read()) branch = 'stable/' + args.prev_series if not deliverable_info.get('releases'): print(' no releases') continue # assume the releases are in order and take the last one new_release = deliverable_info['releases'][-1] print('version {}'.format(new_release['version'])) diff_start = new_release.get('diff-start') if not diff_start: print(' no diff-start') continue else: print(' diff-start: {!r}'.format(diff_start)) for project in new_release['projects']: gitutils.clone_repo(workdir, project['repo']) branch_base = gitutils.get_branch_base( workdir, project['repo'], branch, ) if branch_base: branch_version = gitutils.get_latest_tag( workdir, project['repo'], branch_base, ) if diff_start == branch_version: print(' SAME') else: print(' DIFFERENT {} at {}'.format( branch_version, branch_base))
def get_deliverable_data(series, deliverable): deliverable_filename = 'deliverables/%s/%s.yaml' % (series, deliverable) with open(deliverable_filename, 'r', encoding='utf-8') as f: return yamlutils.loads(f)
def test_independent_series(self): data = yamlutils.loads(self._body) status = series_status.SeriesStatus(data) self.assertIn('independent', status)
def test_init(self): data = yamlutils.loads(self._body) status = series_status.SeriesStatus(data) self.assertIn('rocky', status)
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the release series, such as "newton" or "ocata"', ) parser.add_argument( '--deliverables-dir', default=openstack_releases.deliverable_dir, help='location of deliverable files', ) args = parser.parse_args() series = args.series # Find the schedule file. schedule_filename = os.path.abspath( os.path.join( args.deliverables_dir, '..', 'doc', 'source', series, 'schedule.yaml', )) with open(schedule_filename, 'r') as f: schedule_data = yamlutils.loads(f.read()) print('Release Team Calendar for {}\n'.format(series.title())) print('Review dashboard: https://tiny.cc/ReleaseInbox') print( 'Process document: https://releases.openstack.org/reference/process.html' ) print('Storyboard: https://storyboard.openstack.org/#!/board/64') print( 'Release email content: https://etherpad.opendev.org/p/relmgmt-weekly-emails' ) print() print('Meeting info') print('Thursdays at 16:00 UTC in #openstack-release') print('Meeting stanza: #startmeeting releaseteam') print('Ping list: ') print() print('Review Days') print('[make list format]') print('Mon: ') print('Tue: ') print('Wed: ') print('Thu: ') print('Fri: ') print() print('First apply title formatting to all week titles. Then apply list') print('format to the following content, and copy-paste it in every week:') print() print('Team availability notes') print('Tasks') print('Meeting Agenda') print() for week in schedule_data['cycle']: if not week.get('name'): continue start = datetime.datetime.strptime(week['start'], '%Y-%m-%d') week['start_date'] = start end = datetime.datetime.strptime(week['end'], '%Y-%m-%d') week['end_date'] = end print('{name} ({start_date:%b %d} - {end_date:%b %d})'.format(**week), end='') if week.get('x-project'): print(' [', ', '.join(week['x-project']), ']') else: print()
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'series', help='the release series, such as "newton" or "ocata"', ) parser.add_argument( '--deliverables-dir', default=openstack_releases.deliverable_dir, help='location of deliverable files', ) args = parser.parse_args() series = args.series # Find the schedule file. schedule_filename = os.path.abspath( os.path.join( args.deliverables_dir, '..', 'doc', 'source', series, 'schedule.yaml', )) with open(schedule_filename, 'r') as f: schedule_data = yamlutils.loads(f.read()) print('Release Team Calendar for {}\n'.format(series.title())) print('Review dashboard: http://bit.ly/ocata-relmgt-dashboard') print('Planning document: https://etherpad.openstack.org/p/{}-relmgt-plan'. format(series)) print( 'Process document: http://git.openstack.org/cgit/openstack/releases/tree/PROCESS.rst' ) print() print('First apply title formatting to all week titles. Then apply list') print('format to the following content, and copy-paste it in every week:') print() print('Team availability notes') print('Tasks') print('Meeting Agenda') print('Countdown email content to send this week') print() for week in schedule_data['cycle']: if not week.get('name'): continue start = datetime.datetime.strptime(week['start'], '%Y-%m-%d') week['start_date'] = start end = datetime.datetime.strptime(week['end'], '%Y-%m-%d') week['end_date'] = end print('{name} ({start_date:%b %d} - {end_date:%b %d})'.format(**week), end='') if week.get('x-project'): print(' [', ', '.join(week['x-project']), ']') else: print()
def main(): parser = argparse.ArgumentParser( description=OVERVIEW, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-p", "--projects", metavar="FILE", help="File containing projects to analyze") parser.add_argument("-r", "--releases", metavar="PATH", help="Release repository path (default=%(default)s)", default=os.getcwd()) parser.add_argument("--only-show", action="store_true", default=False, help="Only list changes and do not" " prompt to propose") parser.add_argument('project', nargs='*', help="Project to analyze") args = parser.parse_args() release_repo_path = args.releases release_deliverable_path = os.path.join(release_repo_path, 'deliverables') try: cycles = sorted([ c for c in os.listdir(release_deliverable_path) if not c.startswith("_") ]) latest_cycle = cycles[-1] except (IndexError, OSError): print("Please ensure release deliverables directory '%s' exists and" " it contains at least one release" " cycle." % (release_deliverable_path), file=sys.stderr) return 1 raw_projects = [] if args.projects: try: raw_projects.extend(read_projects(args.projects)) except IOError: print("Please ensure projects '%s' file exists" " and is readable." % (args.projects), file=sys.stderr) return 1 raw_projects.extend(args.project) projects = extract_projects(raw_projects) if not projects: print("Please provide at least one project.") return 1 with tempdir() as a_temp_dir: # Clone fresh copies of all the repos (so we have a good # non-altered starting set of repos, in the future we can # likely relax this). repos = clone_repos(a_temp_dir, projects) for project, short_project in projects: repo_path = repos[project] last_release_cycle, last_release_path = find_last_release_path( release_repo_path, latest_cycle, cycles, short_project) if last_release_path is None or last_release_cycle is None: last_release = None deliverable_info = None else: with open(last_release_path, 'rb') as fh: deliverable_info = yamlutils.loads(fh.read()) try: last_release = deliverable_info['releases'][-1] except (IndexError, KeyError, TypeError): last_release = None print("== Analysis of project '%s' ==" % short_project) if not last_release: print("It has never had a release.") cmd = ['git', 'log', '--pretty=oneline'] output = subprocess.check_output(cmd, cwd=repo_path).decode('utf-8') output = output.strip() changes = list(clean_changes(output.splitlines())) else: print("The last release of project %s was:" % short_project) print(" Released in: %s" % last_release_cycle) print(" Version: %s" % last_release['version']) print(" At sha: %s" % last_release['projects'][0]['hash']) cmd = [ 'git', 'log', '--pretty=oneline', "%s..HEAD" % last_release['projects'][0]['hash'] ] output = subprocess.check_output(cmd, cwd=repo_path).decode('utf-8') output = output.strip() changes = list(clean_changes(output.splitlines())) if changes: maybe_create_release(release_repo_path, deliverable_info, last_release, changes, latest_cycle, project, short_project, should_prompt=not args.only_show) else: print(" No changes.") return 0
def _load_series_status_data(root_dir): filename = os.path.join(root_dir, 'series_status.yaml') with open(filename, 'r', encoding='utf-8') as f: return yamlutils.loads(f.read())
def main(): parser = argparse.ArgumentParser(description=OVERVIEW, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-p", "--projects", metavar="FILE", help="File containing projects to analyze") parser.add_argument( "-r", "--releases", metavar="PATH", help="Release repository path (default=%(default)s)", default=os.getcwd() ) parser.add_argument( "--only-show", action="store_true", default=False, help="Only list changes and do not" " prompt to propose" ) parser.add_argument("project", nargs="*", help="Project to analyze") args = parser.parse_args() release_repo_path = args.releases release_deliverable_path = os.path.join(release_repo_path, "deliverables") try: cycles = sorted([c for c in os.listdir(release_deliverable_path) if not c.startswith("_")]) latest_cycle = cycles[-1] except (IndexError, OSError): print( "Please ensure release deliverables directory '%s' exists and" " it contains at least one release" " cycle." % (release_deliverable_path), file=sys.stderr, ) return 1 raw_projects = [] if args.projects: try: raw_projects.extend(read_projects(args.projects)) except IOError: print("Please ensure projects '%s' file exists" " and is readable." % (args.projects), file=sys.stderr) return 1 raw_projects.extend(args.project) projects = extract_projects(raw_projects) if not projects: print("Please provide at least one project.") return 1 with tempdir() as a_temp_dir: # Clone fresh copies of all the repos (so we have a good # non-altered starting set of repos, in the future we can # likely relax this). repos = clone_repos(a_temp_dir, projects) for project, short_project in projects: repo_path = repos[project] last_release_cycle, last_release_path = find_last_release_path( release_repo_path, latest_cycle, cycles, short_project ) if last_release_path is None or last_release_cycle is None: last_release = None deliverable_info = None else: with open(last_release_path, "rb") as fh: deliverable_info = yamlutils.loads(fh.read()) try: last_release = deliverable_info["releases"][-1] except (IndexError, KeyError, TypeError): last_release = None print("== Analysis of project '%s' ==" % short_project) if not last_release: print("It has never had a release.") cmd = ["git", "log", "--pretty=oneline"] output = subprocess.check_output(cmd, cwd=repo_path) output = output.strip() changes = list(clean_changes(output.splitlines())) else: print("The last release of project %s was:" % short_project) print(" Released in: %s" % last_release_cycle) print(" Version: %s" % last_release["version"]) print(" At sha: %s" % last_release["projects"][0]["hash"]) cmd = ["git", "log", "--pretty=oneline", "%s..HEAD" % last_release["projects"][0]["hash"]] output = subprocess.check_output(cmd, cwd=repo_path) output = output.strip() changes = list(clean_changes(output.splitlines())) if changes: maybe_create_release( release_repo_path, deliverable_info, last_release, changes, latest_cycle, project, short_project, should_prompt=not args.only_show, ) else: print(" No changes.") return 0
def maybe_create_release( release_repo_path, deliverable_info, last_release, change_lines, latest_cycle, project, short_project, max_changes_show=100, should_prompt=True, ): if last_release: print("%s changes to release since %s are:" % (len(change_lines), last_release["version"])) else: print("%s changes to release are:" % (len(change_lines))) for sha, descr in change_lines[0:max_changes_show]: print(" %s %s" % (sha, descr)) leftover_change_lines = change_lines[max_changes_show:] if leftover_change_lines: print(" and %s more changes..." % len(leftover_change_lines)) if not should_prompt: return create_release = yes_no_prompt("Create a release in %s containing" " those changes? " % latest_cycle) if create_release: # NOTE(harlowja): use of an ordered-dict here is on purpose, so that # the ordering here stays similar to what is already being used. newest_release_path = os.path.join(release_repo_path, "deliverables", latest_cycle, "%s.yaml" % short_project) ok_change = True if os.path.exists(newest_release_path): with open(newest_release_path, "rb") as fh: newest_release = yamlutils.loads(fh.read()) ok_change = yes_no_prompt( "Alter existing file (reformatting" " may lose comments and some existing" " yaml indenting/structure)? " ) else: notes_link = to_unicode(NOTES_URL_TPL % (short_project, latest_cycle)) notes_link = prompt("Release notes link: ", validator=NoEmptyValidator(), default=notes_link) if deliverable_info: launchpad_project = to_unicode(deliverable_info["launchpad"]) else: launchpad_project = prompt( "Launchpad project name: ", validator=NoEmptyValidator(), default=to_unicode(short_project) ) team = prompt("Project team: ", validator=NoEmptyValidator(), default=to_unicode(launchpad_project)) include_pypi_link = yes_no_prompt("Include pypi link? ") newest_release = collections.OrderedDict( [ ("launchpad", launchpad_project), ("include-pypi-link", include_pypi_link), ("release-notes", notes_link), ("releases", []), ("team", team), ] ) possible_hashes = [] for sha, _descr in change_lines: possible_hashes.append(sha) release_kind = prompt( "Release type: ", validator=SetValidator(RELEASE_KINDS), completer=WordCompleter(RELEASE_KINDS) ) suggested_version = generate_suggested_next_version(last_release, release_kind) if not suggested_version: suggested_version = "" version = prompt("Release version: ", validator=NoEmptyValidator(), default=to_unicode(suggested_version)) highlights = prompt("Highlights (esc then enter to" " exit): ", multiline=True) highlights = highlights.strip() release_hash = prompt( "Hash to release at: ", validator=SetValidator(possible_hashes), completer=WordCompleter(possible_hashes), default=possible_hashes[0], ) new_release = collections.OrderedDict( [("version", version), ("projects", [collections.OrderedDict([("repo", project), ("hash", release_hash)])])] ) if highlights: new_release["highlights"] = highlights if not ok_change: new_release = yamlutils.dumps(new_release) print("You may manually adjust %s and add:" % newest_release_path) print(new_release) else: try: newest_release["releases"].append(new_release) except KeyError: newest_release["releases"] = [new_release] newest_release = yamlutils.dumps(newest_release) with open(newest_release_path, "wb") as fh: fh.write(newest_release)
def maybe_create_release(release_repo_path, deliverable_info, last_release, change_lines, latest_cycle, project, short_project, max_changes_show=100, should_prompt=True): if last_release: print("%s changes to release since %s are:" % (len(change_lines), last_release['version'])) else: print("%s changes to release are:" % (len(change_lines))) for sha, descr in change_lines[0:max_changes_show]: print(" %s %s" % (sha, descr)) leftover_change_lines = change_lines[max_changes_show:] if leftover_change_lines: print(" and %s more changes..." % len(leftover_change_lines)) if not should_prompt: return create_release = yes_no_prompt('Create a release in %s containing' ' those changes? ' % latest_cycle) if create_release: # NOTE(harlowja): use of an ordered-dict here is on purpose, so that # the ordering here stays similar to what is already being used. newest_release_path = os.path.join(release_repo_path, 'deliverables', latest_cycle, "%s.yaml" % short_project) ok_change = True if os.path.exists(newest_release_path): with open(newest_release_path, 'rb') as fh: newest_release = yamlutils.loads(fh.read()) ok_change = yes_no_prompt("Alter existing file (reformatting" " may lose comments and some existing" " yaml indenting/structure)? ") else: notes_link = to_unicode(NOTES_URL_TPL % (short_project, latest_cycle)) notes_link = prompt("Release notes link: ", validator=NoEmptyValidator(), default=notes_link) if deliverable_info: launchpad_project = to_unicode(deliverable_info['launchpad']) else: launchpad_project = prompt("Launchpad project name: ", validator=NoEmptyValidator(), default=to_unicode(short_project)) team = prompt("Project team: ", validator=NoEmptyValidator(), default=to_unicode(launchpad_project)) include_pypi_link = yes_no_prompt("Include pypi link? ") newest_release = collections.OrderedDict([ ('launchpad', launchpad_project), ('include-pypi-link', include_pypi_link), ('release-notes', notes_link), ('releases', []), ('team', team), ]) possible_hashes = [] for sha, _descr in change_lines: possible_hashes.append(sha) release_kind = prompt("Release type: ", validator=SetValidator(RELEASE_KINDS), completer=WordCompleter(RELEASE_KINDS)) suggested_version = generate_suggested_next_version( last_release, release_kind) if not suggested_version: suggested_version = '' version = prompt("Release version: ", validator=NoEmptyValidator(), default=to_unicode(suggested_version)) highlights = prompt("Highlights (esc then enter to" " exit): ", multiline=True) highlights = highlights.strip() release_hash = prompt("Hash to release at: ", validator=SetValidator(possible_hashes), completer=WordCompleter(possible_hashes), default=possible_hashes[0]) new_release = collections.OrderedDict([ ('version', version), ('projects', [ collections.OrderedDict([ ('repo', project), ('hash', release_hash), ]), ]), ]) if highlights: new_release['highlights'] = highlights if not ok_change: new_release = yamlutils.dumps(new_release) print("You may manually adjust %s and add:" % newest_release_path) print(new_release) else: try: newest_release['releases'].append(new_release) except KeyError: newest_release['releases'] = [new_release] newest_release = yamlutils.dumps(newest_release) with open(newest_release_path, 'wb') as fh: fh.write(newest_release)
def __init__(self): self._raw = pkgutil.get_data('openstack_releases', 'schema.yaml') self._data = yamlutils.loads(self._raw.decode('utf-8'))
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--no-cleanup', dest='cleanup', default=True, action='store_false', help='do not remove temporary files', ) parser.add_argument( '--all', default=False, action='store_true', help='process all deliverables, including release:cycle-trailing', ) parser.add_argument( '--verbose', '-v', action='store_true', default=False, help='produce detailed output', ) parser.add_argument( '--deliverables-dir', default=openstack_releases.deliverable_dir, help='location of deliverable files', ) parser.add_argument( '--series', default=defaults.RELEASE, help='the name of the release series to work on (%(default)s)') parser.add_argument( '--include-clients', action='append_const', const='client-library', default=['library'], dest='types', ) parser.add_argument( '--dry-run', '-n', default=False, action='store_true', help='report what action would be taken but do not take it', ) parser.add_argument( 'deliverable', nargs='*', default=[], help='the name(s) of the deliverable(s) to modify', ) args = parser.parse_args() if args.verbose: def verbose(msg): print(msg) else: def verbose(msg): pass deliverables_dir = args.deliverables_dir workdir = tempfile.mkdtemp(prefix='releases-') print('creating temporary files in %s' % workdir) def cleanup_workdir(): if args.cleanup: try: shutil.rmtree(workdir) except Exception: pass else: print('not cleaning up %s' % workdir) atexit.register(cleanup_workdir) pattern = os.path.join(deliverables_dir, args.series, '*.yaml') verbose('Scanning {}'.format(pattern)) deliverable_files = sorted(glob.glob(pattern)) new_branch = 'stable/' + args.series for filename in deliverable_files: deliverable_name = os.path.basename(filename)[:-5] if args.deliverable and deliverable_name not in args.deliverable: continue with open(filename, 'r', encoding='utf-8') as f: deliverable_data = yamlutils.loads(f.read()) if deliverable_data['type'] not in args.types: continue verbose('\n{}'.format(filename)) releases = deliverable_data.get('releases') if not releases: print('{} has no releases, not branching'.format(deliverable_name)) continue if 'branches' not in deliverable_data: deliverable_data['branches'] = [] skip = False for b in deliverable_data['branches']: if b['name'] == new_branch: print('{} already has branch {}'.format( deliverable_name, new_branch)) skip = True if skip: continue latest_release = releases[-1] print('{} new branch {} at {}'.format(deliverable_name, new_branch, latest_release['version'])) if not args.dry_run: deliverable_data['branches'].append({ 'name': new_branch, 'location': latest_release['version'], }) with open(filename, 'w', encoding='utf-8') as f: f.write(yamlutils.dumps(deliverable_data))
repos: - openstack-infra/release-tools releases: repos: - openstack/releases reno: repos: - openstack/reno docs: contributor: https://docs.openstack.org/developer/reno/ specs-cookiecutter: repos: - openstack-dev/specs-cookiecutter """ TEAM_DATA = yamlutils.loads(_team_data_yaml) class TestGetRepoOwner(base.BaseTestCase): def test_repo_exists(self): owner = governance.get_repo_owner( TEAM_DATA, 'openstack/releases', ) self.assertEqual('Release Management', owner) def test_no_such_repo(self): self.assertRaises( ValueError, governance.get_repo_owner,