示例#1
0
def _initialize_team_data(app):
    global _deliverables
    global _all_teams

    _deliverables = deliverable.Deliverables('deliverables')
    team_data = governance.get_team_data()
    for tn, td in team_data.items():
        _all_teams[tn] = td
        for dn, dd in td['deliverables'].items():
            _all_deliverable_types[dn] = _get_deliverable_type(dn, dd)
示例#2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    args = parser.parse_args()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )

    interesting_deliverables = [
        d for d in (
            deliverable.Deliverable(t, s, dn, da)
            for t, s, dn, da in all_deliv.get_deliverables(None, args.series))
        if d.model == MILESTONE
    ]

    team_data = governance.get_team_data()
    teams = {n.lower(): governance.Team(n, i) for n, i in team_data.items()}

    # Dump the dashboard data
    writer = csv.writer(sys.stdout)
    writer.writerow(
        ('Team', 'Deliverable Type', 'Deliverable Name', 'Pre-RC1', 'RC1',
         'Branched at', 'Latest RC', 'Release Notes', 'Comments', 'PTL Nick',
         'PTL Email', 'Liaison Nick', 'IRC Channel'))

    for deliv in sorted(interesting_deliverables,
                        key=lambda x: (x.team, x.name)):
        team = teams[deliv.team.lower()]
        writer.writerow((
            deliv.team.lower(),
            deliv.type,
            deliv.name,
            deliv.latest_release,
            '',  # RC1
            deliv.get_branch_location('stable/' + args.series),  # branched at
            '',  # latest RC
            deliv.release_notes,
            '',  # Comments
            team.data['ptl']['irc'],
            team.data['ptl']['email'],
            team.liaison[1] or '',
            team.data.get('irc-channel')))
示例#3
0
 def __init__(self, team, series, name, data):
     self.team = team
     if self.team is None:
         self.team = data['team']
     self.series = series
     self.name = name
     self._data = data
     self.repos = set()
     for r in self.releases:
         for p in r['projects']:
             self.repos.add(p['repo'])
     if self._governance_data is None:
         Deliverable._governance_data = governance.get_team_data()
示例#4
0
    def run(self):
        env = self.state.document.settings.env
        app = env.app

        team_data = governance.get_team_data()

        series = self.options.get("series")
        if not series:
            error = self.state_machine.reporter.error(
                "No series set for deliverable directive",
                nodes.literal_block(self.block_text, self.block_text),
                line=self.lineno,
            )
            return [error]

        deliverable_types = {}
        for team in (governance.Team(n, i) for n, i in team_data.items()):
            for dn, di in team.deliverables.items():
                for tag in di.tags:
                    if tag.startswith("type:"):
                        deliverable_types[dn] = tag

        result = ViewList()

        # Read all of the deliverable data for the series.

        deliverables = collections.defaultdict(list)

        for filename in sorted(glob.glob("deliverables/%s/*.yaml" % series)):
            app.info("[deliverables] reading %s" % filename)
            deliverable_name = os.path.basename(filename)[:-5]  # strip .yaml
            deliverable_type = _get_deliverable_type(deliverable_types, deliverable_name)
            with open(filename, "r") as f:
                deliverables[deliverable_type].append((deliverable_name, filename, yaml.load(f.read())))

        for type_tag in self._TYPE_ORDER:
            self._add_deliverables(type_tag, deliverables[type_tag], series, app, result)

        # NOTE(dhellmann): Useful for debugging.
        # print('\n'.join(result))

        node = nodes.section()
        node.document = self.state.document
        nested_parse_with_titles(self.state, result, node)
        return node.children
示例#5
0
def main():
    if not sys.stdout.encoding:
        # Wrap sys.stdout with a writer that knows how to handle
        # encoding Unicode data.
        import codecs
        wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout)
        sys.stdout = wrapped_stdout

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, skipping report')
        return 0

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    team_data = governance.get_team_data()

    # Remove any inherited PAGER environment variable to avoid
    # blocking the output waiting for input.
    os.environ['PAGER'] = ''

    for filename in filenames:
        if not os.path.exists(filename):
            print('\n%s was removed, skipping' % filename)
            continue
        print('\n' + ('=' * 80))
        print('\nChecking %s\n' % filename)
        with open(filename, 'r') as f:
            deliverable_info = yaml.load(f.read())

        series = os.path.basename(
            os.path.dirname(
                os.path.abspath(filename)
            )
        )
        if series == '_independent':
            default_model = 'independent'
        else:
            default_model = 'no release model specified'

        # By default assume the project does not use milestones.
        header('Release model')
        print(deliverable_info.get('release-model', default_model))

        header('Team details')
        if 'team' in deliverable_info:
            team_name = deliverable_info['team']
            team_dict = team_data.get(team_name)
            if team_dict:
                team = governance.Team(team_name, team_dict)
                print('found team %s' % team_name)
                print('  PTL: %(name)s (%(irc)s)\n' % team.ptl)
                deliverable_name = os.path.basename(filename)[:-5]  # remove .yaml
                deliverable = team.deliverables.get(deliverable_name)
                if deliverable:
                    print('found deliverable %s' % deliverable_name)
                    for rn, repo in sorted(deliverable.repositories.items()):
                        print('\nrepo %s\ntags:' % repo.name)
                        for t in repo.tags:
                            print('  %s' % t)
                        print('')
                else:
                    print(('no deliverable %r found for team %r, '
                           'cannot report on governance status') %
                          (deliverable_name, team_name))
            else:
                print('no team %r found, cannot report on governance status' %
                      team_name)
        else:
            print('no team name given, cannot report on governance status')

        if series == defaults.RELEASE:
            branch = 'master'
        else:
            branch = 'stable/' + series

        # If there are no releases listed, this is probably a new
        # deliverable file for initializing a new series. We don't
        # need to list its changes.
        if not deliverable_info.get('releases'):
            header('No releases')
            print('no releases were found, assuming an initialization file')
            continue

        # assume the releases are in order and take the last one
        new_release = deliverable_info['releases'][-1]

        # build a map between version numbers and the release details
        by_version = {
            str(r['version']): r
            for r in deliverable_info['releases']
        }

        for project in new_release['projects']:

            tag_exists = gitutils.commit_exists(
                project['repo'],
                new_release['version'],
            )
            if tag_exists:
                print('%s %s exists on git server already' %
                      (project['repo'], new_release['version']))

            # Check out the code.
            print('\nChecking out repository {}'.format(project['repo']))
            subprocess.check_call(
                ['zuul-cloner',
                 '--branch', branch,
                 '--workspace', workdir,
                 'git://git.openstack.org',
                 project['repo'],
                 ]
            )

            # look at the previous tag for the parent of the commit
            # getting the new release
            previous_tag = gitutils.get_latest_tag(
                workdir,
                project['repo'],
                '{}^'.format(project['hash'])
            )
            previous_release = by_version.get(previous_tag)

            start_range = previous_tag
            if previous_release:
                previous_project = {
                    x['repo']: x
                    for x in previous_release['projects']
                }.get(project['repo'])
                if previous_project is not None:
                    start_range = previous_tag

            if start_range:
                git_range = '%s..%s' % (start_range, project['hash'])
            else:
                git_range = project['hash']

            # Show details about the commit being tagged.
            header('Details for commit receiving new tag %s' %
                   new_release['version'])
            print('\ngit describe %s\n' % project['hash'])
            try:
                subprocess.check_call(
                    ['git', 'describe', project['hash']],
                    cwd=os.path.join(workdir, project['repo']),
                )
            except subprocess.CalledProcessError as e:
                print('WARNING: Could not run git describe: %s' % e)

            git_show(
                workdir=workdir,
                repo=project['repo'],
                title='Check existing tags',
                ref=project['hash'],
            )

            branches = git_branch_contains(
                workdir=workdir,
                repo=project['repo'],
                title='Branches containing commit',
                commit=project['hash'],
            )

            header('Relationship to HEAD')
            if series == '_independent':
                interesting_branches = sorted(
                    b for b in branches
                    if '->' not in b
                )
                tag_branch = interesting_branches[0]
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project['repo'],
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            else:
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project['repo'],
                    'HEAD',
                )
                print('HEAD of {} is {}'.format(branch, head_sha))
                tag_branch = branch
            requested_sha = gitutils.sha_for_tag(
                workdir,
                project['repo'],
                project['hash'],
            )
            # If the sha for HEAD and the requested release don't
            # match, show any unreleased changes on the branch. We ask
            # git to give us the real SHA for the requested release in
            # case the deliverables file has the short version of the
            # hash.
            if head_sha == requested_sha:
                print('\nRequest releases from HEAD on %s' % tag_branch)
            else:
                git_log(workdir, project['repo'], 'Release will NOT include',
                        '%s..%s' % (requested_sha, head_sha),
                        extra_args=['--format=%h %ci %s'])

            # Show any requirements changes in the upcoming release.
            # Include setup.cfg, in case the project uses "extras".
            if start_range:
                git_diff(workdir, project['repo'], git_range, '*requirements*.txt')
                git_diff(workdir, project['repo'], git_range, 'setup.cfg')

            # Show the changes since the last release, first as a
            # graph view so we can check for bad merges, and then with
            # more detail.
            git_log(workdir, project['repo'],
                    'Release %s will include' % new_release['version'],
                    git_range,
                    extra_args=['--graph', '--oneline', '--decorate',
                                '--topo-order'])
            git_log(workdir, project['repo'],
                    'Details Contents',
                    git_range,
                    extra_args=['--no-merges', '--topo-order'])

            # Show any changes in the previous release but not in this
            # release, in case someone picks an "early" SHA or a
            # regular commit instead of the appropriate merge commit.
            previous_tag_exists = False
            if previous_release:
                previous_tag_exists = gitutils.commit_exists(
                    project['repo'],
                    previous_release,
                )
            if previous_tag_exists:
                git_log(
                    workdir, project['repo'],
                    'Patches in previous release but not in this one',
                    [project['hash'],
                     '--not',
                     previous_release['version']],
                    extra_args=['--topo-order', '--oneline', '--no-merges'],
                )
                header('New release %s includes previous release %s' %
                       (new_release['version'], previous_release['version']))
                if not tag_exists:
                    subprocess.check_call(
                        ['git', 'tag', new_release['version'],
                         project['hash']],
                        cwd=os.path.join(workdir, project['repo']),
                    )
                print('\ngit tag --contains %s\n' %
                      previous_release['version'])
                containing_tags = subprocess.check_output(
                    ['git', 'tag',
                     '--contains',
                     previous_release['version']],
                    cwd=os.path.join(workdir, project['repo']),
                ).split()
                print('Containing tags:', containing_tags)
                if new_release['version'] not in containing_tags:
                    print('WARNING: Missing %s' % new_release['version'])
                else:
                    print('Found new version %s' % new_release['version'])

                is_ancestor = gitutils.check_ancestry(
                    workdir,
                    project['repo'],
                    previous_release['version'],
                    project['hash'],
                )
                if is_ancestor:
                    print('SHA found in descendants')
                else:
                    print('SHA NOT FOUND in descendants')

    return 0
示例#6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, skipping report')
        return 0

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    team_data = governance.get_team_data()

    # Remove any inherited PAGER environment variable to avoid
    # blocking the output waiting for input.
    os.environ['PAGER'] = ''

    for filename in filenames:
        if not os.path.exists(filename):
            print('\n%s was removed, skipping' % filename)
            continue
        print('\n' + ('=' * 80))
        print('\nChecking %s\n' % filename)
        with open(filename, 'r') as f:
            deliverable_info = yaml.load(f.read())

        # By default assume the project does not use milestones.
        uses_milestones = False

        header('Team details')
        if 'team' in deliverable_info:
            team_name = deliverable_info['team']
            team_dict = team_data.get(team_name)
            if team_dict:
                team = governance.Team(team_name, team_dict)
                print('found team %s' % team_name)
                print('  PTL: %(name)s (%(irc)s)\n' % team.ptl)
                deliverable_name = os.path.basename(filename)[:-5]  # remove .yaml
                deliverable = team.deliverables.get(deliverable_name)
                if deliverable:
                    print('found deliverable %s' % deliverable_name)
                    for rn, repo in sorted(deliverable.repositories.items()):
                        print('\nrepo %s\ntags:' % repo.name)
                        for t in repo.tags:
                            print('  %s' % t)
                        print('')
                    uses_milestones = 'release:cycle-with-milestones' in repo.tags
                else:
                    print(('no deliverable %r found for team %r, '
                           'cannot report on governance status') %
                          (deliverable_name, team_name))
            else:
                print('no team %r found, cannot report on governance status' %
                      team_name)
        else:
            print('no team name given, cannot report on governance status')
        if uses_milestones:
            print('uses milestones')

        series = os.path.basename(
            os.path.dirname(
                os.path.abspath(filename)
            )
        )
        if series == defaults.RELEASE:
            branch = 'master'
        else:
            branch = 'stable/' + series

        # assume the releases are in order and take the last one
        new_release = deliverable_info['releases'][-1]

        # Warn if the new release looks like a milestone release but
        # the project does not use milestones.
        if not uses_milestones:
            for pre_indicator in ['a', 'b', 'rc']:
                if pre_indicator in new_release['version']:
                    print(('WARNING: %s looks like a pre-release '
                           'but %s does not use milestones') %
                          (new_release['version'], deliverable_name))

        # build a map between version numbers and the release details
        by_version = {
            str(r['version']): r
            for r in deliverable_info['releases']
        }

        for project in new_release['projects']:

            tag_exists = gitutils.commit_exists(
                project['repo'],
                new_release['version'],
            )
            if tag_exists:
                print('%s %s exists on git server already' %
                      (project['repo'], new_release['version']))

            # Check out the code.
            print('\nChecking out repository {}'.format(project['repo']))
            subprocess.check_call(
                ['zuul-cloner',
                 '--branch', branch,
                 '--workspace', workdir,
                 'git://git.openstack.org',
                 project['repo'],
                 ]
            )

            # look at the previous tag for the parent of the commit
            # getting the new release
            previous_tag = gitutils.get_latest_tag(
                workdir,
                project['repo'],
                '{}^'.format(project['hash'])
            )
            previous_release = by_version.get(previous_tag)

            start_range = previous_tag
            if previous_release:
                previous_project = {
                    x['repo']: x
                    for x in previous_release['projects']
                }.get(project['repo'])
                if previous_project is not None:
                    start_range = previous_tag

            if start_range:
                git_range = '%s..%s' % (start_range, project['hash'])
            else:
                git_range = project['hash']

            # Show details about the commit being tagged.
            header('Details for commit receiving new tag %s' %
                   new_release['version'])
            print('\ngit describe %s\n' % project['hash'])
            try:
                subprocess.check_call(
                    ['git', 'describe', project['hash']],
                    cwd=os.path.join(workdir, project['repo']),
                )
            except subprocess.CalledProcessError as e:
                print('WARNING: Could not run git describe: %s' % e)

            git_show(
                workdir=workdir,
                repo=project['repo'],
                title='Check existing tags',
                ref=project['hash'],
            )

            branches = git_branch_contains(
                workdir=workdir,
                repo=project['repo'],
                title='Branches containing commit',
                commit=project['hash'],
            )

            header('Relationship to HEAD')
            if series == '_independent':
                interesting_branches = sorted(
                    b for b in branches
                    if '->' not in b
                )
                tag_branch = interesting_branches[0]
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project['repo'],
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            else:
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project['repo'],
                    'HEAD',
                )
                print('HEAD of {} is {}'.format(branch, head_sha))
                tag_branch = branch
            requested_sha = gitutils.sha_for_tag(
                workdir,
                project['repo'],
                project['hash'],
            )
            # If the sha for HEAD and the requested release don't
            # match, show any unreleased changes on the branch. We ask
            # git to give us the real SHA for the requested release in
            # case the deliverables file has the short version of the
            # hash.
            if head_sha == requested_sha:
                print('\nRequest releases from HEAD on %s' % tag_branch)
            else:
                git_log(workdir, project['repo'], 'Release will NOT include',
                        '%s..%s' % (requested_sha, head_sha),
                        extra_args=['--format=%h %ci %s'])

            # Show any requirements changes in the upcoming release.
            if start_range:
                git_diff(workdir, project['repo'], git_range, '*requirements*.txt')

            # Show the changes since the last release, first as a
            # graph view so we can check for bad merges, and then with
            # more detail.
            git_log(workdir, project['repo'],
                    'Release %s will include' % new_release['version'],
                    git_range,
                    extra_args=['--graph', '--oneline', '--decorate',
                                '--topo-order'])
            git_log(workdir, project['repo'],
                    'Details Contents',
                    git_range,
                    extra_args=['--no-merges', '--topo-order'])

            # Show any changes in the previous release but not in this
            # release, in case someone picks an "early" SHA or a
            # regular commit instead of the appropriate merge commit.
            previous_tag_exists = False
            if previous_release:
                previous_tag_exists = gitutils.commit_exists(
                    project['repo'],
                    previous_release,
                )
            if previous_tag_exists:
                git_log(
                    workdir, project['repo'],
                    'Patches in previous release but not in this one',
                    [project['hash'],
                     '--not',
                     previous_release['version']],
                    extra_args=['--topo-order', '--oneline', '--no-merges'],
                )
                header('New release %s includes previous release %s' %
                       (new_release['version'], previous_release['version']))
                if not tag_exists:
                    subprocess.check_call(
                        ['git', 'tag', new_release['version'],
                         project['hash']],
                        cwd=os.path.join(workdir, project['repo']),
                    )
                print('\ngit tag --contains %s\n' %
                      previous_release['version'])
                containing_tags = subprocess.check_output(
                    ['git', 'tag',
                     '--contains',
                     previous_release['version']],
                    cwd=os.path.join(workdir, project['repo']),
                ).split()
                print('Containing tags:', containing_tags)
                if new_release['version'] not in containing_tags:
                    print('WARNING: Missing %s' % new_release['version'])
                else:
                    print('Found new version %s' % new_release['version'])

                is_ancestor = gitutils.check_ancestry(
                    workdir,
                    project['repo'],
                    previous_release['version'],
                    project['hash'],
                )
                if is_ancestor:
                    print('SHA found in descendants')
                else:
                    print('SHA NOT FOUND in descendants')

    return 0
示例#7
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files and no arguments, '
              'skipping validation')
        return 0

    zuul_layout = project_config.get_zuul_layout_data()

    team_data = governance.get_team_data()

    errors = []
    warnings = []

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    for filename in filenames:
        print('\nChecking %s' % filename)
        if not os.path.isfile(filename):
            print("File was deleted, skipping.")
            continue
        with open(filename, 'r') as f:
            deliverable_info = yaml.load(f.read())

        series_name = os.path.basename(
            os.path.dirname(filename)
        )

        def mk_warning(msg):
            print('WARNING: {}'.format(msg))
            warnings.append('{}: {}'.format(filename, msg))

        def mk_error(msg):
            print('ERROR: {}'.format(msg))
            errors.append('{}: {}'.format(filename, msg))

        validate_launchpad(deliverable_info, mk_warning, mk_error)
        validate_team(deliverable_info, team_data, mk_warning, mk_error)
        validate_release_notes(deliverable_info, mk_warning, mk_error)
        validate_type(deliverable_info, mk_warning, mk_error)
        validate_model(deliverable_info, series_name, mk_warning, mk_error)
        validate_releases(
            deliverable_info,
            zuul_layout,
            series_name,
            workdir,
            mk_warning,
            mk_error,
        )
        # Some rules only apply to the most current release.
        if series_name == defaults.RELEASE:
            validate_new_releases(
                deliverable_info,
                filename,
                team_data,
                mk_warning,
                mk_error,
            )
        validate_branch_prefixes(
            deliverable_info,
            mk_warning,
            mk_error,
        )
        validate_stable_branches(
            deliverable_info,
            mk_warning,
            mk_error,
        )
        validate_feature_branches(
            deliverable_info,
            workdir,
            mk_warning,
            mk_error,
        )
        validate_driverfixes_branches(
            deliverable_info,
            workdir,
            mk_warning,
            mk_error,
        )

    if warnings:
        print('\n\n%s warnings found' % len(warnings))
        for w in warnings:
            print(w)

    if errors:
        print('\n\n%s errors found' % len(errors))
        for e in errors:
            print(e)

    return 1 if errors else 0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--verbose',
        '-v',
        action='store_true',
        default=False,
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    args = parser.parse_args()

    config_filename = os.path.join(
        appdirs.user_config_dir('openstack-release', 'openstack'),
        'gerrit.ini',
    )
    config = configparser.ConfigParser()
    config.read(config_filename, encoding='utf-8')

    if not config.has_option('DEFAULT', 'username'):
        parser.error('No username set in {}'.format(config_filename))
    if not config.has_option('DEFAULT', 'password'):
        parser.error('No password set in {}'.format(config_filename))

    team_data = governance.get_team_data()

    # Some deliverables were independent at one time but might not be
    # any more, so compare the independent list with the current
    # release series.
    all_independent_deliverables = set(
        name for team, series, name, deliv in deliverable.Deliverables(
            root_dir=args.deliverables_dir,
            collapse_history=True,
        ).get_deliverables(None, None))
    current_deliverables = set(
        name for team, series, name, deliv in deliverable.Deliverables(
            root_dir=args.deliverables_dir,
            collapse_history=True,
        ).get_deliverables(None, defaults.RELEASE))
    independent_deliverables = all_independent_deliverables.difference(
        current_deliverables)

    gerrit = GerritClient(
        config['DEFAULT']['username'],
        config['DEFAULT']['password'],
    )

    for repo in governance.get_repositories(team_data, code_only=True):

        if repo.deliverable.team.name in IGNORED_TEAMS:
            if args.verbose:
                print('{}: ignoring {} team'.format(
                    repo.name, repo.deliverable.team.name))
            continue

        if repo.deliverable.name in independent_deliverables:
            if args.verbose:
                print('{}: ignoring independent deliverable'.format(repo.name))
            continue

        acls = gerrit.get_access(repo.name)
        local_tag_acls = acls.get('local', {}).get('refs/tags/*', {})
        if local_tag_acls:
            rules = local_tag_acls.get('permissions',
                                       {}).get('pushSignedTag',
                                               {}).get('rules', {})
            if not rules and args.verbose:
                print('{}: OK'.format(repo.name))

            for group_id, permissions in rules.items():
                group_details = gerrit.get_group(group_id)
                group_name = group_details['name']
                if group_name in ALLOWED:
                    if args.verbose:
                        print('{}: {} pushSignedTag OK'.format(
                            repo.name, group_name))
                    continue
                if args.verbose:
                    print('{}: {} pushSignedTag WARNING'.format(
                        repo.name, group_name))
                else:
                    print('{}: {} pushSignedTag'.format(repo.name, group_name))
示例#9
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files and no arguments, '
              'skipping validation')
        return 0

    zuul_layout = project_config.get_zuul_layout_data()

    team_data = governance.get_team_data()

    errors = []
    warnings = []

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    for filename in filenames:
        print('\nChecking %s' % filename)
        if not os.path.isfile(filename):
            print("File was deleted, skipping.")
            continue
        with open(filename, 'r') as f:
            deliverable_info = yaml.load(f.read())

        series_name = os.path.basename(os.path.dirname(filename))

        def mk_warning(msg):
            print('WARNING: {}'.format(msg))
            warnings.append('{}: {}'.format(filename, msg))

        def mk_error(msg):
            print('ERROR: {}'.format(msg))
            errors.append('{}: {}'.format(filename, msg))

        validate_bugtracker(deliverable_info, mk_warning, mk_error)
        validate_team(deliverable_info, team_data, mk_warning, mk_error)
        validate_release_notes(deliverable_info, mk_warning, mk_error)
        validate_type(deliverable_info, mk_warning, mk_error)
        validate_model(deliverable_info, series_name, mk_warning, mk_error)
        validate_releases(
            deliverable_info,
            zuul_layout,
            series_name,
            workdir,
            mk_warning,
            mk_error,
        )
        # Some rules only apply to the most current release.
        if series_name == defaults.RELEASE:
            validate_new_releases(
                deliverable_info,
                filename,
                team_data,
                mk_warning,
                mk_error,
            )
            validate_series_open(
                deliverable_info,
                series_name,
                filename,
                mk_warning,
                mk_error,
            )
        validate_series_first(
            deliverable_info,
            series_name,
            mk_warning,
            mk_error,
        )
        validate_branch_prefixes(
            deliverable_info,
            mk_warning,
            mk_error,
        )
        validate_stable_branches(
            deliverable_info,
            series_name,
            mk_warning,
            mk_error,
        )
        validate_feature_branches(
            deliverable_info,
            workdir,
            mk_warning,
            mk_error,
        )
        validate_driverfixes_branches(
            deliverable_info,
            workdir,
            mk_warning,
            mk_error,
        )

    if warnings:
        print('\n\n%s warnings found' % len(warnings))
        for w in warnings:
            print(w)

    if errors:
        print('\n\n%s errors found' % len(errors))
        for e in errors:
            print(e)

    return 1 if errors else 0
示例#10
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--all',
        default=False,
        action='store_true',
        help='process all deliverables, including release:cycle-trailing',
    )
    parser.add_argument(
        '--verbose',
        '-v',
        action='store_true',
        default=False,
        help='produce detailed output',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        'prior_series',
        help='the name of the previous series',
    )
    parser.add_argument('series',
                        help='the name of the release series to work on')
    args = parser.parse_args()

    if args.verbose:

        def verbose(msg):
            print(msg)
    else:

        def verbose(msg):
            pass

    deliverables_dir = args.deliverables_dir

    team_data = governance.get_team_data()
    teams = [governance.Team(n, i) for n, i in team_data.items()]
    deliverables = {d.name: d for t in teams for d in t.deliverables.values()}

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except Exception:
                pass
        else:
            print('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    pattern = os.path.join(deliverables_dir, args.series, '*.yaml')
    verbose('Scanning {}'.format(pattern))
    deliverable_files = sorted(glob.glob(pattern))

    for filename in deliverable_files:
        verbose('\n{}'.format(filename))
        deliverable_name = os.path.basename(filename)[:-5]
        with open(filename, 'r') as f:
            deliverable_data = yaml.safe_load(f)
        releases = deliverable_data.get('releases')
        if not releases:
            verbose('#  no releases')
            continue
        latest_release = releases[-1]
        projects = latest_release.get('projects')
        if not projects:
            verbose('#  no projects')
            continue
        for pre_rel in ['a', 'b', 'rc']:
            if pre_rel in str(latest_release['version']):
                break
        else:  # we did not find any pre_rel
            verbose('#  not a release candidate')
            continue
        deliverable = deliverables.get(deliverable_name)
        if deliverable and 'release:cycle-trailing' in deliverable.tags:
            verbose(
                '#  {} is a cycle-trailing project'.format(deliverable_name))
            if not args.all:
                continue
        # The new version is the same as the latest release version
        # without the pre-release component at the end. Make sure it
        # has 3 sets of digits.
        new_version = '.'.join(
            (latest_release['version'].split('.')[:-1] + ['0'])[:3])
        branch = 'stable/{}'.format(args.prior_series)
        diff_start = get_prior_branch_point(
            workdir,
            projects[0]['repo'],
            branch,
        )
        deliverable_data['releases'].append({
            'version':
            new_version,
            'diff_start':
            diff_start,
            'projects':
            latest_release['projects'],
        })
        print('new version for {}: {}'.format(os.path.basename(filename),
                                              new_version))

        # NOTE(dhellmann): PyYAML doesn't preserve layout when you
        # write the data back out, so do the formatting ourselves.
        projects = '\n'.join(
            PROJECT_TEMPLATE.format(**p) for p in latest_release['projects'])
        new_block = VERSION_TEMPLATE.format(
            version=new_version,
            diff_start=diff_start,
            diff_start_comment=('# ' if diff_start is None else ''),
            projects=projects,
        ).rstrip() + '\n'
        with open(filename, 'a') as f:
            f.write(new_block)
示例#11
0
def main():
    if not sys.stdout.encoding:
        # Wrap sys.stdout with a writer that knows how to handle
        # encoding Unicode data.
        import codecs
        wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout)
        sys.stdout = wrapped_stdout

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--no-shortcut', '--force', '-f',
        dest='shortcut',
        default=True,
        action='store_false',
        help='if a tag has been applied, skip the repo',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    # Set up logging, including making some loggers quiet.
    logging.basicConfig(
        format='%(levelname)7s: %(message)s',
        stream=sys.stdout,
        level=logging.DEBUG,
    )
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, skipping report')
        return 0

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    team_data = governance.get_team_data()
    official_repos = set(
        r.name
        for r in governance.get_repositories(team_data)
    )

    # Remove any inherited PAGER environment variable to avoid
    # blocking the output waiting for input.
    os.environ['PAGER'] = ''

    for filename in filenames:
        if not os.path.exists(filename):
            print('\n%s was removed, skipping' % filename)
            continue
        print('\n' + ('=' * 80))
        print('\nChecking %s\n' % filename)
        deliv = deliverable.Deliverable.read_file(filename)

        stable_branch = deliv.series not in ['independent', defaults.RELEASE]

        # By default assume the project does not use milestones.
        header('Release model')
        print(deliv.model)

        header('Team details')
        if deliv.team:
            team_name = deliv.team
            team_dict = team_data.get(team_name)
            if team_dict:
                team = governance.Team(team_name, team_dict)
                print('found team %s' % team_name)
                print('  PTL    : %(name)s (%(irc)s)' % team.ptl)
                print('  Liaison: %s (%s)\n' % team.liaison)
                team_deliv = team.deliverables.get(deliv.name)
                if team_deliv:
                    print('found deliverable %s' % deliv.name)
                    for rn, repo in sorted(team_deliv.repositories.items()):
                        follows_stable_policy = 'stable:follows-policy' in repo.tags
                        print('\nrepo %s\ntags:' % repo.name)
                        for t in repo.tags:
                            print('  %s' % t)
                        print('')
                        if stable_branch and follows_stable_policy:
                            banner('Needs Stable Policy Review')
                            print()
                else:
                    print(('no deliverable %r found for team %r, '
                           'cannot report on governance status') %
                          (deliv.name, team_name))
            else:
                print('no team %r found, cannot report on governance status' %
                      team_name)
        else:
            print('no team name given, cannot report on governance status')

        # If there are no releases listed, this is probably a new
        # deliverable file for initializing a new series. We don't
        # need to list its changes.
        if not deliv.is_released:
            header('No releases')
            print('no releases were found, assuming an initialization file')
            continue

        # assume the releases are in order and take the last one
        new_release = deliv.releases[-1]

        for project in new_release.projects:

            tag_exists = gitutils.tag_exists(
                project.repo.name,
                new_release.version,
            )
            if tag_exists:
                print('%s %s exists on git server already' %
                      (project.repo.name, new_release.version))
                if args.shortcut:
                    print('skipping further processing')
                    continue

            if project.repo.is_retired:
                print('%s is retired' % (project.repo.name,))
                if args.shortcut:
                    print('skipping further processing')
                    continue

            # Start by checking out master, always. We need the repo
            # checked out before we can tell if the stable branch
            # really exists.
            gitutils.clone_repo(
                workdir,
                project.repo.name,
                branch='master',
            )

            # Set some git configuration values to allow us to perform
            # local operations like tagging.
            gitutils.ensure_basic_git_config(
                workdir, project.repo.name,
                {'user.email': '*****@*****.**',
                 'user.name': 'OpenStack Proposal Bot'},
            )

            # Determine which branch we should actually be looking
            # at. Assume any series for which there is no stable
            # branch will be on 'master'.
            if gitutils.stable_branch_exists(workdir,
                                             project.repo.name,
                                             deliv.series):
                branch = 'stable/' + deliv.series
            else:
                branch = 'master'

            if branch != 'master':
                # Check out the repo again to the right branch if we
                # didn't get it the first time.
                gitutils.clone_repo(
                    workdir,
                    project.repo.name,
                    branch=branch,
                )

            # look at the previous tag for the parent of the commit
            # getting the new release
            previous_tag = gitutils.get_latest_tag(
                workdir,
                project.repo.name,
                '{}^'.format(project.hash)
            )
            try:
                previous_release = deliv.get_release(previous_tag)
            except ValueError:
                previous_release = None

            start_range = previous_tag
            if previous_release:
                previous_project = {
                    x.repo.name: x
                    for x in previous_release.projects
                }.get(project.repo.name)
                if previous_project is not None:
                    start_range = previous_tag

            if start_range:
                git_range = '%s..%s' % (start_range, project.hash)
            else:
                git_range = project.hash

            # Show details about the commit being tagged.
            header('Details for commit receiving new tag %s' %
                   new_release.version)
            print('\ngit describe %s\n' % project.hash)
            try:
                subprocess.check_call(
                    ['git', 'describe', project.hash],
                    cwd=os.path.join(workdir, project.repo.name),
                )
            except subprocess.CalledProcessError as e:
                print('WARNING: Could not run git describe: %s' % e)

            git_show(
                workdir=workdir,
                repo=project.repo.name,
                title='Check existing tags',
                ref=project.hash,
            )

            git_list_existing_branches(
                workdir=workdir,
                repo=project.repo.name,
            )

            branches = git_branch_contains(
                workdir=workdir,
                repo=project.repo.name,
                title='Branches containing commit',
                commit=project.hash,
            )

            header('Relationship to HEAD')
            if deliv.is_independent:
                if branches:
                    tag_branch = branches[0]
                else:
                    tag_branch = branch
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project.repo.name,
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            else:
                if (branch in branches) or (not branches):
                    tag_branch = branch
                else:
                    tag_branch = branches[0]
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project.repo.name,
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            requested_sha = gitutils.sha_for_tag(
                workdir,
                project.repo.name,
                project.hash,
            )
            # If the sha for HEAD and the requested release don't
            # match, show any unreleased changes on the branch. We ask
            # git to give us the real SHA for the requested release in
            # case the deliverables file has the short version of the
            # hash.
            if head_sha == requested_sha:
                print('\nRequest releases from HEAD on %s' % tag_branch)
            else:
                git_log(workdir, project.repo.name, 'Release will NOT include',
                        '%s..%s' % (requested_sha, head_sha),
                        extra_args=['--format=%h %ci %s'])

            show_watched_queries(branch, project.repo.name)

            # Show any requirements changes in the upcoming release.
            # Include setup.cfg, in case the project uses "extras".
            if start_range:
                git_diff(workdir, project.repo.name, git_range, '*requirements*.txt',
                         'Requirements Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'doc/requirements.txt',
                         'Doc Requirements Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'setup.cfg',
                         'setup.cfg Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'bindep.txt',
                         'bindep.txt Changes %s' % git_range)

            # Before we try to determine if the previous release
            # is an ancestor or produce the release notes we need
            # the tag to exist in the local repository.
            if not tag_exists:
                header('Applying Temporary Tag')
                print('\ngit tag {version} {hash}'.format(
                    version=new_release.version,
                    hash=project.hash,
                ))
                subprocess.check_call(
                    ['git', 'tag', new_release.version,
                     project.hash],
                    cwd=os.path.join(workdir, project.repo.name),
                )

            # Show any changes in the previous release but not in this
            # release, in case someone picks an "early" SHA or a
            # regular commit instead of the appropriate merge commit.
            previous_tag_exists = False
            if previous_release:
                previous_tag_exists = gitutils.tag_exists(
                    project.repo.name,
                    previous_release.version,
                )
            if previous_tag_exists:
                git_log(
                    workdir, project.repo.name,
                    'Patches in previous release but not in this one',
                    [previous_release.version,
                     '--not',
                     project.hash],
                    extra_args=['--topo-order', '--oneline', '--no-merges'],
                )

                # The tag will have been added as a local tag above if
                # it does not already exist.
                header('New release %s includes previous release %s' %
                       (new_release.version, previous_release.version))
                print('\ngit tag --contains %s\n' %
                      previous_release.version)
                containing_tags = subprocess.check_output(
                    ['git', 'tag',
                     '--contains',
                     previous_release.version],
                    cwd=os.path.join(workdir, project.repo.name),
                ).decode('utf-8').split()
                print('Containing tags:', containing_tags)
                if new_release.version not in containing_tags:
                    print('WARNING: Missing %s' % new_release.version)
                else:
                    print('Found new version %s' % new_release.version)

                is_ancestor = gitutils.check_ancestry(
                    workdir,
                    project.repo.name,
                    previous_release.version,
                    project.hash,
                )
                if is_ancestor:
                    print('SHA found in descendants')
                else:
                    print('SHA NOT FOUND in descendants')

            # Show the changes since the last release, first as a
            # graph view so we can check for bad merges, and then with
            # more detail.
            git_log(workdir, project.repo.name,
                    'Release %s will include' % new_release.version,
                    git_range,
                    extra_args=['--graph', '--oneline', '--decorate',
                                '--topo-order'])
            git_log(workdir, project.repo.name,
                    'Details Contents',
                    git_range,
                    extra_args=['--no-merges', '--topo-order'])

            # The tag will have been added as a local tag above if it does
            # not already exist.
            header('Release Notes')
            try:
                notes = release_notes.generate_release_notes(
                    repo=project.repo.name,
                    repo_path=os.path.join(workdir, project.repo.name),
                    start_revision=new_release.diff_start or start_range,
                    end_revision=new_release.version,
                    show_dates=True,
                    skip_requirement_merges=True,
                    is_stable=branch.startswith('stable/'),
                    series=deliv.series,
                    email='*****@*****.**',
                    email_from='*****@*****.**',
                    email_reply_to='*****@*****.**',
                    email_tags='',
                    include_pypi_link=False,
                    changes_only=False,
                    first_release=deliv.is_first_release,
                    repo_name=project.repo.name,
                    description='',
                    publishing_dir_name=project.repo.name,
                )
            except Exception as e:
                logging.exception('Failed to produce release notes')
            else:
                print('\n')
                print(notes)

            if 'library' in deliv.type:
                show_dependency_listings(
                    project.guess_sdist_name(),
                    official_repos,
                )

    return 0
示例#12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    args = parser.parse_args()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )

    interesting_deliverables = [
        d
        for d in (deliverable.Deliverable(t, s, dn, da)
                  for t, s, dn, da in
                  all_deliv.get_deliverables(None, args.series))
        if d.model == MILESTONE
    ]

    team_data = governance.get_team_data()
    teams = {
        n.lower(): governance.Team(n, i)
        for n, i in team_data.items()
    }

    # Dump the dashboard data
    writer = csv.writer(sys.stdout)
    writer.writerow(
        ('Team',
         'Deliverable Type',
         'Deliverable Name',
         'Pre-RC1',
         'RC1',
         'Branched at',
         'Latest RC',
         'Release Notes',
         'Comments',
         'PTL Nick',
         'PTL Email',
         'IRC Channel')
    )

    for deliv in sorted(interesting_deliverables,
                        key=lambda x: (x.team, x.name)):
        team = teams[d.team.lower()]
        writer.writerow(
            (deliv.team.lower(),
             deliv.type,
             deliv.name,
             deliv.latest_release,
             '',  # RC1
             deliv.get_branch_location('stable/' + args.series),  # branched at
             '',  # latest RC
             deliv.release_notes,
             '',  # Comments
             team.data['ptl']['irc'],
             team.data['ptl']['email'],
             team.data.get('irc-channel'))
        )
示例#13
0
import itertools
import operator
import os.path

from docutils import nodes
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.statemachine import ViewList
from sphinx.util.nodes import nested_parse_with_titles

from openstack_releases import deliverable
from openstack_releases import governance
from openstack_releases import links
from openstack_releases import series_status

_TEAM_DATA = governance.get_team_data()
_PHASE_DOC_URL = 'https://docs.openstack.org/project-team-guide/stable-branches.html#maintenance-phases'  # noqa


def _list_table(add, headers, data, title='', columns=None):
    """Build a list-table directive.

    :param add: Function to add one row to output.
    :param headers: List of header values.
    :param data: Iterable of row data, yielding lists or tuples with rows.
    """
    add('.. list-table:: %s' % title)
    add('   :header-rows: 1')
    if columns:
        add('   :widths: %s' % (','.join(str(c) for c in columns)))
    add('')
示例#14
0
 def tags(self):
     if self._governance_data is None:
         Deliverable._governance_data = governance.get_team_data()
     return governance.get_tags_for_deliverable(self._governance_data,
                                                self.team, self.name)