コード例 #1
0
def _initialize_deliverable_data(app):
    global _deliverables

    series_status_data = series_status.SeriesStatus.from_directory(
        'deliverables')
    deliverable.Deliverable.init_series_status_data(series_status_data)
    _deliverables = deliverable.Deliverables('deliverables')
コード例 #2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    args = parser.parse_args()

    # Deal with the inconsistency of the name for the independent
    # directory.
    series = args.series
    if series == 'independent':
        series = '_independent'

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    for entry in all_deliv.get_deliverable_history(args.deliverable):
        deliv = deliverable.Deliverable(*entry)
        print(deliv.team)
        break
コード例 #3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--file',
        default=False,
        action='store_true',
        help='deliverable arg is a file path rather than a std. deliverable')
    args = parser.parse_args()

    # If we've been told the 'deliverable' is infact a yaml file *or* the
    # deliverable contains a '/' just load that file directly
    if args.file or '/' in args.deliverable:
        deliv = deliverable.Deliverable.read_file(args.deliverable)
    else:
        all_deliv = deliverable.Deliverables(
            root_dir=args.deliverables_dir,
            collapse_history=False,
        )
        deliv = next(all_deliv.get_deliverable_history(args.deliverable))

    print(deliv.team)
コード例 #4
0
ファイル: deliverables.py プロジェクト: juliakreger/releases
def _initialize_deliverable_data():
    global _deliverables
    global _series_status_data

    LOG.info('Loading deliverable data...')

    _series_status_data = series_status.SeriesStatus.default()
    deliverable.Deliverable.init_series_status_data(_series_status_data)
    _deliverables = deliverable.Deliverables('deliverables')
コード例 #5
0
def _initialize_team_data(app):
    global _deliverables
    global _all_teams

    _deliverables = deliverable.Deliverables('deliverables')
    team_data = governance.get_team_data()
    for tn, td in team_data.items():
        _all_teams[tn] = td
        for dn, dd in td['deliverables'].items():
            _all_deliverable_types[dn] = _get_deliverable_type(dn, dd)
コード例 #6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'old_series',
        help='the previous release series, such as "newton"',
    )
    parser.add_argument(
        'new_series',
        help='the new release series, such as "ocata"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    args = parser.parse_args()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )

    new_deliverables = set(
        deliv.name
        for deliv in
        all_deliv.get_deliverables(None, args.new_series)
    )

    outdir = os.path.join(args.deliverables_dir, args.new_series)
    if not os.path.exists(outdir):
        print('creating output directory {}'.format(outdir))
        os.mkdir(outdir)

    old_deliverables = all_deliv.get_deliverables(None, args.old_series)
    for deliv in old_deliverables:
        if deliv.name in new_deliverables:
            continue
        if not deliv.is_released and not deliv.branches:
            # There were no releases for the deliverable in the
            # previous series, stop carrying it over.
            print('{} skipped (no releases in {})'.format(
                deliv.name, args.old_series))
            continue
        # Clean up some series-specific data that should not be copied
        # over.
        raw_data = deliv.data
        for key in IGNORE:
            if key in raw_data:
                del raw_data[key]
        outfilename = os.path.abspath(
            os.path.join(outdir, deliv.name + '.yaml')
        )
        with open(outfilename, 'w', encoding='utf-8') as f:
            print('{} created'.format(outfilename))
            f.write(yamlutils.dumps(raw_data))
コード例 #7
0
ファイル: aclmanager.py プロジェクト: microgle/releases
def repositories_list(deliverables_dir, series):
    """Yields (team, repo) tuples for cycle-with-milestones deliverables"""
    deliverables = deliverable.Deliverables(deliverables_dir)
    for d in deliverables.get_deliverables(None, series):
        if d.model != 'cycle-with-milestones':
            continue
        if not d.repos:
            print('WARNING: no releases for {} in {}'.format(d.name, d.series))
        for repo in sorted(d.repos):
            if repo not in EXCEPTIONS:
                yield (d.team, repo)
コード例 #8
0
ファイル: dashboard.py プロジェクト: jiriproX/releases
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    args = parser.parse_args()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )

    interesting_deliverables = [
        d for d in (
            deliverable.Deliverable(t, s, dn, da)
            for t, s, dn, da in all_deliv.get_deliverables(None, args.series))
        if d.model == MILESTONE
    ]

    team_data = governance.get_team_data()
    teams = {n.lower(): governance.Team(n, i) for n, i in team_data.items()}

    # Dump the dashboard data
    writer = csv.writer(sys.stdout)
    writer.writerow(
        ('Team', 'Deliverable Type', 'Deliverable Name', 'Pre-RC1', 'RC1',
         'Branched at', 'Latest RC', 'Release Notes', 'Comments', 'PTL Nick',
         'PTL Email', 'Liaison Nick', 'IRC Channel'))

    for deliv in sorted(interesting_deliverables,
                        key=lambda x: (x.team, x.name)):
        team = teams[deliv.team.lower()]
        writer.writerow((
            deliv.team.lower(),
            deliv.type,
            deliv.name,
            deliv.latest_release,
            '',  # RC1
            deliv.get_branch_location('stable/' + args.series),  # branched at
            '',  # latest RC
            deliv.release_notes,
            '',  # Comments
            team.data['ptl']['irc'],
            team.data['ptl']['email'],
            team.liaison[1] or '',
            team.data.get('irc-channel')))
コード例 #9
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'old_series',
        help='the previous release series, such as "newton"',
    )
    parser.add_argument(
        'new_series',
        help='the new release series, such as "ocata"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    args = parser.parse_args()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )

    new_deliverables = set(
        name
        for team, series, name, data in
        all_deliv.get_deliverables(None, args.new_series)
    )

    outdir = os.path.join(args.deliverables_dir, args.new_series)
    if not os.path.exists(outdir):
        print('creating output directory {}'.format(outdir))
        os.mkdir(outdir)

    old_deliverables = all_deliv.get_deliverables(None, args.old_series)
    for team, series, name, data in old_deliverables:
        if name in new_deliverables:
            continue
        if not data.get('releases'):
            # There were no releases for the deliverable in the
            # previous series, stop carrying it over.
            print('{} skipped (no releases in {})'.format(name, args.old_series))
            continue
        # Clean up some series-specific data that should not be copied
        # over.
        for key in ['releases', 'branches', 'release-notes']:
            if key in data:
                del data[key]
        outfilename = os.path.join(outdir, name + '.yaml')
        with open(outfilename, 'w') as f:
            print('{} created'.format(outfilename))
            f.write('---\n')
            yaml.dump(data, f, default_flow_style=False)
コード例 #10
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    args = parser.parse_args()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    for deliv in all_deliv.get_deliverable_history(args.deliverable):
        print(deliv.team)
        break
コード例 #11
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--all',
        default=False,
        action='store_true',
        help='process all deliverables, including cycle-trailing ones',
    )
    parser.add_argument(
        '--verbose', '-v',
        action='store_true',
        default=False,
        help='produce detailed output',
    )
    parser.add_argument(
        '--canary',
        action='store_true',
        default=False,
        help='process only a canary final release on release-test',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        'prior_series',
        help='the name of the previous series',
    )
    parser.add_argument(
        'series',
        help='the name of the release series to work on'
    )
    args = parser.parse_args()

    if args.verbose:
        def verbose(msg):
            print(msg)
    else:
        def verbose(msg):
            pass

    # Set up logging, including making some loggers quiet.
    logging.basicConfig(
        format='%(levelname)7s: %(message)s',
        stream=sys.stdout,
        level=logging.DEBUG if args.verbose else logging.INFO,
    )
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)

    deliverables_dir = args.deliverables_dir

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except Exception:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    verbose('Scanning {}/{}'.format(deliverables_dir, args.series))
    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )

    for deliv in all_deliv.get_deliverables(None, args.series):

        if args.canary and deliv.name != "release-test":
            continue

        verbose('\n{} {}'.format(deliv.name, deliv.model))

        if (deliv.model == 'cycle-trailing' or deliv.type == 'trailing'):
            verbose('#  {} is a cycle-trailing project'.format(
                deliv.name))
            if not args.all:
                continue

        if not deliv.releases:
            verbose('#  no releases')
            continue

        latest_release = deliv.releases[-1]
        projects = latest_release.projects
        if not projects:
            verbose('#  no projects in latest release')
            continue
        for pre_rel in ['a', 'b', 'rc']:
            if pre_rel in str(latest_release.version):
                break
        else:  # we did not find any pre_rel
            verbose('#  {} was not a release candidate'.format(
                latest_release.version))
            continue

        # The new version is the same as the latest release version
        # without the pre-release component at the end. Make sure it
        # has 3 sets of digits.
        new_version = '.'.join(
            (latest_release.version.split('.')[:-1] + ['0'])[:3]
        )

        branch = 'stable/{}'.format(args.prior_series)
        diff_start = get_prior_branch_point(
            workdir, projects[0].repo.name, branch,
        )

        deliverable_data = deliv.data
        release_data = {
            'version': new_version,
            'projects': deliv.data['releases'][-1]['projects'],
        }
        if diff_start:
            release_data['diff-start'] = diff_start
        deliverable_data['releases'].append(release_data)
        print('new version for {}: {}'.format(
            deliv.name, new_version))

        filename = os.path.join(deliverables_dir, deliv.filename)
        with open(filename, 'w', encoding='utf-8') as f:
            f.write(yamlutils.dumps(deliverable_data))
コード例 #12
0
def main():
    parser = argparse.ArgumentParser()
    output_mode = parser.add_mutually_exclusive_group()
    output_mode.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        default=False,
        help='show more than the deliverable name',
    )
    output_mode.add_argument(
        '-r',
        '--repos',
        action='store_true',
        default=False,
        help='show the repository names not deliverable names',
    )
    parser.add_argument(
        '--team',
        help='the name of the project team, such as "Nova" or "Oslo"',
    )
    parser.add_argument(
        '--deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    parser.add_argument(
        '--csvfile',
        help='Save results (same as when --verbose) to CSV file',
    )
    model = parser.add_mutually_exclusive_group()
    model.add_argument(
        '--model',
        help=('the release model, such as "cycle-with-milestones"'
              ' or "independent"'),
    )
    model.add_argument(
        '--cycle-based',
        action='store_true',
        default=False,
        help='include all cycle-based code repositories',
    )
    parser.add_argument(
        '--type',
        help='deliverable type, such as "library" or "service"',
    )
    parser.add_argument(
        '--tag',
        default=[],
        action='append',
        help='look for one more more tags on the deliverable or team',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--no-stable-branch',
        default=False,
        action='store_true',
        help='limit the list to deliverables without a stable branch',
    )
    grp = parser.add_mutually_exclusive_group()
    grp.add_argument(
        '--unreleased',
        default=False,
        action='store_true',
        help='limit the list to deliverables not released in the cycle',
    )
    grp.add_argument(
        '--missing-milestone',
        help=('deliverables that do not have the specified milestone as '
              'the most current release; for example 2 would look for .0b2 '
              'in the version number (implies --model cycle-with-milestones)'),
    )
    grp.add_argument(
        '--missing-rc',
        action='store_true',
        help=('deliverables that do not have a release candidate, yet '
              '(implies --model cycle-with-milestones)'),
    )
    grp.add_argument(
        '--missing-final',
        action='store_true',
        help='deliverables that have pre-releases but no final releases, yet',
    )
    args = parser.parse_args()

    # Deal with the inconsistency of the name for the independent
    # directory.
    series = args.series
    if series == 'independent':
        series = '_independent'

    if args.missing_milestone:
        model = 'cycle-with-milestones'
        version_ending = '.0b{}'.format(args.missing_milestone)
    elif args.missing_rc:
        model = 'cycle-with-milestones'
        version_ending = None
    elif args.missing_final:
        model = args.model
        version_ending = None
    else:
        model = args.model
        version_ending = None

    verbose_template = '{name:30} {team:20}'
    if not args.unreleased:
        verbose_template += ' {latest_release:15}'
    if not args.type:
        verbose_template += ' {type:15}'
    if not args.model:
        verbose_template += ' {model:15}'

    csvfile = None
    if args.csvfile:
        csvfile = open(args.csvfile, 'w')
        fieldnames = [
            'name', 'latest_release', 'repo', 'hash', 'team', 'type', 'model'
        ]
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    for entry in all_deliv.get_deliverables(args.team, series):
        deliv = deliverable.Deliverable(*entry)

        if args.deliverable and deliv.name != args.deliverable:
            continue

        if model and deliv.model != model:
            continue
        if args.cycle_based and not deliv.is_cycle_based:
            continue
        if args.type and deliv.type != args.type:
            continue
        if args.no_stable_branch:
            if deliv.get_branch_location('stable/' + series) is not None:
                continue
        if args.unreleased and deliv.versions:
            continue
        if version_ending and deliv.latest_release and deliv.latest_release.endswith(
                version_ending):
            continue
        if args.missing_rc and deliv.latest_release and 'rc' in deliv.latest_release:
            continue
        if args.tag:
            tags = deliv.tags
            for t in args.tag:
                if t not in tags:
                    continue
        if args.missing_final and deliv.latest_release:
            if not ('rc' in deliv.latest_release or 'a' in deliv.latest_release
                    or 'b' in deliv.latest_release):
                continue

        if csvfile:
            rel = (deliv.releases or [{}])[-1]
            for prj in rel.get('projects', [{}]):
                writer.writerow({
                    'name': deliv.name,
                    'latest_release': rel.get('version', None),
                    'repo': prj.get('repo', None),
                    'hash': prj.get('hash', None),
                    'team': deliv.team,
                    'type': deliv.type,
                    'model': deliv.model,
                })
        elif args.verbose:
            print(
                verbose_template.format(
                    name=deliv.name,
                    latest_release=deliv.latest_release,
                    team=deliv.team,
                    type=deliv.type,
                    model=deliv.model,
                ))
        elif args.repos:
            for r in sorted(deliv.repos):
                print(r)
        else:
            print(deliv.name)

    if csvfile:
        csvfile.close()
コード例 #13
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'repository_cache',
        help='location of existing copies of repositories',
    )
    parser.add_argument(
        'series',
        help='the release series, such as "newton" or "ocata"',
    )
    parser.add_argument(
        'deliverable',
        nargs='+',
        help='the deliverable name',
    )
    args = parser.parse_args()

    workdir = tempfile.mkdtemp(prefix='releases-')

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)

    atexit.register(cleanup_workdir)

    branch_name = 'origin/stable/' + args.series

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    for deliv in all_deliv.get_deliverables(None, args.series):
        if deliv.name not in args.deliverable:
            continue
        if deliv.get_branch_location(branch_name) is not None:
            # the branch is already defined for this project
            sys.stderr.write('{} already has a branch {}\n'.format(
                deliv.name, branch_name))
            continue
        # We're only importing stable branches, and those are
        # specified by the version number. We therefore only need one
        # repository, and it shouldn't matter which one. That said, we
        # might not actually find the branch in the first repo so loop
        # until we do.
        for r in deliv.repos:
            reporoot = os.path.join(args.repository_cache, r)
            version = _get_branch_base(reporoot, branch_name)
            if version:
                print(deliv.name, args.series, version)
                break
        else:
            sys.stderr.write('could not find {} in any repos for {}\n'.format(
                branch_name, deliv.name))
コード例 #14
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        default=False,
        help='show more than the deliverable name',
    )
    parser.add_argument(
        '--team',
        help='the name of the project team, such as "Nova" or "Oslo"',
    )
    parser.add_argument(
        '--deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    model = parser.add_mutually_exclusive_group()
    model.add_argument(
        '--model',
        help=('the release model, such as "cycle-with-milestones"'
              ' or "independent"'),
    )
    model.add_argument(
        '--cycle-based',
        action='store_true',
        default=False,
        help='include all cycle-based code repositories',
    )
    parser.add_argument(
        '--type',
        help='deliverable type, such as "library" or "service"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--branch',
        default=None,
        help='branch name, defaults to stable/$series',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    args = parser.parse_args()

    if args.verbose:

        def verbose(msg):
            print(msg)
    else:

        def verbose(msg):
            pass

    # Deal with the inconsistency of the name for the independent
    # directory.
    series = args.series
    if series == 'independent':
        series = '_independent'

    branch = args.branch
    if not branch:
        branch = 'stable/{}'.format(series)

    workdir = tempfile.mkdtemp(prefix='releases-')
    verbose('creating temporary files in {}'.format(workdir))

    def cleanup_workdir():
        if args.cleanup:
            try:
                verbose('cleaning up temporary files in {}'.format(workdir))
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up {}'.format(workdir))

    atexit.register(cleanup_workdir)

    # Count any errors for our exit code.
    errors = 0

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    for entry in all_deliv.get_deliverables(args.team, series):
        deliv = deliverable.Deliverable(*entry)
        branch_loc = deliv.get_branch_location(branch)
        if branch_loc is None:
            verbose('No stable branch for {}'.format(deliv.name))
            continue
        all_versions = deliv.versions
        if all_versions[-1] == branch_loc:
            verbose('Most recent release for {} ({}) is at {}'.format(
                deliv.name, branch_loc, branch))
            continue
        idx = all_versions.index(branch_loc)
        late_releases = all_versions[idx + 1:]
        print('{} releases {} come after {}'.format(deliv.name, late_releases,
                                                    branch))
        for repo in sorted(deliv.repos):
            verbose('cloning {}'.format(repo))
            gitutils.clone_repo(
                workdir,
                repo,
            )
            for version in late_releases:
                containing_br = gitutils.branches_containing(
                    workdir,
                    repo,
                    version,
                )
                for cb in containing_br:
                    if branch in cb:  # allow for remote prefix
                        verbose('{} version {} is on branch {}'.format(
                            repo, version, branch))
                        break
                else:
                    print('{} version {} is not on branch {} ({})'.format(
                        repo, version, branch, containing_br))
                    errors += 1

    return (1 if errors else 0)
コード例 #15
0
def main():
    if not sys.stdout.encoding:
        # Wrap sys.stdout with a writer that knows how to handle
        # encoding Unicode data.
        import codecs
        wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout)
        sys.stdout = wrapped_stdout

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--no-shortcut',
        '--force',
        '-f',
        dest='shortcut',
        default=True,
        action='store_false',
        help='if a tag has been applied, skip the repo',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    # Set up logging, including making some loggers quiet.
    logging.basicConfig(
        format='%(levelname)7s: %(message)s',
        stream=sys.stdout,
        level=logging.DEBUG,
    )
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, skipping report')
        return 0

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            print('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    gov_data = governance.Governance.from_remote_repo()
    official_repos = set(r.name for r in gov_data.get_repositories())

    all_deliverables = deliverable.Deliverables(
        './deliverables',
        False,
    )

    liaison_data = liaisons.get_liaisons()

    # Remove any inherited PAGER environment variable to avoid
    # blocking the output waiting for input.
    os.environ['PAGER'] = ''

    for filename in filenames:
        if not os.path.exists(filename):
            print('\n%s was removed, skipping' % filename)
            continue
        print('\n' + ('=' * 80))
        print('\nChecking %s\n' % filename)
        deliv = deliverable.Deliverable.read_file(filename)

        stable_branch = deliv.series not in ['independent', defaults.RELEASE]

        # By default assume the project does not use milestones.
        header('Release model')
        print(deliv.model)

        header('Team details')
        if deliv.team:
            team_name = deliv.team
            try:
                team = gov_data.get_team(team_name)
            except ValueError:
                team = None
            if team:
                print('found team %s' % team_name)
                print('  PTL    : %(name)s (%(irc)s)' % team.ptl)
                for liaison in liaison_data.get(team.name.lower(), []):
                    print('  Liaison: %(name)s (%(irc)s)' % liaison)
                team_deliv = team.deliverables.get(deliv.name)
                if team_deliv:
                    print('found deliverable %s' % deliv.name)
                    for rn, repo in sorted(team_deliv.repositories.items()):
                        follows_stable_policy = 'stable:follows-policy' in repo.tags
                        print('\nrepo %s\ntags:' % repo.name)
                        for t in repo.tags:
                            print('  %s' % t)
                        print('')
                        if stable_branch and follows_stable_policy:
                            banner('Needs Stable Policy Review')
                            print()
                else:
                    print(('no deliverable %r found for team %r, '
                           'cannot report on governance status') %
                          (deliv.name, team_name))
                if not deliv.is_independent:
                    # Show other deliverables owned by the team and
                    # included in this series.
                    team_deliv_in_series = all_deliverables.get_deliverables(
                        team.name, deliv.series)
                    if team_deliv_in_series:
                        print('Other {} deliverables in {}:'.format(
                            team.name, deliv.series))
                    for d in team_deliv_in_series:
                        print('  {} ({})'.format(d.name, d.latest_release
                                                 or None))
                    print()
            else:
                print('no team %r found, cannot report on governance status' %
                      team_name)
        else:
            print('no team name given, cannot report on governance status')

        # If there are no releases listed, this is probably a new
        # deliverable file for initializing a new series. We don't
        # need to list its changes.
        if not deliv.is_released:
            header('No releases')
            print('no releases were found, assuming an initialization file')
            continue

        # assume the releases are in order and take the last one
        new_release = deliv.releases[-1]

        for project in new_release.projects:

            tag_exists = gitutils.tag_exists(
                project.repo.name,
                new_release.version,
            )
            if tag_exists:
                print('%s %s exists on git server already' %
                      (project.repo.name, new_release.version))
                if args.shortcut:
                    print('skipping further processing')
                    continue

            if project.repo.is_retired:
                print('%s is retired' % (project.repo.name, ))
                if args.shortcut:
                    print('skipping further processing')
                    continue

            # Start by checking out master, always. We need the repo
            # checked out before we can tell if the stable branch
            # really exists.
            gitutils.clone_repo(
                workdir,
                project.repo.name,
                branch='master',
            )

            # Set some git configuration values to allow us to perform
            # local operations like tagging.
            gitutils.ensure_basic_git_config(
                workdir,
                project.repo.name,
                {
                    'user.email': '*****@*****.**',
                    'user.name': 'OpenStack Proposal Bot'
                },
            )

            # Determine which branch we should actually be looking
            # at. Assume any series for which there is no stable
            # branch will be on 'master'.
            if gitutils.stable_branch_exists(workdir, project.repo.name,
                                             deliv.series):
                branch = 'stable/' + deliv.series
            else:
                branch = 'master'

            if branch != 'master':
                # Check out the repo again to the right branch if we
                # didn't get it the first time.
                gitutils.clone_repo(
                    workdir,
                    project.repo.name,
                    branch=branch,
                )

            # look at the previous tag for the parent of the commit
            # getting the new release
            previous_tag = gitutils.get_latest_tag(
                workdir,
                project.repo.name,
                '{}^'.format(project.hash),
                always=False,
            )
            try:
                previous_release = deliv.get_release(previous_tag)
            except ValueError:
                previous_release = None

            start_range = previous_tag
            if previous_release:
                previous_project = {
                    x.repo.name: x
                    for x in previous_release.projects
                }.get(project.repo.name)
                if previous_project is not None:
                    start_range = previous_tag

            if start_range:
                git_range = '%s..%s' % (start_range, project.hash)
            else:
                git_range = project.hash

            # Show details about the commit being tagged.
            header('Details for commit receiving new tag %s' %
                   new_release.version)
            print('\ngit describe %s\n' % project.hash)
            try:
                subprocess.check_call(
                    ['git', 'describe', project.hash],
                    cwd=os.path.join(workdir, project.repo.name),
                )
            except subprocess.CalledProcessError as e:
                print('WARNING: Could not run git describe: %s' % e)

            git_show(
                workdir=workdir,
                repo=project.repo.name,
                title='Check existing tags',
                ref=project.hash,
            )

            git_list_existing_branches(
                workdir=workdir,
                repo=project.repo.name,
            )

            branches = git_branch_contains(
                workdir=workdir,
                repo=project.repo.name,
                title='Branches containing commit',
                commit=project.hash,
            )

            header('Relationship to HEAD')
            if deliv.is_independent:
                if branches:
                    tag_branch = branches[0]
                else:
                    tag_branch = branch
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project.repo.name,
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            else:
                if (branch in branches) or (not branches):
                    tag_branch = branch
                else:
                    tag_branch = branches[0]
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project.repo.name,
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            requested_sha = gitutils.sha_for_tag(
                workdir,
                project.repo.name,
                project.hash,
            )
            # If the sha for HEAD and the requested release don't
            # match, show any unreleased changes on the branch. We ask
            # git to give us the real SHA for the requested release in
            # case the deliverables file has the short version of the
            # hash.
            if head_sha == requested_sha:
                print('\nRequest releases from HEAD on %s' % tag_branch)
            else:
                git_log(workdir,
                        project.repo.name,
                        'Release will NOT include',
                        '%s..%s' % (requested_sha, head_sha),
                        extra_args=['--format=%h %ci %s'])

            show_watched_queries(branch, project.repo.name)

            # Show any requirements changes in the upcoming release.
            # Include setup.cfg, in case the project uses "extras".
            if start_range:
                git_diff(workdir, project.repo.name, git_range,
                         '*requirements*.txt',
                         'Requirements Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range,
                         'doc/requirements.txt',
                         'Doc Requirements Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'setup.cfg',
                         'setup.cfg Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'bindep.txt',
                         'bindep.txt Changes %s' % git_range)

            # Before we try to determine if the previous release
            # is an ancestor or produce the release notes we need
            # the tag to exist in the local repository.
            if not tag_exists:
                header('Applying Temporary Tag')
                print('\ngit tag {version} {hash}'.format(
                    version=new_release.version,
                    hash=project.hash,
                ))
                subprocess.check_call(
                    ['git', 'tag', new_release.version, project.hash],
                    cwd=os.path.join(workdir, project.repo.name),
                )

            # Show any changes in the previous release but not in this
            # release, in case someone picks an "early" SHA or a
            # regular commit instead of the appropriate merge commit.
            previous_tag_exists = False
            if previous_release:
                previous_tag_exists = gitutils.tag_exists(
                    project.repo.name,
                    previous_release.version,
                )
            if previous_tag_exists:
                git_log(
                    workdir,
                    project.repo.name,
                    'Patches in previous release but not in this one',
                    [previous_release.version, '--not', project.hash],
                    extra_args=['--topo-order', '--oneline', '--no-merges'],
                )

                # The tag will have been added as a local tag above if
                # it does not already exist.
                header('New release %s includes previous release %s' %
                       (new_release.version, previous_release.version))
                print('\ngit tag --contains %s\n' % previous_release.version)
                containing_tags = subprocess.check_output(
                    ['git', 'tag', '--contains', previous_release.version],
                    cwd=os.path.join(workdir, project.repo.name),
                ).decode('utf-8').split()
                print('Containing tags:', containing_tags)
                if new_release.version not in containing_tags:
                    print('WARNING: Missing %s' % new_release.version)
                else:
                    print('Found new version %s' % new_release.version)

                is_ancestor = gitutils.check_ancestry(
                    workdir,
                    project.repo.name,
                    previous_release.version,
                    project.hash,
                )
                if is_ancestor:
                    print('SHA found in descendants')
                else:
                    print('SHA NOT FOUND in descendants')

            # Show the changes since the last release, first as a
            # graph view so we can check for bad merges, and then with
            # more detail.
            git_log(workdir,
                    project.repo.name,
                    'Release %s will include' % new_release.version,
                    git_range,
                    extra_args=[
                        '--graph', '--oneline', '--decorate', '--topo-order'
                    ])
            git_log(workdir,
                    project.repo.name,
                    'Details Contents',
                    git_range,
                    extra_args=['--no-merges', '--topo-order'])

            # The tag will have been added as a local tag above if it does
            # not already exist.
            header('Release Notes')
            try:
                notes = release_notes.generate_release_notes(
                    repo=project.repo.name,
                    repo_path=os.path.join(workdir, project.repo.name),
                    start_revision=new_release.diff_start or start_range or '',
                    end_revision=new_release.version,
                    show_dates=True,
                    skip_requirement_merges=True,
                    is_stable=branch.startswith('stable/'),
                    series=deliv.series,
                    email='*****@*****.**',
                    email_from='*****@*****.**',
                    email_reply_to='*****@*****.**',
                    email_tags='',
                    include_pypi_link=False,
                    changes_only=False,
                    first_release=deliv.is_first_release,
                    deliverable_file=filename,
                    description='',
                    publishing_dir_name=project.repo.name,
                )
            except Exception as e:
                logging.exception('Failed to produce release notes')
            else:
                print('\n')
                print(notes)

            if 'library' in deliv.type:
                show_dependency_listings(
                    project.guess_sdist_name(),
                    official_repos,
                )

    return 0
コード例 #16
0
def main():
    deliverable_schema = schema.Schema()

    parser = argparse.ArgumentParser()
    output_mode = parser.add_mutually_exclusive_group()
    output_mode.add_argument(
        '-v', '--verbose',
        action='store_true',
        default=False,
        help='show more than the deliverable name',
    )
    output_mode.add_argument(
        '-r', '--repos',
        action='store_true',
        default=False,
        help='show the repository names not deliverable names',
    )
    output_mode.add_argument(
        '-a', '--all-releases',
        action='store_true',
        default=False,
        help='show all of the releases for each deliverable',
    )
    parser.add_argument(
        '--group-by',
        dest='group_key',
        default=None,
        choices=['team', 'type', 'model'],
        help='group output by the specified value',
    )
    parser.add_argument(
        '--team',
        help='the name of the project team, such as "Nova" or "Oslo"',
    )
    parser.add_argument(
        '--deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    parser.add_argument(
        '--csvfile',
        help='Save results (same as when --verbose) to CSV file',
    )
    parser.add_argument(
        '--show-dates',
        action='store_true',
        default=False,
        help='Show last release date (in verbose mode)',
    )
    parser.add_argument(
        '--show-tags',
        action='store_true',
        default=False,
        help='Show tags associated with deliverable (in verbose mode)',
    )
    model = parser.add_mutually_exclusive_group()
    model.add_argument(
        '--model',
        help=('the release model, such as "cycle-with-rc"'
              ' or "independent"'),
        choices=sorted(deliverable_schema.release_models + ['independent']),
    )
    model.add_argument(
        '--cycle-based',
        action='store_true',
        default=False,
        help='include all cycle-based deliverables',
    )
    model.add_argument(
        '--cycle-based-no-trailing',
        action='store_true',
        default=False,
        help='include all cycle-based deliverables, except trailing ones',
    )
    parser.add_argument(
        '--type',
        default=[],
        action='append',
        choices=sorted(deliverable_schema.release_types),
        help='deliverable type, such as "library" or "service"',
    )
    parser.add_argument(
        '--tag',
        default=[],
        action='append',
        help='look for one more more tags on the deliverable or team',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--no-stable-branch',
        default=False,
        action='store_true',
        help='limit the list to deliverables without a stable branch',
    )
    grp = parser.add_mutually_exclusive_group()
    grp.add_argument(
        '--unreleased',
        default=False,
        action='store_true',
        help='limit the list to deliverables not released in the cycle',
    )
    grp.add_argument(
        '--unreleased-since',
        help=('limit the list to deliverables not released in the cycle '
              'since a given YYYY-MM-DD date'),
    )
    grp.add_argument(
        '--missing-rc',
        action='store_true',
        help=('deliverables that do not have a release candidate, yet '
              '(implies --model cycle-with-rc)'),
    )
    grp.add_argument(
        '--missing-final',
        action='store_true',
        help='deliverables that have pre-releases but no final releases, yet',
    )
    grp.add_argument(
        '--forced',
        action='store_true',
        help=('releases that have the "forced" flag applied '
              '(implies --all-releases)'),
    )
    args = parser.parse_args()

    series = args.series
    GET_REFS_API = 'https://opendev.org/api/v1/repos/{}/git/{}'
    GET_COMMIT_API = 'https://opendev.org/api/v1/repos/{}/git/commits/{}'

    if args.missing_rc:
        model = 'cycle-with-rc'
        version_ending = None
    elif args.missing_final:
        model = args.model
        version_ending = None
    else:
        model = args.model
        version_ending = None

    if args.unreleased_since:
        args.show_dates = True

    verbose_template = '{name:30} {team:20}'
    if not args.unreleased:
        verbose_template += ' {latest_release:12}'
    if args.show_dates:
        verbose_template += ' {last_release_date:11}'
    if len(args.type) != 1:
        verbose_template += ' {type:15}'
    if not args.model:
        verbose_template += ' {model:15}'
    if args.show_tags:
        verbose_template += ' {tags}'

    if args.forced:
        args.all_releases = True

    csvfile = None
    if args.csvfile:
        csvfile = open(args.csvfile, 'w')
        fieldnames = ['name', 'latest_release', 'repo', 'hash',
                      'team', 'type', 'model']
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    deliv_iter = list(all_deliv.get_deliverables(args.team, series))
    if args.group_key:
        deliv_iter = sorted(deliv_iter,
                            key=operator.attrgetter(args.group_key))
        name_fmt = '  {}'
    else:
        name_fmt = '{}'
    cur_group = None
    for deliv in deliv_iter:
        if args.group_key:
            deliv_group = getattr(deliv, args.group_key)

        if args.deliverable and deliv.name != args.deliverable:
            continue

        if model and deliv.model != model:
            continue
        if args.cycle_based and not deliv.is_cycle_based:
            continue
        if args.cycle_based_no_trailing and (not deliv.is_cycle_based or
                                             deliv.type == 'trailing'):
            continue
        if args.type and deliv.type not in args.type:
            continue
        if args.no_stable_branch:
            if deliv.is_branchless:
                continue
            if deliv.name == 'release-test':
                continue
            if deliv.stable_branch_type is None:
                continue
            if deliv.get_branch_location('stable/' + series) is not None:
                continue
        if args.unreleased and (deliv.is_released or not deliv.is_releasable):
            continue
        if version_ending and deliv.is_released:
            found = False
            for release in deliv.releases:
                if release.version.endswith(version_ending):
                    found = True
                    break
            if found:
                continue
        if args.missing_rc and deliv.is_released and 'rc' in deliv.latest_release:
            continue
        if args.tag:
            tags = deliv.tags
            ignore = False
            for t in args.tag:
                if t not in tags:
                    ignore = True
                    break
            if ignore:
                continue

        tag_str = '(' + ', '.join(deliv.tags) + ')'

        if args.missing_final and deliv.latest_release:
            if not ('rc' in deliv.latest_release or
                    'a' in deliv.latest_release or
                    'b' in deliv.latest_release):
                continue

        release_date = {}
        if (args.show_dates or args.unreleased_since) and deliv.is_released:
            if args.all_releases:
                versions = [a.version for a in deliv.releases]
            else:
                versions = [deliv.releases[-1].version]
            for ver in versions:
                ref = "refs/tags/{}".format(ver)
                api = GET_REFS_API.format(deliv.repos[0], ref)
                tagsjson = requests.get(api).json()

                # Gitea returns either a single tag object, or a list of
                # tag objects containing the provided string. So we need to
                # filter the list for the exact match.
                if isinstance(tagsjson, list):
                    for release_tag in tagsjson:
                        if release_tag['ref'] == ref:
                            break
                else:
                    release_tag = tagsjson

                release_sha = release_tag['object']['sha']
                api = GET_COMMIT_API.format(deliv.repos[0], release_sha)
                release_commit = requests.get(api).json()['commit']
                release_date[ver] = release_commit['author']['date'][0:10]

        if args.unreleased_since and deliv.is_released:
            if release_date[ver] >= args.unreleased_since:
                continue

        if csvfile:
            rel = (deliv.releases or [{}])[-1]
            for prj in rel.get('projects', [{}]):
                writer.writerow({
                    'name': deliv.name,
                    'latest_release': rel.get('version', None),
                    'repo': prj.get('repo', None),
                    'hash': prj.get('hash', None),
                    'team': deliv.team,
                    'type': deliv.type,
                    'model': deliv.model,
                })
        elif args.all_releases:
            for r in deliv.releases:
                if args.forced and not r.was_forced:
                    continue
                print(verbose_template.format(
                    name=deliv.name,
                    latest_release=r.version,
                    last_release_date=release_date.get(r.version, ''),
                    team=deliv.team,
                    type=deliv.type,
                    model=deliv.model,
                    tags=tag_str,
                ))
        elif args.verbose:
            print(verbose_template.format(
                name=deliv.name,
                latest_release=deliv.latest_release or '',
                last_release_date=release_date.get(deliv.latest_release, ''),
                team=deliv.team,
                type=deliv.type,
                model=deliv.model,
                tags=tag_str,
            ))
        elif args.repos:
            if args.group_key and cur_group != deliv_group:
                cur_group = deliv_group
                print(cur_group)
            for r in sorted(deliv.repos):
                print(name_fmt.format(r))
        else:
            if args.group_key and cur_group != deliv_group:
                cur_group = deliv_group
                print(cur_group)
            print(name_fmt.format(deliv.name))

    if csvfile:
        csvfile.close()
コード例 #17
0
def main():
    deliverable_schema = schema.Schema()

    parser = argparse.ArgumentParser()
    output_mode = parser.add_mutually_exclusive_group()
    output_mode.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        default=False,
        help='show more than the deliverable name',
    )
    output_mode.add_argument(
        '-r',
        '--repos',
        action='store_true',
        default=False,
        help='show the repository names not deliverable names',
    )
    output_mode.add_argument(
        '-a',
        '--all-releases',
        action='store_true',
        default=False,
        help='show all of the releases for each deliverable',
    )
    parser.add_argument(
        '--group-by',
        dest='group_key',
        default=None,
        choices=['team', 'type', 'model'],
        help='group output by the specified value',
    )
    parser.add_argument(
        '--team',
        help='the name of the project team, such as "Nova" or "Oslo"',
    )
    parser.add_argument(
        '--deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    parser.add_argument(
        '--csvfile',
        help='Save results (same as when --verbose) to CSV file',
    )
    model = parser.add_mutually_exclusive_group()
    model.add_argument(
        '--model',
        help=('the release model, such as "cycle-with-milestones"'
              ' or "independent"'),
        choices=sorted(deliverable_schema.release_models + ['independent']),
    )
    model.add_argument(
        '--cycle-based',
        action='store_true',
        default=False,
        help='include all cycle-based code repositories',
    )
    parser.add_argument(
        '--type',
        default=[],
        action='append',
        choices=sorted(deliverable_schema.release_types),
        help='deliverable type, such as "library" or "service"',
    )
    parser.add_argument(
        '--tag',
        default=[],
        action='append',
        help='look for one more more tags on the deliverable or team',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--no-stable-branch',
        default=False,
        action='store_true',
        help='limit the list to deliverables without a stable branch',
    )
    grp = parser.add_mutually_exclusive_group()
    grp.add_argument(
        '--unreleased',
        default=False,
        action='store_true',
        help='limit the list to deliverables not released in the cycle',
    )
    grp.add_argument(
        '--missing-milestone',
        help=('deliverables that do not have the specified milestone as '
              'the most current release; for example 2 would look for .0b2 '
              'in the version number (implies --model cycle-with-milestones)'),
    )
    grp.add_argument(
        '--missing-rc',
        action='store_true',
        help=('deliverables that do not have a release candidate, yet '
              '(implies --model cycle-with-milestones)'),
    )
    grp.add_argument(
        '--missing-final',
        action='store_true',
        help='deliverables that have pre-releases but no final releases, yet',
    )
    args = parser.parse_args()

    series = args.series

    if args.missing_milestone:
        model = 'cycle-with-milestones'
        version_ending = '.0b{}'.format(args.missing_milestone)
    elif args.missing_rc:
        model = 'cycle-with-milestones'
        version_ending = None
    elif args.missing_final:
        model = args.model
        version_ending = None
    else:
        model = args.model
        version_ending = None

    verbose_template = '{name:30} {team:20}'
    if not args.unreleased:
        verbose_template += ' {latest_release:15}'
    if len(args.type) != 1:
        verbose_template += ' {type:15}'
    if not args.model:
        verbose_template += ' {model:15}'
    verbose_template += ' {tags}'

    csvfile = None
    if args.csvfile:
        csvfile = open(args.csvfile, 'w')
        fieldnames = [
            'name', 'latest_release', 'repo', 'hash', 'team', 'type', 'model'
        ]
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    deliv_iter = list(all_deliv.get_deliverables(args.team, series))
    if args.group_key:
        deliv_iter = sorted(deliv_iter,
                            key=operator.attrgetter(args.group_key))
        name_fmt = '  {}'
    else:
        name_fmt = '{}'
    cur_group = None
    for deliv in deliv_iter:
        if args.group_key:
            deliv_group = getattr(deliv, args.group_key)

        if args.deliverable and deliv.name != args.deliverable:
            continue

        if model and deliv.model != model:
            continue
        if args.cycle_based and not deliv.is_cycle_based:
            continue
        if args.type and deliv.type not in args.type:
            continue
        if args.no_stable_branch:
            if deliv.get_branch_location('stable/' + series) is not None:
                continue
        if args.unreleased and (deliv.is_released or not deliv.is_releasable):
            continue
        if version_ending and deliv.is_released and deliv.latest_release.endswith(
                version_ending):
            continue
        if args.missing_rc and deliv.is_released and 'rc' in deliv.latest_release:
            continue
        if args.tag:
            tags = deliv.tags
            ignore = False
            for t in args.tag:
                if t not in tags:
                    ignore = True
                    break
            if ignore:
                continue

        tag_str = '(' + ', '.join(deliv.tags) + ')'

        if args.missing_final and deliv.latest_release:
            if not ('rc' in deliv.latest_release or 'a' in deliv.latest_release
                    or 'b' in deliv.latest_release):
                continue

        if csvfile:
            rel = (deliv.releases or [{}])[-1]
            for prj in rel.get('projects', [{}]):
                writer.writerow({
                    'name': deliv.name,
                    'latest_release': rel.get('version', None),
                    'repo': prj.get('repo', None),
                    'hash': prj.get('hash', None),
                    'team': deliv.team,
                    'type': deliv.type,
                    'model': deliv.model,
                })
        elif args.all_releases:
            for r in deliv.releases:
                print(
                    verbose_template.format(
                        name=deliv.name,
                        latest_release=r.get('version', ''),
                        team=deliv.team,
                        type=deliv.type,
                        model=deliv.model,
                        tags=tag_str,
                    ))
        elif args.verbose:
            print(
                verbose_template.format(
                    name=deliv.name,
                    latest_release=deliv.latest_release or '',
                    team=deliv.team,
                    type=deliv.type,
                    model=deliv.model,
                    tags=tag_str,
                ))
        elif args.repos:
            if args.group_key and cur_group != deliv_group:
                cur_group = deliv_group
                print(cur_group)
            for r in sorted(deliv.repos):
                print(name_fmt.format(r))
        else:
            if args.group_key and cur_group != deliv_group:
                cur_group = deliv_group
                print(cur_group)
            print(name_fmt.format(deliv.name))

    if csvfile:
        csvfile.close()
コード例 #18
0
def _initialize_deliverable_data(app):
    global _deliverables

    _deliverables = deliverable.Deliverables('deliverables')
コード例 #19
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--verbose',
        '-v',
        action='store_true',
        default=False,
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    args = parser.parse_args()

    config_filename = os.path.join(
        appdirs.user_config_dir('openstack-release', 'openstack'),
        'gerrit.ini',
    )
    config = configparser.ConfigParser()
    config.read(config_filename, encoding='utf-8')

    if not config.has_option('DEFAULT', 'username'):
        parser.error('No username set in {}'.format(config_filename))
    if not config.has_option('DEFAULT', 'password'):
        parser.error('No password set in {}'.format(config_filename))

    team_data = governance.get_team_data()

    # Some deliverables were independent at one time but might not be
    # any more, so compare the independent list with the current
    # release series.
    all_independent_deliverables = set(
        name for team, series, name, deliv in deliverable.Deliverables(
            root_dir=args.deliverables_dir,
            collapse_history=True,
        ).get_deliverables(None, None))
    current_deliverables = set(
        name for team, series, name, deliv in deliverable.Deliverables(
            root_dir=args.deliverables_dir,
            collapse_history=True,
        ).get_deliverables(None, defaults.RELEASE))
    independent_deliverables = all_independent_deliverables.difference(
        current_deliverables)

    gerrit = GerritClient(
        config['DEFAULT']['username'],
        config['DEFAULT']['password'],
    )

    for repo in governance.get_repositories(team_data, code_only=True):

        if repo.deliverable.team.name in IGNORED_TEAMS:
            if args.verbose:
                print('{}: ignoring {} team'.format(
                    repo.name, repo.deliverable.team.name))
            continue

        if repo.deliverable.name in independent_deliverables:
            if args.verbose:
                print('{}: ignoring independent deliverable'.format(repo.name))
            continue

        acls = gerrit.get_access(repo.name)
        local_tag_acls = acls.get('local', {}).get('refs/tags/*', {})
        if local_tag_acls:
            rules = local_tag_acls.get('permissions',
                                       {}).get('pushSignedTag',
                                               {}).get('rules', {})
            if not rules and args.verbose:
                print('{}: OK'.format(repo.name))

            for group_id, permissions in rules.items():
                group_details = gerrit.get_group(group_id)
                group_name = group_details['name']
                if group_name in ALLOWED:
                    if args.verbose:
                        print('{}: {} pushSignedTag OK'.format(
                            repo.name, group_name))
                    continue
                if args.verbose:
                    print('{}: {} pushSignedTag WARNING'.format(
                        repo.name, group_name))
                else:
                    print('{}: {} pushSignedTag'.format(repo.name, group_name))