示例#1
0
def clone_repos(save_dir, projects):
    """Clones a bunch of openstack repos."""
    repos = {}
    for project, short_project in tqdm(projects, unit="repo", desc="Cloning %s repos" % len(projects)):
        gitutils.clone_repo(save_dir, project)
        repos[project] = os.path.join(save_dir, project)
    return repos
示例#2
0
def eol_tag(args, series, deliverable_info):

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        shutil.rmtree(workdir, True)

    atexit.register(cleanup_workdir)

    tag = '{}-eol'.format(series)
    projects = []
    release = {
        'version': tag,
        'projects': projects,
    }

    for repo in deliverable_info['repository-settings'].keys():
        if not gitutils.tag_exists(repo, tag):
            print('No {} tag for {}'.format(tag, repo))
            continue
        gitutils.clone_repo(workdir, repo)
        sha = gitutils.sha_for_tag(workdir, repo, tag)
        projects.append({
            'repo': repo,
            'hash': sha,
        })

    if projects:
        deliverable_info['releases'].append(release)
def get_prior_branch_point(workdir, repo, branch):
    """Return the tag of the base of the branch.

    The diff-start is the old version is the tag on the commit where
    we created the branch. To determine that, we need to clone the
    repo and look at the branch.

    See
    http://lists.openstack.org/pipermail/openstack-dev/2016-October/104901.html
    for a better description of what the desired tag info is.

    """
    gitutils.clone_repo(workdir, repo)
    branch_base = gitutils.get_branch_base(
        workdir, repo, branch,
    )
    if branch_base:
        return gitutils.get_latest_tag(
            workdir, repo, branch_base,
        )
    # Work backwards from the most recent commit looking for the first
    # version that is not a pre-release, and assume that is the
    # previous release on a non-branching repository like for the
    # os-*-config tools.
    start = None
    while True:
        print('  looking for version before {}'.format(start))
        version = gitutils.get_latest_tag(workdir, repo, start)
        if not version:
            return None
        if not PRE_RELEASE.search(version):
            return version
        start = '{}^'.format(version)
    return version
示例#4
0
def clone_repos(save_dir, projects):
    """Clones a bunch of openstack repos."""
    repos = {}
    for project, short_project in tqdm(projects,
                                       unit='repo',
                                       desc='Cloning %s repos' %
                                       len(projects)):
        gitutils.clone_repo(save_dir, project)
        repos[project] = os.path.join(save_dir, project)
    return repos
示例#5
0
def get_requirements_at_ref(workdir, repo, ref):
    """Check out the repo at the ref and load the list of requirements."""
    body = ''

    try:
        dest = gitutils.clone_repo(workdir, repo, ref=ref)
        processutils.check_call(['python3', 'setup.py', 'sdist'], cwd=dest)
        sdist_name = pythonutils.get_sdist_name(workdir, repo)
        requirements_filename = os.path.join(
            dest,
            sdist_name + '.egg-info',
            'requires.txt',
        )
        if os.path.exists(requirements_filename):
            with open(requirements_filename, 'r') as f:
                body = f.read()
        else:
            # The package has no dependencies.
            pass
    except Exception:
        # We've had a few cases where a previous version had an issue and could
        # no longer be installed. In this case, just move along.
        LOG.warning('Unable to create sdist, unable to get requirements.')
        LOG.warning('!!! Perform manual comparison for requirements changes'
                    '!!!')

    return parse_requirements(body)
示例#6
0
def get_requirements_at_ref(workdir, repo, ref):
    "Check out the repo at the ref and load the list of requirements."
    dest = gitutils.clone_repo(workdir, repo, ref=ref)
    processutils.check_call(['python', 'setup.py', 'sdist'], cwd=dest)
    sdist_name = pythonutils.get_sdist_name(workdir, repo)
    requirements_filename = os.path.join(
        dest,
        sdist_name + '.egg-info',
        'requires.txt',
    )
    if os.path.exists(requirements_filename):
        with open(requirements_filename, 'r') as f:
            body = f.read()
    else:
        # The package has no dependencies.
        body = ''
    return parse_requirements(body)
示例#7
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'series',
        help='the name of the release series to scan',
    )
    parser.add_argument(
        'deliverable',
        help='the base name of the deliverable file',
    )
    # FIXME(dhellmann): Add milestone and rc types.
    parser.add_argument(
        'release_type',
        choices=('bugfix', 'feature', 'major'),
        help='the type of release to generate',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    args = parser.parse_args()

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    # Allow for independent projects.
    series = args.series
    if series.lstrip('_') == 'independent':
        series = '_independent'

    # Load existing deliverable data.
    deliverable_filename = 'deliverables/%s/%s.yaml' % (
        series, args.deliverable)
    try:
        with open(deliverable_filename, 'r') as f:
            deliverable_info = yaml.safe_load(f)
    except (IOError, OSError) as e:
        parser.error(e)

    # Determine the new version number.
    last_release = deliverable_info['releases'][-1]
    last_version = last_release['version'].split('.')
    increment = {
        'bugfix': (0, 0, 1),
        'feature': (0, 1, 0),
        'major': (1, 0, 0),
    }[args.release_type]
    new_version_parts = []
    for cur, inc in zip(last_version, increment):
        new_version_parts.append(str(int(cur) + inc))
    new_version = '.'.join(new_version_parts)

    print('going from %s to %s' % (last_version, new_version))

    projects = []
    changes = 0
    for project in last_release['projects']:
        gitutils.clone_repo(workdir, project['repo'])

        branches = gitutils.get_branches(workdir, project['repo'])
        version = 'origin/stable/%s' % series
        if not any(branch for branch in branches
                   if branch.endswith(version)):
            version = 'master'

        sha = gitutils.sha_for_tag(workdir, project['repo'], version)
        if project['hash'] != sha:
            changes += 1
            print('advancing %s from %s to %s' % (project['repo'],
                                                  project['hash'],
                                                  sha))
            projects.append({
                'repo': project['repo'],
                'hash': sha,
            })

    # The YAML dump formatter produces results that aren't very nice
    # to read, so we format the output ourselves. The file is only
    # regenerated if there are in fact changes to be made.
    if changes > 0:
        with open(deliverable_filename, 'a') as f:
            f.write(RELEASE_TEMPLATE.format(version=new_version))
            for p in projects:
                f.write(PROJECT_TEMPLATE.format(**p))
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        default=False,
        help='show more than the deliverable name',
    )
    parser.add_argument(
        '--team',
        help='the name of the project team, such as "Nova" or "Oslo"',
    )
    parser.add_argument(
        '--deliverable',
        help='the name of the deliverable, such as "nova" or "oslo.config"',
    )
    parser.add_argument(
        '--series',
        default=defaults.RELEASE,
        help='the release series, such as "newton" or "ocata"',
    )
    model = parser.add_mutually_exclusive_group()
    model.add_argument(
        '--model',
        help=('the release model, such as "cycle-with-milestones"'
              ' or "independent"'),
    )
    model.add_argument(
        '--cycle-based',
        action='store_true',
        default=False,
        help='include all cycle-based code repositories',
    )
    parser.add_argument(
        '--type',
        help='deliverable type, such as "library" or "service"',
    )
    parser.add_argument(
        '--deliverables-dir',
        default=openstack_releases.deliverable_dir,
        help='location of deliverable files',
    )
    parser.add_argument(
        '--branch',
        default=None,
        help='branch name, defaults to stable/$series',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    args = parser.parse_args()

    if args.verbose:

        def verbose(msg):
            print(msg)
    else:

        def verbose(msg):
            pass

    # Deal with the inconsistency of the name for the independent
    # directory.
    series = args.series
    if series == 'independent':
        series = '_independent'

    branch = args.branch
    if not branch:
        branch = 'stable/{}'.format(series)

    workdir = tempfile.mkdtemp(prefix='releases-')
    verbose('creating temporary files in {}'.format(workdir))

    def cleanup_workdir():
        if args.cleanup:
            try:
                verbose('cleaning up temporary files in {}'.format(workdir))
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up {}'.format(workdir))

    atexit.register(cleanup_workdir)

    # Count any errors for our exit code.
    errors = 0

    all_deliv = deliverable.Deliverables(
        root_dir=args.deliverables_dir,
        collapse_history=False,
    )
    for entry in all_deliv.get_deliverables(args.team, series):
        deliv = deliverable.Deliverable(*entry)
        branch_loc = deliv.get_branch_location(branch)
        if branch_loc is None:
            verbose('No stable branch for {}'.format(deliv.name))
            continue
        all_versions = deliv.versions
        if all_versions[-1] == branch_loc:
            verbose('Most recent release for {} ({}) is at {}'.format(
                deliv.name, branch_loc, branch))
            continue
        idx = all_versions.index(branch_loc)
        late_releases = all_versions[idx + 1:]
        print('{} releases {} come after {}'.format(deliv.name, late_releases,
                                                    branch))
        for repo in sorted(deliv.repos):
            verbose('cloning {}'.format(repo))
            gitutils.clone_repo(
                workdir,
                repo,
            )
            for version in late_releases:
                containing_br = gitutils.branches_containing(
                    workdir,
                    repo,
                    version,
                )
                for cb in containing_br:
                    if branch in cb:  # allow for remote prefix
                        verbose('{} version {} is on branch {}'.format(
                            repo, version, branch))
                        break
                else:
                    print('{} version {} is not on branch {} ({})'.format(
                        repo, version, branch, containing_br))
                    errors += 1

    return (1 if errors else 0)
示例#9
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'series',
        help='the name of the release series to scan',
    )
    parser.add_argument(
        'deliverable',
        help='the base name of the deliverable file',
    )
    parser.add_argument(
        '-v',
        '--verbose',
        default=False,
        action='store_true',
        help='be more chatty',
    )
    parser.add_argument(
        '-i',
        '--interactive',
        default=False,
        action='store_true',
        help='Be interactive and only make releases when instructed')
    parser.add_argument(
        'release_type',
        choices=('bugfix', 'feature', 'major', 'milestone', 'rc', 'procedural',
                 'eol', 'em', 'releasefix'),
        help='the type of release to generate',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--force',
        default=False,
        action='store_true',
        help=('force a new tag, even if the HEAD of the '
              'branch is already tagged'),
    )
    parser.add_argument(
        '--debug',
        default=False,
        action='store_true',
        help='show tracebacks on errors',
    )
    parser.add_argument(
        '--stable-branch',
        default=False,
        action='store_true',
        help='create a new stable branch from the release',
    )
    args = parser.parse_args()

    # Set up logging, including making some loggers quiet.
    logging.basicConfig(
        format='%(levelname)7s: %(message)s',
        stream=sys.stdout,
        level=logging.DEBUG if args.verbose else logging.INFO,
    )
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)

    is_procedural = args.release_type in 'procedural'
    is_retagging = is_procedural or args.release_type == 'releasefix'
    is_eol = args.release_type == 'eol'
    is_em = args.release_type == 'em'
    force_tag = args.force

    workdir = tempfile.mkdtemp(prefix='releases-')
    LOG.info('creating temporary files in %s', workdir)

    def error(msg):
        if args.debug:
            raise msg
        else:
            parser.error(msg)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            LOG.warning('not cleaning up %s', workdir)

    atexit.register(cleanup_workdir)

    # Allow for independent projects.
    series = args.series
    if series.lstrip('_') == 'independent':
        series = '_independent'

    # Load existing deliverable data.
    try:
        deliverable_info = get_deliverable_data(series, args.deliverable)
    except (IOError, OSError) as e:
        error(e)

    # Ensure we have a list for releases, even if it is empty.
    if deliverable_info.get('releases') is None:
        deliverable_info['releases'] = []

    try:
        release_history = get_release_history(series, args.deliverable)
        this_series_history = release_history[0]
        last_release = get_last_release(
            release_history,
            args.deliverable,
            args.release_type,
        )
    except RuntimeError as err:
        error(err)
    if last_release:
        last_version = last_release['version'].split('.')
    else:
        last_version = None
    LOG.debug('last_version %r', last_version)
    diff_start = None

    add_stable_branch = args.stable_branch or is_procedural

    # Validate new tag can be applied
    if last_version and 'eol' in last_version[0]:
        raise ValueError('Cannot create new release after EOL tagging.')

    if last_version is None:
        # Deliverables that have never been released before should
        # start at 0.1.0, indicating they are not feature complete or
        # stable but have features.
        LOG.debug('defaulting to 0.1.0 for first release')
        new_version_parts = ['0', '1', '0']

    elif args.release_type in ('milestone', 'rc'):
        force_tag = True
        if deliverable_info['release-model'] not in _USES_RCS:
            raise ValueError('Cannot compute RC for {} project {}'.format(
                deliverable_info['release-model'], args.deliverable))
        new_version_parts = increment_milestone_version(
            last_version, args.release_type)
        LOG.debug('computed new version %s release type %s', new_version_parts,
                  args.release_type)
        # We are going to take some special steps for the first
        # release candidate, so figure out if that is what this
        # release will be.
        if args.release_type == 'rc' and new_version_parts[-1][3:] == '1':
            add_stable_branch = True

    elif args.release_type == 'procedural':
        # NOTE(dhellmann): We always compute the new version based on
        # the highest version on the branch, rather than the branch
        # base. If the differences are only patch levels the results
        # do not change, but if there was a minor version update then
        # the new version needs to be incremented based on that.
        new_version_parts = increment_version(
            last_version, (0, feature_increment(last_release), 0))

        # NOTE(dhellmann): Save the SHAs for the commits where the
        # branch was created in each repo, even though that is
        # unlikely to be the same as the last_version, because commits
        # further down the stable branch will not be in the history of
        # the master branch and so we can't tag them as part of the
        # new series *AND* we always want stable branches created from
        # master.
        prev_info = get_last_series_info(series, args.deliverable)
        for b in prev_info['branches']:
            if b['name'].startswith('stable/'):
                last_branch_base = b['location'].split('.')
                break
        else:
            raise ValueError(
                'Could not find a version in branch before {}'.format(series))
        if last_version != last_branch_base:
            LOG.warning('last_version %s branch base %s',
                        '.'.join(last_version), '.'.join(last_branch_base))
        for r in prev_info['releases']:
            if r['version'] == '.'.join(last_branch_base):
                last_version_hashes = {
                    p['repo']: p['hash']
                    for p in r['projects']
                }
                break
        else:
            raise ValueError(
                ('Could not find SHAs for tag '
                 '{} in old deliverable file').format('.'.join(last_version)))

    elif args.release_type == 'releasefix':
        increment = (0, 0, 1)
        new_version_parts = increment_version(last_version, increment)
        last_version_hashes = {
            p['repo']: p['hash']
            for p in last_release['projects']
        }
        # Go back 2 releases so the release announcement includes the
        # actual changes.
        try:
            diff_start_release = this_series_history[-2]
        except IndexError:
            # We do not have 2 releases in this series yet, so go back
            # to the stable branch creation point.
            prev_info = get_last_series_info(series, args.deliverable)
            for b in prev_info['branches']:
                if b['name'].startswith('stable/'):
                    diff_start = b['location']
                    LOG.info(
                        'using branch point from previous '
                        'series as diff-start: %r', diff_start)
                    break
        else:
            diff_start = diff_start_release['version']
            LOG.info('using release from same series as diff-start: %r',
                     diff_start)

    elif is_eol or is_em:
        last_version_hashes = {
            p['repo']: p['hash']
            for p in last_release['projects']
        }
        increment = None
        new_version_parts = None
        new_version = '{}-{}'.format(args.series, args.release_type)

    else:
        increment = {
            'bugfix': (0, 0, 1),
            'feature': (0, feature_increment(last_release), 0),
            'major': (1, 0, 0),
        }[args.release_type]
        new_version_parts = increment_version(last_version, increment)
        LOG.debug('computed new version %s', new_version_parts)

    if new_version_parts is not None:
        # The EOL/EM tag version string is computed above and the parts
        # list is set to None to avoid recomputing it here.
        new_version = '.'.join(new_version_parts)

    if 'releases' not in deliverable_info:
        deliverable_info['releases'] = []

    LOG.info('going from %s to %s', last_version, new_version)

    projects = []
    changes = 0
    for repo in deliverable_info['repository-settings'].keys():
        LOG.info('processing %s', repo)

        # Look for the most recent time the repo was tagged and use
        # that info as the old sha.
        previous_sha = None
        previous_tag = None
        found = False
        for release in reversed(deliverable_info['releases']):
            for project in release['projects']:
                if project['repo'] == repo:
                    previous_sha = project.get('hash')
                    previous_tag = release['version']
                    LOG.info('last tagged as %s at %s', previous_tag,
                             previous_sha)
                    found = True
                    break
            if found:
                break

        if is_retagging or (is_em and
                            deliverable_info['release-model'] != 'untagged'):
            # Always use the last tagged hash, which should be coming
            # from the previous series or last release.
            sha = last_version_hashes[repo]

        else:
            # Figure out the hash for the HEAD of the branch.
            gitutils.clone_repo(workdir, repo)

            branches = gitutils.get_branches(workdir, repo)
            version = 'origin/stable/%s' % series
            if not any(branch
                       for branch in branches if branch.endswith(version)):
                version = 'master'

            sha = gitutils.sha_for_tag(workdir, repo, version)

            # Check out the working repo to the sha
            gitutils.checkout_ref(workdir, repo, sha)

        if is_retagging:
            changes += 1
            LOG.info('re-tagging %s at %s (%s)', repo, sha, previous_tag)
            if is_procedural:
                comment = 'procedural tag to support creating stable branch'
            else:
                comment = 'procedural tag to handle release job failure'
            new_project = {
                'repo': repo,
                'hash': sha,
                'comment': comment,
            }
            projects.append(new_project)

        elif is_eol or is_em:
            changes += 1
            LOG.info('tagging %s %s at %s', repo, args.release_type.upper(),
                     sha)
            new_project = {
                'repo': repo,
                'hash': sha,
            }
            projects.append(new_project)

        elif previous_sha != sha or force_tag:
            # TODO(tonyb): Do this early and also prompt for release type.
            # Once we do that we can probably deprecate interactive-release
            if args.interactive:
                # NOTE(tonyb): This is pretty much just copied from
                # interactive-release
                last_tag = '.'.join(last_version)
                change_lines = list(
                    clean_changes(
                        gitutils.changes_since(workdir, repo,
                                               last_tag).splitlines()))
                max_changes_show = 100
                LOG.info('')
                if last_tag:
                    LOG.info("%s changes to %s since %s are:",
                             len(change_lines), repo, last_tag)
                else:
                    LOG.info("%s changes to %s are:", len(change_lines), repo)
                for sha, descr in change_lines[0:max_changes_show]:
                    LOG.info("* %s %s", sha[:7], descr)
                leftover_change_lines = change_lines[max_changes_show:]
                if leftover_change_lines:
                    LOG.info("   and %s more changes...",
                             len(leftover_change_lines))
                LOG.info('')

            changes += 1
            LOG.info('advancing %s from %s (%s) to %s', repo, previous_sha,
                     previous_tag, sha)
            new_project = {
                'repo': repo,
                'hash': sha,
            }
            projects.append(new_project)

        else:
            LOG.info('%s already tagged at most recent commit, skipping', repo)

    new_release_info = {
        'version': new_version,
        'projects': projects,
    }
    if diff_start:
        new_release_info['diff-start'] = diff_start
    deliverable_info['releases'].append(new_release_info)

    if add_stable_branch:
        branch_name = 'stable/{}'.format(series)

        # First check if this branch is already defined
        if 'branches' in deliverable_info:
            for branch in deliverable_info['branches']:
                if branch.get('name') == branch_name:
                    LOG.debug('Branch %s already exists, skipping',
                              branch_name)
                    add_stable_branch = False
                    break

        if add_stable_branch:
            LOG.info('adding stable branch at %s', new_version)
            deliverable_info.setdefault('branches', []).append({
                'name':
                branch_name,
                'location':
                new_version,
            })

    create_release = changes > 0
    if create_release and args.interactive:
        create_release = yes_no_prompt(
            'Create a release in %s containing those changes? ' % series)

    if create_release:
        deliverable_filename = 'deliverables/%s/%s.yaml' % (series,
                                                            args.deliverable)
        with open(deliverable_filename, 'w', encoding='utf-8') as f:
            f.write(yamlutils.dumps(deliverable_info))
示例#10
0
def validate_releases(deliverable_info, zuul_layout, series_name, workdir,
                      mk_warning, mk_error):
    """Apply validation rules to the 'releases' list for the deliverable.
    """
    release_model = get_model(deliverable_info, series_name)
    is_independent = (release_model == 'independent')

    # Remember which entries are new so we can verify that they
    # appear at the end of the file.
    new_releases = {}

    release_type = deliverable_info.get('release-type', 'std')
    link_mode = deliverable_info.get('artifact-link-mode', 'tarball')

    prev_version = None
    prev_projects = set()
    for release in deliverable_info.get('releases', []):

        print('checking %s' % release['version'])

        for project in release['projects']:

            # Check for release jobs (if we ship a tarball)
            if link_mode != 'none':
                project_config.require_release_jobs_for_repo(
                    deliverable_info,
                    zuul_layout,
                    project['repo'],
                    release_type,
                    mk_warning,
                    mk_error,
                )

            # Check the SHA specified for the tag.
            print('%s SHA %s ' % (project['repo'], project['hash']))

            if not is_a_hash(project['hash']):
                mk_error(
                    ('%(repo)s version %(version)s release from '
                     '%(hash)r, which is not a hash') % {
                         'repo': project['repo'],
                         'hash': project['hash'],
                         'version': release['version'],
                     })
            else:
                # Report if the SHA exists or not (an error if it
                # does not).
                sha_exists = gitutils.commit_exists(
                    project['repo'],
                    project['hash'],
                )
                if not sha_exists:
                    mk_error('No commit %(hash)r in %(repo)r' % project)
                    # No point in running extra checks if the SHA just
                    # doesn't exist.
                    continue

                # Ensure we have a local copy of the repository so we
                # can scan for values that are more difficult to get
                # remotely.
                gitutils.clone_repo(workdir, project['repo'], project['hash'])

                # Check that the sdist name and tarball-base name match.
                if link_mode == 'tarball':
                    sdist = pythonutils.get_sdist_name(workdir,
                                                       project['repo'])
                    if sdist is not None:
                        expected = project.get(
                            'tarball-base',
                            os.path.basename(project['repo']),
                        )
                        if sdist != expected:
                            if 'tarball-base' in project:
                                action = 'is set to'
                            else:
                                action = 'defaults to'
                            mk_error(('tarball-base for %s %s %s %r '
                                      'but the sdist name is actually %r. ' +
                                      _PLEASE) %
                                     (project['repo'], release['version'],
                                      action, expected, sdist))

                # Report if the version has already been
                # tagged. We expect it to not exist, but neither
                # case is an error because sometimes we want to
                # import history and sometimes we want to make new
                # releases.
                version_exists = gitutils.tag_exists(
                    project['repo'],
                    release['version'],
                )
                if version_exists:
                    actual_sha = gitutils.sha_for_tag(
                        workdir,
                        project['repo'],
                        release['version'],
                    )
                    if actual_sha != project['hash']:
                        mk_error(('Version %s in %s is on '
                                  'commit %s instead of %s') %
                                 (release['version'], project['repo'],
                                  actual_sha, project['hash']))
                else:
                    print('Found new version {}'.format(release['version']))
                    new_releases[release['version']] = release
                    if prev_projects and project['repo'] not in prev_projects:
                        print('not included in previous release for %s: %s' %
                              (prev_version, ', '.join(sorted(prev_projects))))
                    else:

                        for e in versionutils.validate_version(
                                release['version'],
                                release_type=release_type,
                                pre_ok=(release_model in _USES_PREVER)):
                            msg = ('could not validate version %r: %s' %
                                   (release['version'], e))
                            mk_error(msg)

                        # If this is a puppet module, ensure
                        # that the tag and metadata file
                        # match.
                        if puppetutils.looks_like_a_module(
                                workdir, project['repo']):
                            puppet_ver = puppetutils.get_version(
                                workdir, project['repo'])
                            if puppet_ver != release['version']:
                                mk_error('%s metadata contains "%s" '
                                         'but is being tagged "%s"' % (
                                             project['repo'],
                                             puppet_ver,
                                             release['version'],
                                         ))

                        if is_independent:
                            mk_warning('skipping descendant test for '
                                       'independent project, verify '
                                       'branch manually')

                        else:
                            # If this is the first version in the series,
                            # check that the commit is actually on the
                            # targeted branch.
                            if not gitutils.check_branch_sha(
                                    workdir, project['repo'], series_name,
                                    defaults.RELEASE, project['hash']):
                                msg = '%s %s not present in %s branch' % (
                                    project['repo'],
                                    project['hash'],
                                    series_name,
                                )
                                mk_error(msg)

                            if prev_version:
                                # Check to see if we are re-tagging the same
                                # commit with a new version.
                                old_sha = gitutils.sha_for_tag(
                                    workdir,
                                    project['repo'],
                                    prev_version,
                                )
                                if old_sha == project['hash']:
                                    # FIXME(dhellmann): This needs a test.
                                    print('Retagging the SHA with '
                                          'a new version')
                                else:
                                    # Check to see if the commit for the new
                                    # version is in the ancestors of the
                                    # previous release, meaning it is actually
                                    # merged into the branch.
                                    is_ancestor = gitutils.check_ancestry(
                                        workdir,
                                        project['repo'],
                                        prev_version,
                                        project['hash'],
                                    )
                                    if not is_ancestor:
                                        mk_error('%s %s receiving %s '
                                                 'is not a descendant of %s' %
                                                 (
                                                     project['repo'],
                                                     project['hash'],
                                                     release['version'],
                                                     prev_version,
                                                 ))

        prev_version = release['version']
        prev_projects = set(p['repo'] for p in release['projects'])

    # Make sure that new entries have been appended to the file.
    for v, nr in new_releases.items():
        if nr != deliverable_info['releases'][-1]:
            msg = ('new release %s must be listed last, '
                   'with one new release per patch' % nr['version'])
            mk_error(msg)
示例#11
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'series',
        help='the name of the release series to scan',
    )
    parser.add_argument(
        'deliverable',
        help='the base name of the deliverable file',
    )
    # FIXME(dhellmann): Add milestone and rc types.
    parser.add_argument(
        'release_type',
        choices=('bugfix', 'feature', 'major', 'milestone', 'rc',
                 'procedural'),
        help='the type of release to generate',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--force',
        default=False,
        action='store_true',
        help=('force a new tag, even if the HEAD of the '
              'branch is already tagged'),
    )
    parser.add_argument(
        '--stable-branch',
        default=False,
        action='store_true',
        help='create a new stable branch from the release',
    )
    args = parser.parse_args()

    is_procedural = args.release_type == 'procedural'
    force_tag = args.force

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    # Allow for independent projects.
    series = args.series
    if series.lstrip('_') == 'independent':
        series = '_independent'

    # Load existing deliverable data.
    try:
        deliverable_info = get_deliverable_data(
            series, args.deliverable)
    except (IOError, OSError) as e:
        parser.error(e)

    try:
        last_release = get_last_release(
            deliverable_info,
            series,
            args.deliverable,
            args.release_type,
        )
    except RuntimeError as err:
        parser.error(err)
    last_version = last_release['version'].split('.')

    add_stable_branch = args.stable_branch or is_procedural
    if args.release_type in ('milestone', 'rc'):
        force_tag = True
        if deliverable_info['release-model'] not in _USES_RCS:
            raise ValueError('Cannot compute RC for {} project {}'.format(
                deliverable_info['release-model'], args.deliverable))
        new_version_parts = increment_milestone_version(
            last_version, args.release_type)
        # We are going to take some special steps for the first
        # release candidate, so figure out if that is what this
        # release will be.
        if args.release_type == 'rc' and new_version_parts[-1][3:] == '1':
            add_stable_branch = True

    elif args.release_type == 'procedural':
        # NOTE(dhellmann): We always compute the new version based on
        # the highest version on the branch, rather than the branch
        # base. If the differences are only patch levels the results
        # do not change, but if there was a minor version update then
        # the new version needs to be incremented based on that.
        new_version_parts = increment_version(last_version, (0, 1, 0))

        # NOTE(dhellmann): Save the SHAs for the commits where the
        # branch was created in each repo, even though that is
        # unlikely to be the same as the last_version, because commits
        # further down the stable branch will not be in the history of
        # the master branch and so we can't tag them as part of the
        # new series *AND* we always want stable branches created from
        # master.
        prev_info = get_last_series_info(series, args.deliverable)
        for b in prev_info['branches']:
            if b['name'].startswith('stable/'):
                last_branch_base = b['location'].split('.')
                break
        else:
            raise ValueError(
                'Could not find a version in branch before {}'.format(
                    series)
            )
        if last_version != last_branch_base:
            print('WARNING: last_version {} branch base {}'.format(
                '.'.join(last_version), '.'.join(last_branch_base)))
        for r in prev_info['releases']:
            if r['version'] == '.'.join(last_branch_base):
                last_version_hashes = {
                    p['repo']: p['hash']
                    for p in r['projects']
                }
                break
        else:
            raise ValueError(
                ('Could not find SHAs for tag '
                 '{} in old deliverable file').format(
                    '.'.join(last_version))
            )
    else:
        increment = {
            'bugfix': (0, 0, 1),
            'feature': (0, 1, 0),
            'major': (1, 0, 0),
        }[args.release_type]
        new_version_parts = increment_version(last_version, increment)

    new_version = '.'.join(new_version_parts)

    if 'releases' not in deliverable_info:
        deliverable_info['releases'] = []

    print('going from %s to %s' % (last_version, new_version))

    projects = []
    changes = 0
    for project in last_release['projects']:

        if args.release_type == 'procedural':
            # Always use the last tagged hash, which should be coming
            # from the previous series.
            sha = last_version_hashes[project['repo']]

        else:
            # Figure out the hash for the HEAD of the branch.
            gitutils.clone_repo(workdir, project['repo'])

            branches = gitutils.get_branches(workdir, project['repo'])
            version = 'origin/stable/%s' % series
            if not any(branch for branch in branches
                       if branch.endswith(version)):
                version = 'master'

            sha = gitutils.sha_for_tag(workdir, project['repo'], version)

        if is_procedural:
            changes += 1
            print('re-tagging %s at %s (%s)' % (project['repo'], sha,
                                                last_release['version']))
            new_project = {
                'repo': project['repo'],
                'hash': sha,
                'comment': 'procedural tag to support creating stable branch',
            }
            if 'tarball-base' in project:
                new_project['tarball-base'] = project['tarball-base']
            projects.append(new_project)

        elif project['hash'] != sha or force_tag:
            changes += 1
            print('advancing %s from %s to %s' % (project['repo'],
                                                  project['hash'],
                                                  sha))
            new_project = {
                'repo': project['repo'],
                'hash': sha,
            }
            if 'tarball-base' in project:
                new_project['tarball-base'] = project['tarball-base']
            projects.append(new_project)

        else:
            print('{} already tagged at most recent commit, skipping'.format(
                project['repo']))

    deliverable_info['releases'].append({
        'version': new_version,
        'projects': projects,
    })

    if add_stable_branch:
        branch_name = 'stable/{}'.format(series)

        # First check if this branch is already defined
        if 'branches' in deliverable_info:
            for branch in deliverable_info['branches']:
                if branch.get('name') == branch_name:
                    print('Branch {} already existes, skipping'.format(
                        branch_name))
                    add_stable_branch = False
                    break

        if add_stable_branch:
            print('adding stable branch at {}'.format(new_version))
            deliverable_info.setdefault('branches', []).append({
                'name': branch_name,
                'location': new_version,
            })

    if changes > 0:
        deliverable_filename = 'deliverables/%s/%s.yaml' % (
            series, args.deliverable)
        with open(deliverable_filename, 'w', encoding='utf-8') as f:
            f.write(yamlutils.dumps(deliverable_info))
示例#12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, validating all releases from %s'
              % defaults.RELEASE)
        filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml')

    errors = []

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    for filename in filenames:
        print('\nChecking %s' % filename)
        if not os.path.isfile(filename):
            print("File was deleted, skipping.")
            continue
        with open(filename, 'r') as f:
            deliverable_info = yaml.load(f.read())

        # Look for the launchpad project
        try:
            lp_name = deliverable_info['launchpad']
        except KeyError:
            errors.append('No launchpad project given in %s' % filename)
            print('no launchpad project name given')
        else:
            print('launchpad project %s ' % lp_name, end='')
            lp_resp = requests.get('https://api.launchpad.net/1.0/' + lp_name)
            if (lp_resp.status_code // 100) == 4:
                print('MISSING')
                errors.append('Launchpad project %s does not exist' % lp_name)
            else:
                print('found')

        prev_version = None
        for release in deliverable_info['releases']:
            for project in release['projects']:
                print('%s SHA %s ' % (project['repo'],
                                      project['hash']),
                      end='')

                if not is_a_hash(project['hash']):
                    print('NOT A SHA HASH')
                    errors.append(
                        ('%(repo)s version %(version)s release from '
                         '%(hash)r, which is not a hash') % project
                    )
                else:
                    # Report if the SHA exists or not (an error if it
                    # does not).
                    sha_exists = gitutils.commit_exists(
                        project['repo'], project['hash'],
                    )
                    if not sha_exists:
                        print('MISSING', end='')
                        errors.append('No commit %(hash)r in %(repo)r'
                                      % project)
                    else:
                        print('found ', end='')
                    # Report if the version has already been
                    # tagged. We expect it to not exist, but neither
                    # case is an error because sometimes we want to
                    # import history and sometimes we want to make new
                    # releases.
                    print('version %s ' % release['version'], end='')
                    version_exists = gitutils.commit_exists(
                        project['repo'], release['version'],
                    )
                    if version_exists:
                        gitutils.clone_repo(workdir, project['repo'])
                        actual_sha = gitutils.sha_for_tag(
                            workdir,
                            project['repo'],
                            release['version'],
                        )
                        if actual_sha == project['hash']:
                            print('found and matches SHA')
                        else:
                            print('found DIFFERENT %r' % actual_sha)
                            errors.append(
                                ('Version %s in %s is on '
                                 'commit %s instead of %s') %
                                (release['version'],
                                 project['repo'],
                                 actual_sha,
                                 project['hash']))
                    else:
                        print('NEW ', end='')
                        if not prev_version:
                            print()
                        else:
                            # Check to see if the commit for the new
                            # version is in the ancestors of the
                            # previous release, meaning it is actually
                            # merged into the branch.
                            is_ancestor = gitutils.check_ancestry(
                                workdir,
                                project['repo'],
                                prev_version,
                                project['hash'],
                            )
                            if is_ancestor:
                                print('SHA found in descendants')
                            else:
                                print('SHA NOT FOUND in descendants')
                                errors.append(
                                    '%s %s is not a descendant of %s' % (
                                        project['repo'], project['hash'],
                                        prev_version)
                                )
            prev_version = release['version']

    if errors:
        print('\n%s errors found' % len(errors))
        for e in errors:
            print(e)

    return 1 if errors else 0
示例#13
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'series',
        help='the name of the release series to scan',
    )
    parser.add_argument(
        'deliverable',
        help='the base name of the deliverable file',
    )
    # FIXME(dhellmann): Add milestone and rc types.
    parser.add_argument(
        'release_type',
        choices=('bugfix', 'feature', 'major'),
        help='the type of release to generate',
    )
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    args = parser.parse_args()

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    # Allow for independent projects.
    series = args.series
    if series.lstrip('_') == 'independent':
        series = '_independent'

    # Load existing deliverable data.
    deliverable_filename = 'deliverables/%s/%s.yaml' % (series,
                                                        args.deliverable)
    try:
        with open(deliverable_filename, 'r') as f:
            deliverable_info = yaml.safe_load(f)
    except (IOError, OSError) as e:
        parser.error(e)

    # Determine the new version number.
    last_release = deliverable_info['releases'][-1]
    last_version = last_release['version'].split('.')
    increment = {
        'bugfix': (0, 0, 1),
        'feature': (0, 1, 0),
        'major': (1, 0, 0),
    }[args.release_type]
    new_version_parts = []
    clear = False
    for cur, inc in zip(last_version, increment):
        if clear:
            new_version_parts.append('0')
        else:
            new_version_parts.append(str(int(cur) + inc))
            if inc:
                clear = True
    new_version = '.'.join(new_version_parts)

    print('going from %s to %s' % (last_version, new_version))

    projects = []
    changes = 0
    for project in last_release['projects']:
        gitutils.clone_repo(workdir, project['repo'])

        branches = gitutils.get_branches(workdir, project['repo'])
        version = 'origin/stable/%s' % series
        if not any(branch for branch in branches if branch.endswith(version)):
            version = 'master'

        sha = gitutils.sha_for_tag(workdir, project['repo'], version)
        if project['hash'] != sha:
            changes += 1
            print('advancing %s from %s to %s' %
                  (project['repo'], project['hash'], sha))
            projects.append({
                'repo': project['repo'],
                'hash': sha,
            })

    # The YAML dump formatter produces results that aren't very nice
    # to read, so we format the output ourselves. The file is only
    # regenerated if there are in fact changes to be made.
    if changes > 0:
        with open(deliverable_filename, 'a') as f:
            f.write(RELEASE_TEMPLATE.format(version=new_version))
            for p in projects:
                f.write(PROJECT_TEMPLATE.format(**p))
示例#14
0
def main():
    if not sys.stdout.encoding:
        # Wrap sys.stdout with a writer that knows how to handle
        # encoding Unicode data.
        import codecs
        wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout)
        sys.stdout = wrapped_stdout

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        '--no-shortcut',
        '--force',
        '-f',
        dest='shortcut',
        default=True,
        action='store_false',
        help='if a tag has been applied, skip the repo',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to validate, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    # Set up logging, including making some loggers quiet.
    logging.basicConfig(
        format='%(levelname)7s: %(message)s',
        stream=sys.stdout,
        level=logging.DEBUG,
    )
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, skipping report')
        return 0

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            print('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    gov_data = governance.Governance.from_remote_repo()
    official_repos = set(r.name for r in gov_data.get_repositories())

    all_deliverables = deliverable.Deliverables(
        './deliverables',
        False,
    )

    liaison_data = liaisons.get_liaisons()

    # Remove any inherited PAGER environment variable to avoid
    # blocking the output waiting for input.
    os.environ['PAGER'] = ''

    for filename in filenames:
        if not os.path.exists(filename):
            print('\n%s was removed, skipping' % filename)
            continue
        print('\n' + ('=' * 80))
        print('\nChecking %s\n' % filename)
        deliv = deliverable.Deliverable.read_file(filename)

        stable_branch = deliv.series not in ['independent', defaults.RELEASE]

        # By default assume the project does not use milestones.
        header('Release model')
        print(deliv.model)

        header('Team details')
        if deliv.team:
            team_name = deliv.team
            try:
                team = gov_data.get_team(team_name)
            except ValueError:
                team = None
            if team:
                print('found team %s' % team_name)
                print('  PTL    : %(name)s (%(irc)s)' % team.ptl)
                for liaison in liaison_data.get(team.name.lower(), []):
                    print('  Liaison: %(name)s (%(irc)s)' % liaison)
                team_deliv = team.deliverables.get(deliv.name)
                if team_deliv:
                    print('found deliverable %s' % deliv.name)
                    for rn, repo in sorted(team_deliv.repositories.items()):
                        follows_stable_policy = 'stable:follows-policy' in repo.tags
                        print('\nrepo %s\ntags:' % repo.name)
                        for t in repo.tags:
                            print('  %s' % t)
                        print('')
                        if stable_branch and follows_stable_policy:
                            banner('Needs Stable Policy Review')
                            print()
                else:
                    print(('no deliverable %r found for team %r, '
                           'cannot report on governance status') %
                          (deliv.name, team_name))
                if not deliv.is_independent:
                    # Show other deliverables owned by the team and
                    # included in this series.
                    team_deliv_in_series = all_deliverables.get_deliverables(
                        team.name, deliv.series)
                    if team_deliv_in_series:
                        print('Other {} deliverables in {}:'.format(
                            team.name, deliv.series))
                    for d in team_deliv_in_series:
                        print('  {} ({})'.format(d.name, d.latest_release
                                                 or None))
                    print()
            else:
                print('no team %r found, cannot report on governance status' %
                      team_name)
        else:
            print('no team name given, cannot report on governance status')

        # If there are no releases listed, this is probably a new
        # deliverable file for initializing a new series. We don't
        # need to list its changes.
        if not deliv.is_released:
            header('No releases')
            print('no releases were found, assuming an initialization file')
            continue

        # assume the releases are in order and take the last one
        new_release = deliv.releases[-1]

        for project in new_release.projects:

            tag_exists = gitutils.tag_exists(
                project.repo.name,
                new_release.version,
            )
            if tag_exists:
                print('%s %s exists on git server already' %
                      (project.repo.name, new_release.version))
                if args.shortcut:
                    print('skipping further processing')
                    continue

            if project.repo.is_retired:
                print('%s is retired' % (project.repo.name, ))
                if args.shortcut:
                    print('skipping further processing')
                    continue

            # Start by checking out master, always. We need the repo
            # checked out before we can tell if the stable branch
            # really exists.
            gitutils.clone_repo(
                workdir,
                project.repo.name,
                branch='master',
            )

            # Set some git configuration values to allow us to perform
            # local operations like tagging.
            gitutils.ensure_basic_git_config(
                workdir,
                project.repo.name,
                {
                    'user.email': '*****@*****.**',
                    'user.name': 'OpenStack Proposal Bot'
                },
            )

            # Determine which branch we should actually be looking
            # at. Assume any series for which there is no stable
            # branch will be on 'master'.
            if gitutils.stable_branch_exists(workdir, project.repo.name,
                                             deliv.series):
                branch = 'stable/' + deliv.series
            else:
                branch = 'master'

            if branch != 'master':
                # Check out the repo again to the right branch if we
                # didn't get it the first time.
                gitutils.clone_repo(
                    workdir,
                    project.repo.name,
                    branch=branch,
                )

            # look at the previous tag for the parent of the commit
            # getting the new release
            previous_tag = gitutils.get_latest_tag(
                workdir,
                project.repo.name,
                '{}^'.format(project.hash),
                always=False,
            )
            try:
                previous_release = deliv.get_release(previous_tag)
            except ValueError:
                previous_release = None

            start_range = previous_tag
            if previous_release:
                previous_project = {
                    x.repo.name: x
                    for x in previous_release.projects
                }.get(project.repo.name)
                if previous_project is not None:
                    start_range = previous_tag

            if start_range:
                git_range = '%s..%s' % (start_range, project.hash)
            else:
                git_range = project.hash

            # Show details about the commit being tagged.
            header('Details for commit receiving new tag %s' %
                   new_release.version)
            print('\ngit describe %s\n' % project.hash)
            try:
                subprocess.check_call(
                    ['git', 'describe', project.hash],
                    cwd=os.path.join(workdir, project.repo.name),
                )
            except subprocess.CalledProcessError as e:
                print('WARNING: Could not run git describe: %s' % e)

            git_show(
                workdir=workdir,
                repo=project.repo.name,
                title='Check existing tags',
                ref=project.hash,
            )

            git_list_existing_branches(
                workdir=workdir,
                repo=project.repo.name,
            )

            branches = git_branch_contains(
                workdir=workdir,
                repo=project.repo.name,
                title='Branches containing commit',
                commit=project.hash,
            )

            header('Relationship to HEAD')
            if deliv.is_independent:
                if branches:
                    tag_branch = branches[0]
                else:
                    tag_branch = branch
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project.repo.name,
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            else:
                if (branch in branches) or (not branches):
                    tag_branch = branch
                else:
                    tag_branch = branches[0]
                head_sha = gitutils.sha_for_tag(
                    workdir,
                    project.repo.name,
                    tag_branch,
                )
                print('HEAD of {} is {}'.format(tag_branch, head_sha))
            requested_sha = gitutils.sha_for_tag(
                workdir,
                project.repo.name,
                project.hash,
            )
            # If the sha for HEAD and the requested release don't
            # match, show any unreleased changes on the branch. We ask
            # git to give us the real SHA for the requested release in
            # case the deliverables file has the short version of the
            # hash.
            if head_sha == requested_sha:
                print('\nRequest releases from HEAD on %s' % tag_branch)
            else:
                git_log(workdir,
                        project.repo.name,
                        'Release will NOT include',
                        '%s..%s' % (requested_sha, head_sha),
                        extra_args=['--format=%h %ci %s'])

            show_watched_queries(branch, project.repo.name)

            # Show any requirements changes in the upcoming release.
            # Include setup.cfg, in case the project uses "extras".
            if start_range:
                git_diff(workdir, project.repo.name, git_range,
                         '*requirements*.txt',
                         'Requirements Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range,
                         'doc/requirements.txt',
                         'Doc Requirements Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'setup.cfg',
                         'setup.cfg Changes %s' % git_range)
                git_diff(workdir, project.repo.name, git_range, 'bindep.txt',
                         'bindep.txt Changes %s' % git_range)

            # Before we try to determine if the previous release
            # is an ancestor or produce the release notes we need
            # the tag to exist in the local repository.
            if not tag_exists:
                header('Applying Temporary Tag')
                print('\ngit tag {version} {hash}'.format(
                    version=new_release.version,
                    hash=project.hash,
                ))
                subprocess.check_call(
                    ['git', 'tag', new_release.version, project.hash],
                    cwd=os.path.join(workdir, project.repo.name),
                )

            # Show any changes in the previous release but not in this
            # release, in case someone picks an "early" SHA or a
            # regular commit instead of the appropriate merge commit.
            previous_tag_exists = False
            if previous_release:
                previous_tag_exists = gitutils.tag_exists(
                    project.repo.name,
                    previous_release.version,
                )
            if previous_tag_exists:
                git_log(
                    workdir,
                    project.repo.name,
                    'Patches in previous release but not in this one',
                    [previous_release.version, '--not', project.hash],
                    extra_args=['--topo-order', '--oneline', '--no-merges'],
                )

                # The tag will have been added as a local tag above if
                # it does not already exist.
                header('New release %s includes previous release %s' %
                       (new_release.version, previous_release.version))
                print('\ngit tag --contains %s\n' % previous_release.version)
                containing_tags = subprocess.check_output(
                    ['git', 'tag', '--contains', previous_release.version],
                    cwd=os.path.join(workdir, project.repo.name),
                ).decode('utf-8').split()
                print('Containing tags:', containing_tags)
                if new_release.version not in containing_tags:
                    print('WARNING: Missing %s' % new_release.version)
                else:
                    print('Found new version %s' % new_release.version)

                is_ancestor = gitutils.check_ancestry(
                    workdir,
                    project.repo.name,
                    previous_release.version,
                    project.hash,
                )
                if is_ancestor:
                    print('SHA found in descendants')
                else:
                    print('SHA NOT FOUND in descendants')

            # Show the changes since the last release, first as a
            # graph view so we can check for bad merges, and then with
            # more detail.
            git_log(workdir,
                    project.repo.name,
                    'Release %s will include' % new_release.version,
                    git_range,
                    extra_args=[
                        '--graph', '--oneline', '--decorate', '--topo-order'
                    ])
            git_log(workdir,
                    project.repo.name,
                    'Details Contents',
                    git_range,
                    extra_args=['--no-merges', '--topo-order'])

            # The tag will have been added as a local tag above if it does
            # not already exist.
            header('Release Notes')
            try:
                notes = release_notes.generate_release_notes(
                    repo=project.repo.name,
                    repo_path=os.path.join(workdir, project.repo.name),
                    start_revision=new_release.diff_start or start_range or '',
                    end_revision=new_release.version,
                    show_dates=True,
                    skip_requirement_merges=True,
                    is_stable=branch.startswith('stable/'),
                    series=deliv.series,
                    email='*****@*****.**',
                    email_from='*****@*****.**',
                    email_reply_to='*****@*****.**',
                    email_tags='',
                    include_pypi_link=False,
                    changes_only=False,
                    first_release=deliv.is_first_release,
                    deliverable_file=filename,
                    description='',
                    publishing_dir_name=project.repo.name,
                )
            except Exception as e:
                logging.exception('Failed to produce release notes')
            else:
                print('\n')
                print(notes)

            if 'library' in deliv.type:
                show_dependency_listings(
                    project.guess_sdist_name(),
                    official_repos,
                )

    return 0
示例#15
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'prev_series',
        help='previous series name',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to examine, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print('no modified deliverable files, validating all releases from %s'
              % defaults.RELEASE)
        filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml')

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            try:
                shutil.rmtree(workdir)
            except:
                pass
        else:
            print('not cleaning up %s' % workdir)
    atexit.register(cleanup_workdir)

    for filename in filenames:
        print('\nChecking %s' % filename)
        if not os.path.isfile(filename):
            print("File was deleted, skipping.")
            continue
        with open(filename, 'r') as f:
            deliverable_info = yaml.load(f.read())

        branch = 'stable/' + args.prev_series

        # assume the releases are in order and take the last one
        new_release = deliverable_info['releases'][-1]
        print('version {}'.format(new_release['version']))

        diff_start = new_release.get('diff-start')
        if not diff_start:
            print('  no diff-start')
            continue
        else:
            print('  diff-start: {!r}'.format(diff_start))

        for project in new_release['projects']:
            gitutils.clone_repo(workdir, project['repo'])

            branch_base = gitutils.get_branch_base(
                workdir, project['repo'], branch,
            )
            if branch_base:
                branch_version = gitutils.get_latest_tag(
                    workdir, project['repo'], branch_base,
                )
                if diff_start == branch_version:
                    print('  SAME')
                else:
                    print('  DIFFERENT {} at {}'.format(
                        branch_version, branch_base))
示例#16
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument(
        'prev_series',
        help='previous series name',
    )
    parser.add_argument(
        'input',
        nargs='*',
        help=('YAML files to examine, defaults to '
              'files changed in the latest commit'),
    )
    args = parser.parse_args()

    filenames = args.input or gitutils.find_modified_deliverable_files()
    if not filenames:
        print(
            'no modified deliverable files, validating all releases from %s' %
            defaults.RELEASE)
        filenames = glob.glob('deliverables/' + defaults.RELEASE + '/*.yaml')

    workdir = tempfile.mkdtemp(prefix='releases-')
    print('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            print('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    for filename in filenames:
        print('\nChecking %s' % filename)
        if not os.path.isfile(filename):
            print("File was deleted, skipping.")
            continue
        with open(filename, 'r') as f:
            deliverable_info = yamlutils.loads(f.read())

        branch = 'stable/' + args.prev_series

        if not deliverable_info.get('releases'):
            print('  no releases')
            continue

        # assume the releases are in order and take the last one
        new_release = deliverable_info['releases'][-1]
        print('version {}'.format(new_release['version']))

        diff_start = new_release.get('diff-start')
        if not diff_start:
            print('  no diff-start')
            continue
        else:
            print('  diff-start: {!r}'.format(diff_start))

        for project in new_release['projects']:
            gitutils.clone_repo(workdir, project['repo'])

            branch_base = gitutils.get_branch_base(
                workdir,
                project['repo'],
                branch,
            )
            if branch_base:
                branch_version = gitutils.get_latest_tag(
                    workdir,
                    project['repo'],
                    branch_base,
                )
                if diff_start == branch_version:
                    print('  SAME')
                else:
                    print('  DIFFERENT {} at {}'.format(
                        branch_version, branch_base))
示例#17
0
def validate_releases(deliverable_info, zuul_layout,
                      series_name,
                      workdir,
                      mk_warning, mk_error):
    """Apply validation rules to the 'releases' list for the deliverable.
    """
    release_model = get_model(deliverable_info, series_name)
    is_independent = (release_model == 'independent')

    # Remember which entries are new so we can verify that they
    # appear at the end of the file.
    new_releases = {}

    release_type = deliverable_info.get('release-type', 'std')
    link_mode = deliverable_info.get('artifact-link-mode', 'tarball')

    prev_version = None
    prev_projects = set()
    for release in deliverable_info.get('releases', []):

        for project in release['projects']:

            # Check for release jobs (if we ship a tarball)
            if link_mode != 'none':
                project_config.require_release_jobs_for_repo(
                    deliverable_info, zuul_layout, project['repo'],
                    release_type, mk_warning, mk_error,
                )

            # Check the SHA specified for the tag.
            print('%s SHA %s ' % (project['repo'], project['hash']))

            if not is_a_hash(project['hash']):
                mk_error(
                    ('%(repo)s version %(version)s release from '
                     '%(hash)r, which is not a hash') % {
                         'repo': project['repo'],
                         'hash': project['hash'],
                         'version': release['version'],
                         }
                )
            else:
                # Report if the SHA exists or not (an error if it
                # does not).
                sha_exists = gitutils.commit_exists(
                    project['repo'], project['hash'],
                )
                if not sha_exists:
                    mk_error('No commit %(hash)r in %(repo)r'
                             % project)
                    # No point in running extra checks if the SHA just
                    # doesn't exist.
                    continue
                # Report if the version has already been
                # tagged. We expect it to not exist, but neither
                # case is an error because sometimes we want to
                # import history and sometimes we want to make new
                # releases.
                version_exists = gitutils.tag_exists(
                    project['repo'], release['version'],
                )
                gitutils.clone_repo(workdir, project['repo'])
                if version_exists:
                    actual_sha = gitutils.sha_for_tag(
                        workdir,
                        project['repo'],
                        release['version'],
                    )
                    if actual_sha != project['hash']:
                        mk_error(
                            ('Version %s in %s is on '
                             'commit %s instead of %s') %
                            (release['version'],
                             project['repo'],
                             actual_sha,
                             project['hash']))
                else:
                    print('Found new version {}'.format(release['version']))
                    new_releases[release['version']] = release
                    if prev_projects and project['repo'] not in prev_projects:
                        print('not included in previous release for %s: %s' %
                              (prev_version, ', '.join(sorted(prev_projects))))
                    else:

                        for e in versionutils.validate_version(
                                release['version'],
                                release_type=release_type,
                                pre_ok=(release_model in _USES_PREVER)):
                            msg = ('could not validate version %r: %s' %
                                   (release['version'], e))
                            mk_error(msg)

                        if is_independent:
                            mk_warning('skipping descendant test for '
                                       'independent project, verify '
                                       'branch manually')

                        elif not prev_version:
                            # If this is the first version in the series,
                            # check that the commit is actually on the
                            # targeted branch.
                            if not gitutils.check_branch_sha(workdir,
                                                             project['repo'],
                                                             series_name,
                                                             defaults.RELEASE,
                                                             project['hash']):
                                msg = '%s %s not present in %s branch' % (
                                    project['repo'],
                                    project['hash'],
                                    series_name,
                                    )
                                mk_error(msg)

                        else:
                            # Check to see if we are re-tagging the same
                            # commit with a new version.
                            old_sha = gitutils.sha_for_tag(
                                workdir,
                                project['repo'],
                                prev_version,
                            )
                            if old_sha == project['hash']:
                                # FIXME(dhellmann): This needs a test.
                                print('Retagging the SHA with a new version')
                            else:
                                # Check to see if the commit for the new
                                # version is in the ancestors of the
                                # previous release, meaning it is actually
                                # merged into the branch.
                                is_ancestor = gitutils.check_ancestry(
                                    workdir,
                                    project['repo'],
                                    prev_version,
                                    project['hash'],
                                )
                                if not is_ancestor:
                                    mk_error(
                                        '%s %s receiving %s '
                                        'is not a descendant of %s' % (
                                            project['repo'],
                                            project['hash'],
                                            release['version'],
                                            prev_version,
                                        )
                                    )

        prev_version = release['version']
        prev_projects = set(p['repo'] for p in release['projects'])

    # Make sure that new entries have been appended to the file.
    for v, nr in new_releases.items():
        if nr != deliverable_info['releases'][-1]:
            msg = ('new release %s must be listed last, '
                   'with one new release per patch' % nr['version'])
            mk_error(msg)
def main():
    if not sys.stdout.encoding:
        # Wrap sys.stdout with a writer that knows how to handle
        # encoding Unicode data.
        import codecs
        wrapped_stdout = codecs.getwriter('UTF-8')(sys.stdout)
        sys.stdout = wrapped_stdout

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--no-cleanup',
        dest='cleanup',
        default=True,
        action='store_false',
        help='do not remove temporary files',
    )
    parser.add_argument("-v",
                        "--verbosity",
                        action="count",
                        help="increase output verbosity",
                        default=0)
    parser.add_argument(
        "--ignore-no-results",
        action='store_true',
        default=False,
        help="Ignore projects without difference between the HEAD and "
        "the retrieved previous tag. "
        "They will be ignored in the command output.")
    parser.add_argument("--ignore-errors",
                        action='store_true',
                        default=False,
                        help="Ignore projects in error (repos not found).")
    parser.add_argument(
        "--ignore-not-yet-released",
        action='store_true',
        default=False,
        help="Ignore projects not yet released (previous tag not found).")
    parser.add_argument(
        "-I",
        "--ignore-all",
        action='store_true',
        default=False,
        help="Ignore projects without difference between the HEAD and "
        "previous tag, projects not yet released, projects in error. "
        "Similar to call command with "
        "`--ignore-no-results --ignore-errors --ignore-not-yet-released`"
        " They will be ignored in the command output.")
    parser.add_argument("-f",
                        "--format",
                        choices=['std', 'json', 'yaml'],
                        default='std',
                        help="Output format")
    parser.add_argument(
        'branch',
        help=('Branch to analyze'),
    )
    parser.add_argument(
        'repos',
        nargs='*',
        help=('Repos to analyze, '
              'repo should be e.g. openstack/glance'),
    )
    args = parser.parse_args()

    log_level = logging.ERROR
    if args.verbosity >= 3:
        log_level = logging.DEBUG
    elif args.verbosity >= 2:
        log_level = logging.INFO
    elif args.verbosity >= 1:
        log_level = logging.WARNING

    # Set up logging, including making some loggers quiet.
    logging.basicConfig(
        format='%(levelname)7s: %(message)s',
        stream=sys.stdout,
        level=log_level,
    )
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)

    workdir = tempfile.mkdtemp(prefix='releases-')
    LOG.debug('creating temporary files in %s' % workdir)

    def cleanup_workdir():
        if args.cleanup:
            shutil.rmtree(workdir, True)
        else:
            LOG.info('not cleaning up %s' % workdir)

    atexit.register(cleanup_workdir)

    # Remove any inherited PAGER environment variable to avoid
    # blocking the output waiting for input.
    os.environ['PAGER'] = ''
    output = []

    for repo in args.repos:
        current = {
            'repo': repo,
            'branch': args.branch,
            'commits': None,
            'error': False,
            'not_yet_released': False,
            'msg': ''
        }
        url = 'https://opendev.org/{}'.format(repo)
        res = requests.get(url)
        if res.status_code == 404:
            current.update({'error': True})
            current.update(
                {'msg': "fatal: repository '{}' not found".format(url)})
            output.append(current)
            continue

        # Start by checking out master, always. We need the repo
        # checked out before we can tell if the stable branch
        # really exists.
        gitutils.clone_repo(
            workdir,
            repo,
            branch='master',
        )

        # Set some git configuration values to allow us to perform
        # local operations like tagging.
        gitutils.ensure_basic_git_config(
            workdir,
            repo,
            {
                'user.email': '*****@*****.**',
                'user.name': 'OpenStack Proposal Bot'
            },
        )

        # Determine which branch we should actually be looking
        # at. Assume any series for which there is no stable
        # branch will be on 'master'.
        branch = args.branch.replace('stable/', '')
        if gitutils.stable_branch_exists(workdir, repo, branch):
            branch = 'stable/' + branch
        else:
            branch = 'master'

        if branch != 'master':
            # Check out the repo again to the right branch if we
            # didn't get it the first time.
            gitutils.clone_repo(
                workdir,
                repo,
                branch=branch,
            )

        # look at the previous tag for the parent of the commit
        # getting the new release
        previous_tag = gitutils.get_latest_tag(workdir, repo, always=False)

        if not previous_tag:
            current.update({'not_yet_released': True})
            current.update(
                {'msg': '{} has not yet been released'.format(repo)})
            output.append(current)
            continue

        start_range = previous_tag
        head_sha = gitutils.get_head(workdir, repo)

        if not start_range:
            current.update({'not_yet_released': True})
            current.update(
                {'msg': '{} has not yet been released'.format(repo)})
            output.append(current)
            continue

        commits = git_log(workdir,
                          repo, (start_range, head_sha),
                          extra_args=[
                              '--no-color', '--no-merges', '--graph',
                              '--format=%h %ci %s'
                          ])

        current.update({'commits': commits})
        output.append(current)

    LOG.debug(output)

    out = generate_output(output,
                          output_format=args.format,
                          ignore_no_results=args.ignore_no_results,
                          ignore_errors=args.ignore_errors,
                          ignore_not_yet_released=args.ignore_not_yet_released,
                          ignore_all=args.ignore_all)
    print("".join(out))
    return 0