def main():
    parser = argparse.ArgumentParser(
        description='Show or bump the version number in package.xml files.')
    parser.add_argument(
        'path',
        nargs='?',
        default='.',
        help=
        'The path to a parent folder which contains package.xml files (default: .)'
    )
    parser.add_argument('--bump',
                        choices=('major', 'minor', 'patch'),
                        help='Which part of the version number to bump?')
    args = parser.parse_args()

    try:
        packages = find_packages(args.path)
        if not packages:
            print('No packages found', file=sys.stderr)
            sys.exit(1)
        version = verify_equal_package_versions(packages.values())

        # only print the version number
        if args.bump is None:
            print(version)

        else:
            # bump the version number
            new_version = bump_version(version, args.bump)
            update_versions(packages.keys(), new_version)
            print('%s -> %s' % (version, new_version))
    except Exception as e:
        sys.exit(str(e))
Exemple #2
0
def get_package_data(branch_name=None,
                     directory=None,
                     quiet=True,
                     release_directory=None):
    """
    Gets package data about the package(s) in the current branch.

    It also ignores the packages in the `packages.ignore` file in the master branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory or os.getcwd()
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name),
            end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory
                                                       or os.getcwd()),
            end='')
    # Check for package.xml(s)
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.", use_prefix=False)
        else:
            log("found '" + list(packages.values())[0].name + "'.",
                use_prefix=False)
        ignored_packages = get_ignored_packages(
            release_directory=release_directory)
        for k, v in dict(packages).items():
            # Check for packages with upper case names
            if v.name.lower() != v.name:
                error(
                    "Cowardly refusing to release packages with uppercase characters in the name: "
                    + v.name)
                error("See:")
                error(
                    "  https://github.com/ros-infrastructure/bloom/issues/191")
                error(
                    "  https://github.com/ros-infrastructure/bloom/issues/76")
                error("Invalid package names, aborting.", exit=True)
            # Check for ignored packages
            if v.name in ignored_packages:
                warning(
                    "Explicitly ignoring package '{0}' because it is in the `{1}.ignored` file."
                    .format(v.name, os.environ.get('BLOOM_TRACK', 'packages')))
                del packages[k]
        if packages == {}:
            error("All packages that were found were also ignored, aborting.",
                  exit=True)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("No package.xml(s) found, and '--package-name' not given, aborting.",
          use_prefix=False,
          exit=True)
Exemple #3
0
def get_version(directory=None):
    packages = find_packages(basepath=directory if directory else os.getcwd())
    try:
        version = verify_equal_package_versions(packages.values())
    except RuntimeError as err:
        traceback.print_exec()
        error("Releasing multiple packages with different versions is "
              "not supported: " + str(err))
        sys.exit(1)
    return version
Exemple #4
0
def get_version(directory=None):
    packages = find_packages(basepath=directory if directory else os.getcwd())
    try:
        version = verify_equal_package_versions(packages.values())
    except RuntimeError as err:
        traceback.print_exec()
        error("Releasing multiple packages with different versions is "
                "not supported: " + str(err))
        sys.exit(1)
    return version
Exemple #5
0
def get_package_data(branch_name=None, directory=None, quiet=True, fuerte=False):
    """
    Gets package data about the package(s) in the current branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory if directory else os.getcwd()
    stack_path = os.path.join(repo_dir, 'stack.xml')
    if os.path.exists(stack_path) and not fuerte:
            warning("stack.xml is present but going to be ignored because this is not a release for Fuerte.")
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name), end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory or os.getcwd()), end='')
    ## Check for package.xml(s)
    if not fuerte:
        packages = find_packages(repo_dir)
    else:
        packages = {}
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.",
                use_prefix=False)
        else:
            log("found '" + packages.values()[0].name + "'.",
                use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    ## Check for stack.xml
    has_rospkg = False
    try:
        import rospkg
        has_rospkg = True
    except ImportError:
        log(ansi('redf') + "failed." + ansi('reset'), use_prefix=False)
        warning("rospkg was not detected, stack.xml discovery is disabled",
                file=sys.stderr)
    if not has_rospkg:
        error("no package.xml(s) found, and no name specified with "
              "'--package-name', aborting.", use_prefix=False, exit=True)
    if os.path.exists(stack_path):
        log("found stack.xml.", use_prefix=False)
        stack = rospkg.stack.parse_stack_file(stack_path)
        return stack.name, stack.version, stack
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("no package.xml(s) or stack.xml found, and no name "
          "specified with '--package-name', aborting.",
          use_prefix=False, exit=True)
Exemple #6
0
def _branch_packages(src, prefix, patch, interactive, directory=None):
    # Ensure we are on the correct src branch
    current_branch = get_current_branch()
    if current_branch != src:
        info("Changing to specified source branch " + src)
        execute_command('git checkout ' + src, cwd=directory)
    # Get packages
    repo_dir = directory if directory else os.getcwd()
    packages = find_packages(repo_dir)
    if packages == []:
        error("No package.xml(s) found in " + repo_dir)
        return 1
    # Verify that the packages all have the same version
    version = verify_equal_package_versions(packages.values())
    # Call git-bloom-branch on each package
    info(
      "Branching these packages: " + str([p.name for p in packages.values()])
    )
    if interactive:
        if not maybe_continue():
            error("Answered no to continue, exiting.")
            return 1
    retcode = 0
    for path in packages:
        package = packages[path]
        branch = prefix + ('' if prefix and prefix.endswith('/') else '/') \
               + package.name
        print('')  # white space
        info("Branching " + package.name + "_" + version + " to " + branch)
        ret = -1
        try:
            ret = execute_branch(src, branch, patch, False, path,
                directory=directory)
            msg = "Branching " + package.name + "_" + version + " to " + \
                branch + " returned " + str(ret)
            if ret != 0:
                warning(msg)
                retcode = ret
            else:
                info(msg)
        except Exception as err:
            traceback.print_exc()
            error("Error branching " + package.name + ": " + str(err))
            retcode = ret
        finally:
            execute_command('git checkout ' + src, cwd=directory)
    return retcode
Exemple #7
0
def get_package_data(branch_name=None, directory=None, quiet=True, release_directory=None):
    """
    Gets package data about the package(s) in the current branch.

    It also ignores the packages in the `packages.ignore` file in the master branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory or os.getcwd()
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name), end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory or os.getcwd()), end='')
    # Check for package.xml(s)
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.",
                use_prefix=False)
        else:
            log("found '" + list(packages.values())[0].name + "'.",
                use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        ignored_packages = get_ignored_packages(release_directory=release_directory)
        for k, v in dict(packages).items():
            # Check for packages with upper case names
            if v.name.lower() != v.name:
                error("Cowardly refusing to release packages with uppercase characters in the name: " + v.name)
                error("See:")
                error("  https://github.com/ros-infrastructure/bloom/issues/191")
                error("  https://github.com/ros-infrastructure/bloom/issues/76")
                error("Invalid package names, aborting.", exit=True)
            # Check for ignored packages
            if v.name in ignored_packages:
                warning("Explicitly ignoring package '{0}' because it is in the `{1}.ignored` file."
                        .format(v.name, os.environ.get('BLOOM_TRACK', 'packages')))
                del packages[k]
        if packages == {}:
            error("All packages that were found were also ignored, aborting.",
                  exit=True)
        return [p.name for p in packages.values()], version, packages
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("No package.xml(s) found, and '--package-name' not given, aborting.",
          use_prefix=False, exit=True)
def get_upstream_meta(upstream_dir):
    meta = None
    # Check for stack.xml
    stack_path = os.path.join(upstream_dir, 'stack.xml')
    info("Checking for package.xml(s)")
    # Check for package.xml(s)
    try:
        from catkin_pkg.packages import find_packages
        from catkin_pkg.packages import verify_equal_package_versions
    except ImportError:
        error("catkin_pkg was not detected, please install it.",
              file=sys.stderr)
        sys.exit(1)
    packages = find_packages(basepath=upstream_dir)
    if packages == {}:
        if has_rospkg:
            info("package.xml(s) not found, looking for stack.xml")
            if os.path.exists(stack_path):
                info("stack.xml found")
                # Assumes you are at the top of the repo
                stack = rospkg.stack.parse_stack_file(stack_path)
                meta = {}
                meta['name'] = [stack.name]
                meta['version'] = stack.version
                meta['type'] = 'stack.xml'
            else:
                error("Neither stack.xml, nor package.xml(s) were detected.")
                sys.exit(1)
        else:
            error("Package.xml(s) were not detected.")
            sys.exit(1)
    else:
        info("package.xml(s) found")
        try:
            version = verify_equal_package_versions(packages.values())
        except RuntimeError as err:
            print_exc(traceback.format_exc())
            error("Releasing multiple packages with different versions is "
                  "not supported: " + str(err))
            sys.exit(1)
        meta = {}
        meta['version'] = version
        meta['name'] = [p.name for p in packages.values()]
        meta['type'] = 'package.xml'
    return meta
Exemple #9
0
def get_package_data(branch_name, directory=None):
    """
    Gets package data about the package(s) in the current branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    debug("Looking for packages in '{0}'... ".format(branch_name), end='')
    ## Check for package.xml(s)
    repo_dir = directory if directory else os.getcwd()
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            debug("found " + str(len(packages)) + " packages.",
                 use_prefix=False)
        else:
            debug("found '" + packages.values()[0].name + "'.",
                 use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    ## Check for stack.xml
    has_rospkg = False
    try:
        import rospkg
        has_rospkg = True
    except ImportError:
        debug(ansi('redf') + "failed." + ansi('reset'), use_prefix=False)
        warning("rospkg was not detected, stack.xml discovery is disabled",
                file=sys.stderr)
    if not has_rospkg:
        error("no package.xml(s) found, and no name specified with "
              "'--package-name', aborting.", use_prefix=False)
        return code.NO_PACKAGE_XML_FOUND
    stack_path = os.path.join(repo_dir, 'stack.xml')
    if os.path.exists(stack_path):
        debug("found stack.xml.", use_prefix=False)
        stack = rospkg.stack.parse_stack_file(stack_path)
        return stack.name, stack.version, stack
    # Otherwise we have a problem
    debug("failed.", use_prefix=False)
    error("no package.xml(s) or stack.xml found, and not name "
          "specified with '--package-name', aborting.", use_prefix=False)
    return code.NO_PACKAGE_XML_FOUND
Exemple #10
0
def get_upstream_meta(upstream_dir):
    meta = None
    # Check for stack.xml
    stack_path = os.path.join(upstream_dir, 'stack.xml')
    info("Checking for package.xml(s)")
    # Check for package.xml(s)
    try:
        from catkin_pkg.packages import find_packages
        from catkin_pkg.packages import verify_equal_package_versions
    except ImportError:
        error("catkin_pkg was not detected, please install it.",
              file=sys.stderr)
        sys.exit(1)
    packages = find_packages(basepath=upstream_dir)
    if packages == {}:
        info("package.xml(s) not found, looking for stack.xml")
        if os.path.exists(stack_path):
            info("stack.xml found")
            # Assumes you are at the top of the repo
            stack = parse_stack_xml(stack_path)
            meta = {}
            meta['name'] = [stack.name]
            meta['version'] = stack.version
            meta['type'] = 'stack.xml'
        else:
            bailout("Neither stack.xml, nor package.xml(s) were detected.")
    else:
        info("package.xml(s) found")
        try:
            version = verify_equal_package_versions(packages.values())
        except RuntimeError as err:
            traceback.print_exec()
            bailout("Releasing multiple packages with different versions is "
                    "not supported: " + str(err))
        meta = {}
        meta['version'] = version
        meta['name'] = [p.name for p in packages.values()]
        meta['type'] = 'package.xml'
    return meta
Exemple #11
0
def main():
    parser = argparse.ArgumentParser(description='Show or bump the version number in package.xml files.')
    parser.add_argument('path', nargs='?', default='.', help='The path to a parent folder which contains package.xml files (default: .)')
    parser.add_argument('--bump', choices=('major', 'minor', 'patch'), help='Which part of the version number to bump?')
    args = parser.parse_args()

    try:
        packages = find_packages(args.path)
        if not packages:
            print('No packages found', file=sys.stderr)
            sys.exit(1)
        version = verify_equal_package_versions(packages.values())

        # only print the version number
        if args.bump is None:
            print(version)

        else:
            # bump the version number
            new_version = bump_version(version, args.bump)
            update_versions(packages.keys(), new_version)
            print('%s -> %s' % (version, new_version))
    except Exception as e:
        sys.exit(str(e))
Exemple #12
0
def _main():
    parser = argparse.ArgumentParser(
        description=
        'Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.'
        % PACKAGE_MANIFEST_FILENAME)
    parser.add_argument(
        '--bump',
        choices=('major', 'minor', 'patch'),
        default='patch',
        help='Which part of the version number to bump? (default: %(default)s)'
    )
    parser.add_argument('--version', help='Specify a specific version to use')
    parser.add_argument('--no-color',
                        action='store_true',
                        default=False,
                        help='Disables colored output')
    parser.add_argument('--no-push',
                        action='store_true',
                        default=False,
                        help='Disables pushing to remote repository')
    parser.add_argument('-t',
                        '--tag-prefix',
                        default='',
                        help='Add this prefix to the created release tag')
    parser.add_argument(
        '-y',
        '--non-interactive',
        action='store_true',
        default=False,
        help="Run without user interaction, confirming all questions with 'yes'"
    )
    args = parser.parse_args()

    if args.version and not re.match(
            '^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$',
            args.version):
        parser.error(
            'The passed version must follow the conventions (positive integers x.y.z with no leading zeros)'
        )

    if args.tag_prefix and ' ' in args.tag_prefix:
        parser.error('The tag prefix must not contain spaces')

    # force --no-color if stdout is non-interactive
    if not sys.stdout.isatty():
        args.no_color = True
    # disable colors if asked
    if args.no_color:
        disable_ANSI_colors()

    base_path = '.'

    print(fmt('@{gf}Prepare the source repository for a release.'))

    # determine repository type
    vcs_type = get_repository_type(base_path)
    if vcs_type is None:
        raise RuntimeError(
            fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}"
                % base_path))
    print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type))

    # find packages
    try:
        packages = find_packages(base_path)
    except InvalidPackage as e:
        raise RuntimeError(
            fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n  %s" %
                (os.path.abspath(base_path), str(e))))
    if not packages:
        raise RuntimeError(fmt('@{rf}No packages found'))
    print('Found packages: %s' % ', '.join([
        fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name)
        for p in packages.values()
    ]))

    # complain about packages with non-catkin build_type as they might require additional steps before being released
    # complain about packages with upper case character since they won't be releasable with bloom
    non_catkin_pkg_names = []
    invalid_pkg_names = []
    for package in packages.values():
        build_types = [
            export.content for export in package.exports
            if export.tagname == 'build_type'
        ]
        build_type = build_types[0] if build_types else 'catkin'
        if build_type != 'catkin':
            non_catkin_pkg_names.append(package.name)
        if package.name != package.name.lower():
            invalid_pkg_names.append(package.name)
    if non_catkin_pkg_names:
        print(fmt(
            "@{yf}Warning: the following package are not of build_type catkin and may require manual steps to release': %s"
            % ', '.join([('@{boldon}%s@{boldoff}' % p)
                         for p in sorted(non_catkin_pkg_names)])),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway',
                                                            default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, verify that non-catkin packages are ready to be released or release manually."
                    ))
    if invalid_pkg_names:
        print(fmt(
            "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s"
            % ', '.join([('@{boldon}%s@{boldoff}' % p)
                         for p in sorted(invalid_pkg_names)])),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway',
                                                            default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, fix the names of the packages."))

    local_modifications = []
    for pkg_path, package in packages.items():
        # verify that the package.xml files don't have modifications pending
        package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME)
        if has_changes(base_path, package_xml_path, vcs_type):
            local_modifications.append(package_xml_path)
        # verify that metapackages are valid
        if package.is_metapackage():
            try:
                metapackage.validate_metapackage(pkg_path, package)
            except metapackage.InvalidMetapackage as e:
                raise RuntimeError(
                    fmt("@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n  %s\n\nSee requirements for metapackages: %s"
                        % (os.path.abspath(pkg_path), str(e),
                           metapackage.DEFINITION_URL)))

    # fetch current version and verify that all packages have same version number
    old_version = verify_equal_package_versions(packages.values())
    if args.version:
        new_version = args.version
    else:
        new_version = bump_version(old_version, args.bump)
    tag_name = args.tag_prefix + new_version

    if (not args.non_interactive and not prompt_continue(fmt(
            "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s"
            %
        (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" %
         tag_name if args.tag_prefix else '')),
                                                         default=True)):
        raise RuntimeError(
            fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name."
                ))

    # check for changelog entries
    missing_changelogs = []
    missing_changelogs_but_forthcoming = {}
    for pkg_path, package in packages.items():
        changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME)
        if not os.path.exists(changelog_path):
            missing_changelogs.append(package.name)
            continue
        # verify that the changelog files don't have modifications pending
        if has_changes(base_path, changelog_path, vcs_type):
            local_modifications.append(changelog_path)
        changelog = get_changelog_from_path(changelog_path, package.name)
        try:
            changelog.get_content_of_version(new_version)
        except KeyError:
            # check that forthcoming section exists
            forthcoming_label = get_forthcoming_label(changelog.rst)
            if forthcoming_label:
                missing_changelogs_but_forthcoming[package.name] = (
                    changelog_path, changelog, forthcoming_label)
            else:
                missing_changelogs.append(package.name)

    if local_modifications:
        raise RuntimeError(
            fmt('@{rf}The following files have modifications, please commit/revert them before:'
                + ''.join([('\n- @{boldon}%s@{boldoff}' % path)
                           for path in local_modifications])))

    if missing_changelogs:
        print(fmt(
            "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s"
            % (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p)
                                       for p in sorted(missing_changelogs)]))),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue(
                'Continue without changelogs', default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content."
                    ))

    # verify that repository is pushable (if the vcs supports dry run of push)
    if not args.no_push:
        try_repo_push(base_path, vcs_type)

    # check for staged changes and modified and untracked files
    print(
        fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...'
            ))
    is_clean = check_clean_working_copy(base_path, vcs_type)
    if not is_clean:
        print(fmt(
            '@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.'
        ),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway',
                                                            default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, clean the working copy before trying again."
                    ))

    # for svn verify that we know how to tag that repository
    if vcs_type in ['svn']:
        tag_svn_cmd = tag_repository(base_path,
                                     vcs_type,
                                     tag_name,
                                     args.tag_prefix != '',
                                     dry_run=True)

    # tag forthcoming changelog sections
    update_changelog_sections(missing_changelogs_but_forthcoming, new_version)
    print(
        fmt("@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s"
            % (new_version, ', '.join([
                ('@{boldon}%s@{boldoff}' % p)
                for p in sorted(missing_changelogs_but_forthcoming.keys())
            ]))))

    # bump version number
    update_versions(packages.keys(), new_version)
    print(
        fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'"
            % (old_version, new_version)))

    pushed = None
    if vcs_type in ['svn']:
        # for svn everything affects the remote repository immediately
        commands = []
        commands.append(
            commit_files(base_path,
                         vcs_type,
                         packages,
                         missing_changelogs_but_forthcoming,
                         tag_name,
                         dry_run=True))
        commands.append(tag_svn_cmd)
        if not args.no_push:
            print(
                fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:'
                    ))
        else:
            print(
                fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:'
                    ))
        for cmd in commands:
            print(fmt('  @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))

        if not args.no_push:
            if not args.non_interactive:
                # confirm before modifying repository
                if not prompt_continue(
                        'Execute commands which will modify the repository',
                        default=True):
                    pushed = False
            if pushed is None:
                commit_files(base_path, vcs_type, packages,
                             missing_changelogs_but_forthcoming, tag_name)
                tag_repository(base_path, vcs_type, tag_name,
                               args.tag_prefix != '')
                pushed = True

    else:
        # for other vcs types the changes are first done locally
        print(fmt('@{gf}Committing the package.xml files...'))
        commit_files(base_path, vcs_type, packages,
                     missing_changelogs_but_forthcoming, tag_name)

        print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name)))
        tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '')

        try:
            commands = push_changes(base_path, vcs_type, dry_run=True)
        except RuntimeError as e:
            print(
                fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?'
                    ))
        else:
            if not args.no_push:
                print(
                    fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:'
                        ))
            else:
                print(
                    fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:'
                        ))
            for cmd in commands:
                print(fmt('  @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))

            if not args.no_push:
                if not args.non_interactive:
                    # confirm commands to push to remote repository
                    if not prompt_continue(
                            'Execute commands to push the local commits and tags to the remote repository',
                            default=True):
                        pushed = False
                if pushed is None:
                    push_changes(base_path, vcs_type)
                    pushed = True

    if pushed:
        print(
            fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'."
                ))
    else:
        msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \
            "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'."
        if args.no_push or pushed is False:
            print(fmt('@{yf}%s' % msg))
        else:
            raise RuntimeError(fmt('@{rf}%s' % msg))
Exemple #13
0
def main(sysargs=None):
    parser = argparse.ArgumentParser(
        description=
        'Tag the forthcoming section in the changelog files with an upcoming version number'
    )
    parser.add_argument(
        '--bump',
        choices=('major', 'minor', 'patch'),
        default='patch',
        help='Which part of the version number to bump? (default: %(default)s)'
    )
    args = parser.parse_args(sysargs)

    base_path = '.'

    # find packages
    packages = find_packages(base_path)
    if not packages:
        raise RuntimeError('No packages found')
    print('Found packages: %s' % ', '.join([p.name
                                            for p in packages.values()]))

    # fetch current version and verify that all packages have same version number
    old_version = verify_equal_package_versions(packages.values())
    new_version = bump_version(old_version, args.bump)
    print('Tag version %s' % new_version)

    # check for changelog entries
    changelogs = []
    missing_forthcoming = []
    already_tagged = []
    for pkg_path, package in packages.items():
        changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
        if not os.path.exists(changelog_path):
            missing_forthcoming.append(package.name)
            continue
        changelog = get_changelog_from_path(changelog_path, package.name)
        if not changelog:
            missing_forthcoming.append(package.name)
            continue
        # check that forthcoming section exists
        forthcoming_label = get_forthcoming_label(changelog.rst)
        if not forthcoming_label:
            missing_forthcoming.append(package.name)
            continue
        # check that new_version section does not exist yet
        try:
            changelog.get_content_of_version(new_version)
            already_tagged.append(package.name)
            continue
        except KeyError:
            pass
        changelogs.append(
            (package.name, changelog_path, changelog, forthcoming_label))
    if missing_forthcoming:
        print(
            'The following packages do not have a forthcoming section in their changelog file: %s'
            % ', '.join(sorted(missing_forthcoming)),
            file=sys.stderr)
    if already_tagged:
        print(
            "The following packages do already have a section '%s' in their changelog file: %s"
            % (new_version, ', '.join(sorted(already_tagged))),
            file=sys.stderr)

    # rename forthcoming sections to new_version including current date
    new_changelog_data = []
    new_label = '%s (%s)' % (new_version, datetime.date.today().isoformat())
    for (pkg_name, changelog_path, changelog, forthcoming_label) in changelogs:
        print("Renaming section '%s' to '%s' in package '%s'..." %
              (forthcoming_label, new_label, pkg_name))
        data = rename_section(changelog.rst, forthcoming_label, new_label)
        new_changelog_data.append((changelog_path, data))

    print('Writing updated changelog files...')
    for (changelog_path, data) in new_changelog_data:
        with open(changelog_path, 'w') as f:
            f.write(data)
Exemple #14
0
def main(sysargs=None):
    parser = argparse.ArgumentParser(description='Tag the forthcoming section in the changelog files with an upcoming version number')
    parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)')
    args = parser.parse_args(sysargs)

    base_path = '.'

    # find packages
    packages = find_packages(base_path)
    if not packages:
        raise RuntimeError('No packages found')
    print('Found packages: %s' % ', '.join([p.name for p in packages.values()]))

    # fetch current version and verify that all packages have same version number
    old_version = verify_equal_package_versions(packages.values())
    new_version = bump_version(old_version, args.bump)
    print('Tag version %s' % new_version)

    # check for changelog entries
    changelogs = []
    missing_forthcoming = []
    already_tagged = []
    for pkg_path, package in packages.items():
        changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
        if not os.path.exists(changelog_path):
            missing_forthcoming.append(package.name)
            continue
        changelog = get_changelog_from_path(changelog_path, package.name)
        if not changelog:
            missing_forthcoming.append(package.name)
            continue
        # check that forthcoming section exists
        forthcoming_label = get_forthcoming_label(changelog.rst)
        if not forthcoming_label:
            missing_forthcoming.append(package.name)
            continue
        # check that new_version section does not exist yet
        try:
            changelog.get_content_of_version(new_version)
            already_tagged.append(package.name)
            continue
        except KeyError:
            pass
        changelogs.append((package.name, changelog_path, changelog, forthcoming_label))
    if missing_forthcoming:
        print('The following packages do not have a forthcoming section in their changelog file: %s' % ', '.join(sorted(missing_forthcoming)), file=sys.stderr)
    if already_tagged:
        print("The following packages do already have a section '%s' in their changelog file: %s" % (new_version, ', '.join(sorted(already_tagged))), file=sys.stderr)

    # rename forthcoming sections to new_version including current date
    new_changelog_data = []
    new_label = '%s (%s)' % (new_version, datetime.date.today().isoformat())
    for (pkg_name, changelog_path, changelog, forthcoming_label) in changelogs:
        print("Renaming section '%s' to '%s' in package '%s'..." % (forthcoming_label, new_label, pkg_name))
        data = rename_section(changelog.rst, forthcoming_label, new_label)
        new_changelog_data.append((changelog_path, data))

    print('Writing updated changelog files...')
    for (changelog_path, data) in new_changelog_data:
        with open(changelog_path, 'wb') as f:
            f.write(data.encode('utf-8'))
Exemple #15
0
def _main():
    parser = argparse.ArgumentParser(
        description='Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.' % PACKAGE_MANIFEST_FILENAME)
    parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)')
    parser.add_argument('--version', help='Specify a specific version to use')
    parser.add_argument('--no-color', action='store_true', default=False, help='Disables colored output')
    parser.add_argument('--no-push', action='store_true', default=False, help='Disables pushing to remote repository')
    parser.add_argument('-t', '--tag-prefix', default='', help='Add this prefix to the created release tag')
    parser.add_argument('-y', '--non-interactive', action='store_true', default=False, help="Run without user interaction, confirming all questions with 'yes'")
    args = parser.parse_args()

    if args.version and not re.match('^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', args.version):
        parser.error('The passed version must follow the conventions (positive integers x.y.z with no leading zeros)')

    if args.tag_prefix and ' ' in args.tag_prefix:
        parser.error('The tag prefix must not contain spaces')

    # force --no-color if stdout is non-interactive
    if not sys.stdout.isatty():
        args.no_color = True
    # disable colors if asked
    if args.no_color:
        disable_ANSI_colors()

    base_path = '.'

    print(fmt('@{gf}Prepare the source repository for a release.'))

    # determine repository type
    vcs_type = get_repository_type(base_path)
    if vcs_type is None:
        raise RuntimeError(fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}" % base_path))
    print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type))

    # find packages
    try:
        packages = find_packages(base_path)
    except InvalidPackage as e:
        raise RuntimeError(fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n  %s" % (os.path.abspath(base_path), str(e))))
    if not packages:
        raise RuntimeError(fmt('@{rf}No packages found'))
    print('Found packages: %s' % ', '.join([fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name) for p in packages.values()]))

    # complain about packages with non-catkin build_type as they might require additional steps before being released
    # complain about packages with upper case character since they won't be releasable with bloom
    non_catkin_pkg_names = []
    invalid_pkg_names = []
    for package in packages.values():
        build_types = [export.content for export in package.exports if export.tagname == 'build_type']
        build_type = build_types[0] if build_types else 'catkin'
        if build_type != 'catkin':
            non_catkin_pkg_names.append(package.name)
        if package.name != package.name.lower():
            invalid_pkg_names.append(package.name)
    if non_catkin_pkg_names:
        print(
            fmt(
                "@{yf}Warning: the following package are not of build_type catkin and may require manual steps to release': %s" %
                ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(non_catkin_pkg_names)])
            ), file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway', default=False):
            raise RuntimeError(fmt('@{rf}Aborted release, verify that non-catkin packages are ready to be released or release manually.'))
    if invalid_pkg_names:
        print(
            fmt(
                "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s" %
                ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(invalid_pkg_names)])
            ), file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway', default=False):
            raise RuntimeError(fmt('@{rf}Aborted release, fix the names of the packages.'))

    local_modifications = []
    for pkg_path, package in packages.items():
        # verify that the package.xml files don't have modifications pending
        package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME)
        if has_changes(base_path, package_xml_path, vcs_type):
            local_modifications.append(package_xml_path)
        # verify that metapackages are valid
        if package.is_metapackage():
            try:
                metapackage.validate_metapackage(pkg_path, package)
            except metapackage.InvalidMetapackage as e:
                raise RuntimeError(fmt(
                    "@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n  %s\n\nSee requirements for metapackages: %s" %
                    (os.path.abspath(pkg_path), str(e), metapackage.DEFINITION_URL)))

    # fetch current version and verify that all packages have same version number
    old_version = verify_equal_package_versions(packages.values())
    if args.version:
        new_version = args.version
    else:
        new_version = bump_version(old_version, args.bump)
    tag_name = args.tag_prefix + new_version

    if (
        not args.non_interactive and
        not prompt_continue(
            fmt(
                "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s" %
                (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" % tag_name if args.tag_prefix else '')
            ), default=True)
    ):
        raise RuntimeError(fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name."))

    # check for changelog entries
    missing_changelogs = []
    missing_changelogs_but_forthcoming = {}
    for pkg_path, package in packages.items():
        changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME)
        if not os.path.exists(changelog_path):
            missing_changelogs.append(package.name)
            continue
        # verify that the changelog files don't have modifications pending
        if has_changes(base_path, changelog_path, vcs_type):
            local_modifications.append(changelog_path)
        changelog = get_changelog_from_path(changelog_path, package.name)
        try:
            changelog.get_content_of_version(new_version)
        except KeyError:
            # check that forthcoming section exists
            forthcoming_label = get_forthcoming_label(changelog.rst)
            if forthcoming_label:
                missing_changelogs_but_forthcoming[package.name] = (changelog_path, changelog, forthcoming_label)
            else:
                missing_changelogs.append(package.name)

    if local_modifications:
        raise RuntimeError(fmt('@{rf}The following files have modifications, please commit/revert them before:' + ''.join([('\n- @{boldon}%s@{boldoff}' % path) for path in local_modifications])))

    if missing_changelogs:
        print(
            fmt(
                "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s" %
                (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs)]))
            ), file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue without changelogs', default=False):
            raise RuntimeError(fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content."))

    # verify that repository is pushable (if the vcs supports dry run of push)
    if not args.no_push:
        try_repo_push(base_path, vcs_type)

    # check for staged changes and modified and untracked files
    print(fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...'))
    is_clean = check_clean_working_copy(base_path, vcs_type)
    if not is_clean:
        print(fmt('@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.'), file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway', default=False):
            raise RuntimeError(fmt('@{rf}Aborted release, clean the working copy before trying again.'))

    # for svn verify that we know how to tag that repository
    if vcs_type in ['svn']:
        tag_svn_cmd = tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '', dry_run=True)

    # tag forthcoming changelog sections
    update_changelog_sections(missing_changelogs_but_forthcoming, new_version)
    print(fmt(
        "@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s" %
        (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs_but_forthcoming.keys())]))))

    # bump version number
    update_versions(packages.keys(), new_version)
    print(fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'" % (old_version, new_version)))

    pushed = None
    if vcs_type in ['svn']:
        # for svn everything affects the remote repository immediately
        commands = []
        commands.append(commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name, dry_run=True))
        commands.append(tag_svn_cmd)
        if not args.no_push:
            print(fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:'))
        else:
            print(fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:'))
        for cmd in commands:
            print(fmt('  @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))

        if not args.no_push:
            if not args.non_interactive:
                # confirm before modifying repository
                if not prompt_continue('Execute commands which will modify the repository', default=True):
                    pushed = False
            if pushed is None:
                commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name)
                tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '')
                pushed = True

    else:
        # for other vcs types the changes are first done locally
        print(fmt('@{gf}Committing the package.xml files...'))
        commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name)

        print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name)))
        tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '')

        try:
            commands = push_changes(base_path, vcs_type, tag_name, dry_run=True)
        except RuntimeError:
            print(fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?'))
        else:
            if not args.no_push:
                print(fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:'))
            else:
                print(fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:'))
            for cmd in commands:
                print(fmt('  @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))

            if not args.no_push:
                if not args.non_interactive:
                    # confirm commands to push to remote repository
                    if not prompt_continue('Execute commands to push the local commits and tags to the remote repository', default=True):
                        pushed = False
                if pushed is None:
                    push_changes(base_path, vcs_type, tag_name)
                    pushed = True

    if pushed:
        print(fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'."))
    else:
        msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \
            "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'."
        if args.no_push or pushed is False:
            print(fmt('@{yf}%s' % msg))
        else:
            raise RuntimeError(fmt('@{rf}%s' % msg))
Exemple #16
0
    def get_a_changelog(self, repo_path, changelog_filename="CHANGELOG.rst"):
        """
        @summary Generate a list of changelog for a repo given by a path.
        @param repo_path: Path to a local repo.
        @deprecated: This is temporary. Code is basically copied from caking_pkg.cli.tag_changelog.main.
            This should be ported in upstream to avoid maintenance.
        """
        import datetime
        from catkin_pkg.changelog import get_changelog_from_path
        from catkin_pkg.package_version import bump_version
        from catkin_pkg.packages import find_packages, verify_equal_package_versions
        from catkin_pkg.cli.tag_changelog import get_forthcoming_label, rename_section

        # find packages in the given path
        packages = find_packages(repo_path)
        if not packages:
            raise RuntimeError('No packages found')
        print('Found packages: %s' % ', '.join([p.name for p in packages.values()]))

        # fetch current version and verify that all packages have same version number
        old_version = verify_equal_package_versions(packages.values())
        new_version = bump_version(old_version, args.bump)
        print('Tag version %s' % new_version)

        # check for changelog entries
        changelogs = []
        missing_forthcoming = []
        already_tagged = []
        for pkg_path, package in packages.items():
            changelog_path = os.path.join(repo_path, pkg_path, changelog_filename)
            if not os.path.exists(changelog_path):
                missing_forthcoming.append(package.name)
                continue
            changelog = get_changelog_from_path(changelog_path, package.name)
            if not changelog:
                missing_forthcoming.append(package.name)
                continue
            # check that forthcoming section exists
            forthcoming_label = get_forthcoming_label(changelog.rst)
            if not forthcoming_label:
                missing_forthcoming.append(package.name)
                continue
            # check that new_version section does not exist yet
            try:
                changelog.get_content_of_version(new_version)
                already_tagged.append(package.name)
                continue
            except KeyError:
                pass
            changelogs.append((package.name, changelog_path, changelog, forthcoming_label))
        if missing_forthcoming:
            print('The following packages do not have a forthcoming section in their changelog file: %s' % ', '.join(sorted(missing_forthcoming)), file=sys.stderr)
        if already_tagged:
            print("The following packages do already have a section '%s' in their changelog file: %s" % (new_version, ', '.join(sorted(already_tagged))), file=sys.stderr)

        # rename forthcoming sections to new_version including current date
        new_changelog_data = []
        new_label = '{} ({})'.format(new_version, datetime.date.today().isoformat())
        for (pkg_name, changelog_path, changelog, forthcoming_label) in changelogs:
            print("Renaming section '{}' to '{}' in package '{}'...".format(
                forthcoming_label, new_label, pkg_name))
            data = rename_section(changelog.rst, forthcoming_label, new_label)
            new_changelog_data.append((changelog_path, data))

        print('Writing updated changelog files...')
        for (changelog_path, data) in new_changelog_data:
            with open(changelog_path, 'wb') as f:
                f.write(data.encode('utf-8'))