Ejemplo n.º 1
0
    def commit(self):
        if self.disabled:
            return
        info(
            fmt("@{bf}<==@| Command successful, committing changes to working copy"
                ))
        current_branch = get_current_branch()
        if current_branch is None:
            error("Could not determine current branch.", exit=True)
        with inbranch(get_commit_hash(get_current_branch())):
            with change_directory(self.clone_dir):
                new_branches = get_branches()
                for branch in self.current_branches:
                    if branch in new_branches:
                        new_branches.remove(branch)
                for branch in get_branches(local_only=True):
                    if branch not in new_branches:
                        with inbranch(branch):
                            cmd = 'git pull --rebase origin ' + branch
                            execute_command(cmd)
                execute_command('git push --all', silent=False)
                try:
                    execute_command('git push --tags', silent=False)
                except subprocess.CalledProcessError:
                    warning(
                        "Force pushing tags from clone to working repository, "
                        "you will have to force push back to origin...")
                    execute_command('git push --force --tags', silent=False)

        self.clean_up()
Ejemplo n.º 2
0
def main(sysargs=None):
    # Check that the current directory is a serviceable git/bloom repo
    ensure_clean_working_env()
    ensure_git_root()

    # Get tracks
    tracks_dict = get_tracks_dict_raw()
    if not tracks_dict['tracks']:
        error("No tracks configured, first create a track with "
              "'git-bloom-config new <track_name>'", exit=True)

    # Do argparse stuff
    parser = get_argument_parser([str(t) for t in tracks_dict['tracks']])
    parser = add_global_arguments(parser)
    args = parser.parse_args(sysargs)
    handle_global_arguments(args)

    verify_track(args.track, tracks_dict['tracks'][args.track])

    execute_track(args.track, tracks_dict['tracks'][args.track],
                  args.release_increment, args.pretend, args.debug, args.unsafe)

    # Notify the user of success and next action suggestions
    print('\n\n')
    warning("Tip: Check to ensure that the debian tags created have the same "
            "version as the upstream version you are releasing.")
    info(fmt("@{gf}@!Everything went as expected, "
         "you should check that the new tags match your expectations, and "
         "then push to the release repo with:@|"))
    info(fmt("  git push --all && git push --tags  "
             "@{kf}@!# You might have to add --force to the second command if you "
             "are over-writing existing flags"))
Ejemplo n.º 3
0
 def convert_package_to_debian_data(self, package):
     data = {}
     # Name, Version, Description
     data['Name'] = package.name
     data['Version'] = package.version
     data['Description'] = debianize_string(package.description)
     # Websites
     websites = [str(url) for url in package.urls if url.type == 'website']
     homepage = websites[0] if websites else ''
     if homepage == '':
         warning("No homepage set, defaulting to ''")
     data['Homepage'] = homepage
     # Build rule templates
     if is_meta_package(package):
         data['BuildType'] = 'metapackage'
     else:
         data['BuildType'] = 'cmake'
     # Debian Increment Number
     data['DebianInc'] = self.debian_inc
     # Package name
     data['Package'] = self.get_stackage_name(package)
     # Installation prefix
     data['InstallationPrefix'] = self.install_prefix
     # Dependencies
     data['Depends'] = set([d.name for d in package.run_depends])
     build_deps = (package.build_depends + package.buildtool_depends)
     data['BuildDepends'] = set([d.name for d in build_deps])
     # Maintainers
     maintainers = []
     for m in package.maintainers:
         maintainers.append(str(m))
     data['Maintainer'] = ', '.join(maintainers)
     return data
Ejemplo n.º 4
0
 def __exit__(self, exc_type, exc_value, traceback):
     if self.current_branch is not None:
         checkout(self.current_branch,
                  raise_exc=True,
                  directory=self.directory)
     else:
         warning("Could not determine branch to return to.")
Ejemplo n.º 5
0
    def commit(self):
        if self.disabled:
            return
        info(fmt("@{bf}<==@| Command successful, committing changes to working copy"))
        current_branch = get_current_branch()
        if current_branch is None:
            error("Could not determine current branch.", exit=True)
        with inbranch(get_commit_hash(get_current_branch())):
            with change_directory(self.clone_dir):
                new_branches = get_branches()
                for branch in self.current_branches:
                    if branch in new_branches:
                        new_branches.remove(branch)
                for branch in get_branches(local_only=True):
                    if branch not in new_branches:
                        with inbranch(branch):
                            cmd = 'git pull --rebase origin ' + branch
                            execute_command(cmd)
                execute_command('git push --all', silent=False)
                try:
                    execute_command('git push --tags', silent=False)
                except subprocess.CalledProcessError:
                    warning("Force pushing tags from clone to working repository, "
                            "you will have to force push back to origin...")
                    execute_command('git push --force --tags', silent=False)

        self.clean_up()
Ejemplo n.º 6
0
def get_changelogs(package, releaser_history=None):
    if releaser_history is None:
        warning("No historical releaser history, using current maintainer name "
                "and email for each versioned changelog entry.")
        releaser_history = {}
    if is_debug():
        import logging
        logging.basicConfig()
        import catkin_pkg
        catkin_pkg.changelog.log.setLevel(logging.DEBUG)
    package_path = os.path.abspath(os.path.dirname(package.filename))
    changelog_path = os.path.join(package_path, CHANGELOG_FILENAME)
    if os.path.exists(changelog_path):
        changelog = get_changelog_from_path(changelog_path)
        changelogs = []
        maintainer = (package.maintainers[0].name, package.maintainers[0].email)
        for version, date, changes in changelog.foreach_version(reverse=True):
            changes_str = []
            date_str = get_rfc_2822_date(date)
            for item in changes:
                changes_str.extend(['  ' + i for i in to_unicode(item).splitlines()])
            # Each entry has (version, date, changes, releaser, releaser_email)
            releaser, email = releaser_history.get(version, maintainer)
            changelogs.append((
                version, date_str, '\n'.join(changes_str), releaser, email
            ))
        return changelogs
    else:
        warning("No {0} found for package '{1}'"
                .format(CHANGELOG_FILENAME, package.name))
        return []
Ejemplo n.º 7
0
    def _check_all_keys_are_valid(self, peer_packages, rosdistro):
        keys_to_resolve = set()
        key_to_packages_which_depends_on = collections.defaultdict(list)
        keys_to_ignore = set()
        for package in self.packages.values():
            evaluate_package_conditions(package, rosdistro)
            depends = [
                dep for dep in (package.run_depends + package.buildtool_export_depends)
                if dep.evaluated_condition is not False]
            build_depends = [
                dep for dep in (package.build_depends + package.buildtool_depends + package.test_depends)
                if dep.evaluated_condition is not False]
            unresolved_keys = [
                dep for dep in (depends + build_depends + package.replaces + package.conflicts)
                if dep.evaluated_condition is not False]
            keys_to_ignore = {
                dep for dep in keys_to_ignore.union(package.replaces + package.conflicts)
                if dep.evaluated_condition is not False}
            keys = [d.name for d in unresolved_keys]
            keys_to_resolve.update(keys)
            for key in keys:
                key_to_packages_which_depends_on[key].append(package.name)

        for skip_key in self.skip_keys:
            try:
                keys_to_resolve.remove(skip_key)
            except KeyError:
                warning("Key '{0}' specified by --skip-keys was not found".format(skip_key))
            else:
                warning("Skipping dependency key '{0}' per --skip-keys".format(skip_key))

        os_name = self.os_name
        rosdistro = self.rosdistro
        all_keys_valid = True
        for key in sorted(keys_to_resolve):
            for os_version in self.distros:
                try:
                    extended_peer_packages = peer_packages + [d.name for d in keys_to_ignore]
                    rule, installer_key, default_installer_key = \
                        resolve_rosdep_key(key, os_name, os_version, rosdistro, extended_peer_packages,
                                           retry=False)
                    if rule is None:
                        continue
                    if installer_key != default_installer_key:
                        error("Key '{0}' resolved to '{1}' with installer '{2}', "
                              "which does not match the default installer '{3}'."
                              .format(key, rule, installer_key, default_installer_key))
                        BloomGenerator.exit(
                            "The RPM generator does not support dependencies "
                            "which are installed with the '{0}' installer."
                            .format(installer_key),
                            returncode=code.GENERATOR_INVALID_INSTALLER_KEY)
                except (GeneratorError, RuntimeError) as e:
                    print(fmt("Failed to resolve @{cf}@!{key}@| on @{bf}{os_name}@|:@{cf}@!{os_version}@| with: {e}")
                          .format(**locals()))
                    print(fmt("@{cf}@!{0}@| is depended on by these packages: ").format(key) +
                          str(list(set(key_to_packages_which_depends_on[key]))))
                    print(fmt("@{kf}@!<== @{rf}@!Failed@|"))
                    all_keys_valid = False
        return all_keys_valid
Ejemplo n.º 8
0
def get_changelogs(package, releaser_history=None):
    if releaser_history is None:
        warning(
            "No historical releaser history, using current maintainer name "
            "and email for each versioned changelog entry.")
        releaser_history = {}
    if is_debug():
        import logging
        logging.basicConfig()
        import catkin_pkg
        catkin_pkg.changelog.log.setLevel(logging.DEBUG)
    package_path = os.path.abspath(os.path.dirname(package.filename))
    changelog_path = os.path.join(package_path, CHANGELOG_FILENAME)
    if os.path.exists(changelog_path):
        changelog = get_changelog_from_path(changelog_path)
        changelogs = []
        maintainer = (package.maintainers[0].name,
                      package.maintainers[0].email)
        for version, date, changes in changelog.foreach_version(reverse=True):
            changes_str = []
            date_str = get_rfc_2822_date(date)
            for item in changes:
                changes_str.extend(
                    ['  ' + i for i in to_unicode(item).splitlines()])
            # Each entry has (version, date, changes, releaser, releaser_email)
            releaser, email = releaser_history.get(version, maintainer)
            changelogs.append(
                (version, date_str, '\n'.join(changes_str), releaser, email))
        return changelogs
    else:
        warning("No {0} found for package '{1}'".format(
            CHANGELOG_FILENAME, package.name))
        return []
Ejemplo n.º 9
0
def generate_ros_distro_diff(track, repository, distro, distro_file_url, distro_file, distro_file_raw):
    with inbranch('upstream'):
        # Check for package.xml(s)
        try:
            from catkin_pkg.packages import find_packages
        except ImportError:
            debug(traceback.format_exc())
            error("catkin_pkg was not detected, please install it.",
                  file=sys.stderr, exit=True)
        packages = find_packages(os.getcwd())
        if len(packages) == 0:
            warning("No packages found, will not generate 'package: path' entries for rosdistro.")
        track_dict = get_tracks_dict_raw()['tracks'][track]
        last_version = track_dict['last_version']
        release_inc = track_dict['release_inc']
        if repository not in distro_file['repositories']:
            global _user_provided_release_url
            distro_file['repositories'][repository] = {'url': _user_provided_release_url or ''}
        distro_file['repositories'][repository]['version'] = '{0}-{1}'.format(last_version, release_inc)
        if packages and (len(packages) > 1 or packages.keys()[0] != '.'):
            distro_file['repositories'][repository]['packages'] = {}
            for path, package in packages.iteritems():
                if os.path.basename(path) == package.name:
                    distro_file['repositories'][repository]['packages'][package.name] = None
                else:
                    distro_file['repositories'][repository]['packages'][package.name] = path
    distro_file_name = os.path.join('release', distro_file_url.split('/')[-1])
    distro_dump = yaml.dump(distro_file, indent=2, default_flow_style=False)
    if distro_file_raw != distro_dump:
        udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(),
                                     fromfile=distro_file_name, tofile=distro_file_name)
        temp_dir = tempfile.mkdtemp()
        version = distro_file['repositories'][repository]['version']
        udiff_file = os.path.join(temp_dir, repository + '-' + version + '.patch')
        udiff_raw = ''
        info("Unified diff for the ROS distro file located at '{0}':".format(udiff_file))
        for line in udiff:
            if line.startswith('@@'):
                udiff_raw += line
                line = fmt('@{cf}' + line)
            if line.startswith('+'):
                if not line.startswith('+++'):
                    line += '\n'
                udiff_raw += line
                line = fmt('@{gf}' + line)
            if line.startswith('-'):
                if not line.startswith('---'):
                    line += '\n'
                udiff_raw += line
                line = fmt('@{rf}' + line)
            if line.startswith(' '):
                line += '\n'
                udiff_raw += line
            info(line, use_prefix=False, end='')
        with open(udiff_file, 'w+') as f:
            f.write(udiff_raw)
        return udiff_file, distro_dump
    else:
        warning("This release resulted in no changes to the ROS distro file...")
    return None, None
Ejemplo n.º 10
0
def handle_tree(tree, directory, root_path, version):
    for path, kind in tree.items():
        if kind == 'directory':
            # Path relative to start path
            rel_path = os.path.join(directory, path)
            # If it is a file, error
            if os.path.isfile(rel_path):
                error("In patches path '{0}' is a directory".format(rel_path) +
                      ", but it exists in the upstream branch as a file.",
                      exit=True)
            # If it is not already a directory, create it
            if not os.path.isdir(rel_path):
                info("  Createing directory... '{0}'".format(rel_path))
                os.mkdir(rel_path)
            # Recurse on the directory
            handle_tree(
                ls_tree(BLOOM_CONFIG_BRANCH, os.path.join(root_path,
                                                          rel_path)), rel_path,
                root_path, version)
        if kind == 'file':
            # Path relative to start path
            rel_path = os.path.join(directory, path)
            # If the local version is a directory, error
            if os.path.isdir(rel_path):
                error("In patches path '{0}' is a file, ".format(rel_path) +
                      "but it exists in the upstream branch as a directory.",
                      exit=True)
            # If the file already exists, warn
            if os.path.isfile(rel_path):
                warning("  File '{0}' already exists, overwriting...".format(
                    rel_path))
                execute_command('git rm {0}'.format(rel_path), shell=True)
            # If package.xml tempalte in version, else grab data
            if path in ['stack.xml']:
                warning(
                    "  Skipping '{0}' templating, fuerte not supported".format(
                        rel_path))
            if path in ['package.xml']:
                info("  Templating '{0}' into upstream branch...".format(
                    rel_path))
                file_data = show(BLOOM_CONFIG_BRANCH,
                                 os.path.join(root_path, rel_path))
                file_data = file_data.replace(':{version}', version)
            else:
                info("  Overlaying '{0}' into upstream branch...".format(
                    rel_path))
                file_data = show(BLOOM_CONFIG_BRANCH,
                                 os.path.join(root_path, rel_path))
            # Write file
            with open(rel_path, 'wb') as f:
                # Python 2 will treat this as an ascii string but
                # Python 3 will not re-decode a utf-8 string.
                if sys.version_info.major == 2:
                    file_data = file_data.decode('utf-8').encode('utf-8')
                else:
                    file_data = file_data.encode('utf-8')
                f.write(file_data)
            # Add it with git
            execute_command('git add {0}'.format(rel_path), shell=True)
Ejemplo n.º 11
0
def segment_version(full_version):
    version_list = full_version.split('.')
    if len(version_list) != 3:
        warning('Invalid version element, expected: '
                '<major>.<minor>.<patch>')
    if len(version_list) < 3:
        sys.exit(code.INVALID_VERSION)
    return version_list
Ejemplo n.º 12
0
def segment_version(full_version):
    version_list = full_version.split('.')
    if len(version_list) != 3:
        warning('Invalid version element, expected: '
                '<major>.<minor>.<patch>')
    if len(version_list) < 3:
        sys.exit(code.INVALID_VERSION)
    return version_list
Ejemplo n.º 13
0
def get_package_data(branch_name=None,
                     directory=None,
                     quiet=True,
                     release_directory=None):
    """
    Gets package data about the package(s) in the current branch.

    It also ignores the packages in the `packages.ignore` file in the master branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory or os.getcwd()
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name),
            end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory
                                                       or os.getcwd()),
            end='')
    # Check for package.xml(s)
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.", use_prefix=False)
        else:
            log("found '" + list(packages.values())[0].name + "'.",
                use_prefix=False)
        ignored_packages = get_ignored_packages(
            release_directory=release_directory)
        for k, v in dict(packages).items():
            # Check for packages with upper case names
            if v.name.lower() != v.name:
                error(
                    "Cowardly refusing to release packages with uppercase characters in the name: "
                    + v.name)
                error("See:")
                error(
                    "  https://github.com/ros-infrastructure/bloom/issues/191")
                error(
                    "  https://github.com/ros-infrastructure/bloom/issues/76")
                error("Invalid package names, aborting.", exit=True)
            # Check for ignored packages
            if v.name in ignored_packages:
                warning(
                    "Explicitly ignoring package '{0}' because it is in the `{1}.ignored` file."
                    .format(v.name, os.environ.get('BLOOM_TRACK', 'packages')))
                del packages[k]
        if packages == {}:
            error("All packages that were found were also ignored, aborting.",
                  exit=True)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("No package.xml(s) found, and '--package-name' not given, aborting.",
          use_prefix=False,
          exit=True)
Ejemplo n.º 14
0
def create_generators(generator_names):
    generators = {}
    for generator_name in generator_names:
        generator = load_generator(generator_name)
        if generator is not None:
            generators[generator_name] = generator()
        else:
            warning("Failed to load generator: " + str(generator_name))
    return generators
Ejemplo n.º 15
0
def create_generators(generator_names):
    generators = {}
    for generator_name in generator_names:
        generator = load_generator(load_generator_module, generator_name)
        if generator is not None:
            generators[generator_name] = generator()
        else:
            warning("Failed to load generator: " + str(generator_name))
    return generators
Ejemplo n.º 16
0
 def metapackage_check(self, path, pkg):
     if pkg.is_metapackage():
         try:
             metapackage.validate_metapackage(path, pkg)
         except metapackage.InvalidMetapackage as e:
             warning("Invalid metapackage:")
             warning("  %s\n" % str(e))
             error(fmt("Refusing to release invalid metapackage '@|%s@{rf}@!', metapackage requirements:\n  @|%s" %
                   (pkg.name, metapackage.DEFINITION_URL)), exit=True)
Ejemplo n.º 17
0
def auto_upstream_checkout(upstream_repo, upstream_url, devel_branch):
    info("Searching in upstream development branch for the name and version")
    info("  Upstream url: " + upstream_url)
    info("  Upstream type: " + upstream_repo.get_vcs_type_name())
    if devel_branch:
        info("  Upstream branch: " + str(devel_branch))
    # Handle special svn cases
    if upstream_repo.get_vcs_type_name() == 'svn':
        if devel_branch == '':
            upstream_url += '/trunk'
        else:
            upstream_url += '/branches/' + str(devel_branch)
        devel_branch = ''
    # Checkout to the upstream development branch
    retcode = try_vcstools_checkout(upstream_repo, upstream_url, devel_branch)
    if retcode != 0:
        return retcode
    # Look into the upstream devel branch for the version
    meta = get_upstream_meta(upstream_repo.get_path())
    if meta is None or None in meta.values():
        error("Failed to get the upstream meta data.")
        return 1
    # Summarize the package.xml/stack.xml contents
    info("Found upstream with version: " + ansi('boldon') + meta['version'] + \
         ansi('reset'))
    if meta['type'] == 'stack.xml':
        info("Upstream contains a stack called: " + ansi('boldon') + \
             meta['name'][0] + ansi('reset'))
    else:
        info("Upstream contains package" + \
             ('s: ' if len(meta['name']) > 1 else ': ') + ansi('boldon') + \
             ', '.join(meta['name']) + ansi('reset'))
    # If svn recreate upstream_repo and checkout to the tag
    if upstream_repo.get_vcs_type_name() == 'svn':
        # Remove the /trunk from the url
        upstream_url = '/'.join(upstream_url.split('/')[:-1])
        upstream_dir = upstream_repo.get_path()
        shutil.rmtree(upstream_dir)  # Delete old upstream
        upstream_repo = get_vcs_client('svn', upstream_dir)
        checkout_url = upstream_url + '/tags/' + meta['version']
        if not upstream_repo.checkout(checkout_url):
            got_it = False
            for name in meta['name']:
                warning("Didn't find the tagged version at " + checkout_url)
                checkout_url = upstream_url + '/tags/' + name + \
                               '-' + meta['version']
                warning("Trying " + checkout_url)
                if upstream_repo.checkout(checkout_url):
                    got_it = True
                    break
            if not got_it:
                error("Could not checkout upstream version")
                return 1
    # Return the meta data
    return meta
Ejemplo n.º 18
0
def main(sysargs=None):
    from bloom.config import upconvert_bloom_to_config_branch
    upconvert_bloom_to_config_branch()

    # Check that the current directory is a serviceable git/bloom repo
    ensure_clean_working_env()
    ensure_git_root()

    # Get tracks
    tracks_dict = get_tracks_dict_raw()
    if not tracks_dict['tracks']:
        error(
            "No tracks configured, first create a track with "
            "'git-bloom-config new <track_name>'",
            exit=True)

    # Do argparse stuff
    parser = get_argument_parser([str(t) for t in tracks_dict['tracks']])
    parser = add_global_arguments(parser)
    args = parser.parse_args(sysargs)
    handle_global_arguments(args)

    os.environ['BLOOM_TRACK'] = args.track

    verify_track(args.track, tracks_dict['tracks'][args.track])

    git_clone = GitClone()
    with git_clone:
        quiet_git_clone_warning(True)
        disable_git_clone(True)
        execute_track(args.track,
                      tracks_dict['tracks'][args.track],
                      args.release_increment,
                      args.pretend,
                      args.debug,
                      args.unsafe,
                      interactive=args.interactive)
        disable_git_clone(False)
        quiet_git_clone_warning(False)
    git_clone.commit()

    # Notify the user of success and next action suggestions
    info('\n\n', use_prefix=False)
    warning("Tip: Check to ensure that the debian tags created have the same "
            "version as the upstream version you are releasing.")
    info(
        fmt("@{gf}@!Everything went as expected, "
            "you should check that the new tags match your expectations, and "
            "then push to the release repo with:@|"))
    info(
        fmt("  git push --all && git push --tags  "
            "@{kf}@!# You might have to add --force to the second command if you "
            "are over-writing existing tags"))
Ejemplo n.º 19
0
 def __enter__(self):
     if self.disabled:
         return
     current_branch = get_current_branch()
     if current_branch is None:
         warning("Could not determine current branch, changing to the bloom branch")
         execute_command('git checkout bloom')
     self.orig_cwd = os.getcwd()
     os.chdir(self.clone_dir)
     if self.track_all:
         track_branches(directory=self.clone_dir)
     return os.getcwd()
Ejemplo n.º 20
0
 def mfa_prompt(oauth_config_path, username):
     """Explain how to create a token for users with Multi-Factor Authentication configured."""
     warning("Receiving 401 when trying to create an oauth token can be caused by the user "
             "having two-factor authentication enabled.")
     warning("If 2FA is enabled, the user will have to create an oauth token manually.")
     warning("A token can be created at https://github.com/settings/tokens")
     warning("The resulting token can be placed in the '{oauth_config_path}' file as such:"
             .format(**locals()))
     info("")
     warning('{{"github_user": "******", "oauth_token": "TOKEN_GOES_HERE"}}'
             .format(**locals()))
     info("")
Ejemplo n.º 21
0
Archivo: git.py Proyecto: po1/bloom
 def __enter__(self):
     if self.disabled:
         return
     current_branch = get_current_branch()
     if current_branch is None:
         warning("Could not determine current branch, changing to the bloom branch")
         execute_command('git checkout bloom')
     self.orig_cwd = os.getcwd()
     os.chdir(self.clone_dir)
     if self.track_all:
         track_branches(directory=self.clone_dir)
     return os.getcwd()
Ejemplo n.º 22
0
def export_upstream(uri, tag, vcs_type, output_dir, show_uri, name):
    tag = tag if tag != ":{none}" else None
    output_dir = output_dir or os.getcwd()
    if uri.startswith("git@"):
        uri_is_path = False
    else:
        uri_parsed = urlparse(uri)
        uri = uri if uri_parsed.scheme else uri_parsed.path
        uri_is_path = False if uri_parsed.scheme else True
    name = name or "upstream"
    with temporary_directory() as tmp_dir:
        info(
            "Checking out repository at '{0}'".format(show_uri or uri)
            + (" to reference '{0}'.".format(tag) if tag else ".")
        )
        if uri_is_path:
            upstream_repo = get_vcs_client(vcs_type, uri)
        else:
            repo_path = os.path.join(tmp_dir, "upstream")
            upstream_repo = get_vcs_client(vcs_type, repo_path)
            if not upstream_repo.checkout(uri, tag or ""):
                error(
                    "Failed to clone repository at '{0}'".format(uri)
                    + (" to reference '{0}'.".format(tag) if tag else "."),
                    exit=True,
                )
        if get_root() is not None and has_submodules(upstream_repo.get_path()):
            error(
                """\
bloom does not support exporting git repositories with submodules, see:

- https://github.com/ros-infrastructure/bloom/issues/202
- https://github.com/ros-infrastructure/bloom/issues/217
- https://github.com/vcstools/vcstools/issues/84
""",
                exit=True,
            )
        tarball_prefix = "{0}-{1}".format(name, tag) if tag else name
        tarball_path = os.path.join(output_dir, tarball_prefix)
        full_tarball_path = tarball_path + ".tar.gz"
        info("Exporting to archive: '{0}'".format(full_tarball_path))
        if not upstream_repo.export_repository(tag or "", tarball_path):
            error("Failed to create archive of upstream repository at '{0}'".format(show_uri))
            if tag and vcs_type == "git":  # can only check for git repos
                with change_directory(upstream_repo.get_path()):
                    if not tag_exists(tag):
                        warning("'{0}' is not a tag in the upstream repository...".format(tag))
                    if not branch_exists(tag):
                        warning("'{0}' is not a branch in the upstream repository...".format(tag))
        if not os.path.exists(full_tarball_path):
            error("Tarball was not created.", exit=True)
        info("md5: {0}".format(calculate_file_md5(full_tarball_path)))
Ejemplo n.º 23
0
def update_track(track_dict):
    for key, value in DEFAULT_TEMPLATE.iteritems():
        if key in ['actions']:
            if track_dict[key] != DEFAULT_TEMPLATE[key]:
                warning("""\
Your track's '{0}' configuration is not the same as the default, should it be updated to the default setting?"""
                        .format(key))
                if maybe_continue('n'):
                    track_dict[key] = DEFAULT_TEMPLATE[key]
        elif key not in track_dict:
            value = value.default if isinstance(value, PromptEntry) else value
            track_dict[key] = value
    return track_dict
Ejemplo n.º 24
0
def check_for_updates():
    if sys.argv[0].endswith('bloom-update'):
        return
    user_bloom = os.path.join(os.path.expanduser('~'), '.bloom')
    if os.path.exists(user_bloom):
        with open(user_bloom, 'r') as f:
            raw = f.read()
        if not raw:
            return
        version_dict = json.loads(raw)
        os.remove(user_bloom)  # Remove only on successful parse
        if type(version_dict) == dict and len(version_dict) == 2 and version_dict['current'] == bloom.__version__:
            warning(UPDATE_MSG.format(**version_dict))
Ejemplo n.º 25
0
def update_track(track_dict):
    for key, value in DEFAULT_TEMPLATE.iteritems():
        if key in ['actions']:
            if track_dict[key] != DEFAULT_TEMPLATE[key]:
                warning("""\
Your track's '{0}' configuration is not the same as the default, should it be updated to the default setting?"""
                        .format(key))
                if maybe_continue('n'):
                    track_dict[key] = DEFAULT_TEMPLATE[key]
        elif key not in track_dict:
            value = value.default if isinstance(value, PromptEntry) else value
            track_dict[key] = value
    return track_dict
Ejemplo n.º 26
0
def check_for_updates():
    if sys.argv[0].endswith('bloom-update'):
        return
    user_bloom = os.path.join(os.path.expanduser('~'), '.bloom')
    if os.path.exists(user_bloom):
        with open(user_bloom, 'r') as f:
            raw = f.read()
        if not raw:
            return
        version_dict = json.loads(raw)
        os.remove(user_bloom)  # Remove only on successful parse
        if type(version_dict) == dict and len(version_dict) == 2 and version_dict['current'] == bloom.__version__:
            warning(UPDATE_MSG.format(**version_dict))
Ejemplo n.º 27
0
def version_check(version):
    last_tag = get_last_tag_by_version()
    if not last_tag:
        return
    last_tag_version = last_tag.split('/')[-1]
    info(fmt("The latest upstream tag in the release repository is '@!{0}@|'."
         .format(last_tag)))
    # Ensure the new version is greater than the last tag
    if parse_version(version) < parse_version(last_tag_version):
        warning("""\
Version discrepancy:
The upstream version '{0}' isn't newer than upstream version '{1}'.
""".format(version, last_tag_version))
Ejemplo n.º 28
0
def execute_track(track, track_dict, release_inc, pretend=True, debug=False, fast=False):
    info("Processing release track settings for '{0}'".format(track))
    settings = process_track_settings(track_dict, release_inc)
    # setup extra settings
    archive_dir_path = tempfile.mkdtemp()
    settings['archive_dir_path'] = archive_dir_path
    if settings['release_tag'] != ':{none}':
        archive_file = '{name}-{release_tag}.tar.gz'.format(**settings)
    else:
        archive_file = '{name}.tar.gz'.format(**settings)
    settings['archive_path'] = os.path.join(archive_dir_path, archive_file)
    # execute actions
    info("", use_prefix=False)
    info("Executing release track '{0}'".format(track))
    for action in track_dict['actions']:
        if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar':
            warning("Explicitly skipping bloom-export-upstream for tar.")
            settings['archive_path'] = settings['vcs_uri']
            continue
        templated_action = template_str(action, settings)
        info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action))))
        if pretend:
            continue
        stdout = None
        stderr = None
        if bloom.util._quiet:
            stdout = subprocess.PIPE
            stderr = subprocess.STDOUT
        if debug and 'DEBUG' not in os.environ:
            os.environ['DEBUG'] = '1'
        if fast and 'BLOOM_UNSAFE' not in os.environ:
            os.environ['BLOOM_UNSAFE'] = '1'
        templated_action = templated_action.split()
        templated_action[0] = find_full_path(templated_action[0])
        p = subprocess.Popen(templated_action, stdout=stdout, stderr=stderr,
                             shell=False, env=os.environ.copy())
        out, err = p.communicate()
        if bloom.util._quiet:
            info(out, use_prefix=False)
        ret = p.returncode
        if ret > 0:
            error(fmt(_error + "Error running command '@!{0}'@|")
                  .format(templated_action), exit=True)
        info('', use_prefix=False)
    if not pretend:
        # Update the release_inc
        tracks_dict = get_tracks_dict_raw()
        tracks_dict['tracks'][track]['release_inc'] = settings['release_inc']
        tracks_dict['tracks'][track]['last_version'] = settings['version']
        write_tracks_dict_raw(tracks_dict,
                              'Updating release inc to: ' + str(settings['release_inc']))
Ejemplo n.º 29
0
def execute_track(track, track_dict, release_inc, pretend=True, debug=False, fast=False):
    info("Processing release track settings for '{0}'".format(track))
    settings = process_track_settings(track_dict, release_inc)
    # setup extra settings
    archive_dir_path = tempfile.mkdtemp()
    settings['archive_dir_path'] = archive_dir_path
    if settings['release_tag'] != ':{none}':
        archive_file = '{name}-{release_tag}.tar.gz'.format(**settings)
    else:
        archive_file = '{name}.tar.gz'.format(**settings)
    settings['archive_path'] = os.path.join(archive_dir_path, archive_file)
    # execute actions
    info("", use_prefix=False)
    info("Executing release track '{0}'".format(track))
    for action in track_dict['actions']:
        if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar':
            warning("Explicitly skipping bloom-export-upstream for tar.")
            settings['archive_path'] = settings['vcs_uri']
            continue
        templated_action = template_str(action, settings)
        info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action))))
        if pretend:
            continue
        stdout = None
        stderr = None
        if bloom.util._quiet:
            stdout = subprocess.PIPE
            stderr = subprocess.STDOUT
        if debug and 'DEBUG' not in os.environ:
            os.environ['DEBUG'] = '1'
        if fast and 'BLOOM_UNSAFE' not in os.environ:
            os.environ['BLOOM_UNSAFE'] = '1'
        templated_action = templated_action.split()
        templated_action[0] = find_full_path(templated_action[0])
        p = subprocess.Popen(templated_action, stdout=stdout, stderr=stderr,
                             shell=False, env=os.environ.copy())
        out, err = p.communicate()
        if bloom.util._quiet:
            info(out, use_prefix=False)
        ret = p.returncode
        if ret > 0:
            error(fmt(_error + "Error running command '@!{0}'@|")
                  .format(templated_action), exit=True)
        info('', use_prefix=False)
    if not pretend:
        # Update the release_inc
        tracks_dict = get_tracks_dict_raw()
        tracks_dict['tracks'][track]['release_inc'] = settings['release_inc']
        tracks_dict['tracks'][track]['last_version'] = settings['version']
        write_tracks_dict_raw(tracks_dict,
                              'Updating release inc to: ' + str(settings['release_inc']))
Ejemplo n.º 30
0
def export_upstream(uri, tag, vcs_type, output_dir, show_uri, name):
    tag = tag if tag != ':{none}' else None
    output_dir = output_dir or os.getcwd()
    if uri.startswith('git@'):
        uri_is_path = False
    else:
        uri_parsed = urlparse(uri)
        uri = uri if uri_parsed.scheme else uri_parsed.path
        uri_is_path = False if uri_parsed.scheme else True
    name = name or 'upstream'
    with temporary_directory() as tmp_dir:
        info("Checking out repository at '{0}'".format(show_uri or uri) +
             (" to reference '{0}'.".format(tag) if tag else '.'))
        if uri_is_path:
            upstream_repo = get_vcs_client(vcs_type, uri)
        else:
            repo_path = os.path.join(tmp_dir, 'upstream')
            upstream_repo = get_vcs_client(vcs_type, repo_path)
            if not upstream_repo.checkout(uri, tag or ''):
                error("Failed to clone repository at '{0}'".format(uri) +
                      (" to reference '{0}'.".format(tag) if tag else '.'),
                      exit=True)
        if get_root() is not None and has_submodules(upstream_repo.get_path()):
            error("""\
bloom does not support exporting git repositories with submodules, see:

- https://github.com/ros-infrastructure/bloom/issues/202
- https://github.com/ros-infrastructure/bloom/issues/217
- https://github.com/vcstools/vcstools/issues/84
""",
                  exit=True)
        tarball_prefix = '{0}-{1}'.format(name, tag) if tag else name
        tarball_path = os.path.join(output_dir, tarball_prefix)
        full_tarball_path = tarball_path + '.tar.gz'
        info("Exporting to archive: '{0}'".format(full_tarball_path))
        if not upstream_repo.export_repository(tag or '', tarball_path):
            error("Failed to create archive of upstream repository at '{0}'".
                  format(show_uri))
            if tag and vcs_type == 'git':  # can only check for git repos
                with change_directory(upstream_repo.get_path()):
                    if not tag_exists(tag):
                        warning(
                            "'{0}' is not a tag in the upstream repository...".
                            format(tag))
                    if not branch_exists(tag):
                        warning(
                            "'{0}' is not a branch in the upstream repository..."
                            .format(tag))
        if not os.path.exists(full_tarball_path):
            error("Tarball was not created.", exit=True)
        info("md5: {0}".format(calculate_file_md5(full_tarball_path)))
Ejemplo n.º 31
0
 def place_template_files(self, build_type, debian_dir='debian'):
     # Create/Clean the debian folder
     if os.path.exists(debian_dir):
         if self.interactive:
             warning("debian directory exists: " + debian_dir)
             warning("Do you wish to overwrite it?")
             if not maybe_continue('y'):
                 error("Answered no to continue, aborting.", exit=True)
         elif 'BLOOM_CLEAR_DEBIAN_ON_GENERATION' in os.environ:
             warning("Overwriting debian directory: " + debian_dir)
             execute_command('git rm -rf ' + debian_dir)
             execute_command(
                 'git commit -m "Clearing previous debian folder"')
             if os.path.exists(debian_dir):
                 shutil.rmtree(debian_dir)
         else:
             warning("Not overwriting debian directory.")
     # Use generic place template files command
     place_template_files('.', build_type, gbp=True)
     # Commit results
     execute_command('git add ' + debian_dir)
     _, has_files, _ = execute_command('git diff --cached --name-only',
                                       return_io=True)
     if has_files:
         execute_command('git commit -m "Placing debian template files"')
Ejemplo n.º 32
0
def get_package_data(branch_name=None, directory=None, quiet=True, fuerte=False):
    """
    Gets package data about the package(s) in the current branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory if directory else os.getcwd()
    stack_path = os.path.join(repo_dir, 'stack.xml')
    if os.path.exists(stack_path) and not fuerte:
            warning("stack.xml is present but going to be ignored because this is not a release for Fuerte.")
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name), end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory or os.getcwd()), end='')
    ## Check for package.xml(s)
    if not fuerte:
        packages = find_packages(repo_dir)
    else:
        packages = {}
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.",
                use_prefix=False)
        else:
            log("found '" + packages.values()[0].name + "'.",
                use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    ## Check for stack.xml
    has_rospkg = False
    try:
        import rospkg
        has_rospkg = True
    except ImportError:
        log(ansi('redf') + "failed." + ansi('reset'), use_prefix=False)
        warning("rospkg was not detected, stack.xml discovery is disabled",
                file=sys.stderr)
    if not has_rospkg:
        error("no package.xml(s) found, and no name specified with "
              "'--package-name', aborting.", use_prefix=False, exit=True)
    if os.path.exists(stack_path):
        log("found stack.xml.", use_prefix=False)
        stack = rospkg.stack.parse_stack_file(stack_path)
        return stack.name, stack.version, stack
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("no package.xml(s) or stack.xml found, and no name "
          "specified with '--package-name', aborting.",
          use_prefix=False, exit=True)
Ejemplo n.º 33
0
def get_github_interface():
    # First check to see if the oauth token is stored
    oauth_config_path = os.path.join(os.path.expanduser('~'), '.config', 'bloom')
    config = {}
    if os.path.exists(oauth_config_path):
        with open(oauth_config_path, 'r') as f:
            config = json.loads(f.read())
            token = config.get('oauth_token', None)
            username = config.get('github_user', None)
            if token and username:
                return Github(username, auth=auth_header_from_oauth_token(token), token=token)
    if not os.path.isdir(os.path.dirname(oauth_config_path)):
        os.makedirs(os.path.dirname(oauth_config_path))
    # Ok, now we have to ask for the user name and pass word
    info("")
    info("Looks like bloom doesn't have an oauth token for you yet.")
    info("Therefore bloom will require your Github username and password just this once.")
    info("With your Github username and password bloom will create an oauth token on your behalf.")
    info("The token will be stored in `~/.config/bloom`.")
    info("You can delete the token from that file to have a new token generated.")
    info("Guard this token like a password, because it allows someone/something to act on your behalf.")
    info("If you need to unauthorize it, remove it from the 'Applications' menu in your Github account page.")
    info("")
    token = None
    while token is None:
        try:
            username = getpass.getuser()
            username = safe_input("Github username [{0}]: ".format(username)) or username
            password = getpass.getpass("Github password (never stored): ")
        except (KeyboardInterrupt, EOFError):
            return None
        if not password:
            error("No password was given, aborting.")
            return None
        gh = Github(username, auth=auth_header_from_basic_auth(username, password))
        try:
            token = gh.create_new_bloom_authorization(update_auth=True)
            with open(oauth_config_path, 'a') as f:
                config.update({'oauth_token': token, 'github_user': username})
                f.write(json.dumps(config))
            info("The token '{token}' was created and stored in the bloom config file: '{oauth_config_path}'"
                 .format(**locals()))
        except GithubException as exc:
            error("{0}".format(exc))
            info("")
            warning("This sometimes fails when the username or password are incorrect, try again?")
            if not maybe_continue():
                return None
    return gh
Ejemplo n.º 34
0
def import_patches(directory=None):
    # Get current branch
    current_branch = get_current_branch(directory)
    # Construct the patches branch name
    patches_branch = 'patches/' + current_branch
    # Ensure the patches branch exists and is tracked
    if branch_exists(patches_branch, False, directory=directory):
        if not branch_exists(patches_branch, True, directory=directory):
            track_branches(patches_branch, directory)
    else:
        error("The patches branch ({0}) does not ".format(patches_branch) + \
              "exist, did you use git-bloom-branch?")
        return code.BRANCH_DOES_NOT_EXIST
    # Create a swap space
    tmp_dir = tempfile.mkdtemp()
    try:
        # Get parent branch and base commit from patches branch
        config = get_patch_config(patches_branch, directory)
        parent_branch, commit = config['parent'], config['base']
        if commit != get_commit_hash(current_branch, directory):
            warning("The current commit is not the same as the most recent "
                    "rebase commit. This might mean that you have committed "
                    "since the last time you did 'git-bloom-patch export'.")
            return code.PATCHES_NOT_EXPORTED
        # Checkout to the patches branch
        checkout(patches_branch, directory=directory)
        # Copy the patches to a temp location
        patches = list_patches(directory)
        if len(patches) == 0:
            debug("No patches in the patches branch, nothing to do")
            return code.NOTHING_TO_DO
        tmp_dir_patches = []
        for patch in patches:
            tmp_dir_patches.append(os.path.join(tmp_dir, patch))
            if directory is not None:
                patch = os.path.join(directory, patch)
            shutil.copy(patch, tmp_dir)
        # Now checkout back to the original branch and import them
        checkout(current_branch, directory=directory)
        cmd = 'git am {0}*.patch'.format(tmp_dir + os.sep)
        execute_command(cmd, cwd=directory)
        # Notify the user
        info("Applied {0} patches".format(len(patches)))
    finally:
        if current_branch:
            checkout(current_branch, directory=directory)
        if os.path.exists(tmp_dir):
            shutil.rmtree(tmp_dir)
    return 0
Ejemplo n.º 35
0
 def post_patch(self, destination):
     # Figure out the version of the given package
     if self.name is not None:
         warning("Cannot automatically tag the release because this is "
                 "not a catkin project. Please create a tag manually with:")
         warning("  git tag -f release/" + str(self.name) + "/<version>")
         return 0
     with inbranch(destination):
         package_data = get_package_data(destination)
         if type(package_data) not in [list, tuple]:
             return package_data
     name, version, packages = package_data
     # Execute git tag
     execute_command('git tag -f ' + destination + '/' + version)
     return 0
Ejemplo n.º 36
0
def handle_tree(tree, directory, root_path, version):
    for path, kind in tree.iteritems():
        if kind == 'directory':
            # Path relative to start path
            rel_path = os.path.join(directory, path)
            # If it is a file, error
            if os.path.isfile(rel_path):
                error("In patches path '{0}' is a directory".format(rel_path) +
                      ", but it exists in the upstream branch as a file.",
                      exit=True)
            # If it is not already a directory, create it
            if not os.path.isdir(rel_path):
                info("  Createing directory... '{0}'".format(rel_path))
                os.mkdir(rel_path)
            # Recurse on the directory
            handle_tree(ls_tree(BLOOM_CONFIG_BRANCH, os.path.join(root_path, rel_path)),
                        rel_path, root_path, version)
        if kind == 'file':
            # Path relative to start path
            rel_path = os.path.join(directory, path)
            # If the local version is a directory, error
            if os.path.isdir(rel_path):
                error("In patches path '{0}' is a file, ".format(rel_path) +
                      "but it exists in the upstream branch as a directory.",
                      exit=True)
            # If the file already exists, warn
            if os.path.isfile(rel_path):
                warning("  File '{0}' already exists, overwriting..."
                        .format(rel_path))
                execute_command('git rm {0}'.format(rel_path), shell=True)
            # If package.xml tempalte in version, else grab data
            if path in ['stack.xml']:
                warning("  Skipping '{0}' templating, fuerte not supported"
                        .format(rel_path))
            if path in ['package.xml']:
                info("  Templating '{0}' into upstream branch..."
                     .format(rel_path))
                file_data = show(BLOOM_CONFIG_BRANCH, os.path.join(root_path, rel_path))
                file_data = file_data.replace(':{version}', version)
            else:
                info("  Overlaying '{0}' into upstream branch..."
                     .format(rel_path))
                file_data = show(BLOOM_CONFIG_BRANCH, os.path.join(root_path, rel_path))
            # Write file
            with open(rel_path, 'wb') as f:
                f.write(file_data)
            # Add it with git
            execute_command('git add {0}'.format(rel_path), shell=True)
Ejemplo n.º 37
0
 def handle_arguments(self, args):
     self.interactive = args.interactive
     self.debian_inc = args.debian_inc
     self.os_name = args.os_name
     self.distros = args.distros
     if self.distros in [None, []]:
         index = rosdistro.get_index(rosdistro.get_index_url())
         distribution_file = rosdistro.get_distribution_file(
             index, self.rosdistro)
         if self.os_name not in distribution_file.release_platforms:
             if args.os_not_required:
                 warning(
                     "No platforms defined for os '{0}' in release file for the "
                     "'{1}' distro. This os was not required; continuing without error."
                     .format(self.os_name, self.rosdistro))
                 sys.exit(0)
             error(
                 "No platforms defined for os '{0}' in release file for the '{1}' distro."
                 .format(self.os_name, self.rosdistro),
                 exit=True)
         self.distros = distribution_file.release_platforms[self.os_name]
     self.install_prefix = args.install_prefix
     if args.install_prefix is None:
         self.install_prefix = self.default_install_prefix
     self.prefix = args.prefix
     self.branches = match_branches_with_prefix(self.prefix,
                                                get_branches,
                                                prune=not args.match_all)
     if len(self.branches) == 0:
         error("No packages found, check your --prefix or --src arguments.",
               exit=True)
     self.packages = {}
     self.tag_names = {}
     self.names = []
     self.branch_args = []
     self.debian_branches = []
     for branch in self.branches:
         package = get_package_from_branch(branch)
         if package is None:
             # This is an ignored package
             continue
         self.packages[package.name] = package
         self.names.append(package.name)
         args = self.generate_branching_arguments(package, branch)
         # First branch is debian/[<rosdistro>/]<package>
         self.debian_branches.append(args[0][0])
         self.branch_args.extend(args)
Ejemplo n.º 38
0
 def mfa_prompt(oauth_config_path, username):
     """Explain how to create a token for users with Multi-Factor Authentication configured."""
     warning(
         "Receiving 401 when trying to create an oauth token can be caused by the user "
         "having two-factor authentication enabled.")
     warning(
         "If 2FA is enabled, the user will have to create an oauth token manually."
     )
     warning("A token can be created at https://github.com/settings/tokens")
     warning(
         "The resulting token can be placed in the '{oauth_config_path}' file as such:"
         .format(**locals()))
     info("")
     warning(
         '{{"github_user": "******", "oauth_token": "TOKEN_GOES_HERE"}}'
         .format(**locals()))
     info("")
Ejemplo n.º 39
0
def get_package_data(branch_name=None, directory=None, quiet=True, release_directory=None):
    """
    Gets package data about the package(s) in the current branch.

    It also ignores the packages in the `packages.ignore` file in the master branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory or os.getcwd()
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name), end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory or os.getcwd()), end='')
    # Check for package.xml(s)
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.",
                use_prefix=False)
        else:
            log("found '" + list(packages.values())[0].name + "'.",
                use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        ignored_packages = get_ignored_packages(release_directory=release_directory)
        for k, v in dict(packages).items():
            # Check for packages with upper case names
            if v.name.lower() != v.name:
                error("Cowardly refusing to release packages with uppercase characters in the name: " + v.name)
                error("See:")
                error("  https://github.com/ros-infrastructure/bloom/issues/191")
                error("  https://github.com/ros-infrastructure/bloom/issues/76")
                error("Invalid package names, aborting.", exit=True)
            # Check for ignored packages
            if v.name in ignored_packages:
                warning("Explicitly ignoring package '{0}' because it is in the `{1}.ignored` file."
                        .format(v.name, os.environ.get('BLOOM_TRACK', 'packages')))
                del packages[k]
        if packages == {}:
            error("All packages that were found were also ignored, aborting.",
                  exit=True)
        return [p.name for p in packages.values()], version, packages
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("No package.xml(s) found, and '--package-name' not given, aborting.",
          use_prefix=False, exit=True)
Ejemplo n.º 40
0
def handle_tree(tree, directory, root_path, version):
    for path, kind in tree.iteritems():
        if kind == "directory":
            # Path relative to start path
            rel_path = os.path.join(directory, path)
            # If it is a file, error
            if os.path.isfile(rel_path):
                error(
                    "In patches path '{0}' is a directory".format(rel_path)
                    + ", but it exists in the upstream branch as a file.",
                    exit=True,
                )
            # If it is not already a directory, create it
            if not os.path.isdir(rel_path):
                info("  Createing directory... '{0}'".format(rel_path))
                os.mkdir(rel_path)
            # Recurse on the directory
            handle_tree(ls_tree("bloom", os.path.join(root_path, rel_path)), rel_path, root_path, version)
        if kind == "file":
            # Path relative to start path
            rel_path = os.path.join(directory, path)
            # If the local version is a directory, error
            if os.path.isdir(rel_path):
                error(
                    "In patches path '{0}' is a file, ".format(rel_path)
                    + "but it exists in the upstream branch as a directory.",
                    exit=True,
                )
            # If the file already exists, warn
            if os.path.isfile(rel_path):
                warning("  File '{0}' already exists, overwriting...".format(rel_path))
                execute_command("git rm {0}".format(rel_path), shell=True)
            # If package.xml tempalte in version, else grab data
            if path in ["package.xml", "stack.xml"]:
                info("  Templating '{0}' into upstream branch...".format(rel_path))
                file_data = show("bloom", os.path.join(root_path, rel_path))
                file_data = file_data.replace(":{version}", version)
            else:
                info("  Overlaying '{0}' into upstream branch...".format(rel_path))
                file_data = show("bloom", os.path.join(root_path, rel_path))
            # Write file
            with open(rel_path, "wb") as f:
                f.write(file_data)
            # Add it with git
            execute_command("git add {0}".format(rel_path), shell=True)
Ejemplo n.º 41
0
Archivo: release.py Proyecto: po1/bloom
def generate_ros_distro_diff(track, repository, distro, distro_file_url=ROS_DISTRO_FILE):
    distro_file_url = distro_file_url.format(distro)
    distro_file_raw = fetch_distro_file(distro_file_url)
    distro_file = yaml.load(distro_file_raw)
    with inbranch('upstream'):
        # Check for package.xml(s)
        try:
            from catkin_pkg.packages import find_packages
        except ImportError:
            error("catkin_pkg was not detected, please install it.",
                  file=sys.stderr, exit=True)
        packages = find_packages(os.getcwd())
        if len(packages) == 0:
            warning("No packages found, will not generate 'package: path' entries for rosdistro.")
        track_dict = get_tracks_dict_raw()['tracks'][track]
        last_version = track_dict['last_version']
        release_inc = track_dict['release_inc']
        distro_file['repositories'][repository]['version'] = '{0}-{1}'.format(last_version, release_inc)
        if packages and (len(packages) > 1 or packages.keys()[0] != '.'):
            distro_file['repositories'][repository]['packages'] = {}
            for path, package in packages.iteritems():
                distro_file['repositories'][repository]['packages'][package.name] = path
    distro_file_name = distro_file_url.split('/')[-1]
    # distro_dump_orig = yaml.dump(distro_file_orig, indent=2, default_flow_style=False)
    distro_dump = yaml.dump(distro_file, indent=2, default_flow_style=False)
    udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(),
                                 fromfile=distro_file_name, tofile=distro_file_name)
    if udiff:
        info("Unified diff for the ROS distro file located at '{0}':".format(distro_file_url))
        for line in udiff:
            if line.startswith('@@'):
                line = fmt('@{cf}' + line)
            if line.startswith('+'):
                if not line.startswith('+++'):
                    line += '\n'
                line = fmt('@{gf}' + line)
            if line.startswith('-'):
                if not line.startswith('---'):
                    line += '\n'
                line = fmt('@{rf}' + line)
            if line.startswith(' '):
                line += '\n'
            info(line, use_prefix=False, end='')
    else:
        warning("This release resulted in no changes to the ROS distro file...")
Ejemplo n.º 42
0
Archivo: git.py Proyecto: po1/bloom
 def __init__(self, directory=None, track_all=True):
     self.disabled = get_git_clone_state()
     if self.disabled:
         warning('Skipping transactional safety mechanism, be careful...')
         return
     self.tmp_dir = None
     self.directory = directory if directory is not None else os.getcwd()
     if get_root(directory) is None:
         raise RuntimeError("Provided directory, '" + str(directory) +
                            "', is not a git repository")
     self.track_all = track_all
     if self.track_all:
         track_branches(directory=directory)
     self.current_branches = get_branches()
     self.tmp_dir = tempfile.mkdtemp()
     self.clone_dir = os.path.join(self.tmp_dir, 'clone')
     self.repo_url = 'file://' + os.path.abspath(self.directory)
     execute_command('git clone ' + self.repo_url + ' ' + self.clone_dir)
Ejemplo n.º 43
0
 def handle_arguments(self, args):
     self.interactive = args.interactive
     self.debian_inc = args.debian_inc
     self.os_name = args.os_name
     self.distros = args.distros
     if self.distros in [None, []]:
         index = rosdistro.get_index(rosdistro.get_index_url())
         distribution_file = rosdistro.get_distribution_file(index, self.rosdistro)
         if self.os_name not in distribution_file.release_platforms:
             if args.os_not_required:
                 warning("No platforms defined for os '{0}' in release file for the "
                         "'{1}' distro. This os was not required; continuing without error."
                         .format(self.os_name, self.rosdistro))
                 sys.exit(0)
             error("No platforms defined for os '{0}' in release file for the '{1}' distro."
                   .format(self.os_name, self.rosdistro), exit=True)
         self.distros = distribution_file.release_platforms[self.os_name]
     self.install_prefix = args.install_prefix
     if args.install_prefix is None:
         self.install_prefix = self.default_install_prefix
     self.prefix = args.prefix
     self.branches = match_branches_with_prefix(self.prefix, get_branches, prune=not args.match_all)
     if len(self.branches) == 0:
         error(
             "No packages found, check your --prefix or --src arguments.",
             exit=True
         )
     self.packages = {}
     self.tag_names = {}
     self.names = []
     self.branch_args = []
     self.debian_branches = []
     for branch in self.branches:
         package = get_package_from_branch(branch)
         if package is None:
             # This is an ignored package
             continue
         self.packages[package.name] = package
         self.names.append(package.name)
         args = self.generate_branching_arguments(package, branch)
         # First branch is debian/[<rosdistro>/]<package>
         self.debian_branches.append(args[0][0])
         self.branch_args.extend(args)
Ejemplo n.º 44
0
def get_package_data(branch_name, directory=None):
    """
    Gets package data about the package(s) in the current branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    debug("Looking for packages in '{0}'... ".format(branch_name), end='')
    ## Check for package.xml(s)
    repo_dir = directory if directory else os.getcwd()
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            debug("found " + str(len(packages)) + " packages.",
                 use_prefix=False)
        else:
            debug("found '" + packages.values()[0].name + "'.",
                 use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    ## Check for stack.xml
    has_rospkg = False
    try:
        import rospkg
        has_rospkg = True
    except ImportError:
        debug(ansi('redf') + "failed." + ansi('reset'), use_prefix=False)
        warning("rospkg was not detected, stack.xml discovery is disabled",
                file=sys.stderr)
    if not has_rospkg:
        error("no package.xml(s) found, and no name specified with "
              "'--package-name', aborting.", use_prefix=False)
        return code.NO_PACKAGE_XML_FOUND
    stack_path = os.path.join(repo_dir, 'stack.xml')
    if os.path.exists(stack_path):
        debug("found stack.xml.", use_prefix=False)
        stack = rospkg.stack.parse_stack_file(stack_path)
        return stack.name, stack.version, stack
    # Otherwise we have a problem
    debug("failed.", use_prefix=False)
    error("no package.xml(s) or stack.xml found, and not name "
          "specified with '--package-name', aborting.", use_prefix=False)
    return code.NO_PACKAGE_XML_FOUND
Ejemplo n.º 45
0
 def __init__(self, directory=None, track_all=True):
     self.disabled = get_git_clone_state()
     self.disabled_quiet = get_git_clone_state_quiet()
     if self.disabled:
         if not self.disabled_quiet:
             warning("Skipping transactional safety mechanism, be careful...")
         return
     self.tmp_dir = None
     self.directory = directory if directory is not None else os.getcwd()
     if get_root(directory) is None:
         raise RuntimeError("Provided directory, '" + str(directory) + "', is not a git repository")
     self.track_all = track_all
     if self.track_all:
         track_branches(directory=directory)
     self.current_branches = get_branches()
     self.tmp_dir = tempfile.mkdtemp()
     self.clone_dir = os.path.join(self.tmp_dir, "clone")
     self.repo_url = "file://" + os.path.abspath(self.directory)
     info(fmt("@!@{gf}+++@| Cloning working copy for safety"))
     execute_command("git clone " + self.repo_url + " " + self.clone_dir)
Ejemplo n.º 46
0
 def __init__(self, directory=None, track_all=True):
     self.disabled = get_git_clone_state()
     self.disabled_quiet = get_git_clone_state_quiet()
     if self.disabled:
         if not self.disabled_quiet:
             warning('Skipping transactional safety mechanism, be careful...')
         return
     self.tmp_dir = None
     self.directory = directory if directory is not None else os.getcwd()
     if get_root(directory) is None:
         raise RuntimeError("Provided directory, '" + str(directory) +
                            "', is not a git repository")
     self.track_all = track_all
     if self.track_all:
         track_branches(directory=directory)
     self.current_branches = get_branches()
     self.tmp_dir = tempfile.mkdtemp()
     self.clone_dir = os.path.join(self.tmp_dir, 'clone')
     self.repo_url = 'file://' + os.path.abspath(self.directory)
     info(fmt("@!@{gf}+++@| Cloning working copy for safety"))
     execute_command('git clone ' + self.repo_url + ' ' + self.clone_dir)
Ejemplo n.º 47
0
def _set_trim_sub_dir(sub_dir, force, config, directory):
    debug("_set_trim_sub_dir(" + str(sub_dir) + ", " + str(force) + ", " +
          str(config) + ", " + str(directory) + ")")
    if sub_dir is not None:
        if config['trim'] != '' and config['trim'] != sub_dir:
            warning("You are trying to set the trim sub directory to " +
                    sub_dir + ", but it is already set to " + config['trim'] +
                    ".")
            if not force:
                warning("Changing the sud directory is not advised. "
                        "If you are sure you want to do this, use "
                        "'--force'")
                return None
            else:
                warning("Forcing the change of the sub directory.")
        # Make the sub_dir absolute
        git_root = get_root(directory)
        sub_dir_abs = os.path.join(git_root, sub_dir)
        # Make sure it is a directory
        if not os.path.isdir(sub_dir_abs):
            error("The given sub directory, (" + sub_dir + ") does not "
                  "exist in the git repository at " + git_root)
            return None
        # Set the trim sub directory
        config['trim'] = sub_dir
    return config
Ejemplo n.º 48
0
 def post_patch(self, destination, color='bluef'):
     if destination in self.debian_branches:
         return
     # Tag after patches have been applied
     with inbranch(destination):
         # Tag
         tag_name = self.tag_names[destination]
         if tag_exists(tag_name):
             if self.interactive:
                 warning("Tag exists: " + tag_name)
                 warning("Do you wish to overwrite it?")
                 if not maybe_continue('y'):
                     error("Answered no to continue, aborting.", exit=True)
             else:
                 warning("Overwriting tag: " + tag_name)
         else:
             info("Creating tag: " + tag_name)
         execute_command('git tag -f ' + tag_name)
     # Report of success
     name = destination.split('/')[-1]
     package = self.packages[name]
     distro = destination.split('/')[-2]
     info(ansi(color) + "####" + ansi('reset'), use_prefix=False)
     info(ansi(color) + "#### " + ansi('greenf') + "Successfully" +
          ansi(color) + " generated '" + ansi('boldon') + distro +
          ansi('boldoff') + "' debian for package"
          " '" + ansi('boldon') + package.name + ansi('boldoff') + "'" +
          " at version '" + ansi('boldon') + package.version + "-" +
          str(self.debian_inc) + ansi('boldoff') + "'" + ansi('reset'),
          use_prefix=False)
     info(ansi(color) + "####\n" + ansi('reset'), use_prefix=False)
Ejemplo n.º 49
0
def export_upstream(uri, tag, vcs_type, output_dir, show_uri, name):
    tag = tag if tag != ':{none}' else None
    output_dir = output_dir or os.getcwd()
    if uri.startswith('git@'):
        uri_is_path = False
    else:
        uri_parsed = urlparse(uri)
        uri = uri if uri_parsed.scheme else uri_parsed.path
        uri_is_path = False if uri_parsed.scheme else True
    name = name or 'upstream'
    with temporary_directory() as tmp_dir:
        info("Checking out repository at '{0}'".format(show_uri or uri) +
            (" to reference '{0}'.".format(tag) if tag else '.'))
        if uri_is_path:
            upstream_repo = get_vcs_client(vcs_type, uri)
        else:
            repo_path = os.path.join(tmp_dir, 'upstream')
            upstream_repo = get_vcs_client(vcs_type, repo_path)
            if not upstream_repo.checkout(uri, tag or ''):
                error("Failed to clone repository at '{0}'".format(uri) +
                      (" to reference '{0}'.".format(tag) if tag else '.'),
                      exit=True)
        tarball_prefix = '{0}-{1}'.format(name, tag) if tag else name
        tarball_path = os.path.join(output_dir, tarball_prefix)
        full_tarball_path = tarball_path + '.tar.gz'
        info("Exporting to archive: '{0}'".format(full_tarball_path))
        if not upstream_repo.export_repository(tag or '', tarball_path):
            error("Failed to create archive of upstream repository at '{0}'"
                  .format(show_uri))
            if tag:
                with change_directory(upstream_repo.get_path()):
                    if not tag_exists(tag):
                        warning("'{0}' is not a tag in the upstream repository..."
                                .format(tag))
                    if not branch_exists(tag):
                        warning("'{0}' is not a branch in the upstream repository..."
                                .format(tag))
        if not os.path.exists(full_tarball_path):
            error("Tarball was not created.", exit=True)
        info("md5: {0}".format(calculate_file_md5(full_tarball_path)))
Ejemplo n.º 50
0
 def post_patch(self, destination, color='bluef'):
     # Tag after patches have been applied
     with inbranch(destination):
         # Tag
         tag_name = self.tag_names[destination]
         if tag_exists(tag_name):
             if self.interactive:
                 warning("Tag exists: " + tag_name)
                 warning("Do you wish to overwrite it?")
                 if not maybe_continue('y'):
                     error("Answered no to continue, aborting.")
                     return code.ANSWERED_NO_TO_CONTINUE
             else:
                 warning("Overwriting tag: " + tag_name)
         else:
             info("Creating tag: " + tag_name)
         execute_command('git tag -f ' + tag_name)
     # Report of success
     name = destination.split('/')[-1]
     stackage, kind = self.packages[name]
     distro = destination.split('/')[-2]
     info(ansi(color) + "####" + ansi('reset'), use_prefix=False)
     info(
         ansi(color) + "#### " + ansi('greenf') + "Successfully" + \
         ansi(color) + " generated '" + ansi('boldon') + distro + \
         ansi('boldoff') + "' debian for " + kind + \
         " '" + ansi('boldon') + stackage.name + ansi('boldoff') + "'" + \
         " at version '" + ansi('boldon') + stackage.version + \
         "-" + str(self.debian_inc) + ansi('boldoff') + "'" + \
         ansi('reset'),
         use_prefix=False
     )
     info(ansi(color) + "####\n" + ansi('reset'), use_prefix=False)
Ejemplo n.º 51
0
 def post_patch(self, destination, color='bluef'):
     if destination in self.rpm_branches:
         return
     # Tag after patches have been applied
     with inbranch(destination):
         # Tag
         tag_name = self.tag_names[destination]
         if tag_exists(tag_name):
             if self.interactive:
                 warning("Tag exists: " + tag_name)
                 warning("Do you wish to overwrite it?")
                 if not maybe_continue('y'):
                     error("Answered no to continue, aborting.", exit=True)
             else:
                 warning("Overwriting tag: " + tag_name)
         else:
             info("Creating tag: " + tag_name)
         execute_command('git tag -f ' + tag_name)
     # Report of success
     name = destination.split('/')[-1]
     package = self.packages[name]
     distro = destination.split('/')[-2]
     info(ansi(color) + "####" + ansi('reset'), use_prefix=False)
     info(
         ansi(color) + "#### " + ansi('greenf') + "Successfully" +
         ansi(color) + " generated '" + ansi('boldon') + distro +
         ansi('boldoff') + "' RPM for package"
         " '" + ansi('boldon') + package.name + ansi('boldoff') + "'" +
         " at version '" + ansi('boldon') + package.version +
         "-" + str(self.rpm_inc) + ansi('boldoff') + "'" +
         ansi('reset'),
         use_prefix=False
     )
     info(ansi(color) + "####\n" + ansi('reset'), use_prefix=False)
Ejemplo n.º 52
0
    def post_patch(self, destination):
        # Figure out the version of the given package
        if self.name is not None:
            warning("""\
Cannot automatically tag the release because this is not a catkin project.""")
            warning("""\
Please checkout the release branch and then create a tag manually with:""")
            warning("  git checkout release/" + str(self.name))
            warning("  git tag -f release/" + str(self.name) + "/<version>")
            return
        with inbranch(destination):
            name, version, packages = get_package_data(destination)
        # Execute git tag
        release_tag = destination + '/' + version + '-' + self.release_inc
        execute_command('git tag ' + release_tag)
Ejemplo n.º 53
0
def ensure_clean_working_env(force=False, git_status=True, directory=None):
    """
    Checks the environment to ensure it is clean, raises SystemExit otherwise.

    Clean is defined as:
        - In a git repository
        - In a valid branch (force overrides)
        - Does not have local changes (force overrides)
        - Does not have untracked files (force overrides)

    :param force: If True, overrides a few of the fail conditions
    :param directory: directory in which to run this command

    :raises: subprocess.CalledProcessError if any git calls fail
    :raises: SystemExit if any git calls fail
    """
    def ecwe_fail(code, show_git_status):
        if not bloom.util._quiet and show_git_status:
            info('\n++ git status:\n', use_prefix=False)
            os.system('git status')
        error(code, exit=True)

    # Is it a git repo
    if get_root(directory) is None:
        error("Not in a valid git repository", exit=True)
    # Are we on a branch?
    current_branch = get_current_branch(directory)
    if current_branch is None:
        msg = "Could not determine current branch"
        if not force:
            return ecwe_fail(msg, git_status)
        else:
            warning(msg)
    # Are there local changes?
    if has_changes(directory):
        msg = "Current git working branch has local changes"
        if not force:
            return ecwe_fail(msg, git_status)
        else:
            warning(msg)
    # Are there untracked files or directories?
    if has_untracked_files(directory):
        msg = "Current git working branch has untracked files/directories"
        if not force:
            return ecwe_fail(msg, git_status)
        else:
            warning(msg)
Ejemplo n.º 54
0
 def place_template_files(self, build_type, rpm_dir='rpm'):
     # Create/Clean the rpm folder
     if os.path.exists(rpm_dir):
         if self.interactive:
             warning("rpm directory exists: " + rpm_dir)
             warning("Do you wish to overwrite it?")
             if not maybe_continue('y'):
                 error("Answered no to continue, aborting.", exit=True)
         else:
             warning("Overwriting rpm directory: " + rpm_dir)
         execute_command('git rm -rf ' + rpm_dir)
         execute_command('git commit -m "Clearing previous rpm folder"')
         if os.path.exists(rpm_dir):
             shutil.rmtree(rpm_dir)
     # Use generic place template files command
     place_template_files('.', build_type, gbp=True)
     # Commit results
     execute_command('git add ' + rpm_dir)
     execute_command('git commit -m "Placing rpm template files"')
Ejemplo n.º 55
0
 def place_template_files(self, arch_dir='arch'):
     # Create/Clean the arch folder
     if os.path.exists(arch_dir):
         if self.interactive:
             warning("arch directory exists: " + arch_dir)
             warning("Do you wish to overwrite it?")
             if not maybe_continue('y'):
                 error("Answered no to continue, aborting.", exit=True)
         else:
             warning("Overwriting arch directory: " + arch_dir)
         execute_command('git rm -rf ' + arch_dir)
         execute_command('git commit -m "Clearing previous arch folder"')
         if os.path.exists(arch_dir):
             shutil.rmtree(arch_dir)
     # Use generic place template files command
     place_template_files('.')
     # Commit results
     execute_command('git add ' + arch_dir)
     execute_command('git commit -m "Placing arch template files"')
Ejemplo n.º 56
0
def update_track(track_dict):
    for key, value in DEFAULT_TEMPLATE.items():
        if key in ['actions']:
            if track_dict[key] != DEFAULT_TEMPLATE[key]:
                warning(
                    "Your track's '{0}' configuration is not the same as the default."
                    .format(key))
                default = 'n'
                if key == 'actions' and track_dict[key] in ACTION_LIST_HISTORY:
                    default = 'y'
                    warning("Unless you have manually modified your 'actions' "
                            "(the commands which get run for a release), "
                            "you should update to the new default.")
                warning("Should it be updated to the default setting?")
                if maybe_continue(default):
                    track_dict[key] = DEFAULT_TEMPLATE[key]
        elif key not in track_dict:
            value = value.default if isinstance(value, PromptEntry) else value
            track_dict[key] = value
    return track_dict
Ejemplo n.º 57
0
def generate_substitutions_from_package(package,
                                        os_name,
                                        os_version,
                                        ros_distro,
                                        installation_prefix='/usr',
                                        deb_inc=0,
                                        peer_packages=None,
                                        releaser_history=None,
                                        fallback_resolver=None,
                                        native=False):
    peer_packages = peer_packages or []
    data = {}
    # Name, Version, Description
    data['Name'] = package.name
    data['Version'] = package.version
    data['Description'] = format_description(package.description)
    # Websites
    websites = [str(url) for url in package.urls if url.type == 'website']
    homepage = websites[0] if websites else ''
    if homepage == '':
        warning("No homepage set, defaulting to ''")
    data['Homepage'] = homepage
    # Debian Increment Number
    data['DebianInc'] = '' if native else '-{0}'.format(deb_inc)
    # Debian Package Format
    data['format'] = 'native' if native else 'quilt'
    # Package name
    data['Package'] = sanitize_package_name(package.name)
    # Installation prefix
    data['InstallationPrefix'] = installation_prefix
    # Resolve dependencies
    depends = package.run_depends + package.buildtool_export_depends
    build_depends = package.build_depends + package.buildtool_depends + package.test_depends
    unresolved_keys = depends + build_depends + package.replaces + package.conflicts
    # The installer key is not considered here, but it is checked when the keys are checked before this
    resolved_deps = resolve_dependencies(
        unresolved_keys, os_name, os_version, ros_distro,
        peer_packages + [d.name for d in package.replaces + package.conflicts],
        fallback_resolver)
    data['Depends'] = sorted(set(format_depends(depends, resolved_deps)))
    data['BuildDepends'] = sorted(
        set(format_depends(build_depends, resolved_deps)))
    data['Replaces'] = sorted(
        set(format_depends(package.replaces, resolved_deps)))
    data['Conflicts'] = sorted(
        set(format_depends(package.conflicts, resolved_deps)))
    # Set the distribution
    data['Distribution'] = os_version
    # Use the time stamp to set the date strings
    stamp = datetime.datetime.now(tz.tzlocal())
    data['Date'] = stamp.strftime('%a, %d %b %Y %T %z')
    data['YYYY'] = stamp.strftime('%Y')
    # Maintainers
    maintainers = []
    for m in package.maintainers:
        maintainers.append(str(m))
    data['Maintainer'] = maintainers[0]
    data['Maintainers'] = ', '.join(maintainers)
    # Changelog
    changelogs = get_changelogs(package, releaser_history)
    if changelogs and package.version not in [x[0] for x in changelogs]:
        warning("")
        warning(
            "A CHANGELOG.rst was found, but no changelog for this version was found."
        )
        warning(
            "You REALLY should have a entry (even a blank one) for each version of your package."
        )
        warning("")
    if not changelogs:
        # Ensure at least a minimal changelog
        changelogs = []
    if package.version not in [x[0] for x in changelogs]:
        changelogs.insert(0, (
            package.version, get_rfc_2822_date(datetime.datetime.now()),
            '  * Autogenerated, no changelog for this version found in CHANGELOG.rst.',
            package.maintainers[0].name, package.maintainers[0].email))
    bad_changelog = False
    # Make sure that the first change log is the version being released
    if package.version != changelogs[0][0]:
        error("")
        error("The version of the first changelog entry '{0}' is not the "
              "same as the version being currently released '{1}'.".format(
                  package.version, changelogs[0][0]))
        bad_changelog = True
    # Make sure that the current version is the latest in the changelog
    for changelog in changelogs:
        if parse_version(package.version) < parse_version(changelog[0]):
            error("")
            error(
                "There is at least one changelog entry, '{0}', which has a "
                "newer version than the version of package '{1}' being released, '{2}'."
                .format(changelog[0], package.name, package.version))
            bad_changelog = True
    if bad_changelog:
        error("This is almost certainly by mistake, you should really take a "
              "look at the changelogs for the package you are releasing.")
        error("")
        if not maybe_continue('n', 'Continue anyways'):
            sys.exit("User quit.")
    data['changelogs'] = changelogs
    # Use debhelper version 7 for oneric, otherwise 9
    data['debhelper_version'] = 7 if os_version in ['oneiric'] else 9
    # Summarize dependencies
    summarize_dependency_mapping(data, depends, build_depends, resolved_deps)

    def convertToUnicode(obj):
        if sys.version_info.major == 2:
            if isinstance(obj, str):
                return unicode(obj.decode('utf8'))
            elif isinstance(obj, unicode):
                return obj
        else:
            if isinstance(obj, bytes):
                return str(obj.decode('utf8'))
            elif isinstance(obj, str):
                return obj
        if isinstance(obj, list):
            for i, val in enumerate(obj):
                obj[i] = convertToUnicode(val)
            return obj
        elif isinstance(obj, type(None)):
            return None
        elif isinstance(obj, tuple):
            obj_tmp = list(obj)
            for i, val in enumerate(obj_tmp):
                obj_tmp[i] = convertToUnicode(obj_tmp[i])
            return tuple(obj_tmp)
        elif isinstance(obj, int):
            return obj
        raise RuntimeError('need to deal with type %s' % (str(type(obj))))

    for item in data.items():
        data[item[0]] = convertToUnicode(item[1])

    return data
Ejemplo n.º 58
0
def execute_track(track,
                  track_dict,
                  release_inc,
                  pretend=True,
                  debug=False,
                  fast=False):
    info("Processing release track settings for '{0}'".format(track))
    settings = process_track_settings(track_dict, release_inc)
    # setup extra settings
    archive_dir_path = tempfile.mkdtemp()
    settings['archive_dir_path'] = archive_dir_path
    if settings['release_tag'] != ':{none}':
        archive_file = '{name}-{release_tag}.tar.gz'.format(**settings)
    else:
        archive_file = '{name}.tar.gz'.format(**settings)
    settings['archive_path'] = os.path.join(archive_dir_path, archive_file)
    # execute actions
    info("", use_prefix=False)
    info("Executing release track '{0}'".format(track))
    for action in track_dict['actions']:
        if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar':
            warning("Explicitly skipping bloom-export-upstream for tar.")
            settings['archive_path'] = settings['vcs_uri']
            continue
        templated_action = template_str(action, settings)
        info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action))))
        if pretend:
            continue
        stdout = None
        stderr = None
        if bloom.util._quiet:
            stdout = subprocess.PIPE
            stderr = subprocess.STDOUT
        if debug and 'DEBUG' not in os.environ:
            os.environ['DEBUG'] = '1'
        if fast and 'BLOOM_UNSAFE' not in os.environ:
            os.environ['BLOOM_UNSAFE'] = '1'
        templated_action = templated_action.split()
        templated_action[0] = find_full_path(templated_action[0])
        p = subprocess.Popen(templated_action,
                             stdout=stdout,
                             stderr=stderr,
                             shell=False,
                             env=os.environ.copy())
        out, err = p.communicate()
        if bloom.util._quiet:
            info(out, use_prefix=False)
        ret = p.returncode
        if ret > 0:
            if 'bloom-generate' in templated_action[
                    0] and ret == code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO:
                error(
                    fmt(_error +
                        "The following generator action reported that it is missing one or more"
                        ))
                error(
                    fmt("    @|rosdep keys, but that the key exists in other platforms:"
                        ))
                error(fmt("@|'@!{0}'@|").format(templated_action))
                info('', use_prefix=False)
                error(
                    fmt("@|If you are @!@_@{rf}absolutely@| sure that this key is unavailable for the platform in"
                        ))
                error(
                    fmt("@|question, the generator can be skipped and you can proceed with the release."
                        ))
                if maybe_continue(
                        'n',
                        'Skip generator action and continue with release'):
                    info("\nAction skipped, continuing with release.\n")
                    continue

                info('', use_prefix=False)

            error(fmt(_error + "Error running command '@!{0}'@|").format(
                templated_action),
                  exit=True)
        info('', use_prefix=False)
    if not pretend:
        # Update the release_inc
        tracks_dict = get_tracks_dict_raw()
        tracks_dict['tracks'][track]['release_inc'] = settings['release_inc']
        tracks_dict['tracks'][track]['last_version'] = settings['version']
        # if release tag is set to ask and a custom value is used
        if settings['version'] != settings['release_tag']:
            tracks_dict['tracks'][track]['last_release'] = settings[
                'release_tag']
        write_tracks_dict_raw(
            tracks_dict,
            'Updating release inc to: ' + str(settings['release_inc']))