Exemple #1
0
 def find_package(self, name):
     if 'base' in self.options and self.options['base'] is not None:
         base_abspath = self.env.relfn2path(self.options['base'])[1]
         packages = find_packages(base_abspath)
         package = next((package for package in packages.values()
                         if package.name == name), None)
     else:
         if not ROSObjectDescription._ros_packages:
             packages = {}
             base_paths = self.env.config.ros_base_path
             if not base_paths:
                 base_paths = ['.']
             for base_path in base_paths:
                 if base_path.startswith('/'):
                     base_abspath = base_path
                 else:
                     base_abspath = self.env.relfn2path(base_path)[1]
                 found_packages = find_packages(base_abspath)
                 for package in found_packages.values():
                     packages[package.name] = package
             ROSObjectDescription._ros_packages = packages
         package = ROSObjectDescription._ros_packages.get(name, None)
     if not package:
         self.state_machine.reporter.warning(
             'cannot find package %s' % name,
             line=self.lineno)
     return package
Exemple #2
0
def test_find_packages_invalid_version():
    version = ':{version}'
    path = 'src/foo'
    _create_pkg_in_dir(path, version)
    try:
        find_packages(path.split('/')[0])
        assert False, 'Must raise'
    except InvalidPackage as e:
        exception_message = str(e)
        assert version in exception_message
        assert path in exception_message
Exemple #3
0
def dry_run(context, packages, no_deps, start_with):
    # Print Summary
    log(context.summary())
    # Get all the packages in the context source space
    # Suppress warnings since this is a utility function
    workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[])
    # Find list of packages in the workspace
    packages_to_be_built, packages_to_be_built_deps, all_packages = determine_packages_to_be_built(
        packages, context, workspace_packages)
    # Assert start_with package is in the workspace
    verify_start_with_option(start_with, packages, all_packages, packages_to_be_built + packages_to_be_built_deps)
    if not no_deps:
        # Extend packages to be built to include their deps
        packages_to_be_built.extend(packages_to_be_built_deps)
        # Also resort
        packages_to_be_built = topological_order_packages(dict(packages_to_be_built))
    # Print packages
    log("Packages to be built:")
    max_name_len = str(max([len(pkg.name) for pth, pkg in packages_to_be_built]))
    prefix = clr('@{pf}' + ('------ ' if start_with else '- ') + '@|')
    for pkg_path, pkg in packages_to_be_built:
        build_type = get_build_type(pkg)
        if build_type == 'catkin' and 'metapackage' in [e.tagname for e in pkg.exports]:
            build_type = 'metapackage'
        if start_with and pkg.name == start_with:
            start_with = None
        log(clr("{prefix}@{cf}{name:<" + max_name_len + "}@| (@{yf}{build_type}@|)")
            .format(prefix=clr('@!@{kf}(skip)@| ') if start_with else prefix, name=pkg.name, build_type=build_type))
    log("Total packages: " + str(len(packages_to_be_built)))
def test_unicode_templating():
    with redirected_stdio():
        packages = dict([(pkg.name, pkg) for path, pkg in find_packages(test_data_dir).items()])
        assert 'bad_changelog_pkg' in packages
        chlogs = get_changelogs(packages['bad_changelog_pkg'])
        template = "@(changelog)"
        em.expand(template, {'changelog': chlogs[0][2]})
Exemple #5
0
def cmake_input_changed(source_path, build_path, cmake_args=None, filename='catkin_make'):
    # get current input
    packages = find_packages(source_path)
    package_paths = os.pathsep.join(sorted(packages.keys()))
    cmake_args = ' '.join(cmake_args) if cmake_args else ''

    # file to store current input
    changed = False
    input_filename = os.path.join(build_path, '%s.cache' % filename)
    if not os.path.exists(input_filename):
        changed = True
    else:
        # compare with previously stored input
        with open(input_filename, 'r') as f:
            previous_package_paths = f.readline().rstrip()
            previous_cmake_args = f.readline().rstrip()
        if package_paths != previous_package_paths:
            changed = True
        if cmake_args != previous_cmake_args:
            changed = True

    # store current input for next invocation
    with open(input_filename, 'w') as f:
        f.write('%s\n%s' % (package_paths, cmake_args))

    return changed
Exemple #6
0
def get_dependencies(source_folder, build_depends=True, test_depends=True):
    # get the dependencies
    print "Get the dependencies of source folder %s" % source_folder
    append_pymodules_if_needed()
    from catkin_pkg import packages

    pkgs = packages.find_packages(source_folder)
    local_packages = pkgs.keys()
    if len(pkgs) > 0:
        print "In folder %s, found packages %s" % (source_folder, ", ".join(local_packages))
    else:
        raise BuildException(
            "Found no packages in folder %s. Are you sure your packages have a packages.xml file?" % source_folder
        )

    depends = []
    for name, pkg in pkgs.iteritems():
        if build_depends:
            for d in pkg.build_depends + pkg.buildtool_depends:
                if not d.name in depends and not d.name in local_packages:
                    depends.append(d.name)
        if test_depends:
            for d in pkg.test_depends:
                if not d.name in depends and not d.name in local_packages:
                    depends.append(d.name)

    return depends
Exemple #7
0
def generate_ros_distro_diff(track, repository, distro, distro_file_url, distro_file, distro_file_raw):
    with inbranch('upstream'):
        # Check for package.xml(s)
        try:
            from catkin_pkg.packages import find_packages
        except ImportError:
            debug(traceback.format_exc())
            error("catkin_pkg was not detected, please install it.",
                  file=sys.stderr, exit=True)
        packages = find_packages(os.getcwd())
        if len(packages) == 0:
            warning("No packages found, will not generate 'package: path' entries for rosdistro.")
        track_dict = get_tracks_dict_raw()['tracks'][track]
        last_version = track_dict['last_version']
        release_inc = track_dict['release_inc']
        if repository not in distro_file['repositories']:
            global _user_provided_release_url
            distro_file['repositories'][repository] = {'url': _user_provided_release_url or ''}
        distro_file['repositories'][repository]['version'] = '{0}-{1}'.format(last_version, release_inc)
        if packages and (len(packages) > 1 or packages.keys()[0] != '.'):
            distro_file['repositories'][repository]['packages'] = {}
            for path, package in packages.iteritems():
                if os.path.basename(path) == package.name:
                    distro_file['repositories'][repository]['packages'][package.name] = None
                else:
                    distro_file['repositories'][repository]['packages'][package.name] = path
    distro_file_name = os.path.join('release', distro_file_url.split('/')[-1])
    distro_dump = yaml.dump(distro_file, indent=2, default_flow_style=False)
    if distro_file_raw != distro_dump:
        udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(),
                                     fromfile=distro_file_name, tofile=distro_file_name)
        temp_dir = tempfile.mkdtemp()
        version = distro_file['repositories'][repository]['version']
        udiff_file = os.path.join(temp_dir, repository + '-' + version + '.patch')
        udiff_raw = ''
        info("Unified diff for the ROS distro file located at '{0}':".format(udiff_file))
        for line in udiff:
            if line.startswith('@@'):
                udiff_raw += line
                line = fmt('@{cf}' + line)
            if line.startswith('+'):
                if not line.startswith('+++'):
                    line += '\n'
                udiff_raw += line
                line = fmt('@{gf}' + line)
            if line.startswith('-'):
                if not line.startswith('---'):
                    line += '\n'
                udiff_raw += line
                line = fmt('@{rf}' + line)
            if line.startswith(' '):
                line += '\n'
                udiff_raw += line
            info(line, use_prefix=False, end='')
        with open(udiff_file, 'w+') as f:
            f.write(udiff_raw)
        return udiff_file, distro_dump
    else:
        warning("This release resulted in no changes to the ROS distro file...")
    return None, None
Exemple #8
0
def get_package_list_for_remote_repo(
    ros_distro, repo, version, vcs, url, branch
):
    with temporary_directory() as tmp_dir:
        pkgs_to_ignore = []
        if version == 'latest':
            # If "latest" in a release repository, use combined upstream branch
            branch = 'upstream'
        logger.info("Cloning '{0}' from '{1}' @ '{2}' with '{3}'...".format(
            repo, url, branch, vcs
        ))
        client = vcstools.get_vcs_client(vcs, tmp_dir)
        if not client.checkout(url, version=branch, shallow=True):
            raise RuntimeError(
                "Failed to checkout branch '{0}' from '{1}'"
                .format(branch, url)
            )
        # Find the packages in the repository
        pkg_names = [pkg.name for pth, pkg in find_packages(tmp_dir).items()]
        if version == 'latest':
            # Also consider ignored list
            client.update(version='master')
            if os.path.exists(ros_distro + '.ignored'):
                with open(ros_distro + '.ignored', 'r') as f:
                    pkgs_to_ignore = [l.strip() for l in f.read().split() if l]
        return [p for p in pkg_names if p not in pkgs_to_ignore]
Exemple #9
0
def find_catkin_packages_in(path, verbose=False):
    """
    :returns: a list of packages in a given directory
    :raises: OSError if the path doesn't exist
    """
    global _catkin_packages_cache
    if not os.path.exists(path):
        raise OSError("given path '{0}' does not exist".format(path))
    if verbose:
        print("Looking for packages in '{0}'... ".format(path),
              end='', file=sys.stderr)
    path = os.path.abspath(path)
    if path in _catkin_packages_cache:
        if verbose:
            print('found in cache.', file=sys.stderr)
        return _catkin_packages_cache[path]
    packages = find_packages(path)
    if type(packages) == dict and packages != {}:
        package_names = [package.name for package in packages.values()]
        if verbose:
            print('found ' + str(len(packages)) + ' packages.')
            for package in package_names:
                print('    {0}'.format(package))
        _catkin_packages_cache[path] = package_names
        return package_names
    else:
        if verbose:
            print('failed to find packages.', file=sys.stderr)
        return []
Exemple #10
0
def find_enclosing_package(search_start_path=None, ws_path=None, warnings=None, symlinks=True):
    """Get the package containing a specific directory.

    :param search_start_path: The path to crawl upward to find a package, CWD if None
    :param ws_path: The path at which the search should stop
    """

    search_path = search_start_path or getcwd(symlinks=symlinks)
    stop_path = ws_path or '/'
    child_path = '.'

    while search_path != stop_path:
        # Find packages under the search path
        try:
            pkgs = find_packages(search_path, warnings=warnings)
        except:
            return None

        # Check if the directory is a catkin package
        if child_path in pkgs:
            return pkgs[child_path].name

        # Update search path
        (search_path, child_path) = os.path.split(search_path)

    return None
def _get_non_catkin_packages(basepath):
    from catkin_pkg.packages import find_packages
    pkgs = []
    packages = find_packages(basepath)
    for pkg in packages.values():
        if _is_non_catkin_package(pkg):
            pkgs.append(pkg.name)
    return pkgs
def get_pkg_map():
    from catkin_pkg import packages, workspaces
    pkg_map = {}
    for ws in workspaces.get_spaces():
        pkgs = packages.find_packages(ws)
        for pkg in pkgs.values():
            if not pkg_map.has_key(pkg.name):
                pkg_map[pkg.name] = pkg
    return pkg_map
def extract_notification_recipients(path):
    from catkin_pkg.packages import find_packages
    pkgs = find_packages(path)
    notification_recipients = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            notification_recipients.add(m.email)
    if notification_recipients:
        print('Notification recipients: %s' % ' '.join(sorted(notification_recipients)))
Exemple #14
0
def main(args=None, get_subs_fn=None):
    get_subs_fn = get_subs_fn or get_subs
    _place_template_files = True
    _process_template_files = True
    package_path = os.getcwd()
    if args is not None:
        package_path = args.package_path or os.getcwd()
        _place_template_files = args.place_template_files
        _process_template_files = args.process_template_files

    pkgs_dict = find_packages(package_path)
    if len(pkgs_dict) == 0:
        sys.exit("No packages found in path: '{0}'".format(package_path))
    if len(pkgs_dict) > 1:
        sys.exit("Multiple packages found, " "this tool only supports one package at a time.")

    os_data = create_default_installer_context().get_os_name_and_version()
    os_name, os_version = os_data
    ros_distro = os.environ.get("ROS_DISTRO", "indigo")

    # Allow args overrides
    os_name = args.os_name or os_name
    os_version = args.os_version or os_version
    ros_distro = args.ros_distro or ros_distro

    # Summarize
    info(
        fmt("@!@{gf}==> @|")
        + fmt(
            "Generating debs for @{cf}%s:%s@| for package(s) %s"
            % (os_name, os_version, [p.name for p in pkgs_dict.values()])
        )
    )

    for path, pkg in pkgs_dict.items():
        template_files = None
        try:
            subs = get_subs_fn(pkg, os_name, os_version, ros_distro)
            if _place_template_files:
                # Place template files
                place_template_files(path)
            if _process_template_files:
                # Just process existing template files
                template_files = process_template_files(path, subs)
            if not _place_template_files and not _process_template_files:
                # If neither, do both
                place_template_files(path)
                template_files = process_template_files(path, subs)
            if template_files is not None:
                for template_file in template_files:
                    os.remove(os.path.normpath(template_file))
        except Exception as exc:
            debug(traceback.format_exc())
            error(type(exc).__name__ + ": " + str(exc), exit=True)
        except (KeyboardInterrupt, EOFError):
            sys.exit(1)
Exemple #15
0
def get_version(directory=None):
    packages = find_packages(basepath=directory if directory else os.getcwd())
    try:
        version = verify_equal_package_versions(packages.values())
    except RuntimeError as err:
        traceback.print_exec()
        error("Releasing multiple packages with different versions is "
                "not supported: " + str(err))
        sys.exit(1)
    return version
Exemple #16
0
def print_build_env(context, package_name):
    workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[])
    # Load the environment used by this package for building
    for pth, pkg in workspace_packages.items():
        if pkg.name == package_name:
            environ = get_env_loader(pkg, context)(os.environ)
            print(format_env_dict(environ))
            return 0
    print('[build] Error: Package `{}` not in workspace.'.format(package_name),
          file=sys.stderr)
    return 1
Exemple #17
0
def get_pkg_map():
    pkg_map = {}
    for ws in workspaces.get_spaces():
        pkgs = packages.find_packages(ws)
        for pkg in pkgs.values():
            # packages.find_packages(workspaces.get_spaces()) returns package in high-priority-first-order, so we should not overwirte package map which is already found
            # https://github.com/ros-infrastructure/catkin_pkg/blob/fa4b136b16e2d2886ab97257684f6bff243edefb/src/catkin_pkg/workspaces.py#L43
            # https://github.com/ros-infrastructure/catkin_pkg/blob/fa4b136b16e2d2886ab97257684f6bff243edefb/src/catkin_pkg/packages.py#L71
            if pkg.name not in pkg_map:
                pkg_map[pkg.name] = pkg
    return pkg_map
Exemple #18
0
def main(opts):
    folders = opts.folders or [os.getcwd()]
    for folder in folders:
        for pkg_pth, pkg in find_packages(folder).items():
            if not opts.depends_on or not [x for x in opts.depends_on if x not in [d.name for d in pkg.build_depends]]:
                print(pkg.name)
                if opts.deps:
                    for dep in pkg.build_depends:
                        print('  build: ' + dep.name)
                    for dep in pkg.run_depends:
                        print('  run:   ' + dep.name)
Exemple #19
0
def get_package_data(branch_name=None, directory=None, quiet=True, fuerte=False):
    """
    Gets package data about the package(s) in the current branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory if directory else os.getcwd()
    stack_path = os.path.join(repo_dir, 'stack.xml')
    if os.path.exists(stack_path) and not fuerte:
            warning("stack.xml is present but going to be ignored because this is not a release for Fuerte.")
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name), end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory or os.getcwd()), end='')
    ## Check for package.xml(s)
    if not fuerte:
        packages = find_packages(repo_dir)
    else:
        packages = {}
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.",
                use_prefix=False)
        else:
            log("found '" + packages.values()[0].name + "'.",
                use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        return [p.name for p in packages.values()], version, packages
    ## Check for stack.xml
    has_rospkg = False
    try:
        import rospkg
        has_rospkg = True
    except ImportError:
        log(ansi('redf') + "failed." + ansi('reset'), use_prefix=False)
        warning("rospkg was not detected, stack.xml discovery is disabled",
                file=sys.stderr)
    if not has_rospkg:
        error("no package.xml(s) found, and no name specified with "
              "'--package-name', aborting.", use_prefix=False, exit=True)
    if os.path.exists(stack_path):
        log("found stack.xml.", use_prefix=False)
        stack = rospkg.stack.parse_stack_file(stack_path)
        return stack.name, stack.version, stack
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("no package.xml(s) or stack.xml found, and no name "
          "specified with '--package-name', aborting.",
          use_prefix=False, exit=True)
def main(args=None, get_subs_fn=None):
    get_subs_fn = get_subs_fn or get_subs
    _place_template_files = True
    _process_template_files = True
    package_path = os.getcwd()
    if args is not None:
        package_path = args.package_path or os.getcwd()
        _place_template_files = args.place_template_files
        _process_template_files = args.process_template_files

    pkgs_dict = find_packages(package_path)
    if len(pkgs_dict) == 0:
        sys.exit("No packages found in path: '{0}'".format(package_path))
    if len(pkgs_dict) > 1:
        sys.exit("Multiple packages found, this tool only supports one package at a time.")

    ros_distro = os.environ.get('ROS_DISTRO', 'groovy')

    # Allow args overrides
    ros_distro = args.ros_distro or ros_distro

    # Summarize
    info(fmt("@!@{gf}==> @|") +
         fmt("Generating Homebrew formula for package(s) %s" %
            ([p.name for p in pkgs_dict.values()])))

    for path, pkg in pkgs_dict.items():
        template_files = None
        try:
            subs = get_subs_fn(pkg, ros_distro)
            if _place_template_files:
                # Place template files
                place_template_files(path)
            if _process_template_files:
                # Just process existing template files
                template_files = process_template_files(path, subs)
            if not _place_template_files and not _process_template_files:
                # If neither, do both
                place_template_files(path)
                template_files = process_template_files(path, subs)
            if template_files is not None:
                for template_file in template_files:
                    os.remove(os.path.normpath(template_file))
        except Exception as exc:
            debug(traceback.format_exc())
            error(type(exc).__name__ + ": " + str(exc), exit=True)
        except (KeyboardInterrupt, EOFError):
            sys.exit(1)
Exemple #21
0
def get_built_unbuilt_packages(context, workspace_packages):
    """Get list of packages in workspace which have not been built."""

    # Get the names of all packages which have already been built
    built_packages = set([
        pkg.name for (path, pkg) in
        find_packages(context.package_metadata_path(), warnings=[]).items()])

    # Get names of all unbuilt packages
    unbuilt_pkgs = set()
    for path, pkg in workspace_packages.items():
        if 'metapackage' not in [e.tagname for e in pkg.exports]:
            if pkg.name not in built_packages:
                unbuilt_pkgs.add(pkg.name)

    return built_packages, unbuilt_pkgs
 def test_validate_metapackage(self):
     pkgs_dict = find_packages(test_data_dir)
     for path, package in pkgs_dict.iteritems():
         path = os.path.join(test_data_dir, path)
         assert package.name in test_expectations, 'Unknown test %s' % package.name
         exc, excreg, warnreg = test_expectations[package.name]
         with assert_warning(warnreg):
             if exc is not None:
                 if excreg is not None:
                     with self.assertRaisesRegexp(exc, excreg):
                         _validate_metapackage(path, package)
                 else:
                     with self.assertRaises(exc):
                         _validate_metapackage(path, package)
             else:
                 _validate_metapackage(path, package)
Exemple #23
0
def _branch_packages(src, prefix, patch, interactive, directory=None):
    # Ensure we are on the correct src branch
    current_branch = get_current_branch()
    if current_branch != src:
        info("Changing to specified source branch " + src)
        execute_command('git checkout ' + src, cwd=directory)
    # Get packages
    repo_dir = directory if directory else os.getcwd()
    packages = find_packages(repo_dir)
    if packages == []:
        error("No package.xml(s) found in " + repo_dir)
        return 1
    # Verify that the packages all have the same version
    version = verify_equal_package_versions(packages.values())
    # Call git-bloom-branch on each package
    info(
      "Branching these packages: " + str([p.name for p in packages.values()])
    )
    if interactive:
        if not maybe_continue():
            error("Answered no to continue, exiting.")
            return 1
    retcode = 0
    for path in packages:
        package = packages[path]
        branch = prefix + ('' if prefix and prefix.endswith('/') else '/') \
               + package.name
        print('')  # white space
        info("Branching " + package.name + "_" + version + " to " + branch)
        ret = -1
        try:
            ret = execute_branch(src, branch, patch, False, path,
                directory=directory)
            msg = "Branching " + package.name + "_" + version + " to " + \
                branch + " returned " + str(ret)
            if ret != 0:
                warning(msg)
                retcode = ret
            else:
                info(msg)
        except Exception as err:
            traceback.print_exc()
            error("Error branching " + package.name + ": " + str(err))
            retcode = ret
        finally:
            execute_command('git checkout ' + src, cwd=directory)
    return retcode
Exemple #24
0
def determine_packages_to_be_built(packages, context):
    """Returns list of packages which should be built, and those package's deps.

    :param packages: list of packages to be built, if None all packages are built
    :type packages: list
    :param context: Workspace context
    :type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
    :returns: tuple of packages to be built and those package's deps
    :rtype: tuple
    """
    start = time.time()
    workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True)
    # If there are no packages raise
    if not workspace_packages:
        sys.exit("No packages were found in the source space '{0}'".format(context.source_space_abs))
    log("Found '{0}' packages in {1}."
        .format(len(workspace_packages), format_time_delta(time.time() - start)))

    # Order the packages by topology
    ordered_packages = topological_order_packages(workspace_packages)
    # Set the packages in the workspace for the context
    context.packages = ordered_packages
    # Determin the packages which should be built
    packages_to_be_built = []
    packages_to_be_built_deps = []
    if packages:
        # First assert all of the packages given are in the workspace
        workspace_package_names = dict([(pkg.name, (path, pkg)) for path, pkg in ordered_packages])
        for package in packages:
            if package not in workspace_package_names:
                sys.exit("Given package '{0}' is not in the workspace".format(package))
            # If metapackage, include run depends which are in the workspace
            package_obj = workspace_package_names[package][1]
            if 'metapackage' in [e.tagname for e in package_obj.exports]:
                for rdep in package_obj.run_depends:
                    if rdep.name in workspace_package_names:
                        packages.append(rdep.name)
        # Limit the packages to be built to just the provided packages
        for pkg_path, package in ordered_packages:
            if package.name in packages:
                packages_to_be_built.append((pkg_path, package))
                # Get the recursive dependencies for each of these packages
                pkg_deps = get_cached_recursive_build_depends_in_workspace(package, ordered_packages)
                packages_to_be_built_deps.extend(pkg_deps)
    else:
        packages_to_be_built = ordered_packages
    return packages_to_be_built, packages_to_be_built_deps, ordered_packages
Exemple #25
0
def main(opts):

    if opts.folders:
        folders = opts.folders
    else:
        # Load the context
        ctx = Context.load(opts.workspace, opts.profile, load_env=False)

        if not ctx:
            print(clr("@{rf}ERROR: Could not determine workspace.@|"), file=sys.stderr)
            sys.exit(1)

        folders = [ctx.source_space_abs]

    list_entry_format = '@{pf}-@| @{cf}%s@|' if not opts.unformatted else '%s'

    opts.depends_on = set(opts.depends_on) if opts.depends_on else set()
    warnings = []
    try:
        for folder in folders:
            for pkg_pth, pkg in find_packages(folder, warnings=warnings).items():
                build_depend_names = [d.name for d in pkg.build_depends]
                is_build_dep = opts.depends_on.intersection(
                    build_depend_names)
                run_depend_names = [d.name for d in pkg.run_depends]
                is_run_dep = opts.depends_on.intersection(
                    run_depend_names)
                if not opts.depends_on or is_build_dep or is_run_dep:
                    print(clr(list_entry_format % pkg.name))
                    if opts.deps:
                        if build_depend_names:
                            print(clr('  @{yf}build_depend:@|'))
                            for dep in build_depend_names:
                                print(clr('  @{pf}-@| %s' % dep))
                        if run_depend_names:
                            print(clr('  @{yf}run_depend:@|'))
                            for dep in run_depend_names:
                                print(clr('  @{pf}-@| %s' % dep))
    except InvalidPackage as ex:
        message = '\n'.join(ex.args)
        print(clr("@{rf}Error:@| The directory %s contains an invalid package."
                  " See below for details:\n\n%s" % (folder, message)))

    # Print out warnings
    if not opts.quiet:
        for warning in warnings:
            print(clr("@{yf}Warning:@| %s" % warning), file=sys.stderr)
Exemple #26
0
def determine_packages_to_be_cleaned(context, include_dependents, packages):
    """Returns list of packages which should be cleaned, and those packages' deps.

    :param context: Workspace context
    :type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
    :param packages: list of package names to be cleaned
    :type packages: list
    :returns: full list of package names to be cleaned
    :rtype: list
    """

    # Get all the cached packages in the context source space
    workspace_packages = find_packages(context.package_metadata_path(), exclude_subspaces=True, warnings=[])
    # Order the packages by topology
    ordered_packages = topological_order_packages(workspace_packages)

    # Create a dict of all packages in the workspace by name
    workspace_packages_by_name = dict([(pkg.name, (path, pkg)) for path, pkg in ordered_packages])

    # Initialize empty output
    packages_to_be_cleaned = set()

    # Expand metapackages into their constituents
    for package_name in packages:
        # This is ok if it's orphaned
        if package_name not in workspace_packages_by_name:
            packages_to_be_cleaned.add(package_name)
        else:
            # Get the package object
            package = workspace_packages_by_name[package_name][1]
            # If metapackage, include run depends which are in the workspace
            if 'metapackage' in [e.tagname for e in package.exports]:
                for rdep in package.run_depends:
                    if rdep.name in workspace_packages_by_name:
                        packages_to_be_cleaned.add(rdep.name)
            else:
                packages_to_be_cleaned.add(package_name)

    # Determine the packages that depend on the given packages
    if include_dependents:
        for package_name in list(packages_to_be_cleaned):
            # Get the packages that depend on the packages to be cleaned
            dependents = get_recursive_build_dependents_in_workspace(package_name, ordered_packages)
            packages_to_be_cleaned.update([pkg.name for _, pkg in dependents])

    return [workspace_packages_by_name[n] for n in packages_to_be_cleaned if n in workspace_packages_by_name]
def list_moveit_packages():
    """
    Creates MoveIt! List
    """
    output = ''
    packages = find_packages(os.getcwd())

    for path, package in packages.items():
        output += "<tr>"
        output += populate_package_data(path, package)
        output += "</tr>"

    # Save to file
    basepath = os.path.dirname(os.path.realpath(__file__))
    template_file(os.path.join(basepath, 'maintainer_table_template.html'),
                  os.path.join(basepath, 'index.html'),
                  {'CONTENTS' : output})
Exemple #28
0
def get_package_data(branch_name=None, directory=None, quiet=True, release_directory=None):
    """
    Gets package data about the package(s) in the current branch.

    It also ignores the packages in the `packages.ignore` file in the master branch.

    :param branch_name: name of the branch you are searching on (log use only)
    """
    log = debug if quiet else info
    repo_dir = directory or os.getcwd()
    if branch_name:
        log("Looking for packages in '{0}' branch... ".format(branch_name), end='')
    else:
        log("Looking for packages in '{0}'... ".format(directory or os.getcwd()), end='')
    # Check for package.xml(s)
    packages = find_packages(repo_dir)
    if type(packages) == dict and packages != {}:
        if len(packages) > 1:
            log("found " + str(len(packages)) + " packages.",
                use_prefix=False)
        else:
            log("found '" + list(packages.values())[0].name + "'.",
                use_prefix=False)
        version = verify_equal_package_versions(packages.values())
        ignored_packages = get_ignored_packages(release_directory=release_directory)
        for k, v in dict(packages).items():
            # Check for packages with upper case names
            if v.name.lower() != v.name:
                error("Cowardly refusing to release packages with uppercase characters in the name: " + v.name)
                error("See:")
                error("  https://github.com/ros-infrastructure/bloom/issues/191")
                error("  https://github.com/ros-infrastructure/bloom/issues/76")
                error("Invalid package names, aborting.", exit=True)
            # Check for ignored packages
            if v.name in ignored_packages:
                warning("Explicitly ignoring package '{0}' because it is in the `{1}.ignored` file."
                        .format(v.name, os.environ.get('BLOOM_TRACK', 'packages')))
                del packages[k]
        if packages == {}:
            error("All packages that were found were also ignored, aborting.",
                  exit=True)
        return [p.name for p in packages.values()], version, packages
    # Otherwise we have a problem
    log("failed.", use_prefix=False)
    error("No package.xml(s) found, and '--package-name' not given, aborting.",
          use_prefix=False, exit=True)
def get_upstream_meta(upstream_dir):
    meta = None
    # Check for stack.xml
    stack_path = os.path.join(upstream_dir, 'stack.xml')
    info("Checking for package.xml(s)")
    # Check for package.xml(s)
    try:
        from catkin_pkg.packages import find_packages
        from catkin_pkg.packages import verify_equal_package_versions
    except ImportError:
        error("catkin_pkg was not detected, please install it.",
              file=sys.stderr)
        sys.exit(1)
    packages = find_packages(basepath=upstream_dir)
    if packages == {}:
        if has_rospkg:
            info("package.xml(s) not found, looking for stack.xml")
            if os.path.exists(stack_path):
                info("stack.xml found")
                # Assumes you are at the top of the repo
                stack = rospkg.stack.parse_stack_file(stack_path)
                meta = {}
                meta['name'] = [stack.name]
                meta['version'] = stack.version
                meta['type'] = 'stack.xml'
            else:
                error("Neither stack.xml, nor package.xml(s) were detected.")
                sys.exit(1)
        else:
            error("Package.xml(s) were not detected.")
            sys.exit(1)
    else:
        info("package.xml(s) found")
        try:
            version = verify_equal_package_versions(packages.values())
        except RuntimeError as err:
            print_exc(traceback.format_exc())
            error("Releasing multiple packages with different versions is "
                  "not supported: " + str(err))
            sys.exit(1)
        meta = {}
        meta['version'] = version
        meta['name'] = [p.name for p in packages.values()]
        meta['type'] = 'package.xml'
    return meta
Exemple #30
0
def generate_ros_distro_diff(track, repository, distro, distro_file_url=ROS_DISTRO_FILE):
    distro_file_url = distro_file_url.format(distro)
    distro_file_raw = fetch_distro_file(distro_file_url)
    distro_file = yaml.load(distro_file_raw)
    with inbranch('upstream'):
        # Check for package.xml(s)
        try:
            from catkin_pkg.packages import find_packages
        except ImportError:
            error("catkin_pkg was not detected, please install it.",
                  file=sys.stderr, exit=True)
        packages = find_packages(os.getcwd())
        if len(packages) == 0:
            warning("No packages found, will not generate 'package: path' entries for rosdistro.")
        track_dict = get_tracks_dict_raw()['tracks'][track]
        last_version = track_dict['last_version']
        release_inc = track_dict['release_inc']
        distro_file['repositories'][repository]['version'] = '{0}-{1}'.format(last_version, release_inc)
        if packages and (len(packages) > 1 or packages.keys()[0] != '.'):
            distro_file['repositories'][repository]['packages'] = {}
            for path, package in packages.iteritems():
                distro_file['repositories'][repository]['packages'][package.name] = path
    distro_file_name = distro_file_url.split('/')[-1]
    # distro_dump_orig = yaml.dump(distro_file_orig, indent=2, default_flow_style=False)
    distro_dump = yaml.dump(distro_file, indent=2, default_flow_style=False)
    udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(),
                                 fromfile=distro_file_name, tofile=distro_file_name)
    if udiff:
        info("Unified diff for the ROS distro file located at '{0}':".format(distro_file_url))
        for line in udiff:
            if line.startswith('@@'):
                line = fmt('@{cf}' + line)
            if line.startswith('+'):
                if not line.startswith('+++'):
                    line += '\n'
                line = fmt('@{gf}' + line)
            if line.startswith('-'):
                if not line.startswith('---'):
                    line += '\n'
                line = fmt('@{rf}' + line)
            if line.startswith(' '):
                line += '\n'
            info(line, use_prefix=False, end='')
    else:
        warning("This release resulted in no changes to the ROS distro file...")
Exemple #31
0
def dry_run(context, packages, no_deps, start_with):
    # Print Summary
    log(context.summary())
    # Get all the packages in the context source space
    # Suppress warnings since this is a utility function
    workspace_packages = find_packages(context.source_space_abs,
                                       exclude_subspaces=True,
                                       warnings=[])
    # Find list of packages in the workspace
    packages_to_be_built, packages_to_be_built_deps, all_packages = determine_packages_to_be_built(
        packages, context, workspace_packages)
    # Assert start_with package is in the workspace
    verify_start_with_option(start_with, packages, all_packages,
                             packages_to_be_built + packages_to_be_built_deps)
    if not no_deps:
        # Extend packages to be built to include their deps
        packages_to_be_built.extend(packages_to_be_built_deps)
        # Also resort
        packages_to_be_built = topological_order_packages(
            dict(packages_to_be_built))
    # Print packages
    log("Packages to be built:")
    max_name_len = str(
        max([len(pkg.name) for pth, pkg in packages_to_be_built]))
    prefix = clr('@{pf}' + ('------ ' if start_with else '- ') + '@|')
    for pkg_path, pkg in packages_to_be_built:
        build_type = pkg.get_build_type()
        if build_type == 'catkin' and 'metapackage' in [
                e.tagname for e in pkg.exports
        ]:
            build_type = 'metapackage'
        if start_with and pkg.name == start_with:
            start_with = None
        log(
            clr("{prefix}@{cf}{name:<" + max_name_len +
                "}@| (@{yf}{build_type}@|)").format(
                    prefix=clr('@!@{kf}(skip)@| ') if start_with else prefix,
                    name=pkg.name,
                    build_type=build_type))
    log("Total packages: " + str(len(packages_to_be_built)))
Exemple #32
0
def get_upstream_meta(upstream_dir):
    meta = None
    # Check for stack.xml
    stack_path = os.path.join(upstream_dir, 'stack.xml')
    info("Checking for package.xml(s)")
    # Check for package.xml(s)
    try:
        from catkin_pkg.packages import find_packages
        from catkin_pkg.packages import verify_equal_package_versions
    except ImportError:
        error("catkin_pkg was not detected, please install it.",
              file=sys.stderr)
        sys.exit(1)
    packages = find_packages(basepath=upstream_dir)
    if packages == {}:
        info("package.xml(s) not found, looking for stack.xml")
        if os.path.exists(stack_path):
            info("stack.xml found")
            # Assumes you are at the top of the repo
            stack = parse_stack_xml(stack_path)
            meta = {}
            meta['name'] = [stack.name]
            meta['version'] = stack.version
            meta['type'] = 'stack.xml'
        else:
            bailout("Neither stack.xml, nor package.xml(s) were detected.")
    else:
        info("package.xml(s) found")
        try:
            version = verify_equal_package_versions(packages.values())
        except RuntimeError as err:
            traceback.print_exec()
            bailout("Releasing multiple packages with different versions is "
                    "not supported: " + str(err))
        meta = {}
        meta['version'] = version
        meta['name'] = [p.name for p in packages.values()]
        meta['type'] = 'package.xml'
    return meta
Exemple #33
0
def find_enclosing_package(search_start_path=None,
                           ws_path=None,
                           warnings=None,
                           symlinks=True):
    """Get the package containing the current directory."""

    search_start_path = search_start_path or getcwd(symlinks=symlinks)
    child_path = ''

    while True:
        pkgs = find_packages(search_start_path, warnings=warnings)

        # Check if the previous directory is a catkin package
        if child_path in pkgs:
            return pkgs[child_path].name

        # Update search path or end
        (search_start_path, child_path) = os.path.split(search_start_path)
        if len(child_path) == 0 or search_start_path == ws_path:
            break

    return None
Exemple #34
0
def update(packages, workspace, context, use_preprint, num_threads):
    """Update packages from the available remotes.

    Args:
        packages (list): A list of packages provided by the user.
        workspace (str): Path to a workspace (without src/ in the end).
        context (Context): Current context. Needed to find current packages.
        use_preprint (bool): Show status messages while cloning

    Returns:
        int: Return code. 0 if success. Git error code otherwise.
    """
    ws_path = path.join(workspace, 'src')
    workspace_packages = find_packages(context.source_space_abs,
                                       exclude_subspaces=True,
                                       warnings=[])
    updater = Updater(ws_path=ws_path,
                      packages=workspace_packages,
                      use_preprint=use_preprint,
                      num_threads=num_threads)
    updater.update_packages(packages)
    return 0
Exemple #35
0
def get_dependencies(source_folder, build_depends=True, test_depends=True):
    """
    Get the dependencies of all packages in the given folder.

    @param source_folder: path of folder to search packages in
    @type  source_folder: str
    @param build_depends: get build dependencies
    @type  build_depends: bool
    @param test_depends: get test dependencies
    @type  test_depends: bool

    @return param: build and/or test dependencies
    @return type:  list
    """
    print "Get the dependencies of source folder %s" % source_folder
    append_pymodules_if_needed()
    from catkin_pkg import packages
    pkgs = packages.find_packages(source_folder)
    local_packages = [p.name for p in pkgs.values()]
    if len(pkgs) > 0:
        print "In folder %s, found packages %s" % (source_folder,
                                                   ', '.join(local_packages))
    else:
        raise BuildException(
            "Found no packages in folder %s. Are you sure your packages have a packages.xml file?"
            % source_folder)

    depends = []
    for name, pkg in pkgs.iteritems():
        if build_depends:
            for dep in pkg.build_depends + pkg.buildtool_depends:
                if not dep.name in depends and not dep.name in local_packages:
                    depends.append(dep.name)
        if test_depends:
            for dep in pkg.test_depends + pkg.run_depends:
                if not dep.name in depends and not dep.name in local_packages:
                    depends.append(dep.name)

    return depends
Exemple #36
0
def package_index_from_package_path(package_paths):
    """Find all packages on the given list of paths

    Iterates over the given list of paths in reverse order so that packages
    found in the paths at the beginning of the list get overlaid onto packages
    with the same name which were found in paths farther back in the list.

    The resulting dictionary is keyed by the package name (so packages with
    duplicate names are overlaid) and the values are the
    :py:class:`catkin_pkg.package.Package` class

    @note Is this actually implemented as a function in a general ros package?

    :param ros_package_path: list of paths to search
    :type ros_package_path: list
    :returns: dictionary of package objects keyed by name of the package
    :rtype: dict
    """
    result = {}
    for path in reversed(package_paths):
        for unused_package_path, package in find_packages(path).items():
            result[package.name] = package
    return result
Exemple #37
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description='Find a catkin package')
    parser.add_argument('pkg', help='The name of the package')
    parser.add_argument('base_path',
                        nargs='?',
                        default=os.curdir,
                        help='The base path to crawl for packages')

    args = parser.parse_args(argv)

    try:
        packages = find_packages(args.base_path)
        catkin_pkg = [
            path for path, p in packages.items() if p.name == args.pkg
        ]
        if catkin_pkg:
            print(catkin_pkg[0])
        else:
            print("Could not find package '%s'." % args.pkg, file=sys.stderr)
            sys.exit(2)
    except RuntimeError as e:
        print('ERROR: ' + str(e), file=sys.stderr)
        sys.exit(1)
Exemple #38
0
def find_enclosing_package(search_start_path=None,
                           ws_path=None,
                           warnings=None,
                           symlinks=True):
    """Get the package containing a specific directory.

    :param search_start_path: The path to crawl upward to find a package, CWD if None
    :type search_start_path: str
    :param ws_path: The path at which the search should stop
    :type ws_path: str
    :param warnings: Print warnings if None or return them in the given list
    :type warnings: list
    :param symlinks: If True, then get the path considering symlinks. If false,
    resolve the path to the actual path.
    :type symlinks: bool
    :returns:
    """

    search_path = search_start_path or getcwd(symlinks=symlinks)
    stop_path = ws_path or '/'
    child_path = '.'

    while search_path != stop_path:
        # Find packages under the search path
        try:
            pkgs = find_packages(search_path, warnings=warnings)
        except RuntimeError:
            return None

        # Check if the directory is a catkin package
        if child_path in pkgs:
            return pkgs[child_path].name

        # Update search path
        (search_path, child_path) = os.path.split(search_path)

    return None
Exemple #39
0
def get_packages_to_test(context, packages):
    packages_to_test = find_packages(context.source_space_abs,
                                     exclude_subspaces=True,
                                     warnings=[]).values()

    if packages:
        # One or more package names specified explicitly by the user.
        packages_to_test = [
            package for package in packages_to_test if package.name in packages
        ]
    else:
        # No packages specified, default to everything in workspace, subject to whitelist and blacklist.
        def filter_whitelist_blacklist(packages):
            for package in packages:
                if context.whitelist and package.name not in context.whitelist:
                    continue
                if context.blacklist and package.name in context.blacklist:
                    continue
                yield package

        packages_to_test = filter_whitelist_blacklist(packages_to_test)

    # Filter out metapackages and build_type values we don't support.
    def filter_exports(packages):
        for package in packages:
            keep = True
            for export in package.exports:
                if export.tagname == 'metapackage':
                    keep = False
                if export.tagname == 'build_type':
                    keep = False
            if keep:
                yield package

    packages_to_test = filter_exports(packages_to_test)

    return list(packages_to_test)
Exemple #40
0
def main(args=None, get_subs_fn=None):
    get_subs_fn = get_subs_fn or get_subs
    _place_template_files = True
    _process_template_files = True
    package_path = os.getcwd()
    skip_package_names = None

    if args is not None:
        package_path = args.package_path or os.getcwd()
        _place_template_files = args.place_template_files
        _process_template_files = args.process_template_files

        # create dependency-skip package name lists
        skip_name_path = args.skip_package_names or ''
        if os.path.isfile(os.path.abspath(skip_name_path)):
            with open(os.path.abspath(skip_name_path), 'r') as f:
                skip_package_names = []
                for line in f:
                    # comment string
                    if line[:1] == '#':
                        continue
                    # add lists
                    skip_package_names.append(line.strip())

    pkgs_dict = find_packages(package_path)
    if len(pkgs_dict) == 0:
        sys.exit("No packages found in path: '{0}'".format(package_path))
    if len(pkgs_dict) > 1:
        sys.exit("Multiple packages found, "
                 "this tool only supports one package at a time.")

    os_data = create_default_installer_context().get_os_name_and_version()
    os_name, os_version = os_data
    ros_distro = os.environ.get('ROS_DISTRO', 'indigo')

    # Allow args overrides
    os_name = args.os_name or os_name
    os_version = args.os_version or os_version
    ros_distro = args.ros_distro or ros_distro

    # Summarize
    info(
        fmt("@!@{gf}==> @|") +
        fmt("Generating debs for @{cf}%s:%s@| for package(s) %s" %
            (os_name, os_version, [p.name for p in pkgs_dict.values()])))

    for path, pkg in pkgs_dict.items():
        template_files = None
        try:
            subs = get_subs_fn(pkg, os_name, os_version, ros_distro,
                               args.native, skip_package_names)
            if _place_template_files:
                # Place template files
                place_template_files(path, pkg.get_build_type())
            if _process_template_files:
                # Just process existing template files
                template_files = process_template_files(path, subs)
            if not _place_template_files and not _process_template_files:
                # If neither, do both
                place_template_files(path, pkg.get_build_type())
                template_files = process_template_files(path, subs)
            if template_files is not None:
                for template_file in template_files:
                    os.remove(os.path.normpath(template_file))
        except Exception as exc:
            debug(traceback.format_exc())
            error(type(exc).__name__ + ": " + str(exc), exit=True)
        except (KeyboardInterrupt, EOFError):
            sys.exit(1)
Exemple #41
0
def determine_packages_to_be_cleaned(context, include_dependents, packages):
    """Returns list of packages which should be cleaned, and those packages' deps.

    :param context: Workspace context
    :type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
    :param include_dependents: Also clean dependents of the packages to be cleaned
    :type include_dependents: bool
    :param packages: list of package names to be cleaned
    :type packages: list
    :returns: full list of package names to be cleaned
    :rtype: list
    """

    # Get all the cached packages in the context source space
    workspace_packages = find_packages(context.package_metadata_path(),
                                       exclude_subspaces=True,
                                       warnings=[])
    # Order the packages by topology
    ordered_packages = topological_order_packages(workspace_packages)
    # Set the packages in the workspace for the context
    context.packages = ordered_packages

    # Create a dict of all packages in the workspace by name
    workspace_packages_by_name = dict([(pkg.name, (path, pkg))
                                       for path, pkg in ordered_packages])

    # Initialize empty output
    packages_to_be_cleaned = set()

    # Expand glob patterns in packages
    expanded_packages = []
    for package_name in packages:
        expanded_packages.extend(
            expand_glob_package(package_name, workspace_packages_by_name))
    packages = expanded_packages

    # Expand metapackages into their constituents
    for package_name in packages:
        # This is ok if it's orphaned
        if package_name not in workspace_packages_by_name:
            packages_to_be_cleaned.add(package_name)
        else:
            # Get the package object
            package = workspace_packages_by_name[package_name][1]
            # If metapackage, include run depends which are in the workspace
            if 'metapackage' in [e.tagname for e in package.exports]:
                for rdep in package.run_depends:
                    if rdep.name in workspace_packages_by_name:
                        packages_to_be_cleaned.add(rdep.name)
            else:
                packages_to_be_cleaned.add(package_name)

    # Determine the packages that depend on the given packages
    if include_dependents:
        for package_name in list(packages_to_be_cleaned):
            # Get the packages that depend on the packages to be cleaned
            dependents = get_recursive_build_dependents_in_workspace(
                package_name, ordered_packages)
            packages_to_be_cleaned.update([pkg.name for _, pkg in dependents])

    return [
        workspace_packages_by_name[n] for n in packages_to_be_cleaned
        if n in workspace_packages_by_name
    ]
Exemple #42
0
def stage_runtime_dependencies(
    part_src: str,
    part_install: str,
    ros_version: str,
    ros_distro: str,
    target_arch: str,
):
    click.echo("Staging runtime dependencies...")
    # TODO: support python packages (only apt currently supported)
    apt_packages: Set[str] = set()

    installed_pkgs = catkin_packages.find_packages(part_install).values()
    for pkg in catkin_packages.find_packages(part_src).values():
        # Evaluate the conditions of all dependencies
        pkg.evaluate_conditions({
            "ROS_VERSION": ros_version,
            "ROS_DISTRO": ros_distro,
            "ROS_PYTHON_VERSION": "3",
        })
        # Retrieve only the 'exec_depends' which condition are true
        for dep in (exec_dep for exec_dep in pkg.exec_depends
                    if exec_dep.evaluated_condition):
            # No need to resolve this dependency if we know it's local
            if any(p for p in installed_pkgs if p.name == dep.name):
                continue

            cmd = ["rosdep", "resolve", dep.name, "--rosdistro", ros_distro]
            try:
                click.echo(f"Running {cmd!r}")
                proc = subprocess.run(
                    cmd,
                    check=True,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT,
                    env=dict(PATH=os.environ["PATH"]),
                )
            except subprocess.CalledProcessError as error:
                click.echo(f"failed to run {cmd!r}: {error.output}")

            parsed = _parse_rosdep_resolve_dependencies(
                dep,
                proc.stdout.decode().strip())
            apt_packages |= parsed.pop("apt", set())

            if parsed:
                click.echo(f"unhandled dependencies: {parsed!r}")

    if apt_packages:
        package_names = sorted(apt_packages)
        install_path = pathlib.Path(part_install)
        stage_packages_path = install_path.parent / "stage_packages"

        click.echo(f"Fetching stage packages: {package_names!r}")
        Repo.fetch_stage_packages(
            package_names=package_names,
            base="core20",
            stage_packages_path=stage_packages_path,
            target_arch=target_arch,
        )

        click.echo(f"Unpacking stage packages: {package_names!r}")
        Repo.unpack_stage_packages(stage_packages_path=stage_packages_path,
                                   install_path=install_path)
Exemple #43
0
def main(sysargs=None):
    parser = argparse.ArgumentParser(description='Generate a REP-0132 %s' %
                                     CHANGELOG_FILENAME)
    parser.add_argument(
        '-a',
        '--all',
        action='store_true',
        default=False,
        help=
        'Generate changelog for all versions instead of only the forthcoming one (only supported when no changelog file exists yet)'
    )
    parser.add_argument(
        '--print-root',
        action='store_true',
        default=False,
        help=
        'Output changelog content to the console as if there would be only one package in the root of the repository'
    )
    parser.add_argument(
        '--skip-contributors',
        action='store_true',
        default=False,
        help='Skip adding the list of contributors to the changelog')
    parser.add_argument('--skip-merges',
                        action='store_true',
                        default=False,
                        help='Skip adding merge commits to the changelog')
    parser.add_argument(
        '-y',
        '--non-interactive',
        action='store_true',
        default=False,
        help="Run without user interaction, confirming all questions with 'yes'"
    )
    args = parser.parse_args(sysargs)

    base_path = '.'
    logging.basicConfig(format='%(message)s', level=logging.DEBUG)

    vcs_client = get_vcs_client(base_path)

    if args.print_root:
        # printing status messages to stderr to allow piping the changelog to a file
        if args.all:
            print('Querying all tags and commit information...',
                  file=sys.stderr)
            tag2log_entries = get_all_changes(vcs_client,
                                              skip_merges=args.skip_merges)
            print('Generating changelog output with all versions...',
                  file=sys.stderr)
        else:
            print('Querying commit information since latest tag...',
                  file=sys.stderr)
            tag2log_entries = get_forthcoming_changes(
                vcs_client, skip_merges=args.skip_merges)
            print('Generating changelog files with forthcoming version...',
                  file=sys.stderr)
        print('', file=sys.stderr)
        data = generate_changelog_file('repository-level',
                                       tag2log_entries,
                                       vcs_client=vcs_client)
        print(data)
        return 0

    # find packages
    packages = find_packages(base_path)
    if not packages:
        raise RuntimeError('No packages found')
    print('Found packages: %s' %
          ', '.join(sorted(p.name for p in packages.values())))

    # check for missing changelogs
    missing_changelogs = []
    for pkg_path, package in packages.items():
        changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
        if not os.path.exists(changelog_path):
            missing_changelogs.append(package.name)

    if args.all and not missing_changelogs:
        raise RuntimeError(
            'All packages already have a changelog. Either remove (some of) them before using --all or invoke the script without --all.'
        )

    if args.all and len(missing_changelogs) != len(packages):
        ignored = set([p.name
                       for p in packages.values()]) - set(missing_changelogs)
        print(
            'The following packages already have a changelog file and will be ignored: %s'
            % ', '.join(sorted(ignored)),
            file=sys.stderr)

    # prompt to switch to --all
    if not args.all and missing_changelogs:
        print('Some of the packages have no changelog file: %s' %
              ', '.join(sorted(missing_changelogs)))
        print(
            'You might consider to use --all to generate the changelogs for all versions (not only for the forthcoming version).'
        )
        if not args.non_interactive and not prompt_continue(
                'Continue without --all option', default=False):
            raise RuntimeError(
                'Skipping generation, rerun the script with --all.')

    if args.all:
        print('Querying all tags and commit information...')
        tag2log_entries = get_all_changes(vcs_client,
                                          skip_merges=args.skip_merges)
        print('Generating changelog files with all versions...')
        generate_changelogs(base_path,
                            packages,
                            tag2log_entries,
                            logger=logging,
                            vcs_client=vcs_client,
                            skip_contributors=args.skip_contributors)
    else:
        print('Querying commit information since latest tag...')
        tag2log_entries = get_forthcoming_changes(vcs_client,
                                                  skip_merges=args.skip_merges)
        # separate packages with/without a changelog file
        packages_without = {
            pkg_path: package
            for pkg_path, package in packages.items()
            if package.name in missing_changelogs
        }
        if packages_without:
            print('Generating changelog files with forthcoming version...')
            generate_changelogs(base_path,
                                packages_without,
                                tag2log_entries,
                                logger=logging,
                                vcs_client=vcs_client,
                                skip_contributors=args.skip_contributors)
        packages_with = {
            pkg_path: package
            for pkg_path, package in packages.items()
            if package.name not in missing_changelogs
        }
        if packages_with:
            print('Updating forthcoming section of changelog files...')
            update_changelogs(base_path,
                              packages_with,
                              tag2log_entries,
                              logger=logging,
                              vcs_client=vcs_client,
                              skip_contributors=args.skip_contributors)
    print('Done.')
    print(
        'Please review the extracted commit messages and consolidate the changelog entries before committing the files!'
    )
    return get_cached_distribution(index, distro_name)


def get_package_repo(distro, name):
    return distro.repositories[distro.release_packages[name].repository_name].source_repository


if __name__ == '__main__':
    freeze_support()
    if not ('ROS_SHARE_DIR' in os.environ):
        raise ValueError('expect ROS_SHARE_DIR to be defined in os.environ')

    if not ('ROS_DISTRO' in os.environ):
        raise ValueError('expect ROS_DISTRO to be defined in os.environ')

    packages = find_packages(os.environ['ROS_SHARE_DIR'])
    distro = get_distro(os.environ['ROS_DISTRO'])
    data = {}
    data['version'] = 1
    reg = data['registrations'] = []
    for key, value in packages.iteritems():
        try:
            lic = value.licenses[0]
            repo = get_package_repo(distro, value.name)
            url = repo.url
            version = repo.version
            git_query = Popen(['git', 'ls-remote', url, version], stdout=PIPE, stderr=PIPE)
            (git_status, error) = git_query.communicate()
            if git_query.poll() == 0:
                s_list = git_status.splitlines()
            for line in s_list:
Exemple #45
0
def main(opts):
    # Initialize dictionary version of opts namespace
    opts_vars = vars(opts) if opts else {}

    # Get the workspace (either the given directory or the enclosing ws)
    workspace_hint = opts_vars.get('workspace', None) or os.getcwd()
    workspace = find_enclosing_workspace(workspace_hint)

    if not workspace:
        print(clr("@{rf}ERROR: No workspace found containing '%s'@|" %
                  workspace_hint),
              file=sys.stderr)
        sys.exit(1)

    # Load the context to get the subspaces
    ctx = Context.load(workspace, opts.profile, opts, load_env=False)

    path = None

    if opts.space:
        # Get the subspace
        if opts.space == 'src':
            path = ctx.source_space_abs
        elif opts.space == 'build':
            path = ctx.build_space_abs
        elif opts.space == 'devel':
            path = ctx.devel_space_abs
        elif opts.space == 'install':
            path = ctx.install_space_abs

    if opts.package:
        # Get the path to the given package
        path = path or ctx.source_space_abs
        if opts.space == 'build':
            path = os.path.join(path, opts.package)
        elif opts.space in ['devel', 'install']:
            path = os.path.join(path, 'share', opts.package)
        else:
            try:
                packages = find_packages(path, warnings=[])
                catkin_package = [
                    pkg_path for pkg_path, p in packages.items()
                    if p.name == opts.package
                ]
                if catkin_package:
                    path = os.path.join(path, catkin_package[0])
                else:
                    print(clr(
                        "@{rf}ERROR: Could not locate a package named '%s' in path '%s'@|"
                        % (opts.package, path)),
                          file=sys.stderr)
                    sys.exit(2)
            except RuntimeError as e:
                print(clr('@{rf}ERROR: %s@|' % str(e)), file=sys.stderr)
                sys.exit(1)
    elif not opts.space:
        # Get the path to the workspace root
        path = workspace

    # Check if the path exists
    if opts.existing_only and not os.path.exists(path):
        print(clr("@{rf}ERROR: Requested path '%s' does not exist.@|" % path),
              file=sys.stderr)
        sys.exit(1)

    # Make the path relative if desired
    if opts.relative:
        path = os.path.relpath(path, os.getcwd())

    # Print the path
    print(path)
Exemple #46
0
def _create_unmerged_devel_setup(context):
    # Find all of the leaf packages in the workspace
    # where leaf means that nothing in the workspace depends on it
    workspace_packages = find_packages(context.source_space_abs,
                                       exclude_subspaces=True)
    ordered_packages = topological_order_packages(workspace_packages)
    workspace_packages = dict([(p.name, p)
                               for pth, p in workspace_packages.items()])
    dependencies = set([])
    for name, pkg in workspace_packages.items():
        dependencies.update([
            d.name for d in pkg.buildtool_depends + pkg.build_depends +
            pkg.run_depends
        ])
    leaf_packages = []
    for name, pkg in workspace_packages.items():
        if pkg.name not in dependencies:
            leaf_packages.append(pkg.name)
    assert leaf_packages, leaf_packages  # Defensive, there should always be at least one leaf
    leaf_sources = []
    for pkg_name in leaf_packages:
        source_path = os.path.join(context.devel_space_abs, pkg_name,
                                   'setup.sh')
        if os.path.isfile(source_path):
            leaf_sources.append('. {0}'.format(source_path))
    # In addition to the leaf packages, we need to source the recursive run depends of the leaf packages
    run_depends = get_recursive_run_depends_in_workspace(
        [workspace_packages[p] for p in leaf_packages], ordered_packages)
    run_depends_sources = []
    for run_dep_name in [p.name for pth, p in run_depends]:
        source_path = os.path.join(context.devel_space_abs, run_dep_name,
                                   'setup.sh')
        if os.path.isfile(source_path):
            run_depends_sources.append('. {0}'.format(source_path))
    # Create the setup.sh file
    setup_sh_path = os.path.join(context.devel_space_abs, 'setup.sh')
    env_file = """\
#!/usr/bin/env sh
# generated from within catkin_tools/verbs/catkin_build/build.py

# This file is aggregates the many setup.sh files in the various
# unmerged devel spaces in this folder.
# This is occomplished by sourcing each leaf package and all the
# recursive run dependencies of those leaf packages

# Source the first package's setup.sh without the --extend option
{first_source}

# remove all passed in args, resetting $@, $*, $#, $n
shift $#
# set the --extend arg for rest of the packages setup.sh's
set -- $@ "--extend"
# source setup.sh for each of the leaf packages in the workspace
{leaf_sources}

# And now the setup.sh for each of their recursive run dependencies
{run_depends_sources}
""".format(first_source=leaf_sources[0],
           leaf_sources='\n'.join(leaf_sources[1:]),
           run_depends_sources='\n'.join(run_depends_sources))
    with open(setup_sh_path, 'w') as f:
        f.write(env_file)
    # Make this file executable
    os.chmod(setup_sh_path, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)
    # Create the setup.bash file
    setup_bash_path = os.path.join(context.devel_space_abs, 'setup.bash')
    with open(setup_bash_path, 'w') as f:
        f.write("""\
#!/usr/bin/env bash
# generated from within catkin_tools/verbs/catkin_build/build.py

CATKIN_SHELL=bash

# source setup.sh from same directory as this file
_BUILD_SETUP_DIR=$(builtin cd "`dirname "${BASH_SOURCE[0]}"`" && pwd)
. "$_BUILD_SETUP_DIR/setup.sh"
""")
    # Make this file executable
    os.chmod(setup_bash_path, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)
    setup_zsh_path = os.path.join(context.devel_space_abs, 'setup.zsh')
    with open(setup_zsh_path, 'w') as f:
        f.write("""\
#!/usr/bin/env zsh
# generated from within catkin_tools/verbs/catkin_build/build.py

CATKIN_SHELL=zsh

# source setup.sh from same directory as this file
_BUILD_SETUP_DIR=$(builtin cd -q "`dirname "$0"`" && pwd)
emulate sh # emulate POSIX
. "$_BUILD_SETUP_DIR/setup.sh"
emulate zsh # back to zsh mode
""")
    # Make this file executable
    os.chmod(setup_zsh_path, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)
    def test_collect_warnings(self):
        """Tests warnings collection"""
        warnings = []
        pkgs_dict = find_packages(test_data_dir, warnings=warnings)

        self.assertEqual(warnings.sort(), test_expected_warnings.sort())
Exemple #48
0
def build_isolated_workspace(context,
                             packages=None,
                             start_with=None,
                             no_deps=False,
                             unbuilt=False,
                             n_jobs=None,
                             force_cmake=False,
                             pre_clean=False,
                             force_color=False,
                             quiet=False,
                             interleave_output=False,
                             no_status=False,
                             limit_status_rate=10.0,
                             lock_install=False,
                             no_notify=False,
                             continue_on_failure=False,
                             summarize_build=None,
                             relaxed_constraints=False,
                             influx_url=None,
                             influx_db=None):
    """Builds a catkin workspace in isolation

    This function will find all of the packages in the source space, start some
    executors, feed them packages to build based on dependencies and topological
    ordering, and then monitor the output of the executors, handling loggings of
    the builds, starting builds, failing builds, and finishing builds of
    packages, and handling the shutdown of the executors when appropriate.

    :param context: context in which to build the catkin workspace
    :type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
    :param packages: list of packages to build, by default their dependencies will also be built
    :type packages: list
    :param start_with: package to start with, skipping all packages which proceed it in the topological order
    :type start_with: str
    :param no_deps: If True, the dependencies of packages will not be built first
    :type no_deps: bool
    :param n_jobs: number of parallel package build n_jobs
    :type n_jobs: int
    :param force_cmake: forces invocation of CMake if True, default is False
    :type force_cmake: bool
    :param force_color: forces colored output even if terminal does not support it
    :type force_color: bool
    :param quiet: suppresses the output of commands unless there is an error
    :type quiet: bool
    :param interleave_output: prints the output of commands as they are received
    :type interleave_output: bool
    :param no_status: disables status bar
    :type no_status: bool
    :param limit_status_rate: rate to which status updates are limited; the default 0, places no limit.
    :type limit_status_rate: float
    :param lock_install: causes executors to synchronize on access of install commands
    :type lock_install: bool
    :param no_notify: suppresses system notifications
    :type no_notify: bool
    :param continue_on_failure: do not stop building other jobs on error
    :type continue_on_failure: bool
    :param summarize_build: if True summarizes the build at the end, if None and continue_on_failure is True and the
        the build fails, then the build will be summarized, but if False it never will be summarized.
    :type summarize_build: bool
    :param relaxed_constraints If true, do not use exec_deps for topological ordering
    :type relaxed_constraints bool
    :param influx_url Url to access an InfluxDB instance
    :type influx_url string Url of the form user:password@host:port
    :param influx_db Database name in InfluxDB
    :type influx_db string

    :raises: SystemExit if buildspace is a file or no packages were found in the source space
        or if the provided options are invalid
    """
    pre_start_time = time.time()

    # Assert that the limit_status_rate is valid
    if limit_status_rate < 0:
        sys.exit(
            "[build] @!@{rf}Error:@| The value of --status-rate must be greater than or equal to zero."
        )

    # Declare a buildspace marker describing the build config for error checking
    buildspace_marker_data = {
        'workspace': context.workspace,
        'profile': context.profile,
        'install': context.install,
        'install_space': context.install_space_abs,
        'devel_space': context.devel_space_abs,
        'source_space': context.source_space_abs
    }

    # Check build config
    if os.path.exists(
            os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE)):
        with open(os.path.join(
                context.build_space_abs,
                BUILDSPACE_MARKER_FILE)) as buildspace_marker_file:
            existing_buildspace_marker_data = yaml.safe_load(
                buildspace_marker_file)
            misconfig_lines = ''
            for (k, v) in existing_buildspace_marker_data.items():
                new_v = buildspace_marker_data.get(k, None)
                if new_v != v:
                    misconfig_lines += (
                        '\n - %s: %s (stored) is not %s (commanded)' %
                        (k, v, new_v))
            if len(misconfig_lines) > 0:
                sys.exit(
                    clr("\n@{rf}Error:@| Attempting to build a catkin workspace using build space: "
                        "\"%s\" but that build space's most recent configuration "
                        "differs from the commanded one in ways which will cause "
                        "problems. Fix the following options or use @{yf}`catkin "
                        "clean -b`@| to remove the build space: %s" %
                        (context.build_space_abs, misconfig_lines)))

    # Summarize the context
    summary_notes = []
    if force_cmake:
        summary_notes += [
            clr("@!@{cf}NOTE:@| Forcing CMake to run for each package.")
        ]
    log(context.summary(summary_notes))

    # Make sure there is a build folder and it is not a file
    if os.path.exists(context.build_space_abs):
        if os.path.isfile(context.build_space_abs):
            sys.exit(
                clr("[build] @{rf}Error:@| Build space '{0}' exists but is a file and not a folder."
                    .format(context.build_space_abs)))
    # If it dosen't exist, create it
    else:
        log("[build] Creating build space: '{0}'".format(
            context.build_space_abs))
        os.makedirs(context.build_space_abs)

    # Write the current build config for config error checking
    with open(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE),
              'w') as buildspace_marker_file:
        buildspace_marker_file.write(
            yaml.dump(buildspace_marker_data, default_flow_style=False))

    # Get all the packages in the context source space
    # Suppress warnings since this is a utility function
    workspace_packages = find_packages(context.source_space_abs,
                                       exclude_subspaces=True,
                                       warnings=[])

    # Get packages which have not been built yet
    built_packages, unbuilt_pkgs = get_built_unbuilt_packages(
        context, workspace_packages)

    # Handle unbuilt packages
    if unbuilt:
        # Check if there are any unbuilt
        if len(unbuilt_pkgs) > 0:
            # Add the unbuilt packages
            packages.extend(list(unbuilt_pkgs))
        else:
            log("[build] No unbuilt packages to be built.")
            return

    # If no_deps is given, ensure packages to build are provided
    if no_deps and packages is None:
        log(
            clr("[build] @!@{rf}Error:@| With no_deps, you must specify packages to build."
                ))
        return

    # Find list of packages in the workspace
    packages_to_be_built, packages_to_be_built_deps, all_packages = determine_packages_to_be_built(
        packages, context, workspace_packages)

    if not no_deps:
        # Extend packages to be built to include their deps
        packages_to_be_built.extend(packages_to_be_built_deps)

    # Also re-sort
    try:
        packages_to_be_built = topological_order_packages(
            dict(packages_to_be_built))
    except AttributeError:
        log(
            clr("[build] @!@{rf}Error:@| The workspace packages have a circular "
                "dependency, and cannot be built. Please run `catkin list "
                "--deps` to determine the problematic package(s)."))
        return

    # Check the number of packages to be built
    if len(packages_to_be_built) == 0:
        log(clr('[build] No packages to be built.'))

    # Assert start_with package is in the workspace
    verify_start_with_option(start_with, packages, all_packages,
                             packages_to_be_built)

    # Populate .catkin file if we're not installing
    # NOTE: This is done to avoid the Catkin CMake code from doing it,
    # which isn't parallel-safe. Catkin CMake only modifies this file if
    # it's package source path isn't found.
    if not context.install:
        dot_catkin_file_path = os.path.join(context.devel_space_abs, '.catkin')
        # If the file exists, get the current paths
        if os.path.exists(dot_catkin_file_path):
            dot_catkin_paths = open(dot_catkin_file_path,
                                    'r').read().split(';')
        else:
            dot_catkin_paths = []

        # Update the list with the new packages (in topological order)
        packages_to_be_built_paths = [
            os.path.join(context.source_space_abs, path)
            for path, pkg in packages_to_be_built
        ]

        new_dot_catkin_paths = [
            os.path.join(context.source_space_abs, path) for path in [
                os.path.join(context.source_space_abs, path)
                for path, pkg in all_packages
            ] if path in dot_catkin_paths or path in packages_to_be_built_paths
        ]

        # Write the new file if it's different, otherwise, leave it alone
        if dot_catkin_paths == new_dot_catkin_paths:
            wide_log("[build] Package table is up to date.")
        else:
            wide_log("[build] Updating package table.")
            open(dot_catkin_file_path,
                 'w').write(';'.join(new_dot_catkin_paths))

    # Remove packages before start_with
    if start_with is not None:
        for path, pkg in list(packages_to_be_built):
            if pkg.name != start_with:
                wide_log(
                    clr("@!@{pf}Skipping@|  @{gf}---@| @{cf}{}@|").format(
                        pkg.name))
                packages_to_be_built.pop(0)
            else:
                break

    # Get the names of all packages to be built
    packages_to_be_built_names = [p.name for _, p in packages_to_be_built]
    packages_to_be_built_deps_names = [
        p.name for _, p in packages_to_be_built_deps
    ]

    # Generate prebuild and prebuild clean jobs, if necessary
    prebuild_jobs = {}
    setup_util_present = os.path.exists(
        os.path.join(context.devel_space_abs, '_setup_util.py'))
    catkin_present = 'catkin' in (packages_to_be_built_names +
                                  packages_to_be_built_deps_names)
    catkin_built = 'catkin' in built_packages
    prebuild_built = 'catkin_tools_prebuild' in built_packages

    # Handle the prebuild jobs if the develspace is linked
    prebuild_pkg_deps = []
    if context.link_devel:
        prebuild_pkg = None

        # Construct a dictionary to lookup catkin package by name
        pkg_dict = dict([(pkg.name, (pth, pkg)) for pth, pkg in all_packages])

        if setup_util_present:
            # Setup util is already there, determine if it needs to be
            # regenerated
            if catkin_built:
                if catkin_present:
                    prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
            elif prebuild_built:
                if catkin_present:
                    # TODO: Clean prebuild package
                    ct_prebuild_pkg_path = get_prebuild_package(
                        context.build_space_abs, context.devel_space_abs,
                        force_cmake)
                    ct_prebuild_pkg = parse_package(ct_prebuild_pkg_path)

                    prebuild_jobs[
                        'caktin_tools_prebuild'] = create_catkin_clean_job(
                            context,
                            ct_prebuild_pkg,
                            ct_prebuild_pkg_path,
                            dependencies=[],
                            dry_run=False,
                            clean_build=True,
                            clean_devel=True,
                            clean_install=True)

                    # TODO: Build catkin package
                    prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
                    prebuild_pkg_deps.append('catkin_tools_prebuild')
            else:
                # How did these get here??
                log("Warning: devel space setup files have an unknown origin.")
        else:
            # Setup util needs to be generated
            if catkin_built or prebuild_built:
                log("Warning: generated devel space setup files have been deleted."
                    )

            if catkin_present:
                # Build catkin package
                prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
            else:
                # Generate and buildexplicit prebuild package
                prebuild_pkg_path = get_prebuild_package(
                    context.build_space_abs, context.devel_space_abs,
                    force_cmake)
                prebuild_pkg = parse_package(prebuild_pkg_path)

        if prebuild_pkg is not None:
            # Create the prebuild job
            prebuild_job = create_catkin_build_job(
                context,
                prebuild_pkg,
                prebuild_pkg_path,
                build_dependencies=prebuild_pkg_deps,
                run_dependencies=[],
                force_cmake=force_cmake,
                pre_clean=pre_clean,
                prebuild=True)

            # Add the prebuld job
            prebuild_jobs[prebuild_job.jid] = prebuild_job

    # Remove prebuild jobs from normal job list
    for prebuild_jid, prebuild_job in prebuild_jobs.items():
        if prebuild_jid in packages_to_be_built_names:
            packages_to_be_built_names.remove(prebuild_jid)

    # Initial jobs list is just the prebuild jobs
    jobs = [] + list(prebuild_jobs.values())

    # Get all build type plugins
    build_job_creators = {
        ep.name: ep.load()['create_build_job']
        for ep in pkg_resources.iter_entry_points(group='catkin_tools.jobs')
    }

    # It's a problem if there aren't any build types available
    if len(build_job_creators) == 0:
        sys.exit(
            'Error: No build types available. Please check your catkin_tools installation.'
        )

    # Construct jobs
    for pkg_path, pkg in all_packages:
        if pkg.name not in packages_to_be_built_names:
            continue

        # Ignore metapackages
        if 'metapackage' in [e.tagname for e in pkg.exports]:
            continue

        # Get actual execution deps
        build_deps = [
            p.name for _, p in get_cached_recursive_build_depends_in_workspace(
                pkg, packages_to_be_built) if p.name not in prebuild_jobs
        ]
        build_for_run_deps = [
            p.name for _, p in get_cached_recursive_run_depends_in_workspace(
                pkg, packages_to_be_built) if p.name not in prebuild_jobs
        ]

        # All jobs depend on the prebuild jobs if they're defined
        if not no_deps:
            if relaxed_constraints:
                build_for_run_deps = [
                    p.name for _, p in
                    get_recursive_build_depends_for_run_depends_in_workspace(
                        [pkg], packages_to_be_built)
                    if p.name not in prebuild_jobs
                ]
            else:
                # revert to interpreting all dependencies as build dependencies
                build_deps = list(set(build_deps + build_for_run_deps))
                build_for_run_deps = []

            for j in prebuild_jobs.values():
                build_deps.append(j.jid)

        # Determine the job parameters
        build_job_kwargs = dict(context=context,
                                package=pkg,
                                package_path=pkg_path,
                                build_dependencies=build_deps,
                                run_dependencies=build_for_run_deps,
                                force_cmake=force_cmake,
                                pre_clean=pre_clean)

        # Create the job based on the build type
        build_type = get_build_type(pkg)

        if build_type in build_job_creators:
            jobs.append(build_job_creators[build_type](**build_job_kwargs))
        else:
            wide_log(
                clr("[build] @!@{yf}Warning:@| Skipping package `{}` because it "
                    "has an unsupported package build type: `{}`").format(
                        pkg.name, build_type))

            wide_log(clr("[build] Note: Available build types:"))
            for bt_name in build_job_creators.keys():
                wide_log(clr("[build]  - `{}`".format(bt_name)))

    # Queue for communicating status
    event_queue = Queue()

    status_queue = Queue()
    monitoring_queue = Queue()

    class ForwardingQueue(threading.Thread):
        def __init__(self, queues):
            super(ForwardingQueue, self).__init__()
            self.keep_running = True
            self.queues = queues

        def run(self):
            while self.keep_running:
                event = event_queue.get(True)
                for queue in self.queues:
                    queue.put(event)
                if event is None:
                    break

    queue_thread = ForwardingQueue([status_queue, monitoring_queue])

    threads = [queue_thread]

    try:
        # Spin up status output thread
        status_thread = ConsoleStatusController(
            'build', ['package', 'packages'],
            jobs,
            n_jobs, [pkg.name for _, pkg in context.packages],
            [p for p in context.whitelist], [p for p in context.blacklist],
            status_queue,
            show_notifications=not no_notify,
            show_active_status=not no_status,
            show_buffered_stdout=not quiet and not interleave_output,
            show_buffered_stderr=not interleave_output,
            show_live_stdout=interleave_output,
            show_live_stderr=interleave_output,
            show_stage_events=not quiet,
            show_full_summary=(summarize_build is True),
            pre_start_time=pre_start_time,
            active_status_rate=limit_status_rate)
        threads.append(status_thread)

        if influx_db is not None:
            if not have_influx_db:
                sys.exit(
                    "[build] @!@{rf}Error:@| InfluxDB monitoring is not possible, cannot import influxdb"
                )

            match = re.match('^(.+):(.+)@(.+):(.+)$', influx_url)
            if not match:
                sys.exit(
                    "[build] @!@{rf}Error:@| The value of --influx has to be of the form username:password@host:port"
                )
            username, password, host, port = match.groups()

            influxdb_thread = InfluxDBStatusController(monitoring_queue,
                                                       influx_db, host, port,
                                                       username, password)

            threads.append(influxdb_thread)

        for thread in threads:
            thread.start()

        # Initialize locks
        locks = {
            'installspace': asyncio.Lock() if lock_install else FakeLock()
        }

        # Block while running N jobs asynchronously
        try:
            all_succeeded = run_until_complete(
                execute_jobs('build',
                             jobs,
                             locks,
                             event_queue,
                             context.log_space_abs,
                             max_toplevel_jobs=n_jobs,
                             continue_on_failure=continue_on_failure,
                             continue_without_deps=False,
                             relaxed_constraints=relaxed_constraints))
        except Exception:
            all_succeeded = False
            for thread in threads:
                thread.keep_running = False
            for thread in threads:
                thread.join(1.0)
            wide_log(str(traceback.format_exc()))

        event_queue.put(None)

        for thread in threads:
            thread.join(1.0)

        # Warn user about new packages
        now_built_packages, now_unbuilt_pkgs = get_built_unbuilt_packages(
            context, workspace_packages)
        new_pkgs = [p for p in unbuilt_pkgs if p not in now_unbuilt_pkgs]
        if len(new_pkgs) > 0:
            log(
                clr("[build] @/@!Note:@| @/Workspace packages have changed, "
                    "please re-source setup files to use them.@|"))

        if all_succeeded:
            # Create isolated devel setup if necessary
            if context.isolate_devel:
                if not context.install:
                    _create_unmerged_devel_setup(context, now_unbuilt_pkgs)
                else:
                    _create_unmerged_devel_setup_for_install(context)
            return 0
        else:
            return 1

    except KeyboardInterrupt:
        wide_log("[build] Interrupted by user!")
        event_queue.put(None)

        return 130  # EOWNERDEAD return code is not part of the errno module.
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Lists available binary packages and versions which are'
        'needed to satisfy rosdep keys for ROS packages in the workspace')

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)

    add_argument_output_dir(parser)
    add_argument_package_selection_args(parser)
    add_argument_skip_rosdep_keys(parser)
    parser.add_argument('--package-root',
                        nargs='+',
                        help='The path to the directory containing packages')
    args = parser.parse_args(argv)

    workspace_root = args.package_root[-1]
    os.chdir(workspace_root)

    with Scope('SUBSECTION', 'mark packages with IGNORE files'):
        all_packages = locate_packages(workspace_root)
        selected_packages = all_packages
        if args.package_selection_args:
            print('Using package selection arguments:',
                  args.package_selection_args)
            selected_packages = locate_packages(
                workspace_root, extra_args=args.package_selection_args)

            to_ignore = all_packages.keys() - selected_packages.keys()
            print('Ignoring %d packages' % len(to_ignore))
            for package in sorted(to_ignore):
                print('-', package)
                package_root = all_packages[package]
                Path(package_root, 'COLCON_IGNORE').touch()

        print('There are %d packages which meet selection criteria' %
              len(selected_packages))

    with Scope('SUBSECTION', 'Enumerating packages needed to build'):
        # find all of the underlay packages
        underlay_pkgs = {}
        all_underlay_pkg_names = set()
        for package_root in args.package_root[0:-1]:
            print("Crawling for packages in '%s'" % package_root)
            underlay_pkgs.update(find_packages(package_root))

            # Check for a colcon index for non-ROS package detection
            colcon_index = os.path.join(package_root, 'colcon-core',
                                        'packages')
            try:
                all_underlay_pkg_names.update(os.listdir(colcon_index))
            except FileNotFoundError:
                pass

        underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()]
        print('Found the following ROS underlay packages:')
        for pkg_name in sorted(underlay_pkg_names):
            print('  -', pkg_name)

        # get direct build dependencies
        package_root = args.package_root[-1]
        print("Crawling for packages in '%s'" % package_root)
        pkgs = find_packages(package_root)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following ROS packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        # get build dependencies and map them to binary packages
        all_pkgs = set(pkgs.values()).union(underlay_pkgs.values())

        for pkg in all_pkgs:
            pkg.evaluate_conditions(os.environ)
        for pkg in all_pkgs:
            for group_depend in pkg.group_depends:
                if group_depend.evaluated_condition:
                    group_depend.extract_group_members(all_pkgs)

        dependency_keys_build = get_dependencies(
            all_pkgs, 'build', _get_build_and_recursive_run_dependencies,
            pkgs.values())

        dependency_keys_test = get_dependencies(
            all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies,
            pkgs.values())

        if args.skip_rosdep_keys:
            dependency_keys_build.difference_update(args.skip_rosdep_keys)
            dependency_keys_test.difference_update(args.skip_rosdep_keys)

        # remove all non-ROS packages and packages which are present but
        # specifically ignored
        every_package_name = all_packages.keys() | all_underlay_pkg_names
        dependency_keys_build -= every_package_name
        dependency_keys_test -= every_package_name

        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        os_pkg_names_build = resolve_names(dependency_keys_build, **context)
        os_pkg_names_test = resolve_names(dependency_keys_test, **context)

        os_pkg_names_test -= os_pkg_names_build

    with Scope('SUBSECTION', 'Resolving packages versions using apt cache'):
        apt_cache = Cache()
        os_pkg_versions = get_binary_package_versions(
            apt_cache, os_pkg_names_build | os_pkg_names_test)

    with open(os.path.join(args.output_dir, 'install_list_build.txt'),
              'w') as out_file:
        for package in sorted(os_pkg_names_build):
            out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))

    with open(os.path.join(args.output_dir, 'install_list_test.txt'),
              'w') as out_file:
        for package in sorted(os_pkg_names_test):
            out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))
Exemple #50
0
def main(opts):

    # Load the context
    ctx = Context.load(opts.workspace, opts.profile, load_env=False)

    if not ctx:
        sys.exit(clr("@{rf}ERROR: Could not determine workspace.@|"),
                 file=sys.stderr)

    if opts.directory:
        folders = opts.directory
    else:
        folders = [ctx.source_space_abs]

    list_entry_format = '@{pf}-@| @{cf}%s@|' if not opts.unformatted else '%s'

    opts.depends_on = set(opts.depends_on) if opts.depends_on else set()
    warnings = []
    for folder in folders:
        try:
            packages = find_packages(folder, warnings=warnings)
            ordered_packages = topological_order_packages(packages)
            if ordered_packages and ordered_packages[-1][0] is None:
                sys.exit(
                    clr("@{rf}ERROR: Circular dependency within packages:@| " +
                        ordered_packages[-1][1]),
                    file=sys.stderr)
            packages_by_name = {
                pkg.name: (pth, pkg)
                for pth, pkg in ordered_packages
            }

            if opts.depends_on or opts.rdepends_on:

                dependents = set()

                for pth, pkg in ordered_packages:
                    is_dep = opts.depends_on.intersection(
                        [p.name for p in pkg.build_depends + pkg.run_depends])
                    if is_dep:
                        dependents.add(pkg.name)

                for pth, pkg in [
                        packages_by_name.get(n) for n in opts.rdepends_on
                ]:
                    if pkg is None:
                        continue
                    rbd = get_recursive_build_dependents_in_workspace(
                        pkg.name, ordered_packages)
                    rrd = get_recursive_run_dependents_in_workspace(
                        pkg.name, ordered_packages)
                    dependents.update([p.name for _, p in rbd])
                    dependents.update([p.name for _, p in rrd])

                filtered_packages = [(pth, pkg)
                                     for pth, pkg in ordered_packages
                                     if pkg.name in dependents]
            elif opts.this:
                this_package = find_enclosing_package(
                    search_start_path=getcwd(),
                    ws_path=ctx.workspace,
                    warnings=[])
                if this_package is None:
                    sys.exit(1)
                if this_package in packages_by_name:
                    filtered_packages = [packages_by_name[this_package]]
                else:
                    filtered_packages = []
            else:
                filtered_packages = ordered_packages

            for pkg_pth, pkg in filtered_packages:
                print(clr(list_entry_format % pkg.name))
                if opts.rdeps:
                    build_deps = [
                        p
                        for dp, p in get_recursive_build_depends_in_workspace(
                            pkg, ordered_packages)
                    ]
                    run_deps = [
                        p for dp, p in get_recursive_run_depends_in_workspace(
                            [pkg], ordered_packages)
                    ]
                else:
                    build_deps = [
                        dep for dep in pkg.build_depends
                        if dep.evaluated_condition
                    ]
                    run_deps = [
                        dep for dep in pkg.run_depends
                        if dep.evaluated_condition
                    ]

                if opts.deps or opts.rdeps:
                    if len(build_deps) > 0:
                        print(clr('  @{yf}build_depend:@|'))
                        for dep in build_deps:
                            print(clr('  @{pf}-@| %s' % dep.name))
                    if len(run_deps) > 0:
                        print(clr('  @{yf}run_depend:@|'))
                        for dep in run_deps:
                            print(clr('  @{pf}-@| %s' % dep.name))
        except InvalidPackage as ex:
            sys.exit(
                clr("@{rf}Error:@| The file %s is an invalid package.xml file."
                    " See below for details:\n\n%s" %
                    (ex.package_path, ex.msg)))

    # Print out warnings
    if not opts.quiet:
        for warning in warnings:
            print(clr("@{yf}Warning:@| %s" % warning), file=sys.stderr)
def run_build_and_test(workspace, rosdistro):
    # need to install dependencies, hack python path, import stuff
    call(['apt-get', 'update'])
    call(['ls', '/home/package'])
    apt_get_install(['python-rosdistro', 'python-catkin-pkg'])  # already there
    if not os.path.abspath("/usr/lib/pymodules/python2.7") in sys.path:
        sys.path.append("/usr/lib/pymodules/python2.7")
    from rosdistro import get_index, get_index_url, get_source_file
    from catkin_pkg import packages

    # Find packages to build
    call(['mkdir', '-p', workspace + '/src/'])
    call(['cp', '-r', '/tmp/build/src', workspace])
    print('Searching for something yummy to build...')
    pkgs = packages.find_packages(workspace + '/src')
    building = [p.name for p in pkgs.values()]
    if len(pkgs) > 0:
        print('  Found packages: %s' % ', '.join(building))
    else:
        raise BuildException('No packages to build or test.')

    # Get build + test dependencies
    print('Examining build dependencies.')
    build_depends = []
    for pkg in pkgs.values():
        for d in pkg.build_depends + pkg.buildtool_depends + pkg.test_depends:
            if not d.name in build_depends and not d.name in building:
                build_depends.append(d.name)
    print('Installing: %s' % ', '.join(build_depends))
    rosdep = RosDepResolver(rosdistro)
    apt_get_install(rosdep.to_aptlist(build_depends))
    pip_install(rosdep.to_piplist(build_depends))

    call(['cp', '-r', '/tmp/build/src', workspace])

    # Get environment
    ros_env = get_ros_env('/opt/ros/%s/setup.bash' % rosdistro)

    os.makedirs(workspace + '/build')
    if os.path.exists(workspace + '/test'):
        shutil.rmtree(workspace + '/test')
    os.makedirs(workspace + '/test')
    os.chdir(workspace + '/build')

    print('catkin_init_workspace')
    if find('CMakeLists.txt', '../src') == False:
        call(['catkin_init_workspace', '../src'], ros_env)
    # Workaround for nosetest 1.3.1 issue with non-absolute paths on Trusty
    #  (https://github.com/nose-devs/nose/issues/779)
    test_dir = os.path.realpath('../test')
    call(['cmake', '../src', '-DCATKIN_TEST_RESULTS_DIR=' + test_dir], ros_env)

    print('make')
    call(['make'], ros_env)
    print('make tests')
    call(['make', 'tests'], ros_env)

    # now install the run depends
    print('Examining run dependencies.')
    run_depends = []
    for pkg in pkgs.values():
        for d in pkg.run_depends:
            if not d.name in run_depends and not d.name in building:
                run_depends.append(d.name)
    print('Installing: %s' % ', '.join(run_depends))
    apt_get_install(rosdep.to_aptlist(run_depends))
    pip_install(rosdep.to_piplist(run_depends))

    # Run the tests
    print('make run_tests')
    ros_env = get_ros_env('./devel/setup.bash')
    test_results = call(['make', 'run_tests'], ros_env, return_output=True)

    # Output test results to a file
    f = open(workspace + '/testresults', 'w')

    # Metrics from tests
    gtest_pass = list()
    gtest_fail = list()
    pnose_fail = list()
    pnose_total = 0  # can only count these?
    rostest_pass = 0
    rostest_fail = 0
    rostest_err = 0

    for line in test_results.split('\n'):
        # Is this a gtest pass?
        if line.find(GTESTPASS) > -1:
            name = line[line.find(GTESTPASS) + len(GTESTPASS) +
                        1:].split(' ')[0]
            gtest_pass.append(name)
        # How about a gtest fail?
        if line.find(GTESTFAIL) > -1:
            name = line[line.find(GTESTFAIL) + len(GTESTPASS) +
                        1:].split(' ')[0]
            gtest_fail.append(name)
        # pnose fail?
        if line.find(PNOSEFAIL) > -1:
            name = line.split(' ')[2].rstrip()
            pnose_fail.append(name)
        # pnose failed to configure? (issue #17)
        if line.find(PNOSECONFIGFAIL) > -1:
            pnose_fail.append('python configure')
        # pnose exception?
        if line.find(PNOSEEXCEPTION) > -1:
            pnose_fail.append('python exception')
        # is this our total for python?
        if line.find('Ran ') > -1:
            pnose_total += int(line.split(' ')[1])
        # Is this a rostest pass?
        if line.find(ROSTESTPASS) > -1:
            rostest_pass += int(line[line.find(ROSTESTPASS) +
                                     len(ROSTESTPASS):].split(' ')[0])
        # Is this a rostest fail?
        if line.find(ROSTESTFAIL) > -1:
            l = line[line.find(ROSTESTFAIL) + len(ROSTESTFAIL):]
            while len(l) > 0:
                try:
                    rostest_fail += int(l.split(' ')[0])
                    break
                except ValueError:
                    # Might have formatting attached, remove 1 character at time
                    l = l[0:-1]
        # Is this a rostest error?
        if line.find(ROSTESTERROR) > -1:
            l = line[line.find(ROSTESTERROR) + len(ROSTESTERROR):]
            while len(l) > 0:
                try:
                    rostest_err += int(l.split(' ')[0])
                    break
                except ValueError:
                    # Might have formatting attached, remove 1 character at time
                    l = l[0:-1]

    # determine if we failed
    passed = len(gtest_pass) + pnose_total - len(pnose_fail) + rostest_pass
    failed = len(gtest_fail) + len(pnose_fail) + rostest_fail + rostest_err
    if failed > 0:
        f.write('*' * 70 + '\n')
        f.write('Failed ' + str(failed) + ' of ' + str(passed + failed) +
                ' tests.\n')
        for test in gtest_fail + pnose_fail:
            f.write('  failed: ' + test + '\n')
        f.write('See details below\n')
        f.write('*' * 70 + '\n')
    else:
        f.write('Passed ' + str(passed) + ' tests.\n')

    f.write('\n')
    f.write(test_results)
    f.close()

    # Hack so the buildbot can delete this later
    call(['chmod', '777', workspace + '/testresults'])
    cleanup()
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    index = get_index(config.rosdistro_index_url)

    condition_context = get_package_condition_context(index,
                                                      args.rosdistro_name)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        for pkg in pkgs.values():
            pkg.evaluate_conditions(condition_context)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(args.workspace_root, 'src',
                                args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api',
                    pkg_name, 'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.safe_load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(
                    rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(args.output_dir, 'api', pkg_name,
                                   'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(
                set([d.name for d in depends
                     if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(
                    set([
                        d.name for d in depends
                        if d.name in valid_package_names
                    ]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(args.output_dir,
                                         ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(args.output_dir,
                                                      'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(
                        os.path.join(pkg_changelog_doc_path, 'changelog.html'),
                        'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes
            ]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(
                prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(args.output_dir, 'rosdoc_tags',
                               '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url':
                '../../../api/%s/html' % pkg.name,
                'location':
                'file://%s' %
                os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package':
                pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(
                rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(
                rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(config.jenkins_url,
                                              args.rosdistro_name,
                                              args.doc_build_name,
                                              args.repository_name,
                                              args.os_name, args.os_code_name,
                                              args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name,
                                                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(config.jenkins_url,
                                                    build_files,
                                                    args.rosdistro_name,
                                                    pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(args.output_dir, 'manifests', pkg.name,
                               'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope('SUBSECTION',
               'overwrite CMakeLists.txt files to only generate messages'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type'
            ]
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'):
        # initialize rosdep view
        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            # since catkin is not a run dependency but provides the setup files
            get_os_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_os_package_name(args.rosdistro_name, 'genmsg'),
        ]

        if '3' == str(condition_context['ROS_PYTHON_VERSION']):
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python3-catkin-pkg-modules', 'python3-kitchen',
                'python3-rospkg-modules', 'python3-sphinx', 'python3-yaml'
            ])
        else:
            if '2' != str(condition_context['ROS_PYTHON_VERSION']):
                print('Unknown python version, using Python 2',
                      condition_context)
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python-catkin-pkg-modules', 'python-epydoc', 'python-kitchen',
                'python-rospkg', 'python-sphinx', 'python-yaml'
            ])

        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_os_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(pkgs.values(), 'build, run and doc',
                                   _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print(
                '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build'
            )
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name':
            args.os_name,
            'os_code_name':
            args.os_code_name,
            'arch':
            args.arch,
            'build_tool':
            doc_build_file.build_tool,
            'distribution_repository_urls':
            args.distribution_repository_urls,
            'distribution_repository_keys':
            get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),
            'environment_variables': [
                'ROS_PYTHON_VERSION={}'.format(
                    condition_context['ROS_PYTHON_VERSION'])
            ],
            'rosdistro_name':
            args.rosdistro_name,
            'uid':
            get_user_id(),
            'dependencies':
            debian_pkg_names,
            'dependency_versions':
            debian_pkg_versions,
            'install_lists': [],
            'canonical_base_url':
            doc_build_file.canonical_base_url,
            'ordered_pkg_tuples':
            ordered_pkg_tuples,
            'rosdoc_config_files':
            rosdoc_config_files,
        }
        create_dockerfile('doc/doc_task.Dockerfile.em', data,
                          args.dockerfile_dir)
Exemple #53
0
def build_workspace_isolated(workspace='.',
                             sourcespace=None,
                             buildspace=None,
                             develspace=None,
                             installspace=None,
                             merge=False,
                             install=False,
                             force_cmake=False,
                             colorize=True,
                             build_packages=None,
                             quiet=False,
                             cmake_args=None,
                             make_args=None,
                             catkin_make_args=None,
                             continue_from_pkg=False,
                             only_pkg_with_deps=None,
                             destdir=None,
                             use_ninja=False):
    '''
    Runs ``cmake``, ``make`` and optionally ``make install`` for all
    catkin packages in sourcespace_dir.  It creates several folders
    in the current working directory. For non-catkin packages it runs
    ``cmake``, ``make`` and ``make install`` for each, installing it to
    the devel space or install space if the ``install`` option is specified.

    :param workspace: path to the current workspace, ``str``
    :param sourcespace: workspace folder containing catkin packages, ``str``
    :param buildspace: path to build space location, ``str``
    :param develspace: path to devel space location, ``str``
    :param installspace: path to install space (CMAKE_INSTALL_PREFIX), ``str``
    :param merge: if True, build each catkin package into the same
        devel space (not affecting plain cmake packages), ``bool``
    :param install: if True, install all packages to the install space,
        ``bool``
    :param force_cmake: (optional), if True calls cmake explicitly for each
        package, ``bool``
    :param colorize: if True, colorize cmake output and other messages,
        ``bool``
    :param build_packages: specific packages to build (all parent packages
        in the topological order must have been built before), ``str``
    :param quiet: if True, hides some build output, ``bool``
    :param cmake_args: additional arguments for cmake, ``[str]``
    :param make_args: additional arguments for make, ``[str]``
    :param catkin_make_args: additional arguments for make but only for catkin
        packages, ``[str]``
    :param continue_from_pkg: indicates whether or not cmi should continue
        when a package is reached, ``bool``
    :param only_pkg_with_deps: only consider the specific packages and their
        recursive dependencies and ignore all other packages in the workspace,
        ``[str]``
    :param destdir: define DESTDIR for cmake/invocation, ``string``
    :param use_ninja: if True, use ninja instead of make, ``bool``
    '''
    if not colorize:
        disable_ANSI_colors()

    # Check workspace existance
    if not os.path.exists(workspace):
        sys.exit("Workspace path '{0}' does not exist.".format(workspace))
    workspace = os.path.abspath(workspace)

    # Check source space existance
    if sourcespace is None:
        sourcespace = os.path.join(workspace, 'src')
    if not os.path.exists(sourcespace):
        sys.exit('Could not find source space: {0}'.format(sourcespace))
    print('Base path: ' + str(workspace))
    print('Source space: ' + str(sourcespace))

    # Check build space
    if buildspace is None:
        buildspace = os.path.join(workspace, 'build_isolated')
    if not os.path.exists(buildspace):
        os.mkdir(buildspace)
    print('Build space: ' + str(buildspace))

    # Check devel space
    if develspace is None:
        develspace = os.path.join(workspace, 'devel_isolated')
    print('Devel space: ' + str(develspace))

    # Check install space
    if installspace is None:
        installspace = os.path.join(workspace, 'install_isolated')
    print('Install space: ' + str(installspace))

    if cmake_args:
        print("Additional CMake Arguments: " + " ".join(cmake_args))
    else:
        cmake_args = []

    if not [arg for arg in cmake_args if arg.startswith('-G')]:
        if not use_ninja:
            cmake_args += ['-G', 'Unix Makefiles']
        else:
            cmake_args += ['-G', 'Ninja']
    elif use_ninja:
        print(
            colorize_line(
                "Error: either specify a generator using '-G...' or '--use-ninja' but not both"
            ))
        sys.exit(1)

    if make_args:
        print("Additional make Arguments: " + " ".join(make_args))
    else:
        make_args = []

    if catkin_make_args:
        print("Additional make Arguments for catkin packages: " +
              " ".join(catkin_make_args))
    else:
        catkin_make_args = []

    # Find packages
    packages = find_packages(sourcespace, exclude_subspaces=True)
    if not packages:
        print(fmt("@{yf}No packages found in source space: %s@|" %
                  sourcespace))

    # whitelist packages and their dependencies in workspace
    if only_pkg_with_deps:
        package_names = [p.name for p in packages.values()]
        unknown_packages = [
            name for name in only_pkg_with_deps if name not in package_names
        ]
        if unknown_packages:
            sys.exit('Packages not found in the workspace: %s' %
                     ', '.join(unknown_packages))

        whitelist_pkg_names = get_package_names_with_recursive_dependencies(
            packages, only_pkg_with_deps)
        print('Whitelisted packages: %s' %
              ', '.join(sorted(whitelist_pkg_names)))
        packages = {
            path: p
            for path, p in packages.items() if p.name in whitelist_pkg_names
        }

    # verify that specified package exists in workspace
    if build_packages:
        packages_by_name = {p.name: path for path, p in packages.items()}
        unknown_packages = [
            p for p in build_packages if p not in packages_by_name
        ]
        if unknown_packages:
            sys.exit('Packages not found in the workspace: %s' %
                     ', '.join(unknown_packages))

    # Report topological ordering
    ordered_packages = topological_order_packages(packages)
    unknown_build_types = []
    msg = []
    msg.append('@{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' +
               ('~' * len(str(len(ordered_packages)))))
    msg.append('@{pf}~~@|  traversing %d packages in topological order:' %
               len(ordered_packages))
    for path, package in ordered_packages:
        if path is None:
            print(
                fmt('@{rf}Error: Circular dependency in subset of packages: @!%s@|'
                    % package))
            sys.exit('Can not build workspace with circular dependency')

        export_tags = [e.tagname for e in package.exports]
        if 'build_type' in export_tags:
            build_type_tag = [
                e.content for e in package.exports if e.tagname == 'build_type'
            ][0]
        else:
            build_type_tag = 'catkin'
        if build_type_tag == 'catkin':
            msg.append('@{pf}~~@|  - @!@{bf}' + package.name + '@|')
        elif build_type_tag == 'cmake':
            msg.append('@{pf}~~@|  - @!@{bf}' + package.name + '@|' +
                       ' (@!@{cf}plain cmake@|)')
        else:
            msg.append('@{pf}~~@|  - @!@{bf}' + package.name + '@|' +
                       ' (@{rf}unknown@|)')
            unknown_build_types.append(package)
    msg.append('@{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' +
               ('~' * len(str(len(ordered_packages)))))
    for index in range(len(msg)):
        msg[index] = fmt(msg[index])
    print('\n'.join(msg))

    # Error if there are packages with unknown build_types
    if unknown_build_types:
        print(colorize_line('Error: Packages with unknown build types exist'))
        sys.exit('Can not build workspace with packages of unknown build_type')

    # Check to see if the workspace has changed
    cmake_args_with_spaces = list(cmake_args)
    if develspace:
        cmake_args_with_spaces.append('-DCATKIN_DEVEL_PREFIX=' + develspace)
    if installspace:
        cmake_args_with_spaces.append('-DCMAKE_INSTALL_PREFIX=' + installspace)
    if not force_cmake and cmake_input_changed(
            packages,
            buildspace,
            cmake_args=cmake_args_with_spaces,
            filename='catkin_make_isolated'):
        print(
            'The packages or cmake arguments have changed, forcing cmake invocation'
        )
        force_cmake = True

    ensure_workspace_marker(workspace)

    # Build packages
    pkg_develspace = None
    last_env = None
    for index, path_package in enumerate(ordered_packages):
        path, package = path_package
        if merge:
            pkg_develspace = develspace
        else:
            pkg_develspace = os.path.join(develspace, package.name)
        if not build_packages or package.name in build_packages:
            if continue_from_pkg and build_packages and package.name in build_packages:
                build_packages = None
            try:
                print()
                last_env = build_package(path,
                                         package,
                                         workspace,
                                         buildspace,
                                         pkg_develspace,
                                         installspace,
                                         install,
                                         force_cmake,
                                         quiet,
                                         last_env,
                                         cmake_args,
                                         make_args,
                                         catkin_make_args,
                                         destdir=destdir,
                                         use_ninja=use_ninja,
                                         number=index + 1,
                                         of=len(ordered_packages))
            except subprocess.CalledProcessError as e:
                _print_build_error(package, e)
                # Let users know how to reproduce
                # First add the cd to the build folder of the package
                cmd = 'cd ' + quote(os.path.join(buildspace,
                                                 package.name)) + ' && '
                # Then reproduce the command called
                if isinstance(e.cmd, list):
                    # quote arguments to allow copy-n-paste of command
                    cmd += ' '.join([quote(arg) for arg in e.cmd])
                else:
                    cmd += e.cmd
                print(fmt("\n@{rf}Reproduce this error by running:"))
                print(fmt("@{gf}@!==> @|") + cmd + "\n")
                sys.exit('Command failed, exiting.')
            except Exception as e:
                print("Unhandled exception of type '{0}':".format(
                    type(e).__name__))
                import traceback
                traceback.print_exc()
                _print_build_error(package, e)
                sys.exit('Command failed, exiting.')
        else:
            cprint("Skipping package: '@!@{bf}" + package.name + "@|'")
            last_env = get_new_env(package, pkg_develspace, installspace,
                                   install, last_env, destdir)

    # Provide a top level devel space environment setup script
    if not os.path.exists(develspace):
        os.makedirs(develspace)
    if not build_packages:
        generated_env_sh = os.path.join(develspace, 'env.sh')
        generated_setup_util_py = os.path.join(develspace, '_setup_util.py')
        if not merge and pkg_develspace:
            # generate env.sh and setup.sh|bash|zsh which relay to last devel space
            with open(generated_env_sh, 'w') as f:
                f.write("""\
#!/usr/bin/env sh
# generated from catkin.builder module

{0} "$@"
""".format(os.path.join(pkg_develspace, 'env.sh')))
            os.chmod(generated_env_sh,
                     stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)

            for shell in ['sh', 'bash', 'zsh']:
                with open(os.path.join(develspace, 'setup.%s' % shell),
                          'w') as f:
                    f.write("""\
#!/usr/bin/env {1}
# generated from catkin.builder module

. "{0}/setup.{1}"
""".format(pkg_develspace, shell))

            # remove _setup_util.py file which might have been generated for an empty devel space before
            if os.path.exists(generated_setup_util_py):
                os.remove(generated_setup_util_py)

        elif not pkg_develspace:
            # generate env.sh and setup.sh|bash|zsh for an empty devel space
            if 'CMAKE_PREFIX_PATH' in os.environ.keys():
                variables = {
                    'CATKIN_GLOBAL_BIN_DESTINATION':
                    'bin',
                    'CATKIN_LIB_ENVIRONMENT_PATHS':
                    "'lib'",
                    'CATKIN_PKGCONFIG_ENVIRONMENT_PATHS':
                    "os.path.join('lib', 'pkgconfig')",
                    'CMAKE_PREFIX_PATH_AS_IS':
                    ';'.join(os.environ['CMAKE_PREFIX_PATH'].split(
                        os.pathsep)),
                    'PYTHON_EXECUTABLE':
                    sys.executable,
                    'PYTHON_INSTALL_DIR':
                    get_python_install_dir(),
                }
                with open(generated_setup_util_py, 'w') as f:
                    f.write(
                        configure_file(
                            os.path.join(get_cmake_path(), 'templates',
                                         '_setup_util.py.in'), variables))
                os.chmod(generated_setup_util_py,
                         stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)
            else:
                sys.exit(
                    "Unable to process CMAKE_PREFIX_PATH from environment. Cannot generate environment files."
                )

            variables = {'SETUP_FILENAME': 'setup'}
            with open(generated_env_sh, 'w') as f:
                f.write(
                    configure_file(
                        os.path.join(get_cmake_path(), 'templates',
                                     'env.sh.in'), variables))
            os.chmod(generated_env_sh,
                     stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)

            variables = {'SETUP_DIR': develspace}
            for shell in ['sh', 'bash', 'zsh']:
                with open(os.path.join(develspace, 'setup.%s' % shell),
                          'w') as f:
                    f.write(
                        configure_file(
                            os.path.join(get_cmake_path(), 'templates',
                                         'setup.%s.in' % shell), variables))
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease overlay' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_overlay_arguments(parser)
    parser.add_argument(
        '--underlay-packages',
        nargs='+',
        help='Names of packages on which the overlay builds '
        '(by default package names come from packages found in '
        "'ws/src')")
    group = parser.add_mutually_exclusive_group()
    group.add_argument(
        '--json',
        action='store_true',
        help='Output overlay information as JSON instead of a shell script')
    group.add_argument('--vcstool',
                       action='store_true',
                       help='Output overlay information as vcstool repos file')

    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for overlay workspace
    underlay_package_names = args.underlay_packages
    if underlay_package_names is None:
        packages = find_packages('ws/src')
        underlay_package_names = [pkg.name for pkg in packages.values()]
    print('Underlay workspace contains %d packages:%s' %
          (len(underlay_package_names), ''.join([
              '\n- %s' % pkg_name
              for pkg_name in sorted(underlay_package_names)
          ])),
          file=sys.stderr)

    overlay_package_names = get_overlay_package_names(
        args.pkg,
        args.exclude_pkg,
        args.level,
        underlay_package_names,
        dist_cache.release_package_xmls,
        output=True)
    print('Overlay workspace will contain %d packages:%s' %
          (len(overlay_package_names), ''.join([
              '\n- %s' % pkg_name for pkg_name in sorted(overlay_package_names)
          ])),
          file=sys.stderr)

    repositories = {}
    for pkg_name in overlay_package_names:
        repositories[pkg_name] = \
            get_repository_specification_for_released_package(
                dist_file, pkg_name)
    scms = [(repositories[k], 'ws_overlay/src/%s' % k)
            for k in sorted(repositories.keys())]

    if args.json:
        print(json.dumps([vars(r) for r, p in scms], sort_keys=True, indent=2))
    elif args.vcstool:
        print('repositories:')
        for r, p in scms:
            print('  %s:' % p)
            print('    type: ' + r.type)
            print('    url: ' + r.url)
            print('    version: ' + r.version)
    else:
        value = expand_template('prerelease/prerelease_overlay_script.sh.em',
                                {'scms': scms},
                                options={BANGPATH_OPT: False})
        print(value)
Exemple #55
0
def fetch(packages, workspace, context, default_urls, use_preprint,
          num_threads, pull_after_fetch):
    """Fetch dependencies of a package.

    Args:
        packages (list): A list of packages provided by the user.
        workspace (str): Path to a workspace (without src/ in the end).
        context (Context): Current context. Needed to find current packages.
        default_urls (set(str)): A set of urls where we search for packages.
        use_preprint (bool): Show status messages while cloning

    Returns:
        int: Return code. 0 if success. Git error code otherwise.
    """
    fetch_all = False
    if not packages:
        fetch_all = True

    ws_path = path.join(workspace, 'src')
    ignore_pkgs = Tools.list_all_ros_pkgs()

    already_fetched = set()
    packages = set(packages)

    global_error_code = Downloader.NO_ERROR

    # loop until there are no new dependencies left to download
    while (True):
        log.info(" Searching for dependencies.")
        deps_to_fetch = {}
        workspace_packages = find_packages(context.source_space_abs,
                                           exclude_subspaces=True,
                                           warnings=[])
        available_pkgs = [pkg.name for _, pkg in workspace_packages.items()]
        initial_cloned_pkgs = len(already_fetched)
        for package_path, package in workspace_packages.items():
            if package.name in already_fetched:
                continue
            if fetch_all or (package.name in packages):
                parser = Parser(default_urls=default_urls,
                                pkg_name=package.name)
                package_folder = path.join(ws_path, package_path)
                deps_to_fetch = Tools.update_deps_dict(
                    deps_to_fetch, parser.get_dependencies(package_folder))
                if deps_to_fetch is None:
                    sys.exit(1)
                already_fetched.add(package.name)
                for new_dep_name in deps_to_fetch.keys():
                    # make sure we don't stop until we analyzed all
                    # dependencies as we have just added these repositories
                    # we must analyze their dependencies too even if we wanted
                    # to download dependencies for one project only.
                    packages.add(new_dep_name)
                # Update default url to use the new version of it further on.
                default_urls.update(parser.default_urls)
        try:
            downloader = Downloader(ws_path=ws_path,
                                    available_pkgs=available_pkgs,
                                    ignore_pkgs=ignore_pkgs,
                                    use_preprint=use_preprint,
                                    num_threads=num_threads)
        except ValueError as e:
            log.critical(" Encountered error. Abort.")
            log.critical(" Error message: %s", e.message)
            return 1
        error_code = downloader.download_dependencies(deps_to_fetch)
        if len(already_fetched) == initial_cloned_pkgs:
            log.info(" No new dependencies. Done.")
            break
        if error_code != 0:
            global_error_code = error_code
        log.info(" New packages available. Process their dependencies now.")
    if pull_after_fetch:
        updater = Updater(ws_path=ws_path,
                          packages=workspace_packages,
                          use_preprint=use_preprint,
                          num_threads=num_threads)
        updater.update_packages(packages)
    return global_error_code
def document_workspace(
    context,
    packages=None,
    start_with=None,
    no_deps=False,
    n_jobs=None,
    force_color=False,
    quiet=False,
    interleave_output=False,
    no_status=False,
    limit_status_rate=10.0,
    no_notify=False,
    continue_on_failure=False,
    summarize_build=None
):
    pre_start_time = time.time()

    # Get all the packages in the context source space
    # Suppress warnings since this is a utility function
    workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[])

    # If no_deps is given, ensure packages to build are provided
    if no_deps and packages is None:
        log(fmt("[document] @!@{rf}Error:@| With no_deps, you must specify packages to build."))
        return

    # Find list of packages in the workspace
    packages_to_be_documented, packages_to_be_documented_deps, all_packages = determine_packages_to_be_built(
        packages, context, workspace_packages)

    if not no_deps:
        # Extend packages to be documented to include their deps
        packages_to_be_documented.extend(packages_to_be_documented_deps)

    # Also re-sort
    try:
        packages_to_be_documented = topological_order_packages(dict(packages_to_be_documented))
    except AttributeError:
        log(fmt("[document] @!@{rf}Error:@| The workspace packages have a circular "
                "dependency, and cannot be documented. Please run `catkin list "
                "--deps` to determine the problematic package(s)."))
        return

    # Check the number of packages to be documented
    if len(packages_to_be_documented) == 0:
        log(fmt('[document] No packages to be documented.'))

    # Assert start_with package is in the workspace
    verify_start_with_option(
        start_with,
        packages,
        all_packages,
        packages_to_be_documented + packages_to_be_documented_deps)

    # Remove packages before start_with
    if start_with is not None:
        for path, pkg in list(packages_to_be_documented):
            if pkg.name != start_with:
                wide_log(fmt("@!@{pf}Skipping@|  @{gf}---@| @{cf}{}@|").format(pkg.name))
                packages_to_be_documented.pop(0)
            else:
                break

    # Get the names of all packages to be built
    packages_to_be_documented_names = [p.name for _, p in packages_to_be_documented]
    packages_to_be_documeted_deps_names = [p.name for _, p in packages_to_be_documented_deps]

    jobs = []

    # Construct jobs
    for pkg_path, pkg in all_packages:
        if pkg.name not in packages_to_be_documented_names:
            continue

        # Get actual execution deps
        deps = [
            p.name for _, p
            in get_cached_recursive_build_depends_in_workspace(pkg, packages_to_be_documented)
        ]

        jobs.append(create_package_job(context, pkg, pkg_path, deps))

    # Special job for post-job summary sphinx step.
    jobs.append(create_summary_job(context, package_names=packages_to_be_documented_names))

    # Queue for communicating status
    event_queue = Queue()

    try:
        # Spin up status output thread
        status_thread = ConsoleStatusController(
            'document',
            ['package', 'packages'],
            jobs,
            n_jobs,
            [pkg.name for _, pkg in context.packages],
            [p for p in context.whitelist],
            [p for p in context.blacklist],
            event_queue,
            show_notifications=not no_notify,
            show_active_status=not no_status,
            show_buffered_stdout=not quiet and not interleave_output,
            show_buffered_stderr=not interleave_output,
            show_live_stdout=interleave_output,
            show_live_stderr=interleave_output,
            show_stage_events=not quiet,
            show_full_summary=(summarize_build is True),
            pre_start_time=pre_start_time,
            active_status_rate=limit_status_rate)
        status_thread.start()

        # Block while running N jobs asynchronously
        try:
            all_succeeded = run_until_complete(execute_jobs(
                'document',
                jobs,
                None,
                event_queue,
                context.log_space_abs,
                max_toplevel_jobs=n_jobs,
                continue_on_failure=continue_on_failure,
                continue_without_deps=False))

        except Exception:
            status_thread.keep_running = False
            all_succeeded = False
            status_thread.join(1.0)
            wide_log(str(traceback.format_exc()))
        status_thread.join(1.0)

        return 0 if all_succeeded else 1

    except KeyboardInterrupt:
        wide_log("[document] Interrupted by user!")
        event_queue.put(None)

        return 130  # EOWNERDEAD
Exemple #57
0
def _main():
    parser = argparse.ArgumentParser(
        description=
        'Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.'
        % PACKAGE_MANIFEST_FILENAME)
    parser.add_argument(
        '--bump',
        choices=('major', 'minor', 'patch'),
        default='patch',
        help='Which part of the version number to bump? (default: %(default)s)'
    )
    parser.add_argument('--version', help='Specify a specific version to use')
    parser.add_argument('--no-color',
                        action='store_true',
                        default=False,
                        help='Disables colored output')
    parser.add_argument('--no-push',
                        action='store_true',
                        default=False,
                        help='Disables pushing to remote repository')
    parser.add_argument('-t',
                        '--tag-prefix',
                        default='',
                        help='Add this prefix to the created release tag')
    parser.add_argument(
        '-y',
        '--non-interactive',
        action='store_true',
        default=False,
        help="Run without user interaction, confirming all questions with 'yes'"
    )
    args = parser.parse_args()

    if args.version and not re.match(
            '^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$',
            args.version):
        parser.error(
            'The passed version must follow the conventions (positive integers x.y.z with no leading zeros)'
        )

    if args.tag_prefix and ' ' in args.tag_prefix:
        parser.error('The tag prefix must not contain spaces')

    # force --no-color if stdout is non-interactive
    if not sys.stdout.isatty():
        args.no_color = True
    # disable colors if asked
    if args.no_color:
        disable_ANSI_colors()

    base_path = '.'

    print(fmt('@{gf}Prepare the source repository for a release.'))

    # determine repository type
    vcs_type = get_repository_type(base_path)
    if vcs_type is None:
        raise RuntimeError(
            fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}"
                % base_path))
    print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type))

    # find packages
    try:
        packages = find_packages(base_path)
    except InvalidPackage as e:
        raise RuntimeError(
            fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n  %s" %
                (os.path.abspath(base_path), str(e))))
    if not packages:
        raise RuntimeError(fmt('@{rf}No packages found'))
    print('Found packages: %s' % ', '.join([
        fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name)
        for p in packages.values()
    ]))

    # complain about packages with non-catkin build_type as they might require additional steps before being released
    # complain about packages with upper case character since they won't be releasable with bloom
    non_catkin_pkg_names = []
    invalid_pkg_names = []
    for package in packages.values():
        build_types = [
            export.content for export in package.exports
            if export.tagname == 'build_type'
        ]
        build_type = build_types[0] if build_types else 'catkin'
        if build_type != 'catkin':
            non_catkin_pkg_names.append(package.name)
        if package.name != package.name.lower():
            invalid_pkg_names.append(package.name)
    if non_catkin_pkg_names:
        print(fmt(
            "@{yf}Warning: the following package are not of build_type catkin and may require manual steps to release': %s"
            % ', '.join([('@{boldon}%s@{boldoff}' % p)
                         for p in sorted(non_catkin_pkg_names)])),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway',
                                                            default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, verify that non-catkin packages are ready to be released or release manually."
                    ))
    if invalid_pkg_names:
        print(fmt(
            "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s"
            % ', '.join([('@{boldon}%s@{boldoff}' % p)
                         for p in sorted(invalid_pkg_names)])),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway',
                                                            default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, fix the names of the packages."))

    local_modifications = []
    for pkg_path, package in packages.items():
        # verify that the package.xml files don't have modifications pending
        package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME)
        if has_changes(base_path, package_xml_path, vcs_type):
            local_modifications.append(package_xml_path)
        # verify that metapackages are valid
        if package.is_metapackage():
            try:
                metapackage.validate_metapackage(pkg_path, package)
            except metapackage.InvalidMetapackage as e:
                raise RuntimeError(
                    fmt("@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n  %s\n\nSee requirements for metapackages: %s"
                        % (os.path.abspath(pkg_path), str(e),
                           metapackage.DEFINITION_URL)))

    # fetch current version and verify that all packages have same version number
    old_version = verify_equal_package_versions(packages.values())
    if args.version:
        new_version = args.version
    else:
        new_version = bump_version(old_version, args.bump)
    tag_name = args.tag_prefix + new_version

    if (not args.non_interactive and not prompt_continue(fmt(
            "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s"
            %
        (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" %
         tag_name if args.tag_prefix else '')),
                                                         default=True)):
        raise RuntimeError(
            fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name."
                ))

    # check for changelog entries
    missing_changelogs = []
    missing_changelogs_but_forthcoming = {}
    for pkg_path, package in packages.items():
        changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME)
        if not os.path.exists(changelog_path):
            missing_changelogs.append(package.name)
            continue
        # verify that the changelog files don't have modifications pending
        if has_changes(base_path, changelog_path, vcs_type):
            local_modifications.append(changelog_path)
        changelog = get_changelog_from_path(changelog_path, package.name)
        try:
            changelog.get_content_of_version(new_version)
        except KeyError:
            # check that forthcoming section exists
            forthcoming_label = get_forthcoming_label(changelog.rst)
            if forthcoming_label:
                missing_changelogs_but_forthcoming[package.name] = (
                    changelog_path, changelog, forthcoming_label)
            else:
                missing_changelogs.append(package.name)

    if local_modifications:
        raise RuntimeError(
            fmt('@{rf}The following files have modifications, please commit/revert them before:'
                + ''.join([('\n- @{boldon}%s@{boldoff}' % path)
                           for path in local_modifications])))

    if missing_changelogs:
        print(fmt(
            "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s"
            % (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p)
                                       for p in sorted(missing_changelogs)]))),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue(
                'Continue without changelogs', default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content."
                    ))

    # verify that repository is pushable (if the vcs supports dry run of push)
    if not args.no_push:
        try_repo_push(base_path, vcs_type)

    # check for staged changes and modified and untracked files
    print(
        fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...'
            ))
    is_clean = check_clean_working_copy(base_path, vcs_type)
    if not is_clean:
        print(fmt(
            '@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.'
        ),
              file=sys.stderr)
        if not args.non_interactive and not prompt_continue('Continue anyway',
                                                            default=False):
            raise RuntimeError(
                fmt("@{rf}Aborted release, clean the working copy before trying again."
                    ))

    # for svn verify that we know how to tag that repository
    if vcs_type in ['svn']:
        tag_svn_cmd = tag_repository(base_path,
                                     vcs_type,
                                     tag_name,
                                     args.tag_prefix != '',
                                     dry_run=True)

    # tag forthcoming changelog sections
    update_changelog_sections(missing_changelogs_but_forthcoming, new_version)
    print(
        fmt("@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s"
            % (new_version, ', '.join([
                ('@{boldon}%s@{boldoff}' % p)
                for p in sorted(missing_changelogs_but_forthcoming.keys())
            ]))))

    # bump version number
    update_versions(packages.keys(), new_version)
    print(
        fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'"
            % (old_version, new_version)))

    pushed = None
    if vcs_type in ['svn']:
        # for svn everything affects the remote repository immediately
        commands = []
        commands.append(
            commit_files(base_path,
                         vcs_type,
                         packages,
                         missing_changelogs_but_forthcoming,
                         tag_name,
                         dry_run=True))
        commands.append(tag_svn_cmd)
        if not args.no_push:
            print(
                fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:'
                    ))
        else:
            print(
                fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:'
                    ))
        for cmd in commands:
            print(fmt('  @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))

        if not args.no_push:
            if not args.non_interactive:
                # confirm before modifying repository
                if not prompt_continue(
                        'Execute commands which will modify the repository',
                        default=True):
                    pushed = False
            if pushed is None:
                commit_files(base_path, vcs_type, packages,
                             missing_changelogs_but_forthcoming, tag_name)
                tag_repository(base_path, vcs_type, tag_name,
                               args.tag_prefix != '')
                pushed = True

    else:
        # for other vcs types the changes are first done locally
        print(fmt('@{gf}Committing the package.xml files...'))
        commit_files(base_path, vcs_type, packages,
                     missing_changelogs_but_forthcoming, tag_name)

        print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name)))
        tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '')

        try:
            commands = push_changes(base_path, vcs_type, dry_run=True)
        except RuntimeError as e:
            print(
                fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?'
                    ))
        else:
            if not args.no_push:
                print(
                    fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:'
                        ))
            else:
                print(
                    fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:'
                        ))
            for cmd in commands:
                print(fmt('  @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))

            if not args.no_push:
                if not args.non_interactive:
                    # confirm commands to push to remote repository
                    if not prompt_continue(
                            'Execute commands to push the local commits and tags to the remote repository',
                            default=True):
                        pushed = False
                if pushed is None:
                    push_changes(base_path, vcs_type)
                    pushed = True

    if pushed:
        print(
            fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'."
                ))
    else:
        msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \
            "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'."
        if args.no_push or pushed is False:
            print(fmt('@{yf}%s' % msg))
        else:
            raise RuntimeError(fmt('@{rf}%s' % msg))
Exemple #58
0
def main(opts):
    actions = ['all', 'build', 'devel', 'install', 'cmake_cache', 'orphans', 'setup_files']
    if not any([v for (k, v) in vars(opts).items() if k in actions]):
        print("[clean] No actions performed. See `catkin clean -h` for usage.")
        return 0

    needs_force = False

    # Load the context
    ctx = Context.load(opts.workspace, opts.profile, opts, strict=True, load_env=False)

    if not ctx:
        if not opts.workspace:
            print(
                "catkin clean: error: The current or desired workspace could not be "
                "determined. Please run `catkin clean` from within a catkin "
                "workspace or specify the workspace explicitly with the "
                "`--workspace` option.")
        else:
            print(
                "catkin clean: error: Could not clean workspace \"%s\" because it "
                "either does not exist or it has no catkin_tools metadata." %
                opts.workspace)
        return 1

    # Remove the requested spaces
    if opts.all:
        opts.build = opts.devel = opts.install = True

    if opts.build:
        if os.path.exists(ctx.build_space_abs):
            print("[clean] Removing buildspace: %s" % ctx.build_space_abs)
            shutil.rmtree(ctx.build_space_abs)
    else:
        # Orphan removal
        if opts.orphans:
            if os.path.exists(ctx.build_space_abs):
                # TODO: Check for merged build and report error

                # Get all enabled packages in source space
                # Suppress warnings since this is looking for packages which no longer exist
                found_source_packages = [
                    pkg.name for (path, pkg) in find_packages(ctx.source_space_abs, warnings=[]).items()]

                # Iterate over all packages with build dirs
                print("[clean] Removing orphaned build directories from %s" % ctx.build_space_abs)
                no_orphans = True
                for pkg_build_name in os.listdir(ctx.build_space_abs):
                    if pkg_build_name not in exempt_build_files:
                        pkg_build_path = os.path.join(ctx.build_space_abs, pkg_build_name)
                        # Remove package build dir if not found
                        if pkg_build_name not in found_source_packages:
                            no_orphans = False
                            print(" - Removing %s" % pkg_build_path)
                            shutil.rmtree(pkg_build_path)

                if no_orphans:
                    print("[clean] No orphans found, nothing removed from buildspace.")
                else:
                    # Remove the develspace
                    # TODO: For isolated devel, this could just remove individual packages
                    if os.path.exists(ctx.devel_space_abs):
                        print("Removing develspace: %s" % ctx.devel_space_abs)
                        shutil.rmtree(ctx.devel_space_abs)
                        needs_force = True
            else:
                print("[clean] No buildspace exists, no potential for orphans.")
                return 0

        # CMake Cache removal
        if opts.cmake_cache:
            # Clear the CMakeCache for each package
            if os.path.exists(ctx.build_space_abs):
                # Remove CMakeCaches
                print("[clean] Removing CMakeCache.txt files from %s" % ctx.build_space_abs)
                for pkg_build_name in os.listdir(ctx.build_space_abs):
                    if pkg_build_name not in exempt_build_files:
                        pkg_build_path = os.path.join(ctx.build_space_abs, pkg_build_name)
                        ccache_path = os.path.join(pkg_build_path, 'CMakeCache.txt')

                        if os.path.exists(ccache_path):
                            print(" - Removing %s" % ccache_path)
                            os.remove(ccache_path)
                            needs_force = True
            else:
                print("[clean] No buildspace exists, no CMake caches to clear.")

    if opts.devel:
        if os.path.exists(ctx.devel_space_abs):
            print("[clean] Removing develspace: %s" % ctx.devel_space_abs)
            shutil.rmtree(ctx.devel_space_abs)
    else:
        if opts.setup_files:
            print("[clean] Removing setup files from develspace: %s" % ctx.devel_space_abs)
            for filename in setup_files:
                full_path = os.path.join(ctx.devel_space_abs, filename)
                if os.path.exists(full_path):
                    print(" - Removing %s" % full_path)
                    os.remove(full_path)
                    needs_force = True

    if opts.install:
        if os.path.exists(ctx.install_space_abs):
            print("[clean] Removing installspace: %s" % ctx.install_space_abs)
            shutil.rmtree(ctx.install_space_abs)

    if needs_force:
        print(
            "NOTE: Parts of the workspace have been cleaned which will "
            "necessitate re-configuring CMake on the next build.")
        update_metadata(ctx.workspace, ctx.profile, 'build', {'needs_force': True})

    return 0
Exemple #59
0
def make_main():
    args = _parse_args()
    cmake_args = args.cmake_args

    if args.no_color:
        terminal_color.disable_ANSI_colors()

    (base_path, build_path, devel_path, source_path) = common.get_default_paths()
    doc_path = config_cache.get_doc_prefix_from_config_cmake(base_path)

    validate_build_space(base_path)  # raises a RuntimeError if there is a problem

    # Install rosdeps if requested
    if args.install_rosdeps:
        install_rosdeps(base_path, source_path, settings.get_default_track(), args.no_color)
        return
    if args.install_rosdeps_track is not None:
        install_rosdeps(source_path, args.install_rosdeps_track, args.no_color)
        return

    # Clear out previous temporaries if requested
    if args.pre_clean:
        console.pretty_print("Pre-cleaning before building.", console.cyan)
        shutil.rmtree(devel_path, ignore_errors=True)
        shutil.rmtree(build_path, ignore_errors=True)
        shutil.rmtree(doc_path, ignore_errors=True)

    # check for new build
    if not os.path.exists(build_path):
        os.mkdir(build_path)
    #if not os.path.exists(devel_path):
    #    os.mkdir(devel_path)

    # ensure toplevel cmake file exists
    toplevel_cmake = os.path.join(source_path, 'CMakeLists.txt')
    if not os.path.exists(toplevel_cmake):
        return fmt('@{rf}No toplevel cmake file@')

    # did source paths get added to the original location?
    check_and_update_source_repo_paths(source_path)

    packages = find_packages(source_path, exclude_subspaces=True)

    # verify that specified package exists in workspace
    if args.pkg:
        packages_by_name = {p.name: path for path, p in packages.iteritems()}
        if args.pkg not in packages_by_name:
            raise RuntimeError('Package %s not found in the workspace' % args.pkg)

    # check if cmake must be run (either for a changed list of package paths or changed cmake arguments)
    force_cmake, _ = builder.cmake_input_changed(packages, build_path, cmake_args=cmake_args)

    # check if toolchain.cmake, config.cmake exist
    toolchain_cmd = "-DCMAKE_TOOLCHAIN_FILE=%s" % os.path.join(base_path, 'toolchain.cmake') if os.path.isfile(os.path.join(base_path, 'toolchain.cmake')) else None
    config_cmd = "-C%s" % os.path.join(base_path, 'config.cmake') if os.path.isfile(os.path.join(base_path, 'config.cmake')) else None

    # Help find catkin cmake and python
    unused_catkin_toplevel, catkin_python_path, unused_catkin_cmake_path = common.find_catkin(base_path)
    pkg_config_paths = common.generate_pkg_config_path(base_path)
    env = os.environ.copy()
    # PYTHONPATH
    # Don't add to the environment variable - this mucks up catkin's catkin_generated/setup_cached.py
    # environment later (how? I can't remember - something to do with the default underlay).
    # Maybe we can do away with this now catkin can look up install spaces?
    #try:
    #    env['PYTHONPATH'] = env['PYTHONPATH'] + os.pathsep + catkin_python_path
    #except KeyError:
    #    env['PYTHONPATH'] = catkin_python_path
    sys.path.append(catkin_python_path)
    # PKG_CONFIG_PATH
    for path in pkg_config_paths:
        try:
            env['PKG_CONFIG_PATH'] = env['PKG_CONFIG_PATH'] + os.pathsep + path
        except KeyError:
            env['PKG_CONFIG_PATH'] = path

    if args.doc_only:
        console.pretty_println('Generates documents only', console.bold_white)
        make_doc(source_path, doc_path, packages)
        return

    # consider calling cmake
    makefile = os.path.join(build_path, 'Makefile')
    if not os.path.exists(makefile) or args.force_cmake or force_cmake:
        cmd = ['cmake', source_path]
        if toolchain_cmd:
            cmd.append(toolchain_cmd)
        if config_cmd:
            cmd.append(config_cmd)
        cmd += cmake_args

        #new_env = common.generate_underlays_environment(base_path)
        try:
            builder.print_command_banner(cmd, build_path, color=not args.no_color)
            if args.no_color:
                builder.run_command(cmd, build_path, env=env)
            else:
                builder.run_command_colorized(cmd, build_path, env=env)
        except subprocess.CalledProcessError:
            return fmt('@{rf}Invoking @{boldon}"cmake"@{boldoff} failed')
    else:
        cmd = ['make', 'cmake_check_build_system']
        #new_env = common.generate_environment(base_path) # underlays + current workspace
        try:
            builder.print_command_banner(cmd, build_path, color=not args.no_color)
            if args.no_color:
                builder.run_command(cmd, build_path, env=env)
            else:
                builder.run_command_colorized(cmd, build_path, env=env)
        except subprocess.CalledProcessError:
            return fmt('@{rf}Invoking @{boldon}"make cmake_check_build_system"@{boldoff} failed')

    insert_yujin_make_signature(base_path, devel_path)

    # invoke make
    if not args.cmake_only:
        if args.target:
            cmd = ['make', args.target]
        elif args.install:
            cmd = ['make', 'install']
        elif args.tests:
            cmd = ['make', 'tests']
        elif args.run_tests:
            cmd = ['make', 'test']
        else:
            cmd = ['make']
        jobs = args.jobs
        if args.jobs == '':
            cmd.append('-j')
        else:
            jobs = args.jobs
            if not jobs:
                if 'ROS_PARALLEL_JOBS' in os.environ:
                    ros_parallel_jobs = os.environ['ROS_PARALLEL_JOBS']
                    cmd += [arg for arg in ros_parallel_jobs.split(' ') if arg]
                else:
                    jobs = multiprocessing.cpu_count()
            if jobs:
                cmd.append('-j%d' % jobs)
                cmd.append('-l%d' % jobs)
        cmd += args.make_args
        try:
            make_path = build_path
            if args.pkg:
                make_path = os.path.join(make_path, packages_by_name[args.pkg])
            builder.print_command_banner(cmd, make_path, color=not args.no_color)
            builder.run_command(cmd, make_path, env=env)
        except subprocess.CalledProcessError:
            return fmt('@{rf}Invoking @{boldon}"make"@{boldoff} failed')

    if args.doc:
        make_doc(source_path, doc_path, packages)
def find_in_workspaces(search_dirs=None,
                       project=None,
                       path=None,
                       _workspaces=get_workspaces(),
                       considered_paths=None,
                       first_matching_workspace_only=False,
                       first_match_only=False,
                       workspace_to_source_spaces=None,
                       source_path_to_packages=None):
    '''
    Find all paths which match the search criteria.
    All workspaces are searched in order.
    Each workspace, each search_in subfolder, the project name and the path are concatenated to define a candidate path.
    If the candidate path exists it is appended to the result list.
    Note: the search might return multiple paths for 'share' from devel- and source-space.

    :param search_dir: The list of subfolders to search in (default contains all valid values: 'bin', 'etc', 'lib', 'libexec', 'share'), ``list``
    :param project: The project name to search for (optional, not possible with the global search_in folders 'bin' and 'lib'), ``str``
    :param path: The path, ``str``
    :param _workspaces: (optional, used for unit tests), the list of workspaces to use.
    :param considered_paths: If not None, function will append all path that were searched
    :param first_matching_workspace_only: if True returns all results found for first workspace with results
    :param first_match_only: if True returns first path found (supercedes first_matching_workspace_only)
    :param workspace_to_source_spaces: the dictionary is populated with mappings from workspaces to source paths, pass in the same dictionary to avoid repeated reading of the catkin marker file
    :param source_path_to_packages: the dictionary is populated with mappings from source paths to packages, pass in the same dictionary to avoid repeated crawling
    :raises ValueError: if search_dirs contains an invalid folder name
    :returns: List of paths
    '''
    search_dirs = _get_valid_search_dirs(search_dirs, project)
    if 'libexec' in search_dirs:
        search_dirs.insert(search_dirs.index('libexec'), 'lib')

    if workspace_to_source_spaces is None:
        workspace_to_source_spaces = {}
    if source_path_to_packages is None:
        source_path_to_packages = {}

    paths = []
    existing_paths = []
    try:
        for workspace in (_workspaces or []):
            for sub in search_dirs:
                # search in workspace
                p = os.path.join(workspace, sub)
                if project:
                    p = os.path.join(p, project)
                if path:
                    p = os.path.join(p, path)
                paths.append(p)
                if os.path.exists(p):
                    existing_paths.append(p)
                    if first_match_only:
                        raise StopIteration

                # for search in share also consider source spaces
                if project is not None and sub == 'share':
                    if workspace not in workspace_to_source_spaces:
                        workspace_to_source_spaces[
                            workspace] = get_source_paths(workspace)
                    for source_path in workspace_to_source_spaces[workspace]:
                        if source_path not in source_path_to_packages:
                            source_path_to_packages[
                                source_path] = find_packages(source_path)
                        matching_packages = [
                            p for p, pkg in
                            source_path_to_packages[source_path].items()
                            if pkg.name == project
                        ]
                        if matching_packages:
                            p = source_path
                            if matching_packages[0] != os.curdir:
                                p = os.path.join(p, matching_packages[0])
                            if path is not None:
                                p = os.path.join(p, path)
                            paths.append(p)
                            if os.path.exists(p):
                                existing_paths.append(p)
                                if first_match_only:
                                    raise StopIteration

            if first_matching_workspace_only and existing_paths:
                break

    except StopIteration:
        pass

    if considered_paths is not None:
        considered_paths.extend(paths)

    return existing_paths