Ejemplo n.º 1
0
def build_workspace_in_isolation(sourcespace_dir, buildspace_parent_dir):
    sourcespace_dir = os.path.abspath(sourcespace_dir)
    packages = topological_order(sourcespace_dir)

    buildspace_dir = os.path.abspath(os.path.join(buildspace_parent_dir, "build_isolated"))
    if not os.path.exists(buildspace_dir):
        os.makedirs(buildspace_dir)

    last_package_build_dir = None
    for path, package in packages:
        if "metapackage" in [e.tagname for e in package.exports]:
            print('\n+++ Skipping metapackage "%s"' % path)
            continue
        print('\n+++ Building package "%s"\n' % path)
        package_build_dir = os.path.join(buildspace_dir, package.name)
        if not os.path.exists(package_build_dir):
            os.mkdir(package_build_dir)

        makefile = os.path.join(package_build_dir, "Makefile")
        if not os.path.exists(makefile):
            cmd = ["cmake", os.path.dirname(package.filename), "-DCATKIN_STATIC_ENV=1"]
            _run_command_with_env(cmd, package_build_dir, last_package_build_dir)

        cmd = ["make", "-j8"]
        _run_command_with_env(cmd, package_build_dir, last_package_build_dir)

        last_package_build_dir = package_build_dir
Ejemplo n.º 2
0
def build_workspace_in_isolation(sourcespace_dir, buildspace_parent_dir):
    sourcespace_dir = os.path.abspath(sourcespace_dir)
    packages = topological_order(sourcespace_dir)

    buildspace_dir = os.path.abspath(
        os.path.join(buildspace_parent_dir, 'build_isolated'))
    if not os.path.exists(buildspace_dir):
        os.makedirs(buildspace_dir)

    last_package_build_dir = None
    for path, package in packages:
        if 'metapackage' in [e.tagname for e in package.exports]:
            print('\n+++ Skipping metapackage "%s"' % path)
            continue
        print('\n+++ Building package "%s"\n' % path)
        package_build_dir = os.path.join(buildspace_dir, package.name)
        if not os.path.exists(package_build_dir):
            os.mkdir(package_build_dir)

        makefile = os.path.join(package_build_dir, 'Makefile')
        if not os.path.exists(makefile):
            cmd = [
                'cmake',
                os.path.dirname(package.filename), '-DCATKIN_STATIC_ENV=1'
            ]
            _run_command_with_env(cmd, package_build_dir,
                                  last_package_build_dir)

        cmd = ['make', '-j8']
        _run_command_with_env(cmd, package_build_dir, last_package_build_dir)

        last_package_build_dir = package_build_dir
def get_packages_in_workspace(
        workspace: pathlib.Path,
        root_packages: Iterable[str] = []) -> Mapping[str, Package]:
    """Get a list of all packages in a workspace. Optionally filter to only include direct dependencies of a
    root package list.
    """
    if root_packages is None:
        return {}

    packages = {}

    # Load all packages and their descriptions (processed package.xml)
    for package in topological_order(str(workspace)):
        packages[package[1].name] = (package[1])

    if root_packages == []:
        return packages

    # Traverse the dependency tree starting with root_packages
    queued = set(root_packages)
    processed = set()
    filtered = set()

    while queued:
        package = queued.pop()
        processed.add(package)
        try:
            package_description = packages[package]
            filtered.add(package)
        except Exception:
            continue

        for dependency in get_debian_depends(
                package_description) | get_debian_build_depends(
                    package_description):
            if dependency.name not in processed:
                queued.add(dependency.name)

    # Return the subset of packages found to be dependencies of root_package_list
    return {package: packages[package] for package in filtered}
    def _get_packages(self):
        to = topological_order.topological_order(self._workspace)

        result = {}
        for pname, pkg in to:
            pkg_info = {}
            pkg_info["path"] = os.path.dirname(pkg.filename)
            pkg_info["depends"] = list(
                set([str(d.name) for d in pkg.build_depends] + [str(d.name) for d in pkg.exec_depends])
            )
            pkg_info["description"] = pkg.description
            pkg_info["licenses"] = pkg.licenses
            pkg_info["authors"] = [(a.name) for a in pkg.authors]
            pkg_info["maintainers"] = [(m.name) for m in pkg.maintainers]
            # prefix, pkg_info['package'] = os.path.split(pkg_info['path'])
            prefix, pkg_info["package"] = os.path.split(pkg_info["path"])
            prefix, pkg_info["repo"] = os.path.split(prefix)
            if pkg_info["repo"] in self._workspace:
                pkg_info["repo"] = pkg_info["package"]
            if pkg_info["repo"]:
                pkg_info["release_status"] = self._get_release_status(pkg_info["repo"])
            result[pkg.name] = pkg_info
        self._pkgs = result
Ejemplo n.º 5
0
        print("  ubuntu:")
        print("    focal: [%s]" % rosified_name)


def print_usage():
    print(
        'Usage: multibloom.py rosdep | generate\n'
        'Verb meanings:\n'
        '  rosdep   - Generate a rosdep yaml file for\n'
        '             the packages in src\n'
        '  generate - Build .deb packages for all\n'
        '             packages in src\n',
        file=sys.stderr)


if __name__ == '__main__':
    if len(os.sys.argv) < 2:
        print_usage()
        exit(1)
    print('Listing packages in %s in topological order' % PATH_PREFIX,
          file=sys.stderr)
    packages = topological_order(PATH_PREFIX)
    if os.sys.argv[1] == 'rosdep':
        generate_rosdep(packages)
    elif os.sys.argv[1] == 'generate':
        for package in packages:
            generate_package(package)
    else:
        print_usage()
        exit(1)
Ejemplo n.º 6
0
def build_workspace_in_isolation(sourcespace_dir, install=False, merge=False, force_cmake=False, number_of_threads=None):
    '''
    Runs ``cmake``, ``make`` and optionally ``make install`` for all
    catkin packages in sourcespace_dir.  It creates several folders
    in the current working directory.

    :param sourcespace_dir: folder with catkin packages, ``str``
    :param install: (optional), if True also installs in local install dir, ``bool``
    :param merge: (optional), if True uses a single devel (and install) folder for all packages, ``bool``
    :param force_cmake: (optional), if True calls cmake explicitly for each package, ``bool``
    :param number_of_threads: (optional), the number of thread to use for make (default is the number of CPUs), ``int``
    '''
    sourcespace_dir = os.path.abspath(sourcespace_dir)
    packages = topological_order(sourcespace_dir)
    build_isolated_dir = os.path.abspath('.')

    if not number_of_threads:
        try:
            number_of_threads = multiprocessing.cpu_count()
        except NotImplementedError:
            pass

    last_env_to_source = None
    last_space_dir = None
    for path, package in packages:
        if 'metapackage' in [e.tagname for e in package.exports]:
            print('\n+++ Skipping metapackage "%s"' % path)
            continue
        print('\n+++ Building package "%s"\n' % path)

        package_build_dir = os.path.join(build_isolated_dir, package.name)
        if not os.path.exists(package_build_dir):
            os.mkdir(package_build_dir)

        if not merge:
            package_devel_dir = os.path.join(build_isolated_dir, package.name + '__devel')
        else:
            package_devel_dir = os.path.join(build_isolated_dir, 'devel')

        if install:
            if not merge:
                package_install_dir = os.path.join(build_isolated_dir, package.name + '__install')
            else:
                package_install_dir = os.path.join(build_isolated_dir, 'install')
            if not os.path.exists(package_install_dir):
                os.mkdir(package_install_dir)

        makefile = os.path.join(package_build_dir, 'Makefile')
        if not os.path.exists(makefile) or force_cmake:
            cmd = ['cmake', os.path.dirname(package.filename), '-DCATKIN_STATIC_ENV=1', '-DCATKIN_DEVEL_PREFIX=%s' % package_devel_dir]
            if install:
                cmd.append('-DCMAKE_INSTALL_PREFIX=%s' % package_install_dir)
            _run_command_with_env(cmd, package_build_dir, last_env_to_source)

        cmd = ['make']
        if number_of_threads:
            cmd.append('-j%d' % number_of_threads)
        _run_command_with_env(cmd, package_build_dir, last_env_to_source)

        if not install:
            last_env_to_source = os.path.join(package_build_dir, 'catkin_generated', 'env_cached.sh')
            last_space_dir = package_devel_dir
        else:
            cmd = ['make', 'install']
            _run_command_with_env(cmd, package_build_dir, last_env_to_source)
            last_env_to_source = os.path.join(package_install_dir, 'env_cached.sh')
            last_space_dir = package_install_dir

    if last_space_dir:
        print('\n+++ DONE')
        space_attribute = 'latest' if not merge else 'merged'
        space_type = 'devel space' if not install else 'install space'
        print('\n+++ The %s %s is: %s' % (space_attribute, space_type, last_space_dir))
import sys
import os
from catkin_pkg.topological_order import topological_order

if len(sys.argv) != 2:
    raise RuntimeError('invalid call - missing workspace root')
if not os.path.isdir(sys.argv[1]):
    raise RuntimeError('invalid call - not a directory')

f = open("coverity-analysis.sh","w+")

prefix = 'cd ~/catkin_ws/build/'
suffix = ' && make clean && make'

f.write(prefix)
f.write((suffix + ' && ' + prefix).join(e[1].name for e in topological_order(sys.argv[1])))
f.write(suffix)

f.close()
Ejemplo n.º 8
0
def build_workspace_isolated(
    workspace='.',
    sourcespace=None,
    buildspace=None,
    develspace=None,
    installspace=None,
    merge=False,
    install=False,
    jobs=None,
    force_cmake=False,
    colorize=True,
    quiet=False,
    cmake_args=[]
):
    '''
    Runs ``cmake``, ``make`` and optionally ``make install`` for all
    catkin packages in sourcespace_dir.  It creates several folders
    in the current working directory. For non-catkin packages it runs
    ``cmake``, ``make`` and ``make install`` for each, installing it to
    the devel space or install space if the ``install`` option is specified.

    :param workspace: path to the current workspace, ``str``
    :param sourcespace: workspace folder containing catkin packages, ``str``
    :param buildspace: path to build space location, ``str``
    :param develspace: path to devel space location, ``str``
    :param installspace: path to install space (CMAKE_INSTALL_PREFIX), ``str``
    :param merge: if True, build each catkin package into the same
        devel space. does not work with non-catkin packages, ``bool``
    :param install: if True, install all packages to the install space,
        ``bool``
    :param jobs: number of parallel build jobs to run (make -jN -lN), ``int``
    :param force_cmake: (optional), if True calls cmake explicitly for each
        package, ``bool``
    :param colorize: if True, colorize cmake output and other messages,
        ``bool``
    :param quiet: if True, hides some build output, ``bool``
    :param cmake_args: additional arguments for cmake, ``[str]``
    '''
    if not colorize:
        disable_ANSI_colors()

    # Check workspace existance
    if not os.path.exists(workspace):
        sys.exit("Workspace path '{0}' does not exist.".format(workspace))
    workspace = os.path.abspath(workspace)

    if cmake_args:
        print("Additional CMake Arguments: " + " ".join(cmake_args))

    # Check source space existance
    if sourcespace is None:
        ws_sourcespace = os.path.join(workspace, 'src')
        if not os.path.exists(ws_sourcespace):
            sys.exit("Could not find source space: {0}".format(sourcespace))
        sourcespace = ws_sourcespace
    sourcespace = os.path.abspath(sourcespace)
    print('Base path: ' + str(workspace))
    print('Source space: ' + str(sourcespace))

    # Check build space
    if buildspace is None:
        buildspace = os.path.join(workspace, 'build_isolated')
    buildspace = os.path.abspath(buildspace)
    if not os.path.exists(buildspace):
        os.mkdir(buildspace)
    print('Build space: ' + str(buildspace))

    # Check devel space
    if develspace is None:
        develspace = os.path.join(workspace, 'devel_isolated')
    develspace = os.path.abspath(develspace)
    print('Devel space: ' + str(develspace))

    # Check install space
    if installspace is None:
        installspace = os.path.join(workspace, 'install_isolated')
    installspace = os.path.abspath(installspace)
    print('Install space: ' + str(installspace))

    # Check jobs
    if not jobs:
        try:
            jobs = multiprocessing.cpu_count()
        except NotImplementedError:
            jobs = 1
    jobs = int(jobs)

    # Find packages
    packages = topological_order(sourcespace)
    if not packages:
        sys.exit("No packages found in source space: {0}".format(sourcespace))

    # Report topological ordering
    unknown_build_types = []
    msg = []
    msg.append('@{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
    msg.append('@{pf}~~@|  traversing packages in topological order:')
    for path, package in packages:
        export_tags = [e.tagname for e in package.exports]
        if 'metapackage' in export_tags:
            msg.append(
                '@{pf}~~@|  - @!@{bf}' + package.name + '@|' +
                ' (@{cf}metapackage@|)'
            )
        else:
            if 'build_type' in export_tags:
                build_type_tag = [e.content for e in package.exports
                                            if e.tagname == 'build_type'][0]
            else:
                build_type_tag = 'catkin'
            if build_type_tag == 'catkin':
                msg.append('@{pf}~~@|  - @!@{bf}' + package.name + '@|')
            elif build_type_tag == 'cmake':
                msg.append(
                    '@{pf}~~@|  - @!@{bf}' + package.name + '@|' +
                    ' (@!@{cf}plain cmake@|)'
                )
            else:
                msg.append(
                    '@{pf}~~@|  - @!@{bf}' + package.name + '@|' +
                    ' (@{rf}unknown@|)'
                )
                unknown_build_types.append(package)
    msg.append('@{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
    for index in range(len(msg)):
        msg[index] = fmt(msg[index])
    print('\n'.join(msg))

    # Error if there are packages with unknown build_types
    if unknown_build_types:
        print(colorize_line('Error: Packages with unknown build types exist'))
        sys.exit('Can not build workspace with packages of unknown build_type')

    # Check to see if the workspace has changed
    if cmake_input_changed(
        sourcespace, buildspace, cmake_args, 'catkin_make_isolated'
    ):
        force_cmake = True
        print(colorize_line(
            'Warning: packages or cmake arguments have changed, forcing cmake'
        ))

    # Build packages
    original_develspace = copy.deepcopy(develspace)
    last_env = None
    for index, path_package in enumerate(packages):
        path, package = path_package
        if not merge:
            develspace = os.path.join(original_develspace, package.name)
        try:
            last_env = build_package(
                path, package,
                workspace, buildspace, develspace, installspace,
                install, jobs, force_cmake, quiet, last_env, cmake_args,
                number=index + 1, of=len(packages)
            )
        except Exception as e:
            cprint(
                '@{rf}@!<==@| ' +
                'Failed to process package \'@!@{bf}' +
                package.name + '@|\': \n  ' +
                ('KeyboardInterrupt' if isinstance(e, KeyboardInterrupt)
                        else str(e))
            )
            if isinstance(e, subprocess.CalledProcessError):
                cmd = ' '.join(e.cmd) if isinstance(e.cmd, list) else e.cmd
                print(fmt("\n@{rf}Reproduce this error by running:"))
                print(fmt("@{gf}@!==> @|") + cmd + "\n")
            sys.exit('Command failed, exiting.')

    # Provide a top level devel space environment setup script
    if not merge:
        target_setup = os.path.join(original_develspace, 'setup')
        with open(target_setup + '.sh', 'w') as f:
            f.write("""\
# Generated from catkin.builder module
. "{0}/setup.sh"
""".format(develspace))
        with open(target_setup + '.bash', 'w') as f:
            f.write("""\
# Generated from catkin.builder module
CATKIN_SHELL=bash
source "{0}"
""".format(target_setup + '.sh'))
        with open(target_setup + '.zsh', 'w') as f:
            f.write("""\
# Generated from catkin.builder module
CATKIN_SHELL=zsh
emulate sh # emulate POSIX
. "{0}"
emulate zsh # back to zsh mode
""".format(target_setup + '.sh'))
Ejemplo n.º 9
0
#!/usr/bin/env python

from sys import argv
from catkin_pkg.topological_order import topological_order
from . import resolve_deps, get_rosdep_data, get_rosdeps

packages = zip(*topological_order(argv[1]))[1]
print ', '.join(resolve_deps(get_rosdep_data(argv[2]), get_rosdeps(packages)))
import sys
import os
from catkin_pkg.topological_order import topological_order

if len(sys.argv) != 2:
    raise RuntimeError('invalid call - missing workspace root')
if not os.path.isdir(sys.argv[1]):
    raise RuntimeError('invalid call - not a directory')

f = open("coverity-analysis.sh", "w+")

prefix = 'cd ~/catkin_ws/build/'
suffix = ' && make clean && make'

f.write(prefix)
f.write(
    (suffix + ' && ' + prefix).join(e[1].name
                                    for e in topological_order(sys.argv[1])))
f.write(suffix)

f.close()