Esempio n. 1
0
def find_packages(basepath, use_cache=True):
    global _cache
    if use_cache:
        if _cache is None: _load_cache()
        distro_id = os.environ[
            "ROS_DISTRO"] if "ROS_DISTRO" in os.environ else None
    packages = {}
    package_paths = []
    for dirpath, dirnames, filenames in os.walk(basepath, followlinks=True):
        if "CATKIN_IGNORE" in filenames:
            del dirnames[:]
            continue
        elif PACKAGE_MANIFEST_FILENAME in filenames:
            package_paths.append(os.path.relpath(dirpath, basepath))
            del dirnames[:]
            continue
        for dirname in dirnames:
            if dirname.startswith('.'):
                dirnames.remove(dirname)
    cache_updated = False
    for path in package_paths:
        pkg_dir = os.path.realpath(os.path.join(basepath, path))
        if use_cache:
            last_modified = os.path.getmtime(
                os.path.join(pkg_dir, PACKAGE_MANIFEST_FILENAME))
            path_ts = _cache.local_paths[
                pkg_dir].timestamp if pkg_dir in _cache.local_paths else 0
            if last_modified > path_ts:
                manifest = parse_package(pkg_dir)
                _cache.local_paths[pkg_dir] = CacheItem(
                    manifest, last_modified)
                cache_updated = True
            else:
                manifest = _cache.local_paths[pkg_dir].data
            if not distro_id in _cache.packages:
                _cache.packages[distro_id] = {}
            manifest_ts = _cache.packages[distro_id][
                manifest.name].timestamp if manifest.name in _cache.packages[
                    distro_id] else 0
            if last_modified > manifest_ts:
                _cache.packages[distro_id][manifest.name] = CacheItem(
                    PackageCacheData(path=pkg_dir, manifest=manifest),
                    last_modified)
                cache_updated = True
        else:
            manifest = parse_package(pkg_dir)
        packages[path] = manifest
    if cache_updated:
        _store_cache()
    return packages
Esempio n. 2
0
File: cli.py Progetto: ros2/rosidl
def convert_files_to_idl(extension, conversion_function, argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Convert {extension} files to .idl'.format_map(locals()))
    parser.add_argument(
        'interface_files', nargs='+',
        help='The interface files to convert')
    args = parser.parse_args(argv)

    for interface_file in args.interface_files:
        interface_file = pathlib.Path(interface_file)
        package_dir = interface_file.parent.absolute()
        while (
            len(package_dir.parents) and
            not package_exists_at(str(package_dir))
        ):
            package_dir = package_dir.parent
        if not package_dir.parents:
            print(
                "Could not find package for '{interface_file}'"
                .format_map(locals()), file=sys.stderr)
            continue
        warnings = []
        pkg = parse_package(package_dir, warnings=warnings)

        conversion_function(
            package_dir, pkg.name,
            interface_file.absolute().relative_to(package_dir),
            interface_file.parent)
Esempio n. 3
0
def convert_files_to_idl(extension, conversion_function, argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Convert {extension} files to .idl'.format_map(locals()))
    parser.add_argument('interface_files',
                        nargs='+',
                        help='The interface files to convert')
    args = parser.parse_args(argv)

    for interface_file in args.interface_files:
        interface_file = pathlib.Path(interface_file)
        package_dir = interface_file.parent.absolute()
        while (len(package_dir.parents)
               and not package_exists_at(str(package_dir))):
            package_dir = package_dir.parent
        if not package_dir.parents:
            print("Could not find package for '{interface_file}'".format_map(
                locals()),
                  file=sys.stderr)
            continue
        warnings = []
        pkg = parse_package(package_dir, warnings=warnings)

        conversion_function(package_dir, pkg.name,
                            interface_file.absolute().relative_to(package_dir),
                            interface_file.parent)
Esempio n. 4
0
def package_name(path):
    '''
    The results are cached!

    :return: Returns for given directory a tuple of package name and package path.
    :rtype: tuple(str, str) or tuple(None, None)
    '''
    if path is not None and path and path != os.path.sep:
        dir_path = path
        if not os.path.isdir(dir_path):
            dir_path = os.path.dirname(dir_path)
        if dir_path in PACKAGE_CACHE:
            return PACKAGE_CACHE[dir_path]
        package = os.path.basename(dir_path)
        try:
            fileList = os.listdir(dir_path)
            for f in fileList:
                if f == MANIFEST_FILE:
                    PACKAGE_CACHE[dir_path] = (package, dir_path)
                    return (package, dir_path)
                if CATKIN_SUPPORTED and f == PACKAGE_FILE:
                    try:
                        pkg = parse_package(os.path.join(dir_path, f))
                        PACKAGE_CACHE[dir_path] = (pkg.name, dir_path)
                        return (pkg.name, dir_path)
                    except Exception:
                        return (None, None)
            PACKAGE_CACHE[dir_path] = package_name(os.path.dirname(dir_path))
            return PACKAGE_CACHE[dir_path]
        except OSError:
            return (None, None)
    return (None, None)
Esempio n. 5
0
def extract_notification_recipients(docspace, doc_conf):
    repo_path = os.path.realpath("%s" % (docspace))
    _, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, [])
    notification_recipients = set([])
    email_pattern = re.compile('([a-zA-Z0-9._%\+-]+@[a-zA-Z0-9._%-]+\.[a-zA-Z]{2,6})')
    for package_name in set(catkin_packages.keys()) | set(manifest_packages.keys()):
        if package_name in catkin_packages:
            package_path = catkin_packages[package_name]
            from catkin_pkg.package import parse_package
            pkg = parse_package(package_path)
            for m in pkg.maintainers:
                notification_recipients.add(m.email)
        else:
            package_path = manifest_packages[package_name]
            from rospkg import MANIFEST_FILE, STACK_FILE
            from rospkg.manifest import parse_manifest_file
            if os.path.exists(os.path.join(package_path, MANIFEST_FILE)):
                pkg = parse_manifest_file(package_path, MANIFEST_FILE)
            elif os.path.exists(os.path.join(package_path, STACK_FILE)):
                pkg = parse_manifest_file(package_path, STACK_FILE)
            else:
                assert False, "Path '%s' does not neither contain a manifest.xml nor a stack.xml file" % package_path
            if pkg.author:
                for email in email_pattern.finditer(pkg.author):
                    notification_recipients.add(email.group(1))
    if notification_recipients:
        print('Notification recipients: %s' % ' '.join(sorted(notification_recipients)))
    def get_paths():
        gazebo_model_path = []
        gazebo_plugin_path = []
        gazebo_media_path = []

        for package_name in get_package_names():
            package_share_path = get_package_share_directory(package_name)
            package_file_path = os.path.join(package_share_path, PACKAGE_MANIFEST_FILENAME)
            if os.path.isfile(package_file_path):
                try:
                    package = parse_package(package_file_path)
                except InvalidPackage:
                    continue
                for export in package.exports:
                    if export.tagname == 'gazebo_ros':
                        if 'gazebo_model_path' in export.attributes:
                            xml_path = export.attributes['gazebo_model_path']
                            xml_path = xml_path.replace('${prefix}', package_share_path)
                            gazebo_model_path.append(xml_path)
                        if 'plugin_path' in export.attributes:
                            xml_path = export.attributes['plugin_path']
                            xml_path = xml_path.replace('${prefix}', package_share_path)
                            gazebo_plugin_path.append(xml_path)
                        if 'gazebo_media_path' in export.attributes:
                            xml_path = export.attributes['gazebo_media_path']
                            xml_path = xml_path.replace('${prefix}', package_share_path)
                            gazebo_media_path.append(xml_path)

        gazebo_model_path = os.pathsep.join(gazebo_model_path)
        gazebo_plugin_path = os.pathsep.join(gazebo_plugin_path)
        gazebo_media_path = os.pathsep.join(gazebo_media_path)

        return gazebo_model_path, gazebo_plugin_path, gazebo_media_path
def extract_notification_recipients(docspace, doc_conf):
    repo_path = os.path.realpath("%s" % (docspace))
    _, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, [])
    notification_recipients = set([])
    email_pattern = re.compile("([a-zA-Z0-9._%\+-]+@[a-zA-Z0-9._%-]+\.[a-zA-Z]{2,6})")
    for package_name in set(catkin_packages.keys()) | set(manifest_packages.keys()):
        if package_name in catkin_packages:
            package_path = catkin_packages[package_name]
            from catkin_pkg.package import parse_package

            pkg = parse_package(package_path)
            for m in pkg.maintainers:
                notification_recipients.add(m.email)
        else:
            package_path = manifest_packages[package_name]
            from rospkg import MANIFEST_FILE, STACK_FILE
            from rospkg.manifest import parse_manifest_file

            if os.path.exists(os.path.join(package_path, MANIFEST_FILE)):
                pkg = parse_manifest_file(package_path, MANIFEST_FILE)
            elif os.path.exists(os.path.join(package_path, STACK_FILE)):
                pkg = parse_manifest_file(package_path, STACK_FILE)
            else:
                assert False, "Path '%s' does not neither contain a manifest.xml nor a stack.xml file" % package_path
            if pkg.author:
                for email in email_pattern.finditer(pkg.author):
                    notification_recipients.add(email.group(1))
    if notification_recipients:
        print("Notification recipients: %s" % " ".join(sorted(notification_recipients)))
Esempio n. 8
0
def package_name(dir):
  '''
  Returns for given directory a tuple of package name and package path or None values.
  The results are cached!
  @rtype: C{(name, path)}
  '''
  if not (dir is None) and dir and dir != os.path.sep and os.path.isdir(dir):
    if PACKAGE_CACHE.has_key(dir):
      return PACKAGE_CACHE[dir]
    package = os.path.basename(dir)
    fileList = os.listdir(dir)
    for file in fileList:
      if file == MANIFEST_FILE:
        PACKAGE_CACHE[dir] = (package, dir)
        return (package, dir)
      if CATKIN_SUPPORTED and file == PACKAGE_FILE:
        try:
          pkg = parse_package(os.path.join(dir, file))
          PACKAGE_CACHE[dir] = (pkg.name, dir)
          return (pkg.name, dir)
        except:
          return (None,None)
    PACKAGE_CACHE[dir] = package_name(os.path.dirname(dir))
    return PACKAGE_CACHE[dir]
  return (None, None)
Esempio n. 9
0
def _get_package(path: str):
    """Get the ROS package for the given path."""
    from catkin_pkg.package import has_ros_schema_reference
    from catkin_pkg.package import InvalidPackage
    from catkin_pkg.package import package_exists_at
    from catkin_pkg.package import parse_package

    if not package_exists_at(path):
        return None

    try:
        pkg = parse_package(path)
    except (AssertionError, InvalidPackage) as e:  # noqa: F841
        if has_ros_schema_reference(path):
            logger.debug(
                "Found ROS schema reference in package manifest in '{path}'".
                format_map(locals()))
            logger.warn(
                "Failed to parse ROS package manifest in '{path}': {e}".
                format_map(locals()))
        else:
            logger.debug('Failed to parse potential ROS package manifest in'
                         "'{path}': {e}".format_map(locals()))
        return None

    pkg.evaluate_conditions(os.environ)
    return pkg
Esempio n. 10
0
def package_name(path):
    '''
    Returns for given directory a tuple of package name and package path or None values.
    The results are cached!
    @rtype: C{(name, path)}
    '''
    if not (path is None) and path and path != os.path.sep and os.path.isdir(path):
        if path in PACKAGE_CACHE:
            return PACKAGE_CACHE[path]
        package = os.path.basename(path)
        fileList = os.listdir(path)
        for f in fileList:
            if f == MANIFEST_FILE:
                PACKAGE_CACHE[path] = (package, path)
                return (package, path)
            if CATKIN_SUPPORTED and f == PACKAGE_FILE:
                try:
                    pkg = parse_package(os.path.join(path, f))
                    PACKAGE_CACHE[path] = (pkg.name, path)
                    return (pkg.name, path)
                except:
                    return (None, None)
        PACKAGE_CACHE[path] = package_name(os.path.dirname(path))
        return PACKAGE_CACHE[path]
    return (None, None)
Esempio n. 11
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name, os_code_name, sources_dir):
    from rosdistro import get_distribution_file
    from rosdistro import get_index

    index = get_index(rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return "Not a released package name: %s" % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name, os_code_name)

    cmd = [
        "git",
        "clone",
        "--branch",
        tag,
        # fetch all branches and tags but no history
        "--depth",
        "1",
        "--no-single-branch",
        repo.release_repository.url,
        sources_dir,
    ]

    print("Invoking '%s'" % " ".join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ["Version"])[0]
    if not source_version.startswith(pkg_version) or (
        len(source_version) > len(pkg_version) and source_version[len(pkg_version)] in "0123456789"
    ):
        raise RuntimeError(
            (
                "The cloned package version from the GBP (%s) does not match "
                + "the expected package version from the distribution file (%s)"
            )
            % (source_version, pkg_version)
        )

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package

    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print("Package maintainer emails: %s" % " ".join(sorted(maintainer_emails)))
Esempio n. 12
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name,
                os_code_name, sources_dir):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name,
                          os_code_name)

    cmd = [
        'git',
        'clone',
        '--branch',
        tag,
        # fetch all branches and tags but no history
        '--depth',
        '1',
        '--no-single-branch',
        repo.release_repository.url,
        sources_dir
    ]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Esempio n. 13
0
    def test_parse_generated(self):
        maint = self.get_maintainer()
        pack = PackageTemplate(name='bar',
                               package_format=2,
                               version='0.0.1',
                               version_compatibility='0.0.0',
                               urls=[Url('foo')],
                               description='pdesc',
                               maintainers=[maint],
                               licenses=['BSD'])
        try:
            rootdir = tempfile.mkdtemp()
            file1 = os.path.join(rootdir, 'CMakeLists.txt')
            file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
            create_package_files(rootdir, pack, 'groovy')
            self.assertTrue(os.path.isfile(file1))
            self.assertTrue(os.path.isfile(file2))

            pack_result = parse_package(file2)
            self.assertEqual(pack.name, pack_result.name)
            self.assertEqual(pack.package_format, pack_result.package_format)
            self.assertEqual(pack.version, pack_result.version)
            self.assertEqual(pack.version_compatibility,
                             pack_result.version_compatibility)
            self.assertEqual(pack.description, pack_result.description)
            self.assertEqual(pack.maintainers[0].name,
                             pack_result.maintainers[0].name)
            self.assertEqual(pack.maintainers[0].email,
                             pack_result.maintainers[0].email)
            self.assertEqual(pack.authors, pack_result.authors)
            self.assertEqual(pack.urls[0].url, pack_result.urls[0].url)
            self.assertEqual('website', pack_result.urls[0].type)
            self.assertEqual(pack.licenses, pack_result.licenses)
            self.assertEqual(pack.build_depends, pack_result.build_depends)
            self.assertEqual(pack.buildtool_depends,
                             pack_result.buildtool_depends)
            self.assertEqual(pack.run_depends, pack_result.run_depends)
            self.assertEqual(pack.test_depends, pack_result.test_depends)
            self.assertEqual(pack.conflicts, pack_result.conflicts)
            self.assertEqual(pack.replaces, pack_result.replaces)
            self.assertEqual(pack.exports, pack_result.exports)

            rdict = generate_distutils_setup(package_xml_path=file2)
            self.assertEqual(
                {
                    'name': 'bar',
                    'maintainer': u('John Foo'),
                    'maintainer_email': '*****@*****.**',
                    'description': 'pdesc',
                    'license': 'BSD',
                    'version': '0.0.1',
                    'author': '',
                    'url': 'foo'
                }, rdict)
        finally:
            shutil.rmtree(rootdir)
 def test_parse_package_valid(self):
     filename = os.path.join(test_data_dir, 'valid_package.xml')
     package = parse_package(filename)
     assert package.filename == filename
     assert not package.is_metapackage()
     assert package.name == 'valid_package'
     assert package.description == 'valid_package description'
     assert package.version == '0.1.0'
     assert package.licenses == ['BSD']
     assert [x.name for x in package.run_depends] == ['foo', 'bar', 'baz']
Esempio n. 15
0
 def test_parse_package_valid(self):
     filename = os.path.join(test_data_dir, 'valid_package.xml')
     package = parse_package(filename)
     assert package.filename == filename
     assert not package.is_metapackage()
     assert package.name == 'valid_package'
     assert package.description == 'valid_package description'
     assert package.version == '0.1.0'
     assert package.licenses == ['BSD']
     assert [x.name for x in package.run_depends] == ['foo', 'bar', 'baz']
Esempio n. 16
0
def get_depends(pkg):
    """Get dependencies written as run_depend in package.xml"""
    global pkg_map
    if pkg_map is None:
        pkg_map = get_pkg_map()
    pkg_obj = pkg_map[pkg]
    pkg_xml_path = pkg_obj.filename
    depends = map(lambda x: x.name,
                  package.parse_package(pkg_xml_path).exec_depends)
    depends = list(set(depends))  # for duplicate
    return depends
Esempio n. 17
0
def get_depends(pkg):
    """Get dependencies written as run_depend in package.xml"""
    global pkg_map
    if pkg_map is None:
        pkg_map = get_pkg_map()
    pkg_obj = pkg_map[pkg]
    pkg_xml_path = pkg_obj.filename
    depends = map(lambda x: x.name,
                  package.parse_package(pkg_xml_path).exec_depends)
    depends = list(set(depends))  # for duplicate
    return depends
Esempio n. 18
0
def find_packages(basepath, use_cache=True):
    global _cache
    if use_cache:
        _load_cache()
        distro_id = os.environ.get("ROS_DISTRO", None)
    packages = {}
    package_paths = find_package_paths(basepath)
    cache_updated = False
    for path in package_paths:
        pkg_dir = os.path.realpath(os.path.join(basepath, path))
        if use_cache:
            last_modified = os.path.getmtime(
                os.path.join(pkg_dir, PACKAGE_MANIFEST_FILENAME))
            path_ts = _cache.local_paths[
                pkg_dir].timestamp if pkg_dir in _cache.local_paths else 0
            if last_modified > path_ts:
                manifest = parse_package(pkg_dir)
                _cache.local_paths[pkg_dir] = CacheItem(
                    manifest, last_modified)
                cache_updated = True
            else:
                manifest = _cache.local_paths[pkg_dir].data
            if distro_id not in _cache.packages:
                _cache.packages[distro_id] = {}
            manifest_ts = _cache.packages[distro_id][
                manifest.name].timestamp if manifest.name in _cache.packages[
                    distro_id] else 0
            if last_modified > manifest_ts:
                _cache.packages[distro_id][manifest.name] = CacheItem(
                    PackageCacheData(path=pkg_dir, manifest=manifest),
                    last_modified)
                cache_updated = True
        else:
            manifest = parse_package(pkg_dir)
        packages[path] = manifest
    if cache_updated:
        _store_cache()
    for package in packages.values():
        if hasattr(package, "evaluate_conditions"):
            package.evaluate_conditions(os.environ)
    return packages
Esempio n. 19
0
def find_packages(basepath, use_cache=True):
    global _cache
    if use_cache:
        if _cache is None: _load_cache()
        distro_id = os.environ["ROS_DISTRO"] if "ROS_DISTRO" in os.environ else None
    packages = {}
    package_paths = []
    for dirpath, dirnames, filenames in os.walk(basepath, followlinks=True):
        if "CATKIN_IGNORE" in filenames:
            del dirnames[:]
            continue
        elif PACKAGE_MANIFEST_FILENAME in filenames:
            package_paths.append(os.path.relpath(dirpath, basepath))
            del dirnames[:]
            continue
        for dirname in dirnames:
            if dirname.startswith('.'):
                dirnames.remove(dirname)
    cache_updated = False
    for path in package_paths:
        pkg_dir = os.path.realpath(os.path.join(basepath, path))
        if use_cache:
            last_modified = os.path.getmtime(os.path.join(pkg_dir, PACKAGE_MANIFEST_FILENAME))
            path_ts = _cache.local_paths[pkg_dir].timestamp if pkg_dir in _cache.local_paths else 0
            if last_modified > path_ts:
                manifest = parse_package(pkg_dir)
                _cache.local_paths[pkg_dir] = CacheItem(manifest, last_modified)
                cache_updated = True
            else:
                manifest = _cache.local_paths[pkg_dir].data
            if not distro_id in _cache.packages: _cache.packages[distro_id] = {}
            manifest_ts = _cache.packages[distro_id][manifest.name].timestamp if manifest.name in _cache.packages[distro_id] else 0
            if last_modified > manifest_ts:
                _cache.packages[distro_id][manifest.name] = CacheItem(PackageCacheData(path=pkg_dir, manifest=manifest), last_modified)
                cache_updated = True
        else:
            manifest = parse_package(pkg_dir)
        packages[path] = manifest
    if cache_updated:
        _store_cache()
    return packages
Esempio n. 20
0
def _get_package_name(test_case: type) -> str:
    path = Path(inspect.getfile(test_case))

    while not path.samefile(path.root):
        if (path / "package.xml").exists():
            package = parse_package(path / "package.xml", warnings=[])
            return package.name

        path = path.parent

    raise Exception(
        f"Could not determine package name for TestCase {test_case}")
Esempio n. 21
0
def find_catkin_packages(srcdir,
                         subdir=None,
                         cache=None,
                         cache_id="workspace_packages"):
    cached_paths = {}
    cache_update = False
    if cache is not None:
        cached_paths = cache.get_object(cache_id,
                                        WORKSPACE_PACKAGE_CACHE_VERSION,
                                        cached_paths)
    package_paths = []
    base_path = srcdir if subdir is None else os.path.join(srcdir, subdir)
    for curdir, subdirs, files in os_walk(base_path, followlinks=True):
        if "CATKIN_IGNORE" in files:
            del subdirs[:]
            continue
        if PACKAGE_MANIFEST_FILENAME in files:
            package_paths.append(os.path.relpath(curdir, srcdir))
            del subdirs[:]
            continue
        subdirs = [d for d in subdirs if not d.startswith(".")]
    result = {}
    discovered_paths = {}
    for path in package_paths:
        try:
            cur_ts = os.path.getmtime(
                os.path.join(srcdir, path, PACKAGE_MANIFEST_FILENAME))
            manifest = None
            if path in cached_paths:
                old_ts = cached_paths[path]["t"]
                if old_ts == cur_ts:
                    manifest = cached_paths[path]["m"]
            if manifest is None:
                cache_update = True
                manifest = parse_package(
                    os.path.join(srcdir, path, PACKAGE_MANIFEST_FILENAME))
            if manifest.name not in result:
                result[manifest.name] = []
            result[manifest.name].append(
                Package(manifest=manifest, workspace_path=path))
            discovered_paths[path] = {"t": cur_ts, "m": manifest}
        except InvalidPackage as e:
            msg(str(e) + "\n")
            fatal("invalid package in workspace")
    if subdir is not None:
        for path, entry in iteritems(cached_paths):
            if not path_has_prefix(path, subdir):
                discovered_paths[path] = entry
    if cache is not None:
        if cache_update or len(cached_paths) != len(discovered_paths):
            cache.set_object(cache_id, WORKSPACE_PACKAGE_CACHE_VERSION,
                             discovered_paths)
    return result
def main(argv=sys.argv[1:]):
    """
    Reads given package_xml and writes extracted variables to outfile.
    """
    parser = argparse.ArgumentParser(description="Read package.xml and write extracted variables to stdout")
    parser.add_argument('package_xml')
    parser.add_argument('outfile')
    args = parser.parse_args(argv)
    package = parse_package(args.package_xml)

    lines = _get_output(package)
    with open(args.outfile, 'w') as ofile:
        ofile.write('\n'.join(lines))
Esempio n. 23
0
def main(argv=sys.argv[1:]):
    """
    Reads given package_xml and writes extracted variables to outfile.
    """
    parser = argparse.ArgumentParser(description="Read package.xml and write extracted variables to stdout")
    parser.add_argument('package_xml')
    parser.add_argument('outfile')
    args = parser.parse_args(argv)
    package = parse_package(args.package_xml)

    lines = _get_output(package)
    with open(args.outfile, 'w') as ofile:
        ofile.write('\n'.join(lines))
Esempio n. 24
0
    def test_parse_generated(self):
        maint = self.get_maintainer()
        pack = PackageTemplate(name='bar',
                               package_format=1,
                               version='0.0.0',
                               version_abi='pabi',
                               urls=[Url('foo')],
                               description='pdesc',
                               maintainers=[maint],
                               licenses=['BSD'])
        try:
            rootdir = tempfile.mkdtemp()
            file1 = os.path.join(rootdir, 'CMakeLists.txt')
            file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
            create_package_files(rootdir, pack, 'groovy')
            self.assertTrue(os.path.isfile(file1))
            self.assertTrue(os.path.isfile(file2))

            pack_result = parse_package(file2)
            self.assertEqual(pack.name, pack_result.name)
            self.assertEqual(pack.package_format, pack_result.package_format)
            self.assertEqual(pack.version, pack_result.version)
            self.assertEqual(pack.version_abi, pack_result.version_abi)
            self.assertEqual(pack.description, pack_result.description)
            self.assertEqual(pack.maintainers[0].name, pack_result.maintainers[0].name)
            self.assertEqual(pack.maintainers[0].email, pack_result.maintainers[0].email)
            self.assertEqual(pack.authors, pack_result.authors)
            self.assertEqual(pack.urls[0].url, pack_result.urls[0].url)
            self.assertEqual('website', pack_result.urls[0].type)
            self.assertEqual(pack.licenses, pack_result.licenses)
            self.assertEqual(pack.build_depends, pack_result.build_depends)
            self.assertEqual(pack.buildtool_depends, pack_result.buildtool_depends)
            self.assertEqual(pack.run_depends, pack_result.run_depends)
            self.assertEqual(pack.test_depends, pack_result.test_depends)
            self.assertEqual(pack.conflicts, pack_result.conflicts)
            self.assertEqual(pack.replaces, pack_result.replaces)
            self.assertEqual(pack.exports, pack_result.exports)

            rdict = generate_distutils_setup(package_xml_path=file2)
            self.assertEqual({'name': 'bar',
                              'maintainer': u'John Foo',
                              'maintainer_email': '*****@*****.**',
                              'description': 'pdesc',
                              'license': 'BSD',
                              'version': '0.0.0',
                              'author': '',
                              'url': 'foo'}, rdict)
        finally:
            shutil.rmtree(rootdir)
def get_local_package_versions() -> dict:
    """
    Return local package name and versions.

    :return: dictionary of local package name and version
    """
    local_packages = {}
    package_name_prefixes = get_packages_with_prefixes()
    if package_name_prefixes:
        for name, prefix in package_name_prefixes.items():
            file_path = os.path.join(prefix, 'share', name)
            package_obj = parse_package(file_path)
            local_packages[
                name] = package_obj.version if package_obj.version else ''
    return local_packages
Esempio n. 26
0
 def processPackage(self, s, ws):
     verbose("Testing package candidate in folder '%s'." % s);
     if isPackageSrc(s):
         p = parse_package(s);
         verbose("Found package named '%s'." % p.name);
         if(p.name == self.searchPackageName):
             if self.cnt==0:
                 self.workspace = Workspace(ws)
                 self.src = s
                 okay('-- Found {0} in workspace {1} with source {2}'.format(self.searchPackageName, ws, s))
             elif self.workspace._develspace != ws or self.src != s:
                 error('-- Package ambiguous also in workspace {0} with source {1}'.format(ws, s))
             self.cnt+=1
     else:
         verbose("Is no package.");
    def _find_plugins(self, export_tag, discovery_data):
        crawl = True
        if discovery_data:
            data = discovery_data.get_settings('rqt_gui.RospkgPluginProvider')
            export_data = data.get_settings(export_tag)
            crawl = export_tag not in data.child_groups()

        plugins = []
        if crawl:
            qDebug("RospkgPluginProvider._find_plugins() crawling for plugins of type '%s'" %
                   export_tag)
            for package_name, package_path in get_packages_with_prefixes().items():
                package_share_path = os.path.join(package_path, 'share', package_name)
                package_file_path = os.path.join(
                    package_share_path, PACKAGE_MANIFEST_FILENAME)
                if os.path.isfile(package_file_path):
                    # only try to import catkin if a PACKAGE_FILE is found
                    try:
                        package = parse_package(package_file_path)
                    except InvalidPackage as e:
                        qWarning('Could not parse package file "%s":\n%s' % (package_file_path, e))
                        continue
                    for export in package.exports:
                        if export.tagname != export_tag or 'plugin' not in export.attributes:
                            continue
                        plugin_xml_path = export.attributes['plugin']
                        plugin_xml_path = plugin_xml_path.replace('${prefix}', package_share_path)
                        plugins.append([package_name, plugin_xml_path])
                    continue
            # write crawling information to cache
            if discovery_data:
                plugins_by_package = {}
                for (package_name, export) in plugins:
                    if package_name not in plugins_by_package:
                        plugins_by_package[package_name] = []
                    plugins_by_package[package_name].append(export)
                for package_name, exports in plugins_by_package.items():
                    export_data.set_value(package_name, os.pathsep.join([str(e) for e in exports]))

        else:
            # use cached information
            for package_name in export_data.all_keys():
                exports = export_data.value(package_name)
                if exports:
                    for export in exports.split(os.pathsep):
                        plugins.append([package_name, export])

        return plugins
Esempio n. 28
0
def _get_package(path: str):
    """Get the ROS package for the given path."""
    from catkin_pkg.package import InvalidPackage
    from catkin_pkg.package import package_exists_at
    from catkin_pkg.package import parse_package

    if not package_exists_at(path):
        return None

    try:
        pkg = parse_package(path)
    except (AssertionError, InvalidPackage):
        return None

    pkg.evaluate_conditions(os.environ)
    return pkg
Esempio n. 29
0
    def plugin_help_request(self, plugin_descriptor):
        package_name = plugin_descriptor.attributes()['package_name']
        package_path = get_package_path(package_name)
        try:
            package = parse_package(package_path)
        except (InvalidPackage, IOError):
            return

        if len(package.urls) == 0:
            return
        url_str = package.urls[0].url
        for url in package.urls:
            if url.type == 'website':
                url_str = url.url
                break
        webbrowser.open(url_str)
Esempio n. 30
0
    def _add_package(self, package_name, parent=None):
        """
        adds object based on package_name to self.packages
        :param parent: packagename which referenced package_name (for debugging only)
        """
        if self._hide_package(package_name):
            return
        if package_name in self.packages:
            return False

        catkin_package = self._is_package_wet(package_name)
        if catkin_package is None:
            return False
        self.packages[package_name] = {'is_catkin': catkin_package}

        if self.with_stacks:
            try:
                if self._is_package_wet(package_name):
                    # get stackname (metapackage) where package_name belongs to
                    stackname = None
                    # for all metapackages
                    for name in self.rosstack.list():
                        # if package_name is one of dependency of metapackage (name)
                        if package_name in self.rosstack.get_depends(
                                name, implicit=False):
                            # if package_name is buildtool depend, remove it.
                            from catkin_pkg.package import parse_package
                            p = parse_package(
                                self.rosstack.get_path(name) + '/package.xml')
                            if package_name not in [
                                    d.name for d in p.buildtool_depends
                            ]:
                                stackname = name
                else:
                    # for old dry package
                    stackname = self.rospack.stack_of(package_name)
            except ResourceNotFound as e:
                print(
                    'RosPackageGraphDotcodeGenerator._add_package(%s), parent %s: ResourceNotFound:'
                    % (package_name, parent), e)
                stackname = None
            if not stackname is None and stackname != '':
                if not stackname in self.stacks:
                    self._add_stack(stackname)
                self.stacks[stackname]['packages'].append(package_name)
        return True
Esempio n. 31
0
 def _get_packages(self, path):
     result = {}
     if os.path.isdir(path):
         fileList = os.listdir(path)
         if MANIFEST_FILE in fileList:
             return {os.path.basename(path): path}
         if CATKIN_SUPPORTED and PACKAGE_FILE in fileList:
             try:
                 pkg = parse_package(path)
                 return {pkg.name: path}
             except Exception:
                 pass
             return {}
         for f in fileList:
             ret = self._get_packages(os.path.join(path, f))
             result.update(ret)
     return result
Esempio n. 32
0
def get_ros2_messages():
    pkgs = []
    msgs = []
    rules = []
    # get messages from packages
    resource_type = 'rosidl_interfaces'
    resources = ament_index_python.get_resources(resource_type)
    for package_name, prefix_path in resources.items():
        pkgs.append(package_name)
        resource, _ = ament_index_python.get_resource(resource_type,
                                                      package_name)
        interfaces = resource.splitlines()
        message_names = {
            i[4:-4]
            for i in interfaces
            if i.startswith('msg/') and i[-4:] in ('.idl', '.msg')
        }

        for message_name in sorted(message_names):
            msgs.append(Message(package_name, message_name, prefix_path))
        # check package manifest for mapping rules
        package_path = os.path.join(prefix_path, 'share', package_name)
        pkg = parse_package(package_path)
        for export in pkg.exports:
            if export.tagname != 'ros1_bridge':
                continue
            if 'mapping_rules' not in export.attributes:
                continue
            rule_file = os.path.join(package_path,
                                     export.attributes['mapping_rules'])
            with open(rule_file, 'r') as h:
                content = yaml.safe_load(h)
            if not isinstance(content, list):
                print(
                    "The content of the mapping rules in '%s' is not a list" %
                    rule_file,
                    file=sys.stderr)
                continue
            for data in content:
                if all(n not in data
                       for n in ('ros1_service_name', 'ros2_service_name')):
                    try:
                        rules.append(MessageMappingRule(data, package_name))
                    except Exception as e:  # noqa: B902
                        print('%s' % str(e), file=sys.stderr)
    return pkgs, msgs, rules
Esempio n. 33
0
def get_packages(path):
    result = {}
    if os.path.isdir(path):
        fileList = os.listdir(path)
        if MANIFEST_FILE in fileList:
            return {os.path.basename(path): path}
        if CATKIN_SUPPORTED and PACKAGE_FILE in fileList:
            try:
                pkg = parse_package(path)
                return {pkg.name: path}
            except:
                pass
            return {}
        for f in fileList:
            ret = get_packages(os.path.join(path, f))
            result = dict(ret.items() + result.items())
    return result
Esempio n. 34
0
def calculate_missing(base_pkg, missing, file_deps, use_test_depends=False):
    """
    Calculate missing package dependencies in the manifest. This is
    mainly used as a subroutine of roslaunch_deps().
    
    @param base_pkg: name of package where initial walk begins (unused).
    @type  base_pkg: str
    @param missing: dictionary mapping package names to set of missing package dependencies.
    @type  missing: { str: set(str) }
    @param file_deps: dictionary mapping launch file names to RoslaunchDeps of each file
    @type  file_deps: { str: RoslaunchDeps}
    @param use_test_depends [bool]: use test_depends as installed package
    @type  use_test_depends: [bool]
    @return: missing (see parameter)
    @rtype: { str: set(str) }
    """
    rospack = rospkg.RosPack()
    for launch_file in file_deps.keys():
        pkg = rospkg.get_package_name(
            os.path.dirname(os.path.abspath(launch_file)))

        if pkg is None:  #cannot determine package
            print("ERROR: cannot determine package for [%s]" % pkg,
                  file=sys.stderr)
            continue
        m = rospack.get_manifest(pkg)
        d_pkgs = set([d.name for d in m.depends])
        if m.is_catkin:
            # for catkin packages consider the run dependencies instead
            # else not released packages will not appear in the dependency list
            # since rospkg  does uses rosdep to decide which dependencies to return
            from catkin_pkg.package import parse_package
            p = parse_package(os.path.dirname(m.filename))
            d_pkgs = set([d.name for d in p.run_depends])
            if use_test_depends:
                for d in p.test_depends:
                    d_pkgs.add(d.name)
        # make sure we don't count ourselves as a dep
        d_pkgs.add(pkg)

        diff = list(set(file_deps[launch_file].pkgs) - d_pkgs)
        if not pkg in missing:
            missing[pkg] = set()
        missing[pkg].update(diff)
    return missing
Esempio n. 35
0
def main(argv=sys.argv[1:]):
    """Read given package_xml and writes extracted variables to outfile."""
    parser = argparse.ArgumentParser(
        description='Read package.xml and write extracted variables to stdout')
    parser.add_argument('package_xml')
    parser.add_argument('outfile')
    args = parser.parse_args(argv)
    package = parse_package(args.package_xml)

    # Force utf8 encoding for python3.
    # This way unicode files can still be processed on non-unicode locales.
    kwargs = {}
    if sys.version_info.major >= 3:
        kwargs['encoding'] = 'utf8'

    lines = _get_output(package)
    with open(args.outfile, 'w', **kwargs) as ofile:
        ofile.write('\n'.join(lines))
Esempio n. 36
0
def find_catkin_packages(srcdir, subdir=None, cache=None, cache_id="workspace_packages"):
    cached_paths = {}
    cache_update = False
    if cache is not None:
        cached_paths = cache.get_object(cache_id, WORKSPACE_PACKAGE_CACHE_VERSION, cached_paths)
    package_paths = []
    base_path = srcdir if subdir is None else os.path.join(srcdir, subdir)
    for curdir, subdirs, files in os_walk(base_path, followlinks=True):
        if "CATKIN_IGNORE" in files:
            del subdirs[:]
            continue
        if PACKAGE_MANIFEST_FILENAME in files:
            package_paths.append(os.path.relpath(curdir, srcdir))
            del subdirs[:]
            continue
        subdirs = [d for d in subdirs if not d.startswith(".")]
    result = {}
    discovered_paths = {}
    for path in package_paths:
        try:
            cur_ts = os.path.getmtime(os.path.join(srcdir, path, PACKAGE_MANIFEST_FILENAME))
            manifest = None
            if path in cached_paths:
                old_ts = cached_paths[path]["t"]
                if old_ts == cur_ts:
                    manifest = cached_paths[path]["m"]
            if manifest is None:
                cache_update = True
                manifest = parse_package(os.path.join(srcdir, path, PACKAGE_MANIFEST_FILENAME))
            if manifest.name not in result:
                result[manifest.name] = []
            result[manifest.name].append(Package(manifest=manifest, workspace_path=path))
            discovered_paths[path] = {"t": cur_ts, "m": manifest}
        except InvalidPackage as e:
            msg(str(e) + "\n")
            fatal("invalid package in workspace")
    if subdir is not None:
        for path, entry in iteritems(cached_paths):
            if not path_has_prefix(path, subdir):
                discovered_paths[path] = entry
    if cache is not None:
        if cache_update or len(cached_paths) != len(discovered_paths):
            cache.set_object(cache_id, WORKSPACE_PACKAGE_CACHE_VERSION, discovered_paths)
    return result
Esempio n. 37
0
    def _build(self, package_path):
        package = parse_package(package_path)
        package_dir = self._builder.get_package_path(package)

        print "Building {} version {}".format(package.name, package.version)

        print "Validating package"
        package.validate()

        print "Updating rosdep"
        self._builder.rosdep_update()

        print "Checking rosdep sanity"
        self._builder.rosdep_sanity_check()

        # XXX: rosdep only resolves public dependencies
        # print "Installing dependencies"
        # self._builder.rosdep_install_deps(package_dir)

        print "Creating tempdir"
        self.tempdir = tempfile.mkdtemp()

        print "Copying sources to tempdir"
        builddir = self._get_builddir()
        shutil.copytree(src=package_dir, dst=builddir, symlinks=True)

        print "Writing debian metadata"
        self._write_debfiles(package)

        print "Creating upstream tarball"
        debian_name = self._builder.catkin_to_apt_name(package.name)
        upstream_filename = os.path.join(
            self.tempdir, '{}_{}.orig.tar.gz'.format(debian_name,
                                                     package.version))
        with tarfile.open(upstream_filename, 'w:gz') as tar:
            tar.add(builddir, arcname='')

        print "Running dpkg-buildpackage"
        subprocess.check_output(args=['dpkg-buildpackage', '-d', '-uc', '-us'],
                                cwd=builddir)

        print "Copying deb package to {}".format(os.getcwd())
        for f in glob.glob(os.path.join(self.tempdir, '*.deb')):
            shutil.copy(f, os.getcwd())
Esempio n. 38
0
    def _find_plugins(self, export_tag):
        plugins = []
        r = RosPack()
        for package_name in r.list():
            package_path = r.get_path(package_name)
            manifest_file_path = os.path.join(package_path, MANIFEST_FILE)
            if os.path.isfile(manifest_file_path):
                try:
                    manifest = parse_manifest_file(package_path, MANIFEST_FILE)
                except InvalidManifest as e:
                    qWarning('Could not parse manifest "%s":\n%s' %
                             (manifest_file_path, e))
                    continue
                exports = manifest.get_export(export_tag, 'plugin')
                for export in exports:
                    plugins.append([package_name, str(export)])
                continue

            package_file_path = os.path.join(package_path, PACKAGE_FILE)
            if os.path.isfile(package_file_path):
                # only try to import catkin if a PACKAGE_FILE is found
                try:
                    from catkin_pkg.package import parse_package, InvalidPackage
                except ImportError as e:
                    qWarning(
                        'Package "%s" has a package file, but import of parser failed:\n%s'
                        % (package_path, e))
                    continue
                try:
                    package = parse_package(package_file_path)
                except InvalidPackage as e:
                    qWarning('Could not parse package file "%s":\n%s' %
                             (package_file_path, e))
                    continue
                for export in package.exports:
                    if export.tagname != export_tag or 'plugin' not in export.attributes:
                        continue
                    plugin_xml_path = export.attributes['plugin']
                    plugin_xml_path = plugin_xml_path.replace(
                        '${prefix}', package_path)
                    plugins.append([package_name, plugin_xml_path])
                continue
        return plugins
Esempio n. 39
0
def process_package(package_name, soft_fail=True):
    # type: (str) -> List[str], List[str]
    try:
        package_path = find_in_workspaces(
            project=package_name,
            path="package.xml",
            first_match_only=True,
        )[0]
    except IndexError:
        if not soft_fail:
            raise RuntimeError(
                "Unable to process package {}".format(package_name))
        else:
            # This is not a catkin dependency
            return [], []
    else:
        package = parse_package(package_path)
        dependencies = package.build_depends + package.exec_depends + package.test_depends
        return parse_exported_requirements(package), dependencies
Esempio n. 40
0
 def processPackage(self, s, ws):
     verbose("Testing package candidate in folder '%s'." % s)
     if isPackageSrc(s):
         p = parse_package(s)
         verbose("Found package named '%s'." % p.name)
         if (p.name == self.searchPackageName):
             if self.cnt == 0:
                 self.workspace = Workspace(ws)
                 self.src = s
                 okay(
                     '-- Found {0} in workspace {1} with source {2}'.format(
                         self.searchPackageName, ws, s))
             elif self.workspace._develspace != ws or self.src != s:
                 error(
                     '-- Package ambiguous also in workspace {0} with source {1}'
                     .format(ws, s))
             self.cnt += 1
     else:
         verbose("Is no package.")
Esempio n. 41
0
def main(argv=sys.argv[1:]):
    """
    Reads given package_xml and writes extracted variables to outfile.
    """
    parser = argparse.ArgumentParser(description="Read package.xml and write extracted variables to stdout")
    parser.add_argument('package_xml')
    parser.add_argument('outfile')
    args = parser.parse_args(argv)
    package = parse_package(args.package_xml)

    # Force utf8 encoding for python3.
    # This way unicode files can still be processed on non-unicode locales.
    kwargs = {}
    if sys.version_info.major >= 3:
        kwargs['encoding'] = 'utf8'

    lines = _get_output(package)
    with open(args.outfile, 'w', **kwargs) as ofile:
        ofile.write('\n'.join(lines))
Esempio n. 42
0
def calculate_missing(base_pkg, missing, file_deps, use_test_depends=False):
    """
    Calculate missing package dependencies in the manifest. This is
    mainly used as a subroutine of roslaunch_deps().
    
    @param base_pkg: name of package where initial walk begins (unused).
    @type  base_pkg: str
    @param missing: dictionary mapping package names to set of missing package dependencies.
    @type  missing: { str: set(str) }
    @param file_deps: dictionary mapping launch file names to RoslaunchDeps of each file
    @type  file_deps: { str: RoslaunchDeps}
    @param use_test_depends [bool]: use test_depends as installed package
    @type  use_test_depends: [bool]
    @return: missing (see parameter)
    @rtype: { str: set(str) }
    """
    rospack = rospkg.RosPack()
    for launch_file in file_deps.keys():
        pkg = rospkg.get_package_name(os.path.dirname(os.path.abspath(launch_file)))

        if pkg is None: #cannot determine package
            print("ERROR: cannot determine package for [%s]"%pkg, file=sys.stderr)
            continue
        m = rospack.get_manifest(pkg)
        d_pkgs = set([d.name for d in m.depends])
        if m.is_catkin:
            # for catkin packages consider the run dependencies instead
            # else not released packages will not appear in the dependency list
            # since rospkg  does uses rosdep to decide which dependencies to return
            from catkin_pkg.package import parse_package
            p = parse_package(os.path.dirname(m.filename))
            d_pkgs = set([d.name for d in p.run_depends])
            if use_test_depends:
                for d in p.test_depends:
                    d_pkgs.add(d.name)
        # make sure we don't count ourselves as a dep
        d_pkgs.add(pkg)

        diff = list(set(file_deps[launch_file].pkgs) - d_pkgs)
        if not pkg in missing:
            missing[pkg] = set()
        missing[pkg].update(diff)
    return missing
Esempio n. 43
0
def get_pkg_desc(path):
    #Check whether we've got a catkin or non-catkin package
    if is_catkin(path):
        pkg_desc = parse_package(path)
        print("Documenting a catkin package")
    else:
        rp = rospkg.RosPack()
        package = os.path.basename(path)
        try:
            ros_path = os.path.realpath(rp.get_path(package))
        except rospkg.common.ResourceNotFound as e:
            sys.stderr.write("Rospack could not find the %s. Are you sure it's on your ROS_PACKAGE_PATH?\n" % package)
            sys.exit(1)
        if ros_path != path:
            sys.stderr.write("The path passed in does not match that returned \
                             by rospack. Requested path: %s. Rospack path: %s.\n" % (path, ros_path))
            sys.exit(1)
        pkg_desc = rp.get_manifest(package)
        print("Documenting a non-catkin package")
    return pkg_desc
Esempio n. 44
0
def get_package(path):
    """Get the ROS package for the given path."""
    if not package_exists_at(path):
        raise RuntimeError(
            f"Failed to find a ROS package at given path '{path}'")

    try:
        package = parse_package(path)
    except (AssertionError, InvalidPackage) as e:
        if has_ros_schema_reference(path):
            raise RuntimeError(
                f"Failed to parse ROS package manifest in '{path}': {e}")
        else:
            raise RuntimeError(
                f"Failed to parse potential ROS package manifest in '{path}': {e}"
            )
        return None

    package.evaluate_conditions(os.environ)
    return package
Esempio n. 45
0
def get_ros2_messages():
    pkgs = []
    msgs = []
    rules = []
    # get messages from packages
    resource_type = 'rosidl_interfaces'
    resources = ament_index_python.get_resources(resource_type)
    for package_name, prefix_path in resources.items():
        pkgs.append(package_name)
        resource, _ = ament_index_python.get_resource(resource_type, package_name)
        interfaces = resource.splitlines()
        message_names = {
            i[4:-4]
            for i in interfaces
            if i.startswith('msg/') and i[-4:] in ('.idl', '.msg')}

        for message_name in sorted(message_names):
            msgs.append(Message(package_name, message_name, prefix_path))
        # check package manifest for mapping rules
        package_path = os.path.join(prefix_path, 'share', package_name)
        pkg = parse_package(package_path)
        for export in pkg.exports:
            if export.tagname != 'ros1_bridge':
                continue
            if 'mapping_rules' not in export.attributes:
                continue
            rule_file = os.path.join(package_path, export.attributes['mapping_rules'])
            with open(rule_file, 'r') as h:
                content = yaml.safe_load(h)
            if not isinstance(content, list):
                print(
                    "The content of the mapping rules in '%s' is not a list" % rule_file,
                    file=sys.stderr)
                continue
            for data in content:
                if all(n not in data for n in ('ros1_service_name', 'ros2_service_name')):
                    try:
                        rules.append(MessageMappingRule(data, package_name))
                    except Exception as e:
                        print('%s' % str(e), file=sys.stderr)
    return pkgs, msgs, rules
Esempio n. 46
0
def get_sourcedeb(
        rosdistro_name, package_name, sourcedeb_dir,
        skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # download sourcedeb
        cmd = ['apt-get', 'source', debian_package_name, '--download-only']
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
    def _find_plugins(self, export_tag, discovery_data):
        crawl = True
        if discovery_data:
            data = discovery_data.get_settings('rqt_gui.RospkgPluginProvider')
            export_data = data.get_settings(export_tag)
            crawl = export_tag not in data.child_groups()

        plugins = []
        if crawl:
            qDebug("RospkgPluginProvider._find_plugins() crawling for plugins of type '%s'" % export_tag)
            r = RospkgPluginProvider.rospack
            for package_name in r.list():
                package_path = r.get_path(package_name)
                manifest_file_path = os.path.join(package_path, MANIFEST_FILE)
                if os.path.isfile(manifest_file_path):
                    try:
                        manifest = parse_manifest_file(package_path, MANIFEST_FILE)
                    except InvalidManifest as e:
                        qWarning('Could not parse manifest "%s":\n%s' % (manifest_file_path, e))
                        continue
                    exports = manifest.get_export(export_tag, 'plugin')
                    for export in exports:
                        plugins.append([package_name, str(export)])
                    continue

                package_file_path = os.path.join(package_path, PACKAGE_FILE)
                if os.path.isfile(package_file_path):
                    # only try to import catkin if a PACKAGE_FILE is found
                    try:
                        from catkin_pkg.package import parse_package, InvalidPackage
                    except ImportError as e:
                        qWarning('Package "%s" has a package file, but import of parser failed:\n%s' % (package_path, e))
                        continue
                    try:
                        package = parse_package(package_file_path)
                    except InvalidPackage as e:
                        qWarning('Could not parse package file "%s":\n%s' % (package_file_path, e))
                        continue
                    for export in package.exports:
                        if export.tagname != export_tag or 'plugin' not in export.attributes:
                            continue
                        plugin_xml_path = export.attributes['plugin']
                        plugin_xml_path = plugin_xml_path.replace('${prefix}', package_path)
                        plugins.append([package_name, plugin_xml_path])
                    continue

            # write crawling information to cache
            if discovery_data:
                plugins_by_package = {}
                for (package_name, export) in plugins:
                    if package_name not in plugins_by_package:
                        plugins_by_package[package_name] = []
                    plugins_by_package[package_name].append(export)
                for package_name, exports in plugins_by_package.items():
                    export_data.set_value(package_name, os.pathsep.join([str(e) for e in exports]))

        else:
            # use cached information
            for package_name in export_data.all_keys():
                exports = export_data.value(package_name)
                if exports:
                    for export in exports.split(os.pathsep):
                        plugins.append([package_name, export])

        return plugins
Esempio n. 48
0
    def test_parse_generated_multi(self):
        # test with multiple attributes filled
        maint = self.get_maintainer()
        pack = PackageTemplate(name='bar',
                               package_format=1,
                               version='0.0.0',
                               version_abi='pabi',
                               description='pdesc',
                               maintainers=[maint, maint],
                               authors=[maint, maint],
                               licenses=['BSD', 'MIT'],
                               urls=[Url('foo', 'bugtracker'), Url('bar')],
                               build_depends=[Dependency('dep1')],
                               buildtool_depends=[Dependency('dep2'),
                                                      Dependency('dep3')],
                               run_depends=[Dependency('dep4', version_lt='4')],
                               test_depends=[Dependency('dep5',
                                                               version_gt='4',
                                                               version_lt='4')],
                               conflicts=[Dependency('dep6')],
                               replaces=[Dependency('dep7'),
                                             Dependency('dep8')],
                               exports=[Export('architecture_independent'),
                                        Export('meta_package')])

        def assertEqualDependencies(deplist1, deplist2):
            if len(deplist1) != len(deplist1):
                return False
            for depx, depy in zip(deplist1, deplist2):
                for attr in ['name', 'version_lt', 'version_lte',
                             'version_eq', 'version_gte', 'version_gt']:
                    if getattr(depx, attr) != getattr(depy, attr):
                        return False
            return True

        try:
            rootdir = tempfile.mkdtemp()
            file2 = os.path.join(rootdir, 'package.xml')
            create_package_files(rootdir, pack, {})
            self.assertTrue(os.path.isfile(file2))

            pack_result = parse_package(file2)
            self.assertEqual(pack.name, pack_result.name)
            self.assertEqual(pack.package_format, pack_result.package_format)
            self.assertEqual(pack.version, pack_result.version)
            self.assertEqual(pack.version_abi, pack_result.version_abi)
            self.assertEqual(pack.description, pack_result.description)
            self.assertEqual(len(pack.maintainers), len(pack_result.maintainers))
            self.assertEqual(len(pack.authors), len(pack_result.authors))
            self.assertEqual(len(pack.urls), len(pack_result.urls))
            self.assertEqual(pack.urls[0].url, pack_result.urls[0].url)
            self.assertEqual(pack.urls[0].type, pack_result.urls[0].type)
            self.assertEqual(pack.licenses, pack_result.licenses)
            self.assertTrue(assertEqualDependencies(pack.build_depends,
                                                    pack_result.build_depends))
            self.assertTrue(assertEqualDependencies(pack.build_depends,
                                                    pack_result.build_depends))
            self.assertTrue(assertEqualDependencies(pack.buildtool_depends,
                                                    pack_result.buildtool_depends))
            self.assertTrue(assertEqualDependencies(pack.run_depends,
                                                    pack_result.run_depends))
            self.assertTrue(assertEqualDependencies(pack.test_depends,
                                                    pack_result.test_depends))
            self.assertTrue(assertEqualDependencies(pack.conflicts,
                                                    pack_result.conflicts))
            self.assertTrue(assertEqualDependencies(pack.replaces,
                                                    pack_result.replaces))
            self.assertEqual(pack.exports[0].tagname, pack_result.exports[0].tagname)
            self.assertEqual(pack.exports[1].tagname, pack_result.exports[1].tagname)

            rdict = parse_package_for_distutils(file2)
            self.assertEqual({'name': 'bar',
                              'maintainer': u'John Foo <*****@*****.**>, John Foo <*****@*****.**>',
                              'description': 'pdesc',
                              'license': 'BSD, MIT',
                              'version': '0.0.0',
                              'author': u'John Foo <*****@*****.**>, John Foo <*****@*****.**>',
                              'url': 'bar',
                              'keywords': ['ROS']}, rdict)
        finally:
            shutil.rmtree(rootdir)
Esempio n. 49
0
# sphinx-quickstart on Fri Jun 17 16:07:12 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.

import sys
import os
from catkin_pkg.package import parse_package, PACKAGE_MANIFEST_FILENAME
catkin_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
catkin_package = parse_package(os.path.join(catkin_dir, PACKAGE_MANIFEST_FILENAME))

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))

# -- General configuration ------------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
Esempio n. 50
0
def process_package_xml(args, directory=None):
    cwd = directory if directory else '.'
    xml_path = os.path.join(cwd, 'package.xml')
    if not os.path.exists(xml_path):
        bailout("No package.xml file found at: {0}".format(xml_path))
    try:
        from catkin_pkg.package import parse_package
    except ImportError:
        error("catkin_pkg was not detected, please install it.",
              file=sys.stderr)
        sys.exit(1)
    package = parse_package(xml_path)

    data = {}
    data['Name'] = package.name
    data['Version'] = package.version
    data['Description'] = debianize_string(package.description)
    websites = [str(url) for url in package.urls if url.type == 'website']
    homepage = websites[0] if websites else ''
    if homepage == '':
        warning("No homepage set, defaulting to ''")
    data['Homepage'] = homepage

    data['Catkin-ChangelogType'] = ''
    data['Catkin-DebRulesType'] = 'cmake'
    data['Catkin-DebRulesFile'] = ''
    # data['Catkin-CopyrightType'] = package.copyright
    # data['copyright'] = package.copyright

    data['DebianInc'] = args.debian_revision
    if args.rosdistro == 'backports':
        data['Package'] = sanitize_package_name("%s" % (package.name))
    else:
        data['Package'] = \
            sanitize_package_name("ros-%s-%s" % (args.rosdistro, package.name))

    data['ROS_DISTRO'] = args.rosdistro

    # allow override of these values
    if args.rosdistro == 'backports':
        data['INSTALL_PREFIX'] = \
            args.install_prefix if args.install_prefix != None else '/usr'
    else:
        data['INSTALL_PREFIX'] = \
            args.install_prefix if args.install_prefix != None \
                                else '/opt/ros/%s' % args.rosdistro

    data['Depends'] = set([d.name for d in package.run_depends])
    build_deps = (package.build_depends + package.buildtool_depends)
    data['BuildDepends'] = set([d.name for d in build_deps])

    maintainers = []
    for m in package.maintainers:
        maintainers.append(str(m))
    data['Maintainer'] = ', '.join(maintainers)

    # Go over the different subfolders and find all the packages
    package_descriptions = {}

    # search for manifest in current folder and direct subfolders
    for dir_name in [cwd] + os.listdir(cwd):
        if not os.path.isdir(dir_name):
            continue
        dir_path = os.path.join('.', dir_name)
        for file_name in os.listdir(dir_path):
            if file_name == 'manifest.xml':
                # parse the manifest, in case it is not valid
                manifest = rospkg.parse_manifest_file(dir_path, file_name)
                # remove markups
                if manifest.description is None:
                    manifest.description = ''
                description = debianize_string(manifest.description)
                if dir_name == '.':
                    dir_name = package.name
                package_descriptions[dir_name] = description
    # Enhance the description with the list of packages in the stack
    if package_descriptions:
        if data['Description']:
            data['Description'] += '\n .\n'
        data['Description'] += ' This stack contains the packages:'
        for name, description in package_descriptions.items():
            data['Description'] += '\n * %s: %s' % (name, description)

    return data
Esempio n. 51
0
def build_isolated_workspace(
    context,
    packages=None,
    start_with=None,
    no_deps=False,
    unbuilt=False,
    n_jobs=None,
    force_cmake=False,
    pre_clean=False,
    force_color=False,
    quiet=False,
    interleave_output=False,
    no_status=False,
    limit_status_rate=10.0,
    lock_install=False,
    no_notify=False,
    continue_on_failure=False,
    summarize_build=None,
):
    """Builds a catkin workspace in isolation

    This function will find all of the packages in the source space, start some
    executors, feed them packages to build based on dependencies and topological
    ordering, and then monitor the output of the executors, handling loggings of
    the builds, starting builds, failing builds, and finishing builds of
    packages, and handling the shutdown of the executors when appropriate.

    :param context: context in which to build the catkin workspace
    :type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
    :param packages: list of packages to build, by default their dependencies will also be built
    :type packages: list
    :param start_with: package to start with, skipping all packages which proceed it in the topological order
    :type start_with: str
    :param no_deps: If True, the dependencies of packages will not be built first
    :type no_deps: bool
    :param n_jobs: number of parallel package build n_jobs
    :type n_jobs: int
    :param force_cmake: forces invocation of CMake if True, default is False
    :type force_cmake: bool
    :param force_color: forces colored output even if terminal does not support it
    :type force_color: bool
    :param quiet: suppresses the output of commands unless there is an error
    :type quiet: bool
    :param interleave_output: prints the output of commands as they are received
    :type interleave_output: bool
    :param no_status: disables status bar
    :type no_status: bool
    :param limit_status_rate: rate to which status updates are limited; the default 0, places no limit.
    :type limit_status_rate: float
    :param lock_install: causes executors to synchronize on access of install commands
    :type lock_install: bool
    :param no_notify: suppresses system notifications
    :type no_notify: bool
    :param continue_on_failure: do not stop building other jobs on error
    :type continue_on_failure: bool
    :param summarize_build: if True summarizes the build at the end, if None and continue_on_failure is True and the
        the build fails, then the build will be summarized, but if False it never will be summarized.
    :type summarize_build: bool

    :raises: SystemExit if buildspace is a file or no packages were found in the source space
        or if the provided options are invalid
    """
    pre_start_time = time.time()

    # Assert that the limit_status_rate is valid
    if limit_status_rate < 0:
        sys.exit("[build] @!@{rf}Error:@| The value of --status-rate must be greater than or equal to zero.")

    # Declare a buildspace marker describing the build config for error checking
    buildspace_marker_data = {
        'workspace': context.workspace,
        'profile': context.profile,
        'install': context.install,
        'install_space': context.install_space_abs,
        'devel_space': context.devel_space_abs,
        'source_space': context.source_space_abs}

    # Check build config
    if os.path.exists(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE)):
        with open(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE)) as buildspace_marker_file:
            existing_buildspace_marker_data = yaml.load(buildspace_marker_file)
            misconfig_lines = ''
            for (k, v) in existing_buildspace_marker_data.items():
                new_v = buildspace_marker_data.get(k, None)
                if new_v != v:
                    misconfig_lines += (
                        '\n - %s: %s (stored) is not %s (commanded)' %
                        (k, v, new_v))
            if len(misconfig_lines) > 0:
                sys.exit(clr(
                    "\n@{rf}Error:@| Attempting to build a catkin workspace using build space: "
                    "\"%s\" but that build space's most recent configuration "
                    "differs from the commanded one in ways which will cause "
                    "problems. Fix the following options or use @{yf}`catkin "
                    "clean -b`@| to remove the build space: %s" %
                    (context.build_space_abs, misconfig_lines)))

    # Summarize the context
    summary_notes = []
    if force_cmake:
        summary_notes += [clr("@!@{cf}NOTE:@| Forcing CMake to run for each package.")]
    log(context.summary(summary_notes))

    # Make sure there is a build folder and it is not a file
    if os.path.exists(context.build_space_abs):
        if os.path.isfile(context.build_space_abs):
            sys.exit(clr(
                "[build] @{rf}Error:@| Build space '{0}' exists but is a file and not a folder."
                .format(context.build_space_abs)))
    # If it dosen't exist, create it
    else:
        log("[build] Creating build space: '{0}'".format(context.build_space_abs))
        os.makedirs(context.build_space_abs)

    # Write the current build config for config error checking
    with open(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE), 'w') as buildspace_marker_file:
        buildspace_marker_file.write(yaml.dump(buildspace_marker_data, default_flow_style=False))

    # Get all the packages in the context source space
    # Suppress warnings since this is a utility function
    workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[])

    # Get packages which have not been built yet
    unbuilt_pkgs = get_unbuilt_packages(context, workspace_packages)

    # Handle unbuilt packages
    if unbuilt:
        # Check if there are any unbuilt
        if len(unbuilt_pkgs) > 0:
            # Add the unbuilt packages
            packages.extend(list(unbuilt_pkgs))
        else:
            log("[build] No unbuilt packages to be built.")
            return

    # If no_deps is given, ensure packages to build are provided
    if no_deps and packages is None:
        log(clr("[build] @!@{rf}Error:@| With no_deps, you must specify packages to build."))
        return

    # Find list of packages in the workspace
    packages_to_be_built, packages_to_be_built_deps, all_packages = determine_packages_to_be_built(
        packages, context, workspace_packages)

    if not no_deps:
        # Extend packages to be built to include their deps
        packages_to_be_built.extend(packages_to_be_built_deps)

    # Also re-sort
    try:
        packages_to_be_built = topological_order_packages(dict(packages_to_be_built))
    except AttributeError:
        log(clr("[build] @!@{rf}Error:@| The workspace packages have a circular "
                "dependency, and cannot be built. Please run `catkin list "
                "--deps` to determine the problematic package(s)."))
        return

    # Check the number of packages to be built
    if len(packages_to_be_built) == 0:
        log(clr('[build] No packages to be built.'))
        return

    # Assert start_with package is in the workspace
    verify_start_with_option(
        start_with,
        packages,
        all_packages,
        packages_to_be_built + packages_to_be_built_deps)

    # Populate .catkin file if we're not installing
    # NOTE: This is done to avoid the Catkin CMake code from doing it,
    # which isn't parallel-safe. Catkin CMake only modifies this file if
    # it's package source path isn't found.
    if not context.install:
        dot_catkin_file_path = os.path.join(context.devel_space_abs, '.catkin')
        # If the file exists, get the current paths
        if os.path.exists(dot_catkin_file_path):
            dot_catkin_paths = open(dot_catkin_file_path, 'r').read().split(';')
        else:
            dot_catkin_paths = []

        # Update the list with the new packages (in topological order)
        packages_to_be_built_paths = [
            os.path.join(context.source_space_abs, path)
            for path, pkg in packages_to_be_built
        ]

        new_dot_catkin_paths = [
            os.path.join(context.source_space_abs, path)
            for path in [os.path.join(context.source_space_abs, path) for path, pkg in all_packages]
            if path in dot_catkin_paths or path in packages_to_be_built_paths
        ]

        # Write the new file if it's different, otherwise, leave it alone
        if dot_catkin_paths == new_dot_catkin_paths:
            wide_log("[build] Package table is up to date.")
        else:
            wide_log("[build] Updating package table.")
            open(dot_catkin_file_path, 'w').write(';'.join(new_dot_catkin_paths))

    # Remove packages before start_with
    if start_with is not None:
        for path, pkg in list(packages_to_be_built):
            if pkg.name != start_with:
                wide_log(clr("@!@{pf}Skipping@| @{gf}---@| @{cf}{}@|").format(pkg.name))
                packages_to_be_built.pop(0)
            else:
                break

    # Get the names of all packages to be built
    packages_to_be_built_names = [p.name for _, p in packages_to_be_built]
    packages_to_be_built_deps_names = [p.name for _, p in packages_to_be_built_deps]

    # Generate prebuild jobs, if necessary
    prebuild_jobs = {}
    setup_util_exists = os.path.exists(os.path.join(context.devel_space_abs, '_setup_util.py'))
    if context.link_devel and (not setup_util_exists or (force_cmake and len(packages) == 0)):
        wide_log('[build] Preparing linked develspace...')

        pkg_dict = dict([(pkg.name, (pth, pkg)) for pth, pkg in all_packages])

        if 'catkin' in packages_to_be_built_names + packages_to_be_built_deps_names:
            # Use catkin as the prebuild package
            prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
        else:
            # Generate explicit prebuild package
            prebuild_pkg_path = generate_prebuild_package(context.build_space_abs, context.devel_space_abs, force_cmake)
            prebuild_pkg = parse_package(prebuild_pkg_path)

        # Create the prebuild job
        prebuild_job = create_catkin_build_job(
            context,
            prebuild_pkg,
            prebuild_pkg_path,
            dependencies=[],
            force_cmake=force_cmake,
            pre_clean=pre_clean,
            prebuild=True)

        # Add the prebuld job
        prebuild_jobs[prebuild_job.jid] = prebuild_job

    # Remove prebuild jobs from normal job list
    for prebuild_jid, prebuild_job in prebuild_jobs.items():
        if prebuild_jid in packages_to_be_built_names:
            packages_to_be_built_names.remove(prebuild_jid)

    # Initial jobs list is just the prebuild jobs
    jobs = [] + list(prebuild_jobs.values())

    # Get all build type plugins
    build_job_creators = {
        ep.name: ep.load()['create_build_job']
        for ep in pkg_resources.iter_entry_points(group='catkin_tools.jobs')
    }

    # It's a problem if there aren't any build types available
    if len(build_job_creators) == 0:
        sys.exit('Error: No build types availalbe. Please check your catkin_tools installation.')

    # Construct jobs
    for pkg_path, pkg in all_packages:
        if pkg.name not in packages_to_be_built_names:
            continue

        # Ignore metapackages
        if 'metapackage' in [e.tagname for e in pkg.exports]:
            continue

        # Get actual execution deps
        deps = [
            p.name for _, p
            in get_cached_recursive_build_depends_in_workspace(pkg, packages_to_be_built)
            if p.name not in prebuild_jobs
        ]

        # All jobs depend on the prebuild job if it's defined
        for j in prebuild_jobs.values():
            deps.append(j.jid)

        # Determine the job parameters
        build_job_kwargs = dict(
            context=context,
            package=pkg,
            package_path=pkg_path,
            dependencies=deps,
            force_cmake=force_cmake,
            pre_clean=pre_clean)

        # Create the job based on the build type
        build_type = get_build_type(pkg)

        if build_type in build_job_creators:
            jobs.append(build_job_creators[build_type](**build_job_kwargs))
        else:
            wide_log(clr(
                "[build] @!@{yf}Warning:@| Skipping package `{}` because it "
                "has an unsupported package build type: `{}`"
            ).format(pkg.name, build_type))

            wide_log(clr("[build] Note: Available build types:"))
            for bt_name in build_job_creators.keys():
                wide_log(clr("[build]  - `{}`".format(bt_name)))

    # Queue for communicating status
    event_queue = Queue()

    try:
        # Spin up status output thread
        status_thread = ConsoleStatusController(
            'build',
            ['package', 'packages'],
            jobs,
            n_jobs,
            [pkg.name for _, pkg in context.packages],
            [p for p in context.whitelist],
            [p for p in context.blacklist],
            event_queue,
            show_notifications=not no_notify,
            show_active_status=not no_status,
            show_buffered_stdout=not quiet and not interleave_output,
            show_buffered_stderr=not interleave_output,
            show_live_stdout=interleave_output,
            show_live_stderr=interleave_output,
            show_stage_events=not quiet,
            show_full_summary=(summarize_build is True),
            pre_start_time=pre_start_time,
            active_status_rate=limit_status_rate)
        status_thread.start()

        # Initialize locks
        locks = {
            'installspace': asyncio.Lock() if lock_install else FakeLock()
        }

        # Block while running N jobs asynchronously
        try:
            all_succeeded = run_until_complete(execute_jobs(
                'build',
                jobs,
                locks,
                event_queue,
                os.path.join(context.build_space_abs, '_logs'),
                max_toplevel_jobs=n_jobs,
                continue_on_failure=continue_on_failure,
                continue_without_deps=False))
        except Exception:
            status_thread.keep_running = False
            all_succeeded = False
            status_thread.join(1.0)
            wide_log(str(traceback.format_exc()))

        status_thread.join(1.0)

        # Warn user about new packages
        now_unbuilt_pkgs = get_unbuilt_packages(context, workspace_packages)
        new_pkgs = [p for p in unbuilt_pkgs if p not in now_unbuilt_pkgs]
        if len(new_pkgs) > 0:
            log(clr("[build] @/@!Note:@| @/Workspace packages have changed, "
                    "please re-source setup files to use them.@|"))

        if all_succeeded:
            # Create isolated devel setup if necessary
            if context.isolate_devel:
                if not context.install:
                    _create_unmerged_devel_setup(context, now_unbuilt_pkgs)
                else:
                    _create_unmerged_devel_setup_for_install(context)
            return 0
        else:
            return 1

    except KeyboardInterrupt:
        wide_log("[build] Interrupted by user!")
        event_queue.put(None)
Esempio n. 52
0
def parse_manifest_file(dirpath, manifest_name):
    """
    Parse manifest file (package, stack).  Type will be inferred from manifest_name.
    
    :param dirpath: directory of manifest file, ``str``
    :param manifest_name: ``MANIFEST_FILE`` or ``STACK_FILE``, ``str``

    :returns: return :class:`Manifest` instance, populated with parsed fields
    :raises: :exc:`InvalidManifest`
    :raises: :exc:`IOError`
    """
    filename = os.path.join(dirpath, manifest_name)
    if not os.path.isfile(filename):
        # hack for backward compatibility
        package_filename = os.path.join(dirpath, PACKAGE_FILE)
        if not os.path.isfile(package_filename):
            raise IOError("Invalid/non-existent manifest file: %s" % filename)
        manifest = Manifest(filename=filename, is_catkin=True)

        # extract all information from package.xml
        from catkin_pkg.package import parse_package
        p = parse_package(package_filename)
        # put these into manifest
        manifest.description = p.description
        manifest.author = ', '.join([('Maintainer: %s' % str(m)) for m in p.maintainers] + [str(a) for a in p.authors])
        manifest.license = ', '.join(p.licenses)
        if p.urls:
            manifest.url = str(p.urls[0])
        manifest.version = p.version
        for export in p.exports:
            manifest.exports.append(Export(export.tagname, export.attributes, export.content))

        # split ros and system dependencies (using rosdep)
        try:
            from rosdep2.rospack import init_rospack_interface, is_ros_package, is_system_dependency, is_view_empty
            global _static_rosdep_view
            # initialize rosdep view once
            if _static_rosdep_view is None:
                _static_rosdep_view = init_rospack_interface()
                if is_view_empty(_static_rosdep_view):
                    sys.stderr.write("the rosdep view is empty: call 'sudo rosdep init' and 'rosdep update'\n")
                    _static_rosdep_view = False
            if _static_rosdep_view:
                depends = set([])
                rosdeps = set([])
                for d in (p.buildtool_depends + p.build_depends + p.run_depends):
                    if is_ros_package(_static_rosdep_view, d.name):
                        depends.add(d.name)
                    if is_system_dependency(_static_rosdep_view, d.name):
                        rosdeps.add(d.name)
                for name in depends:
                    manifest.depends.append(Depend(name, 'package'))
                for name in rosdeps:
                    manifest.rosdeps.append(RosDep(name))
        except ImportError:
            pass

        return manifest

    with open(filename, 'r') as f:
        return parse_manifest(manifest_name, f.read(), filename)
def get_sourcedeb(
        rosdistro_index_url, rosdistro_name, package_name, sourcedeb_dir,
        skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        # get the exact sourcedeb version
        showsrc_output = subprocess.check_output([
            'apt-cache', 'showsrc', debian_package_name]).decode()
        line_prefix = 'Version: '
        debian_package_versions = [
            l[len(line_prefix):] for l in showsrc_output.splitlines()
            if l.startswith(line_prefix + package_version)]
        assert len(debian_package_versions) == 1, \
            "Failed to find sourcedeb with version '%s', only found: %s" % \
            (package_version, ', '.join(debian_package_versions))

        # download sourcedeb
        apt_script = os.path.join(
            os.path.dirname(__file__), 'wrapper', 'apt.py')
        cmd = [
            sys.executable, apt_script,
            'source', '--download-only', '--only-source',
            debian_package_name + '=' + debian_package_versions[0]]
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Esempio n. 54
0
#!/usr/bin/env python

from __future__ import print_function
import sys

from catkin_pkg.package import parse_package

package = parse_package(sys.argv[1])

values = {}
values["VERSION"] = '"%s"' % package.version

values["MAINTAINER"] = '"%s"' % (", ".join([str(m) for m in package.maintainers]))

values["BUILD_DEPENDS"] = " ".join(['"%s"' % str(d) for d in package.build_depends])
values["RUN_DEPENDS"] = " ".join(['"%s"' % str(d) for d in package.run_depends])

with open(sys.argv[2], "w") as ofile:
    print(r'set(_CATKIN_CURRENT_PACKAGE "%s")' % package.name, file=ofile)
    for k, v in values.items():
        print("set(%s_%s %s)" % (package.name, k, v), file=ofile)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--workspace', type=str, default='.')
    parser.add_argument('distribution_file', type=str)
    parser.add_argument('package_names', type=str, nargs='+')
    args = parser.parse_args()

    # Load the distribution file.
    with open(args.distribution_file, 'rb') as distribution_file:
        distribution_raw = yaml.load(distribution_file)

    packages_raw = distribution_raw.get('repositories')
    if packages_raw is None:
        raise ValueError('Distribution is missing the "repositories" key.')

    repositories = {
        name: Repository(name, options)
        for name, options in packages_raw.iteritems() }

    # Build a map from package name to the repository that contains it, based
    # soley on the information in the distribution file.
    distribution_package_map = dict()

    for repository in repositories.itervalues():
        for package_name in repository.packages:
            existing_repository = distribution_package_map.get(package_name)
            if existing_repository is not None:
                raise ValueError(
                    'Duplicate package "{:s}" in repositories "{:s}" and'
                    ' "{:s}".'.format(
                        package_name, existing_repository.name,
                        repository.name))

            distribution_package_map[package_name] = Package(
                package_name, repository)

    # Aggregate a map of packages that we know about.
    package_map = dict(distribution_package_map)
    done_packages = set() # installed and processed
    installed_packages = set() # installed, but not processed yet
    pending_packages = set(args.package_names)

    while pending_packages:
        package_name = pending_packages.pop()

        print('Processing package "{:s}"'.format(package_name))

        package = package_map.get(package_name)
        if package is None:
            raise ValueError(
                'Package "{:s}" is not in the distribution.'.format(
                    package_name))

        # Checkout the repository.
        repository = package.repository

        if repository.location is None:
            repository.location = os.path.join(args.workspace, repository.name)

            print('  Checking out "{:s}" repository => {:s}'.format(
                repository.name, repository.location))

            client = vcstools.get_vcs_client(
                repository.vcs_type, repository.location)

            if client.detect_presence():
                detected_url = client.get_url()

                if not client.url_matches(detected_url, repository.vcs_uri):
                    raise ValueError(
                        'Directory "{:s}" already contains a VCS repository with'
                        ' URL "{:s}". This does not match the requested URL'
                        ' "{:s}".'.format(repository_name, detected_url, repository.vcs_uri))

                client.update(version=repository.vcs_version)
            else:
                client.checkout(repository.vcs_uri, version=repository.vcs_version)

            # Search for packages in the repository.
            repository_package_map = dict()
            rospkg.list_by_path(
                manifest_name='package.xml',
                path=repository.location,
                cache=repository_package_map)

            if package.name not in repository_package_map:
                raise ValueError(
                    'Repository "{:s}" checked out from the "{:s}" repository'
                    ' "{:s}" does not contain the package "{:s}".'.format(
                        repository.name, repository.vcs_type,
                        repository.vcs_uri, package.name))

            # Mark all of these packages as installed.
            for package_name, location in repository_package_map.iteritems():
                installed_package = package_map.get(package_name)

                if installed_package is None:
                    installed_package = Package(package_name, repository)
                    package_map[package_name] = installed_package
                elif (installed_package.repository != repository or
                      installed_package.location is not None):
                    raise ValueError(
                        'Repository "{:s} installed duplicate package "{:s}"'
                        ' in directory "{:s}". This package was already installed'
                        ' by repository "{:s}" in directory "{:s}".'.format(
                            repository.name, package_name, location,
                            installed_package.repository.name,
                            installed_package.location))

                installed_package.location = location

                print('    Found package "{:s}" => {:s}'.format(
                    installed_package.name, installed_package.location))

            installed_packages.update(repository_package_map.iterkeys())

        # Crawl dependencies.
        package_xml_path = os.path.join(package.location, 'package.xml')
        package_manifest = parse_package(package_xml_path)

        all_depends = set()
        for dependency_type in DEPENDENCY_TYPES:
            for dependency in getattr(package_manifest, dependency_type):
                all_depends.add(dependency.name)

        # Only keep the dependencies that we know about.
        def annotate_package_name(package_name):
            if package_name in done_packages:
                return package_name + '*'
            elif package_name in installed_packages:
                return package_name + '^'
            else:
                return package_name

        known_depends = all_depends.intersection(
            distribution_package_map.iterkeys())
        if known_depends:
            print('  Depends on:', ' '.join(
                sorted(map(annotate_package_name, known_depends))))

        pending_packages.update(known_depends)