Example #1
0
def get_recursive_dependencies(distro,
                               package_names,
                               excludes=None,
                               limit_depth=None):
    excludes = set(excludes or [])
    dependencies = set([])
    walker = DependencyWalker(distro)
    # redirect all stderr output to logger
    stderr = sys.stderr
    sys.stderr = CustomLogger()
    try:
        for pkg_name in package_names:
            try:
                dependencies |= walker.get_recursive_depends(
                    pkg_name, ['buildtool', 'build', 'run', 'test'],
                    ros_packages_only=True,
                    ignore_pkgs=dependencies | excludes,
                    limit_depth=limit_depth)
            except AssertionError as e:
                raise RuntimeError(
                    "Failed to fetch recursive dependencies of package '%s': %s"
                    % (pkg_name, e))
    finally:
        sys.stderr = stderr
    dependencies -= set(package_names)
    return dependencies
Example #2
0
def _gen_recipe_for_package(distro, pkg_name, pkg, repo, ros_pkg,
                            pkg_rosinstall, tar_dir, md5_cache, sha256_cache,
                            patches, incs):
    pkg_dep_walker = DependencyWalker(distro)
    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_run_deps = pkg_dep_walker.get_depends(pkg_name, "run")
    src_uri = pkg_rosinstall[0]['tar']['uri']

    pkg_recipe = yoctoRecipe(pkg_name, distro, src_uri, tar_dir, md5_cache,
                             sha256_cache, patches, incs)
    # add run dependencies
    for rdep in pkg_run_deps:
        pkg_recipe.add_depend(rdep)

    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_recipe.add_depend(bdep)

    # add build tool dependencies
    for tdep in pkg_buildtool_deps:
        pkg_recipe.add_depend(tdep)

    # parse throught package xml
    try:
        pkg_xml = ros_pkg.get_package_xml(distro.name)
    except Exception as e:
        warn("fetch metadata for package {}".format(pkg_name))
        return pkg_recipe
    pkg_fields = PackageMetadata(pkg_xml)
    pkg_recipe.pkg_xml = pkg_xml
    pkg_recipe.license = pkg_fields.upstream_license
    pkg_recipe.description = pkg_fields.description
    pkg_recipe.homepage = pkg_fields.homepage
    return pkg_recipe
Example #3
0
def get_recursive_dependencies_on(distro,
                                  package_names,
                                  excludes=None,
                                  limit=None):
    excludes = set(excludes or [])
    limit = set(limit or [])

    # to improve performance limit search space if possible
    if limit:
        released_names, _ = get_package_names(distro)
        excludes.update(set(released_names) - limit - set(package_names))

    dependencies = set([])
    walker = DependencyWalker(distro)
    # redirect all stderr output to logger
    stderr = sys.stderr
    sys.stderr = CustomLogger()
    try:
        for pkg_name in package_names:
            dependencies |= walker.get_recursive_depends_on(
                pkg_name, ['buildtool', 'build', 'run', 'test'],
                ignore_pkgs=dependencies | excludes)
    finally:
        sys.stderr = stderr
    dependencies -= set(package_names)
    return dependencies
Example #4
0
def _gen_ebuild_for_package(distro, pkg_name, pkg, repo, ros_pkg,
                            pkg_rosinstall):
    pkg_ebuild = Ebuild()

    pkg_ebuild.distro = distro.name
    pkg_ebuild.src_uri = pkg_rosinstall[0]['tar']['uri']
    pkg_names = get_package_names(distro)
    pkg_dep_walker = DependencyWalker(distro)

    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_run_deps = pkg_dep_walker.get_depends(pkg_name, "run")

    pkg_keywords = ['x86', 'amd64', 'arm', 'arm64']

    # add run dependencies
    for rdep in pkg_run_deps:
        pkg_ebuild.add_run_depend(rdep, rdep in pkg_names[0])

    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_ebuild.add_build_depend(bdep, bdep in pkg_names[0])

    # add build tool dependencies
    for tdep in pkg_buildtool_deps:
        pkg_ebuild.add_build_depend(tdep, tdep in pkg_names[0])

    # add keywords
    for key in pkg_keywords:
        pkg_ebuild.add_keyword(key)

    # parse throught package xml
    try:
        pkg_xml = ros_pkg.get_package_xml(distro.name)
    except Exception as e:
        warn("fetch metadata for package {}".format(pkg_name))
        return pkg_ebuild
    pkg_fields = xmltodict.parse(pkg_xml)

    pkg_ebuild.upstream_license = pkg_fields['package']['license']
    pkg_ebuild.description = pkg_fields['package']['description']
    if isinstance(pkg_ebuild.description, str):
        pkg_ebuild.description = pkg_ebuild.description.replace('`', "")
    if len(pkg_ebuild.description) > 80:
        pkg_ebuild.description = pkg_ebuild.description[:80]
    try:
        if 'url' not in pkg_fields['package']:
            warn("no website field for package {}".format(pkg_name))
        elif isinstance(pkg_fields['package']['url'], str):
            pkg_ebuild.homepage = pkg_fields['package']['url']
        elif '@type' in pkg_fields['package']['url']:
            if pkg_fields['package']['url']['@type'] == 'website':
                if '#text' in pkg_fields['package']['url']:
                    pkg_ebuild.homepage = pkg_fields['package']['url']['#text']
        else:
            warn("failed to parse website for package {}".format(pkg_name))
    except TypeError as e:
        warn("failed to parse website package {}: {}".format(pkg_name, e))
    return pkg_ebuild
Example #5
0
def _gen_recipe_for_package(distro, pkg_name, pkg, repo, ros_pkg,
                            pkg_rosinstall, tar_dir):
    pkg_dep_walker = DependencyWalker(distro)
    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_run_deps = pkg_dep_walker.get_depends(pkg_name, "run")
    src_uri = pkg_rosinstall[0]['tar']['uri']

    pkg_recipe = yoctoRecipe(pkg_name, distro, src_uri, tar_dir)
    # add run dependencies
    for rdep in pkg_run_deps:
        pkg_recipe.add_depend(rdep)

    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_recipe.add_depend(bdep)

    # add build tool dependencies
    for tdep in pkg_buildtool_deps:
        pkg_recipe.add_depend(tdep)

    # parse throught package xml
    try:
        pkg_xml = ros_pkg.get_package_xml(distro.name)
    except Exception as e:
        warn("fetch metadata for package {}".format(pkg_name))
        return pkg_recipe
    pkg_fields = xmltodict.parse(pkg_xml)

    pkg_recipe.pkg_xml = pkg_xml
    pkg_recipe.license = pkg_fields['package']['license']
    pkg_recipe.description = pkg_fields['package']['description']
    if not isinstance(pkg_recipe.description, str):
        if '#text' in pkg_recipe.description:
            pkg_recipe.description = pkg_recipe.description['#text']
        else:
            pkg_recipe.description = "None"
    pkg_recipe.description = pkg_recipe.description.replace('`', "")
    if len(pkg_recipe.description) > 80:
        pkg_recipe.description = pkg_recipe.description[:80]
    try:
        if 'url' not in pkg_fields['package']:
            warn("no website field for package {}".format(pkg_name))
        elif sys.version_info <= (3, 0):
            pkg_recipe.recipe = pkg_fields['package']['url'].decode()
        elif isinstance(pkg_fields['package']['url'], str):
            pkg_recipe.homepage = pkg_fields['package']['url']
        elif '@type' in pkg_fields['package']['url']:
            if pkg_fields['package']['url']['@type'] == 'website':
                if '#text' in pkg_fields['package']['url']:
                    pkg_recipe.homepage =\
                        pkg_fields['package']['url']['#text']
        else:
            warn("failed to parse website for package {}".format(pkg_name))
    except TypeError as e:
        warn("failed to parse website package {}: {}".format(pkg_name, e))
    return pkg_recipe
Example #6
0
def get_recursive_dependencies(distro, package_names, excludes=None, limit_depth=None):
    excludes = set(excludes or [])
    dependencies = set([])
    walker = DependencyWalker(distro)
    # redirect all stderr output to logger
    stderr = sys.stderr
    sys.stderr = CustomLogger()
    try:
        for pkg_name in package_names:
            try:
                dependencies |= walker.get_recursive_depends(pkg_name, ['buildtool', 'build', 'run', 'test'], ros_packages_only=True, ignore_pkgs=dependencies | excludes, limit_depth=limit_depth)
            except AssertionError as e:
                raise RuntimeError("Failed to fetch recursive dependencies of package '%s': %s" % (pkg_name, e))
    finally:
        sys.stderr = stderr
    dependencies -= set(package_names)
    return dependencies
Example #7
0
def _gen_recipe_for_package(distro, pkg_name, pkg, repo, ros_pkg,
                            pkg_rosinstall, tar_dir, md5_cache, sha256_cache,
                            skip_keys):
    pkg_names = get_package_names(distro)
    pkg_dep_walker = DependencyWalker(distro,
                                      evaluate_condition_context=os.environ)
    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_build_export_deps = pkg_dep_walker.get_depends(pkg_name,
                                                       "build_export")
    pkg_buildtool_export_deps = pkg_dep_walker.get_depends(
        pkg_name, "buildtool_export")
    pkg_exec_deps = pkg_dep_walker.get_depends(pkg_name, "exec")
    pkg_test_deps = pkg_dep_walker.get_depends(pkg_name, "test")
    src_uri = pkg_rosinstall[0]['tar']['uri']

    # parse through package xml
    err_msg = 'Failed to fetch metadata for package {}'.format(pkg_name)
    pkg_xml = retry_on_exception(ros_pkg.get_package_xml,
                                 distro.name,
                                 retry_msg='Could not get package xml!',
                                 error_msg=err_msg)

    pkg_recipe = yoctoRecipe(
        pkg.repository_name,
        len(ros_pkg.repository.package_names),
        pkg_name,
        pkg_xml,
        distro,
        src_uri,
        tar_dir,
        md5_cache,
        sha256_cache,
        skip_keys,
    )
    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_recipe.add_build_depend(bdep, bdep in pkg_names[0])

    # add build tool dependencies
    for btdep in pkg_buildtool_deps:
        pkg_recipe.add_buildtool_depend(btdep, btdep in pkg_names[0])

    # add export dependencies
    for edep in pkg_build_export_deps:
        pkg_recipe.add_export_depend(edep, edep in pkg_names[0])

    # add buildtool export dependencies
    for btedep in pkg_buildtool_export_deps:
        pkg_recipe.add_buildtool_export_depend(btedep, btedep in pkg_names[0])

    # add exec dependencies
    for xdep in pkg_exec_deps:
        pkg_recipe.add_run_depend(xdep, xdep in pkg_names[0])

    # add test dependencies
    for tdep in pkg_test_deps:
        pkg_recipe.add_test_depend(tdep, tdep in pkg_names[0])

    return pkg_recipe
Example #8
0
def get_recursive_dependencies_on(distro, package_names, excludes=None, limit=None):
    excludes = set(excludes or [])
    limit = set(limit or [])

    # to improve performance limit search space if possible
    if limit:
        released_names, _ = get_package_names(distro)
        excludes.update(set(released_names) - limit - set(package_names))

    dependencies = set([])
    walker = DependencyWalker(distro)
    # redirect all stderr output to logger
    stderr = sys.stderr
    sys.stderr = CustomLogger()
    try:
        for pkg_name in package_names:
            dependencies |= walker.get_recursive_depends_on(pkg_name, ['buildtool', 'build', 'run', 'test'], ignore_pkgs=dependencies | excludes)
    finally:
        sys.stderr = stderr
    dependencies -= set(package_names)
    return dependencies
Example #9
0
    def __init__(self, distro_name, python_version=None):
        index = get_index(get_index_url())
        self._distro = get_cached_distribution(index, distro_name)
        self.distro_name = distro_name
        # set up ROS environments
        if python_version is None:
            python_version = index.distributions[distro_name]["python_version"]
        os.environ["ROS_PYTHON_VERSION"] = "{0}".format(python_version)
        os.environ["ROS_DISTRO"] = "{0}".format(distro_name)
        if "ROS_ROOT" in os.environ:
            os.environ.pop("ROS_ROOT")
        if "ROS_PACKAGE_PATH" in os.environ:
            os.environ.pop("ROS_PACKAGE_PATH")
        self._walker = DependencyWalker(self._distro,
                                        evaluate_condition_context=os.environ)

        # cache distribution type
        self._distribution_type = index.distributions[distro_name][
            "distribution_type"]
        self._python_version = index.distributions[distro_name][
            "python_version"]
        self.build_packages = set()

        os.environ["ROS_VERSION"] = "1" if self.check_ros1() else "2"
Example #10
0
def _gen_ebuild_for_package(
    distro, pkg_name, pkg, repo, ros_pkg, pkg_rosinstall
):
    pkg_ebuild = Ebuild()

    pkg_ebuild.distro = distro.name
    pkg_ebuild.src_uri = pkg_rosinstall[0]['tar']['uri']
    pkg_names = get_package_names(distro)
    pkg_dep_walker = DependencyWalker(distro)

    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_run_deps = pkg_dep_walker.get_depends(pkg_name, "run")
    pkg_test_deps = pkg_dep_walker.get_depends(pkg_name, "test")

    pkg_keywords = ['x86', 'amd64', 'arm', 'arm64']

    # add run dependencies
    for rdep in pkg_run_deps:
        pkg_ebuild.add_run_depend(rdep, rdep in pkg_names[0])

    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_ebuild.add_build_depend(bdep, bdep in pkg_names[0])

    # add build tool dependencies
    for tdep in pkg_buildtool_deps:
        pkg_ebuild.add_build_depend(tdep, tdep in pkg_names[0])

    # add test dependencies
    for test_dep in pkg_test_deps:
        pkg_ebuild.add_test_depend(test_dep, test_dep in pkg_names[0])

    # add keywords
    for key in pkg_keywords:
        pkg_ebuild.add_keyword(key)

    # parse throught package xml
    try:
        pkg_xml = ros_pkg.get_package_xml(distro.name)
    except Exception:
        warn("fetch metadata for package {}".format(pkg_name))
        return pkg_ebuild
    pkg = PackageMetadata(pkg_xml)
    pkg_ebuild.upstream_license = pkg.upstream_license
    pkg_ebuild.description = pkg.description
    pkg_ebuild.homepage = pkg.homepage
    pkg_ebuild.build_type = pkg.build_type
    return pkg_ebuild
Example #11
0
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)

    print('Checking packages for "%s" distribution' % rosdistro_name)

    # Find the previous distribution to the current one
    try:
        prev_rosdistro_name = _prev_rosdistro(index, rosdistro_name)
    except ValueError as e:
        print(e.args[0], file=sys.stderr)
        exit(-1)

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(index,
                                                          prev_rosdistro_name,
                                                          cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    prev_repo_names = set(_released_repos(prev_distro_file))

    if repo_names is not None:
        ignored_inputs = prev_repo_names.difference(repo_names)
        prev_repo_names.intersection_update(repo_names)
        repo_names = prev_repo_names

        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    current_repo_names = set(_released_repos(distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        _released_packages(distro_file, current_repo_names))

    released_repos = prev_repo_names.intersection(current_repo_names)

    if prev_repo_names.issubset(current_repo_names):
        print('All inputs already released in {0}.'.format(rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set(
        )  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    package_dependencies.update(
                        _package_dependencies(dependency_walker, package))

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = defaultdict(dict)
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    maintainers[unreleased_repo_name].update(
                        dict(_maintainers(prev_distribution, pkg_name)))
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = _repo_url(prev_distribution, blocking_repo_name)
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set(
                            [])
                    repos_info[blocking_repo_name]['repos_blocking'].add(
                        repo_name)

            # Get url of repo
            repo_url = _repo_url(prev_distribution, repo_name)
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(
                repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get(
                    'repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name][
                    'recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
    'robot',
    'viz',
    'desktop',
    'perception',
    'simulators',
    'desktop_full',
]

# Get packages which make up each layer of the veriants
mp_sets = {}
index = get_index(get_index_url())
hydro = get_cached_distribution(index, 'hydro')
indigo = get_cached_distribution(index, 'indigo')
dist_file = get_distribution_file(index, 'hydro')
indigo_dist_file = get_distribution_file(index, 'indigo')
dw = DependencyWalker(hydro)
for mp in keys:
    # print("Fetching deps for: ", mp)
    deps = list(set(metapackages[mp].run_depends))
    mp_sets[mp] = set([])
    for dep in deps:
        mp_sets[mp].update(set([dep.name]))
        if dep.name in keys:
            continue
        # print(" ", dep.name)
        previous_pkgs = set([])
        for mp_, mp_set in mp_sets.items():
            if mp == mp_:
                continue
            previous_pkgs.update(mp_set)
        mp_sets[mp].update(dw.get_recursive_depends(
Example #13
0
def get_blocking_info(distro_key, repo_names, depth):
    prev_distro_key = None

    index = rosdistro.get_index(rosdistro.get_index_url())
    valid_distro_keys = index.distributions.keys()
    valid_distro_keys.sort()
    if distro_key is None:
        distro_key = valid_distro_keys[-1]
    print('Checking packages for "%s" distribution' % distro_key)

    # Find the previous distribution to the current one
    try:
        i = valid_distro_keys.index(distro_key)
    except ValueError:
        print('Distribution key not found in list of valid distributions.')
        exit(-1)
    if i == 0:
        print('No previous distribution found.')
        exit(-1)
    prev_distro_key = valid_distro_keys[i - 1]

    cache = rosdistro.get_distribution_cache(index, distro_key)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
    prev_distribution = rosdistro.get_cached_distribution(
        index, prev_distro_key, cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(
            repo for repo in keys if is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(
            repo for repo in repo_names if is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print('Ignoring inputs for which repository info not found in previous distribution' +
                    ' (did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(
        repo for repo in keys if is_released(repo, distro_file))

    released_repos = prev_repo_names.intersection(
        current_repo_names)
    
    unreleased_repos = list(prev_repo_names.difference(
        current_repo_names))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names
        for pkg in distro_file.repositories[repo].release_repository.package_names)

    # Construct a dictionary where keys are repository names and values are a list
    # of the repos blocking/blocked by that repo
    blocked_repos = {}
    blocking_repos = {}
    unblocked_blocking_repos = set()

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(
            distro_key))

    # Process repo dependencies
    unblocked_repos = set()
    total_blocking_repos = set()

    for repository_name in unreleased_repos:
        repo = prev_distro_file.repositories[repository_name]
        release_repo = repo.release_repository
        package_dependencies = set()
        packages = release_repo.package_names
        # Accumulate all dependencies for those packages
        for package in packages:
            recursive_dependencies = dependency_walker.get_recursive_depends(
                package, ['build', 'run', 'buildtool'], ros_packages_only=True,
                limit_depth=depth)
            package_dependencies = package_dependencies.union(
                recursive_dependencies)

        # For all package dependencies, check if they are released yet
        unreleased_pkgs = package_dependencies.difference(
            current_package_names)
        # remove the packages which this repo provides.
        unreleased_pkgs = unreleased_pkgs.difference(packages)
        # Now get the repositories for these packages.
        blocking_repos_for_this_repo = set(prev_distro_file.release_packages[pkg].repository_name
                            for pkg in unreleased_pkgs)
        if len(blocking_repos_for_this_repo) == 0:
            unblocked_repos.add(repository_name)
        else:
            # Get the repository for the unreleased packages
            blocked_repos[repository_name] = blocking_repos_for_this_repo
            total_blocking_repos |= blocking_repos_for_this_repo
            
            for blocking_repo in blocking_repos_for_this_repo:
                try: 
                    blocking_repos[blocking_repo] |= set([repository_name]) 
                except KeyError:
                    blocking_repos[blocking_repo] = set([repository_name])

    unblocked_blocking_repos_names = total_blocking_repos.intersection(unblocked_repos)
    unblocked_blocking_repos = {
        repo:blocking for repo, blocking in blocking_repos.iteritems() 
        if repo in unblocked_blocking_repos_names
        }
    unblocked_leaf_repos = unblocked_repos.difference(unblocked_blocking_repos_names)

    # Double-check repositories that we think are leaf repos
    for repo in unblocked_leaf_repos:
        # Check only one level of depends_on
        depends_on = dependency_walker.get_depends_on(package, 'build') | \
            dependency_walker.get_depends_on(package, 'run') | \
            dependency_walker.get_depends_on(package, 'buildtool')
        if len(depends_on) != 0:
            # There are packages that depend on this "leaf", but we didn't find
            # them initially because they weren't related to our inputs
            for package in depends_on:
                depends_on_repo = prev_distro_file.release_packages[package].repository_name
                try: 
                    unblocked_blocking_repos[repo] |= set([depends_on_repo]) 
                except KeyError:
                    unblocked_blocking_repos[repo] = set([depends_on_repo])

    unblocked_unblocking_repos = unblocked_leaf_repos.difference(
        unblocked_blocking_repos.keys())
    
    if not len(repo_names) == (len(ignored_inputs) + len(released_repos) + len(blocked_repos.keys()) + 
        len(unblocked_blocking_repos.keys()) + len(unblocked_unblocking_repos)):
        raise Exception('Somewhere a repo has not been accounted for')
    return released_repos, blocked_repos, unblocked_blocking_repos, unblocked_unblocking_repos
Example #14
0
        exit(-1)
    prev_distro_key = args.comparison
else:
    prev_distro_key = valid_distro_keys[i - 1]

cache = rosdistro.get_distribution_cache(index, distro_key)
distro_file = cache.distribution_file

prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
prev_distribution = rosdistro.get_cached_distribution(index,
                                                      prev_distro_key,
                                                      cache=prev_cache)

prev_distro_file = prev_cache.distribution_file

dependency_walker = DependencyWalker(prev_distribution)

if repo_names_argument is None:
    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    repo_names_argument = prev_distro_file.repositories.keys()

prev_repo_names = set(repo for repo in repo_names_argument
                      if is_released(repo, prev_distro_file))

keys = distro_file.repositories.keys()
current_repo_names = set(repo for repo in keys
                         if is_released(repo, distro_file))

# Print the repositories that will be eliminated from the input
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on,
                       repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace,
                       sudo=False, no_chroot=False):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    from rosdistro import get_cached_release, get_index, get_index_url, get_source_file
    from rosdistro.dependency_walker import DependencyWalker
    from rosdistro.manifest_provider import get_release_tag

    index = get_index(get_index_url())
    print "Parsing rosdistro file for %s" % ros_distro
    release = get_cached_release(index, ros_distro)
    print "Parsing devel file for %s" % ros_distro
    source_file = get_source_file(index, ros_distro)

    # Create rosdep object
    print "Create rosdep object"
    rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot)

    # download the repo_list from source
    print "Creating rosinstall file for repo list"
    rosinstall = ""
    for repo_name, version in zip(repo_list, version_list):
        if version == 'devel':
            if repo_name not in source_file.repositories:
                raise BuildException("Repository %s does not exist in Devel Distro" % repo_name)
            print "Using devel distro file to download repositories"
            rosinstall += _generate_rosinstall_for_repo(source_file.repositories[repo_name])
        else:
            if repo_name not in release.repositories:
                raise BuildException("Repository %s does not exist in Ros Distro" % repo_name)
            repo = release.repositories[repo_name]
            if version not in ['latest', 'master']:
                assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name
            assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name
            for pkg_name in repo.package_names:
                release_tag = get_release_tag(repo, pkg_name)
                if version in ['latest', 'master']:
                    release_tag = '/'.join(release_tag.split('/')[:-1])
                print 'Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)
                rosinstall += _generate_rosinstall_for_repo(release.repositories[repo_name], version=release_tag)
    print "rosinstall file for all repositories: \n %s" % rosinstall
    with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f:
        f.write(rosinstall)
    print "Install repo list from source"
    os.makedirs(repo_sourcespace)
    call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace))

    # get the repositories build dependencies
    print "Get build dependencies of repo list"
    repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, test_depends=False)
    # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available
    if 'catkin' not in repo_build_dependencies:
        repo_build_dependencies.append('catkin')
    print "Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))
    apt_get_install(repo_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file for repositories that use catkin
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(repo_sourcespace, 'CMakeLists.txt'))
    print "Create a new CMakeLists.txt file using catkin"

    # get environment
    ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro)

    # check if source workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace)

    # make build folder and change into it
    os.makedirs(repo_buildspace)
    os.chdir(repo_buildspace)

    # make test results dir
    test_results_dir = os.path.join(workspace, 'test_results')
    if os.path.exists(test_results_dir):
        shutil.rmtree(test_results_dir)
    os.makedirs(test_results_dir)

    if not non_catkin_pkgs:
        print "Build catkin workspace"
        call("catkin_init_workspace %s" % repo_sourcespace, ros_env)
        repos_test_results_dir = os.path.join(test_results_dir, 'repos')
        call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env)
        #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash'))

        # build repositories and tests
        print "Build repo list"
        call("make", ros_env)
        call("make tests", ros_env)

        # get the repositories test and run dependencies
        print "Get test and run dependencies of repo list"
        repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, test_depends=True)
        print "Install test and run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))
        apt_get_install(repo_test_dependencies, rosdep_resolver, sudo)

        # run tests
        print "Test repo list"
        call("make run_tests", ros_env)

    else:
        print "Build workspace with non-catkin packages in isolation"
        # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin)
        os.makedirs(os.path.join(repo_buildspace, 'devel_isolated'))
        call('catkin_make_isolated --source %s --install-space install_isolated --install' % repo_sourcespace, ros_env)
        setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh')
        ros_env = get_ros_env(setup_file)

    # see if we need to do more work or not
    if not test_depends_on:
        print "We're not testing the depends-on repositories"
        ensure_test_results(test_results_dir)
        return

    # get repo_list depends-on list
    print "Get list of wet repositories that build-depend on repo list: %s" % ', '.join(repo_list)
    walker = DependencyWalker(release)
    depends_on = set([])
    try:
        for repo_name in repo_list:
            print('repo_name', repo_name)
            repo = release.repositories[repo_name]
            for pkg_name in repo.package_names:
                print('pkg_name', pkg_name)
                depends_on |= walker.get_recursive_depends_on(pkg_name, ['buildtool', 'build'], ignore_pkgs=depends_on)
                print('depends_on', depends_on)
    except RuntimeError:
        print "Exception %s: If you are not in the rosdistro and only in the devel", \
            " builds there will be no depends on"
        depends_on = set([])

    print "Build depends_on list of pkg list: %s" % (', '.join(depends_on))
    if len(depends_on) == 0:
        print "No wet packages depend on our repo list. Test finished here"
        ensure_test_results(test_results_dir)
        return

    # install depends_on packages from source from release repositories
    rosinstall = ''
    non_catkin_pkgs = []
    for pkg_name in depends_on:
        repo = release.repositories[release.packages[pkg_name].repository_name]
        if repo.version is None:
            continue
        pkg_xml = release.get_package_xml(pkg_name)
        if pkg_xml is None:
            raise BuildException('Could not retrieve package.xml for package "%s" from rosdistro cache' % pkg_name)
        try:
            pkg = parse_package_string(pkg_xml)
        except InvalidPackage as e:
            raise BuildException('package.xml for package "%s" from rosdistro cache is invalid: %s' % (pkg_name, e))
        if _is_non_catkin_package(pkg):
            non_catkin_pkgs.append(pkg.name)
        rosinstall += _generate_rosinstall_for_pkg(repo, pkg_name)

    if non_catkin_pkgs:
        print 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))
        create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs)))
        return

    print "Rosinstall for depends_on:\n %s" % rosinstall
    with open(workspace + "/depends_on.rosinstall", 'w') as f:
        f.write(rosinstall)
    print "Created rosinstall file for depends on"

    # install all repository and system dependencies of the depends_on list
    print "Install all depends_on from source: %s" % (', '.join(depends_on))
    os.makedirs(dependson_sourcespace)
    call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace))

    # check if depends_on workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(dependson_sourcespace)
    if non_catkin_pkgs:
        print 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))
        create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs)))
        return

    # get build and test dependencies of depends_on list
    dependson_build_dependencies = []
    for d in get_dependencies(dependson_sourcespace, build_depends=True, test_depends=False):
        print "  Checking dependency %s" % d
        if d in dependson_build_dependencies:
            print "    Already in dependson_build_dependencies"
        if d in depends_on:
            print "    Is a direct dependency of the repo list, and is installed from source"
        if d in repo_list:
            print "    Is one of the repositories tested"
        if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list:
            dependson_build_dependencies.append(d)
    print "Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))
    dependson_test_dependencies = []
    for d in get_dependencies(dependson_sourcespace, build_depends=False, test_depends=True):
        if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list:
            dependson_test_dependencies.append(d)
    print "Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))

    # install build dependencies
    print "Install all build dependencies of the depends_on list"
    apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file again
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt'))
    os.makedirs(dependson_buildspace)
    os.chdir(dependson_buildspace)
    print "Create a new CMakeLists.txt file using catkin"
    call("catkin_init_workspace %s" % dependson_sourcespace, ros_env)
    depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on')
    call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env)
    #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash'))

    # build repositories
    print "Build depends-on packages"
    call("make", ros_env)

    # install test dependencies
    print "Install all test dependencies of the depends_on list"
    apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo)

    # test repositories
    print "Test depends-on packages"
    call("make run_tests", ros_env)
    ensure_test_results(test_results_dir)
Example #16
0
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker
    from catkin_pkg.package import InvalidPackage, parse_package_string

    prev_rosdistro_name = None

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)
    valid_rosdistro_names = list(index.distributions.keys())
    valid_rosdistro_names.sort()
    if rosdistro_name is None:
        rosdistro_name = valid_rosdistro_names[-1]
    print('Checking packages for "%s" distribution' % rosdistro_name)

    # Find the previous distribution to the current one
    try:
        i = valid_rosdistro_names.index(rosdistro_name)
    except ValueError:
        print('Distribution key not found in list of valid distributions.',
              file=sys.stderr)
        exit(-1)
    if i == 0:
        print('No previous distribution found.', file=sys.stderr)
        exit(-1)
    prev_rosdistro_name = valid_rosdistro_names[i - 1]

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(index,
                                                          prev_rosdistro_name,
                                                          cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(repo for repo in keys
                              if _is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(repo for repo in repo_names
                              if _is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(repo for repo in keys
                             if _is_released(repo, distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names for pkg in
        distro_file.repositories[repo].release_repository.package_names)

    released_repos = prev_repo_names.intersection(current_repo_names)

    unreleased_repos = list(prev_repo_names.difference(current_repo_names))

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set(
        )  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    try:
                        package_dependencies |= dependency_walker.get_recursive_depends(
                            package, ['build', 'buildtool', 'run', 'test'],
                            ros_packages_only=True,
                            limit_depth=1)
                    except AssertionError as e:
                        print(e, file=sys.stderr)

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = {}
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    pkg_xml = prev_distribution.get_release_package_xml(
                        pkg_name)
                    if pkg_xml is not None:
                        try:
                            pkg = parse_package_string(pkg_xml)
                        except InvalidPackage:
                            pass
                        else:
                            pkg_maintainers = {
                                m.name: m.email
                                for m in pkg.maintainers
                            }
                            if unreleased_repo_name not in maintainers:
                                maintainers[unreleased_repo_name] = {}
                            maintainers[unreleased_repo_name].update(
                                pkg_maintainers)
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = None
                    blocking_repo = prev_distro_file.repositories[
                        blocking_repo_name]
                    if blocking_repo.source_repository:
                        repo_url = blocking_repo.source_repository.url
                    elif blocking_repo.doc_repository:
                        repo_url = blocking_repo.doc_repository.url
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set(
                            [])
                    repos_info[blocking_repo_name]['repos_blocking'].add(
                        repo_name)

            # Get url of repo
            repo_url = None
            if repo.source_repository:
                repo_url = repo.source_repository.url
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(
                repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get(
                    'repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name][
                    'recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on,
                       repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace,
                       sudo=False, no_chroot=False):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    from rosdistro import get_cached_release, get_index, get_index_url, get_source_file
    from rosdistro.dependency_walker import DependencyWalker
    from rosdistro.manifest_provider import get_release_tag

    index = get_index(get_index_url())
    print "Parsing rosdistro file for %s" % ros_distro
    release = get_cached_release(index, ros_distro)
    print "Parsing devel file for %s" % ros_distro
    source_file = get_source_file(index, ros_distro)

    # Create rosdep object
    print "Create rosdep object"
    rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot)

    # download the repo_list from source
    print "Creating rosinstall file for repo list"
    rosinstall = ""
    for repo_name, version in zip(repo_list, version_list):
        if version == 'devel':
            if repo_name not in source_file.repositories:
                raise BuildException("Repository %s does not exist in Devel Distro" % repo_name)
            print "Using devel distro file to download repositories"
            rosinstall += _generate_rosinstall_for_repo(source_file.repositories[repo_name])
        else:
            if repo_name not in release.repositories:
                raise BuildException("Repository %s does not exist in Ros Distro" % repo_name)
            repo = release.repositories[repo_name]
            if version not in ['latest', 'master']:
                assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name
            assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name
            for pkg_name in repo.package_names:
                release_tag = get_release_tag(repo, pkg_name)
                if version in ['latest', 'master']:
                    release_tag = '/'.join(release_tag.split('/')[:-1])
                print 'Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)
                rosinstall += _generate_rosinstall_for_repo(release.repositories[repo_name], version=release_tag)
    print "rosinstall file for all repositories: \n %s" % rosinstall
    with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f:
        f.write(rosinstall)
    print "Install repo list from source"
    os.makedirs(repo_sourcespace)
    call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace))

    # get the repositories build dependencies
    print "Get build dependencies of repo list"
    repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, test_depends=False)
    # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available
    if 'catkin' not in repo_build_dependencies:
        repo_build_dependencies.append('catkin')
    print "Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))
    apt_get_install(repo_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file for repositories that use catkin
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(repo_sourcespace, 'CMakeLists.txt'))
    print "Create a new CMakeLists.txt file using catkin"

    # get environment
    ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro)

    # check if source workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace)

    # make build folder and change into it
    os.makedirs(repo_buildspace)
    os.chdir(repo_buildspace)

    # make test results dir
    test_results_dir = os.path.join(workspace, 'test_results')
    if os.path.exists(test_results_dir):
        shutil.rmtree(test_results_dir)
    os.makedirs(test_results_dir)

    if not non_catkin_pkgs:
        print "Build catkin workspace"
        call("catkin_init_workspace %s" % repo_sourcespace, ros_env)
        repos_test_results_dir = os.path.join(test_results_dir, 'repos')
        call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env)
        #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash'))

        # build repositories and tests
        print "Build repo list"
        call("make", ros_env)
        call("make tests", ros_env)

        # get the repositories test and run dependencies
        print "Get test and run dependencies of repo list"
        repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, test_depends=True)
        print "Install test and run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))
        apt_get_install(repo_test_dependencies, rosdep_resolver, sudo)

        # run tests
        print "Test repo list"
        call("make run_tests", ros_env)

    else:
        print "Build workspace with non-catkin packages in isolation"
        # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin)
        os.makedirs(os.path.join(repo_buildspace, 'devel_isolated'))
        call('catkin_make_isolated --source %s --install-space install_isolated --install' % repo_sourcespace, ros_env)
        setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh')
        ros_env = get_ros_env(setup_file)

    # don't do depends-on on things not in release
    not_in_release = set(repo_list) - set(release.repositories.keys())
    if not_in_release:
        print "Removed [%s] repositories which are not in the " %\
            ', '.join(sorted(not_in_release)), \
            "release file for depends-on testing"
        repo_list = list(set(repo_list) - not_in_release)

    # see if we need to do more work or not
    if not test_depends_on:
        print "We're not testing the depends-on repositories"
        ensure_test_results(test_results_dir)
        return

    # get repo_list depends-on list
    print "Get list of wet repositories that build-depend on repo list: %s" % ', '.join(repo_list)
    walker = DependencyWalker(release)
    depends_on = set([])
    try:
        for repo_name in repo_list:
            print('repo_name', repo_name)
            repo = release.repositories[repo_name]
            for pkg_name in repo.package_names:
                print('pkg_name', pkg_name)
                depends_on |= walker.get_recursive_depends_on(pkg_name, ['buildtool', 'build'], ignore_pkgs=depends_on)
                print('depends_on', depends_on)
    except RuntimeError:
        print "Exception %s: If you are not in the rosdistro and only in the devel", \
            " builds there will be no depends on"
        depends_on = set([])

    print "Build depends_on list of pkg list: %s" % (', '.join(depends_on))
    if len(depends_on) == 0:
        print "No wet packages depend on our repo list. Test finished here"
        ensure_test_results(test_results_dir)
        return

    # install depends_on packages from source from release repositories
    rosinstall = ''
    non_catkin_pkgs = []
    for pkg_name in depends_on:
        repo = release.repositories[release.packages[pkg_name].repository_name]
        if repo.version is None:
            continue
        pkg_xml = release.get_package_xml(pkg_name)
        if pkg_xml is None:
            raise BuildException('Could not retrieve package.xml for package "%s" from rosdistro cache' % pkg_name)
        try:
            pkg = parse_package_string(pkg_xml)
        except InvalidPackage as e:
            raise BuildException('package.xml for package "%s" from rosdistro cache is invalid: %s' % (pkg_name, e))
        if _is_non_catkin_package(pkg):
            non_catkin_pkgs.append(pkg.name)
        rosinstall += _generate_rosinstall_for_pkg(repo, pkg_name)

    if non_catkin_pkgs:
        print 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))
        create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs)))
        return

    print "Rosinstall for depends_on:\n %s" % rosinstall
    with open(workspace + "/depends_on.rosinstall", 'w') as f:
        f.write(rosinstall)
    print "Created rosinstall file for depends on"

    # install all repository and system dependencies of the depends_on list
    print "Install all depends_on from source: %s" % (', '.join(depends_on))
    os.makedirs(dependson_sourcespace)
    call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace))

    # check if depends_on workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(dependson_sourcespace)
    if non_catkin_pkgs:
        print 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))
        create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs)))
        return

    # get build and test dependencies of depends_on list
    dependson_build_dependencies = []
    for d in get_dependencies(dependson_sourcespace, build_depends=True, test_depends=False):
        print "  Checking dependency %s" % d
        if d in dependson_build_dependencies:
            print "    Already in dependson_build_dependencies"
        if d in depends_on:
            print "    Is a direct dependency of the repo list, and is installed from source"
        if d in repo_list:
            print "    Is one of the repositories tested"
        if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list:
            dependson_build_dependencies.append(d)
    print "Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))
    dependson_test_dependencies = []
    for d in get_dependencies(dependson_sourcespace, build_depends=False, test_depends=True):
        if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list:
            dependson_test_dependencies.append(d)
    print "Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))

    # install build dependencies
    print "Install all build dependencies of the depends_on list"
    apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file again
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt'))
    os.makedirs(dependson_buildspace)
    os.chdir(dependson_buildspace)
    print "Create a new CMakeLists.txt file using catkin"
    call("catkin_init_workspace %s" % dependson_sourcespace, ros_env)
    depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on')
    call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env)
    #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash'))

    # build repositories
    print "Build depends-on packages"
    call("make", ros_env)

    # install test dependencies
    print "Install all test dependencies of the depends_on list"
    apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo)

    # test repositories
    print "Test depends-on packages"
    call("make run_tests", ros_env)
    ensure_test_results(test_results_dir)
Example #18
0
        print('Valid rosdistros are %s.' % valid_comparison_keys, file=sys.stderr)
        exit(-1)
    prev_distro_key = args.comparison
else:
    prev_distro_key = valid_distro_keys[i - 1]

cache = rosdistro.get_distribution_cache(index, distro_key)
distro_file = cache.distribution_file

prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
prev_distribution = rosdistro.get_cached_distribution(
    index, prev_distro_key, cache=prev_cache)

prev_distro_file = prev_cache.distribution_file

dependency_walker = DependencyWalker(prev_distribution)

if repo_names_argument is None:
    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    repo_names_argument = prev_distro_file.repositories.keys()

prev_repo_names = set(
    repo for repo in repo_names_argument if is_released(repo, prev_distro_file))

keys = distro_file.repositories.keys()
current_repo_names = set(
    repo for repo in keys if is_released(repo, distro_file))

# Print the repositories that will be eliminated from the input
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker
    from catkin_pkg.package import InvalidPackage, parse_package_string

    prev_rosdistro_name = None

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)
    valid_rosdistro_names = list(index.distributions.keys())
    valid_rosdistro_names.sort()
    if rosdistro_name is None:
        rosdistro_name = valid_rosdistro_names[-1]
    print('Checking packages for "%s" distribution' % rosdistro_name)

    # skip distributions with a different type if the information is available
    distro_type = index.distributions[rosdistro_name].get('distribution_type')
    if distro_type is not None:
        valid_rosdistro_names = [
            n for n in valid_rosdistro_names
            if distro_type == index.distributions[n].get('distribution_type')]

    # Find the previous distribution to the current one
    try:
        i = valid_rosdistro_names.index(rosdistro_name)
    except ValueError:
        print('Distribution key not found in list of valid distributions.', file=sys.stderr)
        exit(-1)
    if i == 0:
        print('No previous distribution found.', file=sys.stderr)
        exit(-1)
    prev_rosdistro_name = valid_rosdistro_names[i - 1]

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(
        index, prev_rosdistro_name, cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(
            repo for repo in keys if _is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(
            repo for repo in repo_names if _is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(
        repo for repo in keys if _is_released(repo, distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names
        for pkg in distro_file.repositories[repo].release_repository.package_names)

    released_repos = prev_repo_names.intersection(
        current_repo_names)

    unreleased_repos = list(prev_repo_names.difference(
        current_repo_names))

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(
            rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set()  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    try:
                        package_dependencies |= dependency_walker.get_recursive_depends(
                            package, ['build', 'buildtool', 'run', 'test'], ros_packages_only=True,
                            limit_depth=1)
                    except AssertionError as e:
                        print(e, file=sys.stderr)

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = {}
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    pkg_xml = prev_distribution.get_release_package_xml(pkg_name)
                    if pkg_xml is not None:
                        try:
                            pkg = parse_package_string(pkg_xml)
                        except InvalidPackage:
                            pass
                        else:
                            pkg_maintainers = {m.name: m.email for m in pkg.maintainers}
                            if unreleased_repo_name not in maintainers:
                                maintainers[unreleased_repo_name] = {}
                            maintainers[unreleased_repo_name].update(pkg_maintainers)
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = None
                    blocking_repo = prev_distro_file.repositories[blocking_repo_name]
                    if blocking_repo.source_repository:
                        repo_url = blocking_repo.source_repository.url
                    elif blocking_repo.doc_repository:
                        repo_url = blocking_repo.doc_repository.url
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set([])
                    repos_info[blocking_repo_name]['repos_blocking'].add(repo_name)

            # Get url of repo
            repo_url = None
            if repo.source_repository:
                repo_url = repo.source_repository.url
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get('repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name]['recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
Example #20
0
def _get_blocked_source_entries_info(config_url, rosdistro_name):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    from rosdistro.dependency_walker import DependencyWalker

    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)

    print('Getting blocked source entries for', rosdistro_name)

    try:
        prev_rosdistro_name = _prev_rosdistro(index, rosdistro_name)
    except ValueError as e:
        print(e.args[0], file=sys.stderr)
        exit(-1)

    print('Comparing', rosdistro_name, 'with', prev_rosdistro_name)

    dist = get_cached_distribution(index, rosdistro_name)
    prev_dist = get_cached_distribution(index, prev_rosdistro_name)

    prev_walker = DependencyWalker(prev_dist)

    prev_released_repos = set(_released_repos(prev_dist))
    source_entry_repos = set(_source_entry_repos(dist))
    missing_repos = prev_released_repos.difference(source_entry_repos)

    # Assume repos will provide the same packages as previous distro
    missing_packages = set(_released_packages(prev_dist, missing_repos))

    repos_info = defaultdict(dict)

    # Give all repos some basic info
    for repo_name in prev_released_repos.union(source_entry_repos).union(
            missing_repos):
        repos_info[repo_name]['url'] = ''
        repos_info[repo_name]['repos_blocking'] = set()
        repos_info[repo_name]['recursive_repos_blocking'] = set()
        repos_info[repo_name]['released'] = False
        repos_info[repo_name]['version'] = 'no'
        repos_info[repo_name]['repos_blocked_by'] = {}
        repos_info[repo_name]['maintainers'] = defaultdict(dict)

    for repo_name in prev_released_repos:
        repos_info[repo_name]['url'] = _repo_url(prev_dist, repo_name)

    # has a source entry? Call that 'released' with a 'version' to reuse _format_repo_table_row
    for repo_name in source_entry_repos:
        repos_info[repo_name]['released'] = True
        repos_info[repo_name]['version'] = 'yes'

    # Determine which repos directly block the missing ones
    for repo_name in missing_repos:
        package_dependencies = set()
        for pkg in _released_packages(prev_dist, (repo_name, )):
            package_dependencies.update(_package_dependencies(
                prev_walker, pkg))

        for dep in package_dependencies:
            if dep in missing_packages:
                blocking_repo = prev_dist.release_packages[dep].repository_name
                if blocking_repo == repo_name:
                    # ignore packages in the same repo
                    continue
                repos_info[repo_name]['repos_blocked_by'][blocking_repo] = \
                    _repo_url(prev_dist, repo_name)
                repos_info[repo_name]['maintainers'][blocking_repo].update(
                    dict(_maintainers(prev_dist, dep)))

                # Mark blocking relationship in other direction
                repos_info[blocking_repo]['repos_blocking'].add(repo_name)

    # Compute which repos block another recursively
    for repo_name in repos_info.keys():
        checked_repos = set()
        repos_to_check = set([repo_name])
        while repos_to_check:
            next_repo = repos_to_check.pop()
            new_repos_blocking = repos_info[next_repo]['repos_blocking']
            repos_info[repo_name]['recursive_repos_blocking'].update(
                new_repos_blocking)
            checked_repos.add(next_repo)
            repos_to_check.update(repos_info[next_repo]
                                  ['repos_blocking'].difference(checked_repos))

    return repos_info
Example #21
0
class Distro(object):
    def __init__(self, distro_name, python_version=None):
        index = get_index(get_index_url())
        self._distro = get_cached_distribution(index, distro_name)
        self.distro_name = distro_name
        # set up ROS environments
        if python_version is None:
            python_version = index.distributions[distro_name]["python_version"]
        os.environ["ROS_PYTHON_VERSION"] = "{0}".format(python_version)
        os.environ["ROS_DISTRO"] = "{0}".format(distro_name)
        if "ROS_ROOT" in os.environ:
            os.environ.pop("ROS_ROOT")
        if "ROS_PACKAGE_PATH" in os.environ:
            os.environ.pop("ROS_PACKAGE_PATH")
        self._walker = DependencyWalker(self._distro,
                                        evaluate_condition_context=os.environ)

        # cache distribution type
        self._distribution_type = index.distributions[distro_name][
            "distribution_type"]
        self._python_version = index.distributions[distro_name][
            "python_version"]
        self.build_packages = set()

        os.environ["ROS_VERSION"] = "1" if self.check_ros1() else "2"

    @property
    def name(self):
        return self.distro_name

    def add_packages(self, packages):
        self.build_packages = set(packages)

    def get_depends(self, pkg):
        dependencies = set()
        dependencies |= self._walker.get_recursive_depends(
            pkg,
            [
                "buildtool",
                "buildtool_export",
                "build",
                "build_export",
                "run",
                "test",
                "exec",
            ],
            ros_packages_only=True,
        )
        return dependencies

    def get_released_repo(self, pkg_name):
        pkg = self._distro.release_packages[pkg_name]
        repo = self._distro.repositories[
            pkg.repository_name].release_repository
        release_tag = get_release_tag(repo, pkg_name)
        return repo.url, release_tag

    def check_package(self, pkg_name):
        if pkg_name in self._distro.release_packages:
            return True
        elif pkg_name in self.build_packages:
            return True
        else:
            return False

    def get_version(self, pkg_name):
        pkg = self._distro.release_packages[pkg_name]
        repo = self._distro.repositories[
            pkg.repository_name].release_repository
        return repo.version.split("-")[0]

    def get_release_package_xml(self, pkg_name):
        return self._distro.get_release_package_xml(pkg_name)

    def check_ros1(self):
        return self._distribution_type == "ros1"

    def get_python_version(self):
        return self._python_version