def _gen_recipe_for_package(distro, pkg_name, pkg, repo, ros_pkg,
                            pkg_rosinstall, tar_dir, md5_cache, sha256_cache,
                            skip_keys):
    pkg_names = get_package_names(distro)
    pkg_dep_walker = DependencyWalker(distro,
                                      evaluate_condition_context=os.environ)
    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_build_export_deps = pkg_dep_walker.get_depends(pkg_name,
                                                       "build_export")
    pkg_buildtool_export_deps = pkg_dep_walker.get_depends(
        pkg_name, "buildtool_export")
    pkg_exec_deps = pkg_dep_walker.get_depends(pkg_name, "exec")
    pkg_test_deps = pkg_dep_walker.get_depends(pkg_name, "test")
    src_uri = pkg_rosinstall[0]['tar']['uri']

    # parse through package xml
    err_msg = 'Failed to fetch metadata for package {}'.format(pkg_name)
    pkg_xml = retry_on_exception(ros_pkg.get_package_xml,
                                 distro.name,
                                 retry_msg='Could not get package xml!',
                                 error_msg=err_msg)

    pkg_recipe = yoctoRecipe(
        pkg.repository_name,
        len(ros_pkg.repository.package_names),
        pkg_name,
        pkg_xml,
        distro,
        src_uri,
        tar_dir,
        md5_cache,
        sha256_cache,
        skip_keys,
    )
    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_recipe.add_build_depend(bdep, bdep in pkg_names[0])

    # add build tool dependencies
    for btdep in pkg_buildtool_deps:
        pkg_recipe.add_buildtool_depend(btdep, btdep in pkg_names[0])

    # add export dependencies
    for edep in pkg_build_export_deps:
        pkg_recipe.add_export_depend(edep, edep in pkg_names[0])

    # add buildtool export dependencies
    for btedep in pkg_buildtool_export_deps:
        pkg_recipe.add_buildtool_export_depend(btedep, btedep in pkg_names[0])

    # add exec dependencies
    for xdep in pkg_exec_deps:
        pkg_recipe.add_run_depend(xdep, xdep in pkg_names[0])

    # add test dependencies
    for tdep in pkg_test_deps:
        pkg_recipe.add_test_depend(tdep, tdep in pkg_names[0])

    return pkg_recipe
def _gen_ebuild_for_package(distro, pkg_name, pkg, repo, ros_pkg,
                            pkg_rosinstall):
    pkg_ebuild = Ebuild()

    pkg_ebuild.distro = distro.name
    pkg_ebuild.src_uri = pkg_rosinstall[0]['tar']['uri']
    pkg_names = get_package_names(distro)
    pkg_dep_walker = DependencyWalker(distro)

    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_run_deps = pkg_dep_walker.get_depends(pkg_name, "run")

    pkg_keywords = ['x86', 'amd64', 'arm', 'arm64']

    # add run dependencies
    for rdep in pkg_run_deps:
        pkg_ebuild.add_run_depend(rdep, rdep in pkg_names[0])

    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_ebuild.add_build_depend(bdep, bdep in pkg_names[0])

    # add build tool dependencies
    for tdep in pkg_buildtool_deps:
        pkg_ebuild.add_build_depend(tdep, tdep in pkg_names[0])

    # add keywords
    for key in pkg_keywords:
        pkg_ebuild.add_keyword(key)

    # parse throught package xml
    try:
        pkg_xml = ros_pkg.get_package_xml(distro.name)
    except Exception as e:
        warn("fetch metadata for package {}".format(pkg_name))
        return pkg_ebuild
    pkg_fields = xmltodict.parse(pkg_xml)

    pkg_ebuild.upstream_license = pkg_fields['package']['license']
    pkg_ebuild.description = pkg_fields['package']['description']
    if isinstance(pkg_ebuild.description, str):
        pkg_ebuild.description = pkg_ebuild.description.replace('`', "")
    if len(pkg_ebuild.description) > 80:
        pkg_ebuild.description = pkg_ebuild.description[:80]
    try:
        if 'url' not in pkg_fields['package']:
            warn("no website field for package {}".format(pkg_name))
        elif isinstance(pkg_fields['package']['url'], str):
            pkg_ebuild.homepage = pkg_fields['package']['url']
        elif '@type' in pkg_fields['package']['url']:
            if pkg_fields['package']['url']['@type'] == 'website':
                if '#text' in pkg_fields['package']['url']:
                    pkg_ebuild.homepage = pkg_fields['package']['url']['#text']
        else:
            warn("failed to parse website for package {}".format(pkg_name))
    except TypeError as e:
        warn("failed to parse website package {}: {}".format(pkg_name, e))
    return pkg_ebuild
def regenerate_pkg(overlay, pkg, distro, preserve_existing=False):
    version = get_pkg_version(distro, pkg)
    ebuild_name =\
        '/ros-{0}/{1}/{1}-{2}.ebuild'.format(distro.name, pkg, version)
    ebuild_name = overlay.repo.repo_dir + ebuild_name
    patch_path = '/ros-{}/{}/files'.format(distro.name, pkg)
    patch_path = overlay.repo.repo_dir + patch_path
    has_patches = os.path.exists(patch_path)
    pkg_names = get_package_names(distro)[0]

    if pkg not in pkg_names:
        raise RuntimeError("Unknown package '%s'" % (pkg))
    # otherwise, remove a (potentially) existing ebuild.
    existing = glob.glob('{0}/ros-{1}/{2}/*.ebuild'.format(
        overlay.repo.repo_dir, distro.name, pkg))
    if preserve_existing and os.path.isfile(ebuild_name):
        ok("ebuild for package '%s' up to date, skipping..." % pkg)
        return None, []
    elif existing:
        overlay.repo.remove_file(existing[0])
        manifest_file = '{0}/ros-{1}/{2}/Manifest'.format(
            overlay.repo.repo_dir, distro.name, pkg)
        overlay.repo.remove_file(manifest_file)
    try:
        current = gentoo_installer(distro, pkg, has_patches)
        current.ebuild.name = pkg
    except Exception as e:
        err('Failed to generate installer for package {}!'.format(pkg))
        raise e
    try:
        ebuild_text = current.ebuild_text()
        metadata_text = current.metadata_text()
    except UnresolvedDependency:
        dep_err = 'Failed to resolve required dependencies for'
        err("{0} package {1}!".format(dep_err, pkg))
        unresolved = current.ebuild.get_unresolved()
        for dep in unresolved:
            err(" unresolved: \"{}\"".format(dep))
        return None, current.ebuild.get_unresolved()
    except KeyError as ke:
        err("Failed to parse data for package {}!".format(pkg))
        raise ke
    make_dir("{}/ros-{}/{}".format(overlay.repo.repo_dir, distro.name, pkg))
    success_msg = 'Successfully generated installer for package'
    ok('{0} \'{1}\'.'.format(success_msg, pkg))

    try:
        ebuild_file = '{0}/ros-{1}/{2}/{2}-{3}.ebuild'.format(
            overlay.repo.repo_dir, distro.name, pkg, version)
        ebuild_file = open(ebuild_file, "w")
        metadata_file = '{0}/ros-{1}/{2}/metadata.xml'.format(
            overlay.repo.repo_dir, distro.name, pkg)
        metadata_file = open(metadata_file, "w")
        ebuild_file.write(ebuild_text)
        metadata_file.write(metadata_text)
    except Exception as e:
        err("Failed to write ebuild/metadata to disk!")
        raise e
    return current, []
def regenerate_installer(overlay, pkg, distro, preserve_existing, tar_dir):
    make_dir("{0}/recipes-ros-{1}".format(overlay.repo.repo_dir, distro.name))
    version = get_pkg_version(distro, pkg)
    pkg_names = get_package_names(distro)[0]

    if pkg not in pkg_names:
        raise RuntimeError("Unknown package '%s'" % pkg)

    # check for an existing recipe
    existing = glob.glob('{0}/recipes-ros-{1}/{2}/*.bb'.format(
        overlay.repo.repo_dir, distro.name, pkg))

    if preserve_existing and existing:
        ok("recipe for package '%s' up to date, skpping..." % pkg)
        return None, []
    elif existing:
        overlay.repo.remove_file(existing[0])
    try:
        current = oe_installer(distro, pkg, tar_dir)
        current.recipe.name = pkg.replace('_', '-')
    except Exception as e:
        err('Failed to generate installer for package {}!'.format(pkg))
        raise e
    try:
        recipe_text = current.recipe_text()
    except UnresolvedDependency:
        dep_err = 'Failed to resolve required dependencies for'
        err("{0} package {1}!".format(dep_err, pkg))
        unresolved = current.recipe.get_unresolved()
        for dep in unresolved:
            err(" unresolved: \"{}\"".format(dep))
        return None, current.recipe.get_unresolved()
    except NoPkgXml:
        err("Could not fetch pkg!")
        return None, []
    except KeyError as ke:
        err("Failed to parse data for package {}!".format(pkg))
        raise ke
    make_dir("{0}/recipes-ros-{1}/{2}".format(overlay.repo.repo_dir,
                                              distro.name,
                                              pkg.replace('_', '-')))
    success_msg = 'Successfully generated installer for package'
    ok('{0} \'{1}\'.'.format(success_msg, pkg))
    recipe_name = '{0}/recipes-ros-{1}/{2}/{2}_{3}.bb'.format(
        overlay.repo.repo_dir, distro.name, pkg.replace('_', '-'), version)
    try:
        with open('{0}'.format(recipe_name), "w") as recipe_file:
            recipe_file.write(recipe_text)
    except Exception as e:
        err("Failed to write recipe to disk!")
        raise e
    return current, []
def _expand_keywords(distro_name, keywords):
    names = set([])
    if ARG_ALL_PACKAGES in keywords:
        wet_distro = get_wet_distro(distro_name)
        released_package_names, _ = get_package_names(wet_distro)
        names.update(released_package_names)
        if distro_name == 'groovy':
            dry_distro = get_dry_distro(distro_name)
            released_stack_names, _ = get_stack_names(dry_distro)
            names.update(released_stack_names)
    if ARG_CURRENT_ENVIRONMENT in keywords:
        names.update(_get_packages_in_environment())
    return names
def _expand_keywords(distro_name, keywords):
    names = set([])
    if ARG_ALL_PACKAGES in keywords:
        wet_distro = get_wet_distro(distro_name)
        released_package_names, _ = get_package_names(wet_distro)
        names.update(released_package_names)
        if distro_name == 'groovy':
            dry_distro = get_dry_distro(distro_name)
            released_stack_names, _ = get_stack_names(dry_distro)
            names.update(released_stack_names)
    if ARG_CURRENT_ENVIRONMENT in keywords:
        names.update(_get_packages_in_environment())
    return names
Exemple #7
0
def _gen_ebuild_for_package(
    distro, pkg_name, pkg, repo, ros_pkg, pkg_rosinstall
):
    pkg_ebuild = Ebuild()

    pkg_ebuild.distro = distro.name
    pkg_ebuild.src_uri = pkg_rosinstall[0]['tar']['uri']
    pkg_names = get_package_names(distro)
    pkg_dep_walker = DependencyWalker(distro)

    pkg_buildtool_deps = pkg_dep_walker.get_depends(pkg_name, "buildtool")
    pkg_build_deps = pkg_dep_walker.get_depends(pkg_name, "build")
    pkg_run_deps = pkg_dep_walker.get_depends(pkg_name, "run")
    pkg_test_deps = pkg_dep_walker.get_depends(pkg_name, "test")

    pkg_keywords = ['x86', 'amd64', 'arm', 'arm64']

    # add run dependencies
    for rdep in pkg_run_deps:
        pkg_ebuild.add_run_depend(rdep, rdep in pkg_names[0])

    # add build dependencies
    for bdep in pkg_build_deps:
        pkg_ebuild.add_build_depend(bdep, bdep in pkg_names[0])

    # add build tool dependencies
    for tdep in pkg_buildtool_deps:
        pkg_ebuild.add_build_depend(tdep, tdep in pkg_names[0])

    # add test dependencies
    for test_dep in pkg_test_deps:
        pkg_ebuild.add_test_depend(test_dep, test_dep in pkg_names[0])

    # add keywords
    for key in pkg_keywords:
        pkg_ebuild.add_keyword(key)

    # parse throught package xml
    try:
        pkg_xml = ros_pkg.get_package_xml(distro.name)
    except Exception:
        warn("fetch metadata for package {}".format(pkg_name))
        return pkg_ebuild
    pkg = PackageMetadata(pkg_xml)
    pkg_ebuild.upstream_license = pkg.upstream_license
    pkg_ebuild.description = pkg.description
    pkg_ebuild.homepage = pkg.homepage
    pkg_ebuild.build_type = pkg.build_type
    return pkg_ebuild
def generate_installers(
    distro,  # ros distro
    overlay,  # repo instance
    gen_pkg_func,  # function to call for generating
    preserve_existing=True,  # don't regenerate if installer exists
    *args,  # any additional args for gen_pkg_func
    **kwargs  # any additional keyword arguments
):
    distro_name = distro.name
    pkg_names = get_package_names(distro)
    total = float(len(pkg_names[0]))
    borkd_pkgs = dict()
    changes = []
    installers = []
    bad_installers = []
    succeeded = 0
    failed = 0

    info("Generating installers for distro '%s'" % distro_name)
    for i, pkg in enumerate(sorted(pkg_names[0])):
        if 'skip_keys' in kwargs and pkg in kwargs['skip_keys']:
            warn("Package '%s' is in skip-keys list, skipping..." % pkg)
            continue
        version = get_pkg_version(distro, pkg)
        percent = '%.1f' % (100 * (float(i) / total))
        try:
            current, current_info = gen_pkg_func(overlay, pkg, distro,
                                                 preserve_existing, *args)
            if not current and current_info:
                # we are missing dependencies
                failed_msg = "{0}%: Failed to generate".format(percent)
                failed_msg += " installer for package '%s'!" % pkg
                err(failed_msg)
                borkd_pkgs[pkg] = current_info
                failed = failed + 1
                continue
            elif not current and preserve_existing:
                # don't replace the installer
                succeeded = succeeded + 1
                continue
            success_msg = 'Successfully generated installer for package'
            ok('{0}%: {1} \'{2}\'.'.format(percent, success_msg, pkg))
            succeeded = succeeded + 1
            if current_info:
                changes.append('*{0} {1} --> {2}*'.format(
                    pkg, current_info, version))
            else:
                changes.append('*{0} {1}*'.format(pkg, version))
            installers.append(pkg)
        except UnknownBuildType as ub:
            err("{0}%: Unknown Build type '{1}' for package '{2}'".format(
                percent, str(ub), pkg))
            failed = failed + 1
        except KeyError:
            failed_msg = 'Failed to generate installer'
            err("{0}%: {1} for package {2}!".format(percent, failed_msg, pkg))
            bad_installers.append(pkg)
            failed = failed + 1
    results = 'Generated {0} / {1}'.format(succeeded, failed + succeeded)
    results += ' for distro {0}'.format(distro_name)
    info("------ {0} ------\n".format(results))

    if len(borkd_pkgs) > 0:
        warn("Unresolved:")
        for broken in borkd_pkgs.keys():
            warn("{}:".format(broken))
            warn("  {}".format(borkd_pkgs[broken]))

    return installers, borkd_pkgs, changes
 def analyse(self, root_pkg):
     released_names, unreleased_names = get_package_names(self._distro)
     pprint(
         unreleased_names
         #get_recursive_dependencies(self._distro, ['strands_apps'], source=True)
     )
def generate_rosinstall(distro_name, names,
    deps=False, deps_up_to=None, deps_depth=None, deps_only=False,
    wet_only=False, dry_only=False, catkin_only=False, non_catkin_only=False,
    excludes=None,
    flat=False,
    tar=False):
    # classify package/stack names
    names, keywords = _split_special_keywords(names)
    names, unknown_names = _classify_names(distro_name, names)
    if unknown_names:
        logger.warn('The following not released packages/stacks will be ignored: %s' % (', '.join(sorted(unknown_names))))
    if keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, keywords))
        if unknown_names:
            logger.warn('The following not released packages/stacks from the %s will be ignored: %s' % (ROS_PACKAGE_PATH, ', '.join(sorted(unknown_names))))
        names.update(expanded_names)
    if not names.wet_package_names and not names.dry_stack_names:
        raise RuntimeError('No packages/stacks left after ignoring not released')
    logger.debug('Packages/stacks: %s' % ', '.join(sorted(names.wet_package_names | names.dry_stack_names)))

    # classify deps-up-to
    deps_up_to_names, keywords = _split_special_keywords(deps_up_to or [])
    deps_up_to_names, unknown_names = _classify_names(distro_name, deps_up_to_names)
    if unknown_names:
        logger.warn("The following not released '--deps-up-to' packages/stacks will be ignored: %s" % (', '.join(sorted(unknown_names))))
    if keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, keywords))
        if unknown_names:
            logger.warn("The following not released '--deps-up-to' packages/stacks from the %s will be ignored: %s" % (ROS_PACKAGE_PATH, ', '.join(sorted(unknown_names))))
        deps_up_to_names.update(expanded_names)
    if deps_up_to:
        logger.debug('Dependencies up to: %s' % ', '.join(sorted(deps_up_to_names.wet_package_names | deps_up_to_names.dry_stack_names)))

    # classify excludes
    exclude_names, keywords = _split_special_keywords(excludes or [])
    exclude_names, unknown_names = _classify_names(distro_name, exclude_names)
    if unknown_names:
        logger.warn("The following not released '--exclude' packages/stacks will be ignored: %s" % (', '.join(sorted(unknown_names))))
    if keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, keywords))
        exclude_names.update(expanded_names)
    if excludes:
        logger.debug('Excluded packages/stacks: %s' % ', '.join(sorted(exclude_names.wet_package_names | exclude_names.dry_stack_names)))

    result = copy.deepcopy(names)
    # clear wet packages if not requested
    if dry_only:
        result.wet_package_names.clear()
    # clear dry packages if not requested and no dependencies
    if wet_only and not deps and not deps_up_to:
        result.dry_stack_names.clear()

    # remove excluded names from the list of wet and dry names
    result.wet_package_names -= exclude_names.wet_package_names
    result.dry_stack_names -= exclude_names.dry_stack_names
    if not result.wet_package_names and not result.dry_stack_names:
        raise RuntimeError('No packages/stacks left after applying the exclusions')

    if result.wet_package_names:
        logger.debug('Wet packages: %s' % ', '.join(sorted(result.wet_package_names)))
    if result.dry_stack_names:
        logger.debug('Dry stacks: %s' % ', '.join(sorted(result.dry_stack_names)))

    # extend the names with recursive dependencies
    if deps or deps_up_to:
        # add dry dependencies
        if result.dry_stack_names:
            dry_distro = get_dry_distro(distro_name)
            _, unreleased_stack_names = get_stack_names(dry_distro)
            excludes = exclude_names.dry_stack_names | deps_up_to_names.dry_stack_names | set(unreleased_stack_names)
            dry_dependencies, wet_dependencies = get_recursive_dependencies_of_dry(dry_distro, result.dry_stack_names, excludes=excludes)
            logger.debug('Dry stacks including dependencies: %s' % ', '.join(sorted(dry_dependencies)))
            result.dry_stack_names |= dry_dependencies

            if not dry_only:
                # add wet dependencies of dry stuff
                logger.debug('Wet dependencies of dry stacks: %s' % ', '.join(sorted(wet_dependencies)))
                for depend in wet_dependencies:
                    if depend in exclude_names.wet_package_names or depend in deps_up_to_names.wet_package_names:
                        continue
                    wet_distro = get_wet_distro(distro_name)
                    assert depend in wet_distro.release_packages, "Package '%s' does not have a version" % depend
                    result.wet_package_names.add(depend)
        # add wet dependencies
        if result.wet_package_names:
            wet_distro = get_wet_distro(distro_name)
            _, unreleased_package_names = get_package_names(wet_distro)
            excludes = exclude_names.wet_package_names | deps_up_to_names.wet_package_names | set(unreleased_package_names)
            result.wet_package_names |= get_recursive_dependencies_of_wet(wet_distro, result.wet_package_names, excludes=excludes, limit_depth=deps_depth)
            logger.debug('Wet packages including dependencies: %s' % ', '.join(sorted(result.wet_package_names)))

    # intersect result with recursive dependencies on
    if deps_up_to:
        # intersect with wet dependencies on
        if deps_up_to_names.wet_package_names:
            wet_distro = get_wet_distro(distro_name)
            # wet depends on do not include the names since they are excluded to stop recursion asap
            wet_package_names = get_recursive_dependencies_on_of_wet(wet_distro, deps_up_to_names.wet_package_names, excludes=names.wet_package_names, limit=result.wet_package_names)
            # keep all names which are already in the result set
            wet_package_names |= result.wet_package_names & names.wet_package_names
            result.wet_package_names = wet_package_names
        else:
            result.wet_package_names.clear()
        logger.debug('Wet packages after intersection: %s' % ', '.join(sorted(result.wet_package_names)))

        # intersect with dry dependencies on
        dry_dependency_names = result.wet_package_names | deps_up_to_names.dry_stack_names
        if dry_dependency_names and not wet_only:
            dry_distro = get_dry_distro(distro_name)
            # dry depends on do not include the names since they are excluded to stop recursion asap
            dry_stack_names = get_recursive_dependencies_on_of_dry(dry_distro, dry_dependency_names, excludes=names.dry_stack_names, limit=result.dry_stack_names)
            # keep all names which are already in the result set
            dry_stack_names |= result.dry_stack_names & names.dry_stack_names
            result.dry_stack_names = dry_stack_names
        else:
            result.dry_stack_names.clear()
        logger.debug('Dry stacks after intersection: %s' % ', '.join(sorted(result.dry_stack_names)))

    # exclude passed in names
    if deps_only:
        result.wet_package_names -= set(names.wet_package_names)
        result.dry_stack_names -= set(names.dry_stack_names)

    # exclude wet packages based on build type
    if catkin_only or non_catkin_only:
        wet_distro = get_wet_distro(distro_name)
        for pkg_name in list(result.wet_package_names):
            pkg_xml = wet_distro.get_release_package_xml(pkg_name)
            try:
                pkg = parse_package_string(pkg_xml)
            except InvalidPackage as e:
                logger.warn("The package '%s' has an invalid manifest and will be ignored: %s" % (pkg_name, e))
                result.wet_package_names.remove(pkg_name)
                continue
            build_type = ([e.content for e in pkg.exports if e.tagname == 'build_type'][0]) if 'build_type' in [e.tagname for e in pkg.exports] else 'catkin'
            if catkin_only ^ (build_type == 'catkin'):
                result.wet_package_names.remove(pkg_name)

    # get wet and/or dry rosinstall data
    rosinstall_data = []
    if not dry_only and result.wet_package_names:
        logger.debug('Generate rosinstall entries for wet packages: %s' % ', '.join(sorted(result.wet_package_names)))
        wet_distro = get_wet_distro(distro_name)
        wet_rosinstall_data = generate_wet_rosinstall(wet_distro, result.wet_package_names, flat=flat, tar=tar)
        rosinstall_data += wet_rosinstall_data
    if not wet_only and result.dry_stack_names:
        logger.debug('Generate rosinstall entries for dry stacks: %s' % ', '.join(sorted(result.dry_stack_names)))
        dry_distro = get_dry_distro(distro_name)
        dry_rosinstall_data = generate_dry_rosinstall(dry_distro, result.dry_stack_names)
        rosinstall_data += dry_rosinstall_data
    return rosinstall_data
def test_get_package_names():
    d = _get_test_dist()
    assert set(get_package_names(d)[0]) == set(['archie', 'betty', 'veronica'])

    d.repositories['riverdale'].release_repository.version = None
    assert set(get_package_names(d)[1]) == set(['archie', 'betty', 'veronica'])
Exemple #12
0
def main():
    overlay = None
    parser = get_parser('Generate OpenEmbedded recipes for ROS packages',
                        exclude_all=True,
                        require_rosdistro=True,
                        require_dryrun=True)
    parser.add_argument('--tar-archive-dir',
                        help='location to store archived packages',
                        type=str)
    args = parser.parse_args(sys.argv[1:])
    pr_comment = args.pr_comment
    skip_keys = set(args.skip_keys) if args.skip_keys else set()
    if args.pr_only:
        if args.dry_run:
            parser.error('Invalid args! cannot dry-run and file PR')
        if not args.output_repository_path:
            parser.error('Invalid args! no repository specified')
        try:
            prev_overlay = RepoInstance(args.output_repository_path, False)
            msg, title = load_pr()
            prev_overlay.pull_request(msg, title=title)
            clean_up()
            sys.exit(0)
        except Exception as e:
            err('Failed to file PR!')
            err('reason: {0}'.format(e))
            sys.exit(1)
    warn('"{0}" distro detected...'.format(args.ros_distro))
    """
    No longer supporting generation for multiple targets, but left the code in
    place to handle them in case it might be needed again in the future.
    """
    selected_targets = [args.ros_distro]
    preserve_existing = args.only
    now = os.getenv('SUPERFLORE_GENERATION_DATETIME',
                    get_utcnow_timestamp_str())
    repo_org = 'ros'
    repo_name = 'meta-ros'
    if args.upstream_repo:
        repo_org, repo_name = url_to_repo_org(args.upstream_repo)
    # open cached tar file if it exists
    with TempfileManager(args.output_repository_path) as _repo:
        if not args.output_repository_path:
            # give our group write permissions to the temp dir
            os.chmod(_repo, 17407)
        # clone if args.output_repository_path is None
        overlay = RosMeta(
            _repo,
            not args.output_repository_path,
            branch=(('superflore/{}'.format(now))
                    if not args.no_branch else None),
            org=repo_org,
            repo=repo_name,
            from_branch=args.upstream_branch,
        )
        if not args.only:
            pr_comment = pr_comment or (
                'Recipes generated by **superflore** for all packages in ROS '
                'distribution {}.\n'.format(selected_targets[0]))
        else:
            pr_comment = pr_comment or (
                'Recipes generated by **superflore** for package(s) {} in ROS '
                'distribution {}.\n'.format(args.only, args.ros_distro))
        # generate installers
        total_installers = dict()
        total_changes = dict()
        if args.tar_archive_dir:
            srcrev_filename = '%s/srcrev_cache.pickle' % args.tar_archive_dir
        else:
            srcrev_filename = None
        with CacheManager(srcrev_filename) as srcrev_cache:
            if args.only:
                distro = get_distro(args.ros_distro)
                for pkg in args.only:
                    if pkg in skip_keys:
                        warn("Package '%s' is in skip-keys list, skipping..." %
                             pkg)
                        continue
                    info("Regenerating package '%s'..." % pkg)
                    try:
                        regenerate_pkg(
                            overlay,
                            pkg,
                            distro,
                            False,  # preserve_existing
                            srcrev_cache,
                            skip_keys=skip_keys,
                        )
                    except KeyError:
                        err("No package to satisfy key '%s' available "
                            "packages in selected distro: %s" %
                            (pkg, get_package_names(distro)))
                        sys.exit(1)
                # Commit changes and file pull request
                title =\
                    '{{{0}}} Selected recipes generated from '\
                    'files/{0}/generated/cache.yaml '\
                    'as of {1}\n'.format(
                            args.ros_distro,
                            now)
                regen_dict = dict()
                regen_dict[args.ros_distro] = args.only
                delta = "Regenerated: '%s'\n" % args.only
                overlay.add_generated_files(args.ros_distro)
                commit_msg = '\n'.join([
                    get_pr_text(
                        title + '\n' +
                        pr_comment.replace('**superflore**', 'superflore'),
                        markup=''), delta
                ])
                overlay.commit_changes(args.ros_distro, commit_msg)
                if args.dry_run:
                    save_pr(overlay, args.only, '', pr_comment, title=title)
                    sys.exit(0)
                file_pr(overlay,
                        delta,
                        '',
                        pr_comment,
                        distro=args.ros_distro,
                        title=title)
                ok('Successfully synchronized repositories!')
                sys.exit(0)

            overlay.clean_ros_recipe_dirs(args.ros_distro)
            for adistro in selected_targets:
                yoctoRecipe.reset()
                distro = get_distro(adistro)

                distro_installers, _, distro_changes =\
                    generate_installers(
                        distro,
                        overlay,
                        regenerate_pkg,
                        preserve_existing,
                        srcrev_cache,
                        skip_keys,
                        skip_keys=skip_keys,
                        is_oe=True,
                    )
                total_changes[adistro] = distro_changes
                total_installers[adistro] = distro_installers
                yoctoRecipe.generate_ros_distro_inc(
                    _repo, args.ros_distro,
                    overlay.get_file_revision_logs(
                        'meta-ros{0}-{1}/files/{1}/generated/cache.yaml'.
                        format(yoctoRecipe._get_ros_version(args.ros_distro),
                               args.ros_distro)), distro.release_platforms,
                    skip_keys)
                yoctoRecipe.generate_superflore_datetime_inc(
                    _repo, args.ros_distro, now)
                yoctoRecipe.generate_rosdep_resolve(_repo, args.ros_distro)
                yoctoRecipe.generate_newer_platform_components(
                    _repo, args.ros_distro)
                overlay.add_generated_files(args.ros_distro)

        num_changes = 0
        for distro_name in total_changes:
            num_changes += len(total_changes[distro_name])

        if num_changes == 0:
            info('ROS distro is up to date.')
            summary = overlay.get_change_summary(args.ros_distro)
            if len(summary) == 0:
                info('Exiting...')
                clean_up()
                sys.exit(0)
            else:
                info('But there are some changes in other regenerated files:'
                     '%s' % summary)

        # remove duplicates
        delta = gen_delta_msg(total_changes, markup='')
        # Commit changes and file pull request
        title = '{{{0}}} Sync to files/{0}/generated/'\
            'cache.yaml as of {1}\n'.format(
                args.ros_distro,
                now)
        commit_msg = '\n'.join([
            get_pr_text(title + '\n' +
                        pr_comment.replace('**superflore**', 'superflore'),
                        markup=''), delta
        ])
        overlay.commit_changes(args.ros_distro, commit_msg)
        delta = gen_delta_msg(total_changes)
        if args.dry_run:
            info('Running in dry mode, not filing PR')
            save_pr(
                overlay,
                delta,
                '',
                pr_comment,
                title=title,
            )
            sys.exit(0)
        file_pr(overlay, delta, '', comment=pr_comment, title=title)
        clean_up()
        ok('Successfully synchronized repositories!')
Exemple #13
0
def regenerate_pkg(overlay, pkg, distro, preserve_existing, tar_dir, md5_cache,
                   sha256_cache, skip_keys):
    pkg_names = get_package_names(distro)[0]
    if pkg not in pkg_names:
        yoctoRecipe.not_generated_recipes.add(pkg)
        raise RuntimeError("Unknown package '%s'" % pkg)
    try:
        version = get_pkg_version(distro, pkg, is_oe=True)
    except KeyError as ke:
        yoctoRecipe.not_generated_recipes.add(pkg)
        raise ke
    repo_dir = overlay.repo.repo_dir
    component_name = yoctoRecipe.convert_to_oe_name(
        distro.release_packages[pkg].repository_name)
    recipe = yoctoRecipe.convert_to_oe_name(pkg)
    # check for an existing recipe
    glob_pattern = '{0}/generated-recipes-{1}/{2}/{3}*.bb'.format(
        repo_dir, distro.name, component_name, recipe)
    existing = glob.glob(glob_pattern)
    if preserve_existing and existing:
        ok("recipe for package '%s' up to date, skipping..." % pkg)
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, []
    elif existing:
        overlay.repo.remove_file(existing[0], True)
    try:
        current = oe_installer(distro, pkg, tar_dir, md5_cache, sha256_cache,
                               skip_keys)
    except InvalidPackage as e:
        err('Invalid package: ' + str(e))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, []
    except Exception as e:
        err('Failed generating installer for {}! {}'.format(pkg, str(e)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, []
    try:
        recipe_text = current.recipe_text()
    except NoPkgXml as nopkg:
        err("Could not fetch pkg! {}".format(str(nopkg)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, []
    except KeyError as ke:
        err("Failed to parse data for package {}! {}".format(pkg, str(ke)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, []
    make_dir("{0}/generated-recipes-{1}/{2}".format(repo_dir, distro.name,
                                                    component_name))
    success_msg = 'Successfully generated installer for package'
    ok('{0} \'{1}\'.'.format(success_msg, pkg))
    recipe_file_name = '{0}/generated-recipes-{1}/{2}/{3}_{4}.bb'.format(
        repo_dir, distro.name, component_name, recipe, version)
    try:
        with open('{0}'.format(recipe_file_name), "w") as recipe_file:
            ok('Writing recipe {0}'.format(recipe_file_name))
            recipe_file.write(recipe_text)
            yoctoRecipe.generated_components.add(component_name)
            yoctoRecipe.generated_recipes[recipe] = (version, component_name)
    except Exception:
        err("Failed to write recipe to disk!")
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, []
    return current, []
def generate_rosinstall(distro_name, names,
    from_paths=None, repo_names=None,
    deps=False, deps_up_to=None, deps_depth=None, deps_only=False,
    wet_only=False, dry_only=False, catkin_only=False, non_catkin_only=False,
    excludes=None, exclude_paths=None,
    flat=False,
    tar=False,
    upstream_version_tag=False, upstream_source_version=False):

    # classify package/stack names
    names, pkg_keywords = _split_special_keywords(names)

    # find packages recursively in include paths
    if from_paths:
        include_names_from_path = set([])
        [include_names_from_path.update(_get_package_names(from_path)) for from_path in from_paths]
        logger.debug("The following wet packages found in '--from-path' will be considered: %s" % ', '.join(sorted(include_names_from_path)))
        names.update(include_names_from_path)

    # Allow special keywords in repos
    repo_names, repo_keywords = _split_special_keywords(repo_names or [])
    if set(repo_keywords).difference(set([ARG_ALL_PACKAGES])):
        raise RuntimeError('The only keyword supported by repos is %r' % (ARG_ALL_PACKAGES))

    if ARG_ALL_PACKAGES in repo_keywords:
        wet_distro = get_wet_distro(distro_name)
        repo_names = wet_distro.repositories.keys()

    # expand repository names into package names
    repo_names, unknown_repo_names = _classify_repo_names(distro_name, repo_names)
    if unknown_repo_names:
        logger.warn('The following unknown repositories will be ignored: %s' % (', '.join(sorted(unknown_repo_names))))
    wet_package_names, unreleased_repo_names = _get_packages_for_repos(distro_name, repo_names, source=upstream_source_version)
    names.update(wet_package_names)
    if unreleased_repo_names and not upstream_version_tag and not upstream_source_version:
        logger.warn('The following unreleased repositories will be ignored: %s' % ', '.join(sorted(unreleased_repo_names)))
    if unreleased_repo_names and (deps or deps_up_to) and (upstream_version_tag or upstream_source_version):
        logger.warn('The dependencies of the following unreleased repositories are unknown and will be ignored: %s' % ', '.join(sorted(unreleased_repo_names)))
    has_repos = ((repo_names - unreleased_repo_names) and (upstream_version_tag or upstream_source_version)) or (unreleased_repo_names and upstream_source_version)

    names, unknown_names = _classify_names(distro_name, names, source=upstream_source_version)
    if unknown_names:
        logger.warn('The following unreleased packages/stacks will be ignored: %s' % (', '.join(sorted(unknown_names))))
    if pkg_keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, pkg_keywords), source=upstream_source_version)
        if unknown_names:
            logger.warn('The following unreleased packages/stacks from the %s will be ignored: %s' % (ROS_PACKAGE_PATH, ', '.join(sorted(unknown_names))))
        names.update(expanded_names)
    if not names.wet_package_names and not names.dry_stack_names and not has_repos:
        raise RuntimeError('No packages/stacks left after ignoring unreleased')
    if names.wet_package_names or names.dry_stack_names:
        logger.debug('Packages/stacks: %s' % ', '.join(sorted(names.wet_package_names | names.dry_stack_names)))
    if unreleased_repo_names:
        logger.debug('Unreleased repositories: %s' % ', '.join(sorted(unreleased_repo_names)))

    # classify deps-up-to
    deps_up_to_names, deps_keywords = _split_special_keywords(deps_up_to or [])
    deps_up_to_names, unknown_names = _classify_names(distro_name, deps_up_to_names, source=upstream_source_version)
    if unknown_names:
        logger.warn("The following unreleased '--deps-up-to' packages/stacks will be ignored: %s" % (', '.join(sorted(unknown_names))))
    if deps_keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, deps_keywords), source=upstream_source_version)
        if unknown_names:
            logger.warn("The following unreleased '--deps-up-to' packages/stacks from the %s will be ignored: %s" % (ROS_PACKAGE_PATH, ', '.join(sorted(unknown_names))))
        deps_up_to_names.update(expanded_names)
    if deps_up_to:
        logger.debug('Dependencies up to: %s' % ', '.join(sorted(deps_up_to_names.wet_package_names | deps_up_to_names.dry_stack_names)))

    # classify excludes
    exclude_names, excludes_keywords = _split_special_keywords(excludes or [])
    if exclude_paths:
        exclude_names_from_path = set([])
        [exclude_names_from_path.update(_get_package_names(exclude_path)) for exclude_path in exclude_paths]
        logger.debug("The following wet packages found in '--exclude-path' will be excluded: %s" % ', '.join(sorted(exclude_names_from_path)))
        exclude_names.update(exclude_names_from_path)
    exclude_names, unknown_names = _classify_names(distro_name, exclude_names, source=upstream_source_version)
    if unknown_names:
        logger.warn("The following unreleased '--exclude' packages/stacks will be ignored: %s" % (', '.join(sorted(unknown_names))))
    if excludes_keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, excludes_keywords), source=upstream_source_version)
        exclude_names.update(expanded_names)
    if excludes:
        logger.debug('Excluded packages/stacks: %s' % ', '.join(sorted(exclude_names.wet_package_names | exclude_names.dry_stack_names)))

    result = copy.deepcopy(names)
    # clear wet packages if not requested
    if dry_only:
        result.wet_package_names.clear()
    # clear dry packages if not requested and no dependencies
    if wet_only and not deps and not deps_up_to:
        result.dry_stack_names.clear()

    # remove excluded names from the list of wet and dry names
    result.wet_package_names -= exclude_names.wet_package_names
    result.dry_stack_names -= exclude_names.dry_stack_names
    if not result.wet_package_names and not result.dry_stack_names and not has_repos:
        raise RuntimeError('No packages/stacks left after applying the exclusions')

    if result.wet_package_names:
        logger.debug('Wet packages: %s' % ', '.join(sorted(result.wet_package_names)))
    if result.dry_stack_names:
        logger.debug('Dry stacks: %s' % ', '.join(sorted(result.dry_stack_names)))

    # extend the names with recursive dependencies
    if deps or deps_up_to:
        # add dry dependencies
        if result.dry_stack_names:
            dry_distro = get_dry_distro(distro_name)
            _, unreleased_stack_names = get_stack_names(dry_distro)
            excludes = exclude_names.dry_stack_names | deps_up_to_names.dry_stack_names | set(unreleased_stack_names)
            dry_dependencies, wet_dependencies = get_recursive_dependencies_of_dry(dry_distro, result.dry_stack_names, excludes=excludes)
            logger.debug('Dry stacks including dependencies: %s' % ', '.join(sorted(dry_dependencies)))
            result.dry_stack_names |= dry_dependencies

            if not dry_only:
                # add wet dependencies of dry stuff
                logger.debug('Wet dependencies of dry stacks: %s' % ', '.join(sorted(wet_dependencies)))
                for depend in wet_dependencies:
                    if depend in exclude_names.wet_package_names or depend in deps_up_to_names.wet_package_names:
                        continue
                    wet_distro = get_wet_distro(distro_name)
                    assert depend in wet_distro.release_packages, "Package '%s' does not have a version" % depend
                    result.wet_package_names.add(depend)
        # add wet dependencies
        if result.wet_package_names:
            wet_distro = get_wet_distro(distro_name)
            _, unreleased_package_names = get_package_names(wet_distro)
            excludes = exclude_names.wet_package_names | deps_up_to_names.wet_package_names | set(unreleased_package_names)
            result.wet_package_names |= get_recursive_dependencies_of_wet(wet_distro, result.wet_package_names, excludes=excludes,
                    limit_depth=deps_depth, source=upstream_source_version)
            logger.debug('Wet packages including dependencies: %s' % ', '.join(sorted(result.wet_package_names)))

    # intersect result with recursive dependencies on
    if deps_up_to:
        # intersect with wet dependencies on
        if deps_up_to_names.wet_package_names:
            wet_distro = get_wet_distro(distro_name)
            # wet depends on do not include the names since they are excluded to stop recursion asap
            wet_package_names = get_recursive_dependencies_on_of_wet(wet_distro, deps_up_to_names.wet_package_names, excludes=names.wet_package_names,
                    limit=result.wet_package_names, source=upstream_source_version)
            # keep all names which are already in the result set
            wet_package_names |= result.wet_package_names & names.wet_package_names
            result.wet_package_names = wet_package_names
        else:
            result.wet_package_names.clear()
        logger.debug('Wet packages after intersection: %s' % ', '.join(sorted(result.wet_package_names)))

        # intersect with dry dependencies on
        dry_dependency_names = result.wet_package_names | deps_up_to_names.dry_stack_names
        if dry_dependency_names and not wet_only:
            dry_distro = get_dry_distro(distro_name)
            # dry depends on do not include the names since they are excluded to stop recursion asap
            dry_stack_names = get_recursive_dependencies_on_of_dry(dry_distro, dry_dependency_names, excludes=names.dry_stack_names, limit=result.dry_stack_names)
            # keep all names which are already in the result set
            dry_stack_names |= result.dry_stack_names & names.dry_stack_names
            result.dry_stack_names = dry_stack_names
        else:
            result.dry_stack_names.clear()
        logger.debug('Dry stacks after intersection: %s' % ', '.join(sorted(result.dry_stack_names)))

    # exclude passed in names
    if deps_only:
        result.wet_package_names -= set(names.wet_package_names)
        result.dry_stack_names -= set(names.dry_stack_names)

    # exclude wet packages based on build type
    if catkin_only or non_catkin_only:
        wet_distro = get_wet_distro(distro_name)
        for pkg_name in list(result.wet_package_names):
            pkg_xml = wet_distro.get_release_package_xml(pkg_name)
            try:
                pkg = parse_package_string(pkg_xml)
            except InvalidPackage as e:
                logger.warn("The package '%s' has an invalid manifest and will be ignored: %s" % (pkg_name, e))
                result.wet_package_names.remove(pkg_name)
                continue
            build_type = ([e.content for e in pkg.exports if e.tagname == 'build_type'][0]) if 'build_type' in [e.tagname for e in pkg.exports] else 'catkin'
            if catkin_only ^ (build_type == 'catkin'):
                result.wet_package_names.remove(pkg_name)

    # get wet and/or dry rosinstall data
    rosinstall_data = []
    if not dry_only and (result.wet_package_names or has_repos):
        wet_distro = get_wet_distro(distro_name)
        if upstream_version_tag or upstream_source_version:
            # determine repositories based on package names and passed in repository names
            repos = {}
            for pkg_name in result.wet_package_names:
                if upstream_source_version and wet_distro.source_packages:
                    pkg = wet_distro.source_packages[pkg_name]
                    repos[pkg.repository_name] = wet_distro.repositories[pkg.repository_name]
                else:
                    pkg = wet_distro.release_packages[pkg_name]
                    if pkg.repository_name not in repos:
                        repo = wet_distro.repositories[pkg.repository_name]
                        release_repo = repo.release_repository
                        assert not upstream_version_tag or release_repo.version is not None, "Package '%s' in repository '%s' does not have a release version" % (pkg_name, pkg.repository_name)
                        repos[pkg.repository_name] = repo
            # If asked to get upstream development then the release state doesn't matter
            if upstream_source_version:
                repo_names = repo_names.union(unreleased_repo_names)
            for repo_name in repo_names:
                if repo_name not in repos:
                    repos[repo_name] = wet_distro.repositories[repo_name]
            # ignore repos which lack information
            repos_without_source = [repo_name for repo_name, repo in repos.items() if not repo.source_repository]
            if repos_without_source:
                logger.warn('The following repositories with an unknown upstream will be ignored: %s' % ', '.join(sorted(repos_without_source)))
                [repos.pop(repo_name) for repo_name in repos_without_source]
            if upstream_version_tag:
                repos_without_release = [repo_name for repo_name, repo in repos.items() if not repo.release_repository or not repo.release_repository.version]
                if repos_without_release:
                    logger.warn('The following repositories without a release will be ignored: %s' % ', '.join(sorted(repos_without_release)))
                    [repos.pop(repo_name) for repo_name in repos_without_release]
            logger.debug('Generate rosinstall entries for wet repositories: %s' % ', '.join(sorted(repos.keys())))
            wet_rosinstall_data = generate_rosinstall_for_repos(repos, version_tag=upstream_version_tag, tar=tar)
            rosinstall_data += wet_rosinstall_data
        else:
            logger.debug('Generate rosinstall entries for wet packages: %s' % ', '.join(sorted(result.wet_package_names)))
            wet_rosinstall_data = generate_wet_rosinstall(wet_distro, result.wet_package_names, flat=flat, tar=tar)
            rosinstall_data += wet_rosinstall_data
    if not wet_only and result.dry_stack_names:
        logger.debug('Generate rosinstall entries for dry stacks: %s' % ', '.join(sorted(result.dry_stack_names)))
        dry_distro = get_dry_distro(distro_name)
        dry_rosinstall_data = generate_dry_rosinstall(dry_distro, result.dry_stack_names)
        rosinstall_data += dry_rosinstall_data
    return rosinstall_data
Exemple #15
0
def regenerate_pkg(overlay, pkg, distro, preserve_existing=False):
    version = get_pkg_version(distro, pkg)
    pkgbuild_name =\
        '/ros-{0}/{1}/{1}.pkgbuild'.format(distro.name, pkg)
    pkgbuild_name = overlay.repo.repo_dir + pkgbuild_name
    patch_path = '/ros-{}/{}/files'.format(distro.name, pkg)
    patch_path = overlay.repo.repo_dir + patch_path
    is_ros2 = get_distros()[distro.name]['distribution_type'] == 'ros2'
    has_patches = os.path.exists(patch_path)
    pkg_names = get_package_names(distro)[0]
    patches = None
    if os.path.exists(patch_path):
        patches = [
            f for f in glob.glob('%s/*.patch' % patch_path)
        ]
    if pkg not in pkg_names:
        raise RuntimeError("Unknown package '%s'" % (pkg))
    # otherwise, remove a (potentially) existing pkgbuild.
    prefix = '{0}/ros-{1}/{2}/'.format(overlay.repo.repo_dir, distro.name, pkg)
    existing = glob.glob('%s*.pkgbuild' % prefix)
    previous_version = None
    if preserve_existing and os.path.isfile(pkgbuild_name):
        ok("pkgbuild for package '%s' up to date, skipping..." % pkg)
        return None, [], None
    elif existing:
        overlay.repo.remove_file(existing[0])
        previous_version = existing[0].lstrip(prefix).rstrip('.pkgbuild')
        manifest_file = '{0}/ros-{1}/{2}/Manifest'.format(
            overlay.repo.repo_dir, distro.name, pkg
        )
        overlay.repo.remove_file(manifest_file)
    try:
        current = arch_pkgbuild(distro, pkg, has_patches)
        current.pkgbuild.name = pkg
        current.pkgbuild.version = version
        current.pkgbuild.patches = patches
        current.pkgbuild.is_ros2 = is_ros2
    except Exception as e:
        err('Failed to generate pkgbuild for package {}!'.format(pkg))
        raise e
    try:
        pkgbuild_text = current.pkgbuild_text()
    except UnresolvedDependency:
        dep_err = 'Failed to resolve required dependencies for'
        err("{0} package {1}!".format(dep_err, pkg))
        unresolved = current.pkgbuild.get_unresolved()
        for dep in unresolved:
            err(" unresolved: \"{}\"".format(dep))
        return None, current.pkgbuild.get_unresolved(), None
    except KeyError as ke:
        err("Failed to parse data for package {}!".format(pkg))
        raise ke
    make_dir(
        "{}/ros-{}/{}".format(overlay.repo.repo_dir, distro.name, pkg)
    )
    success_msg = 'Successfully generated pkgbuild for package'
    ok('{0} \'{1}\'.'.format(success_msg, pkg))

    try:
        pkgbuild_file = '{0}/ros-{1}/{2}/PKGBUILD'.format(
            overlay.repo.repo_dir,
            distro.name, 
            pkg,
        )
        ok(f"writing {pkgbuild_file}")
        with open(pkgbuild_file, "w") as pkgbuild_file_f:
            pkgbuild_file_f.write(pkgbuild_text)
    except Exception as e:
        err(f"Failed to write f{pkgbuild_file} to disk!")
        raise e
    return current, previous_version, pkg
def generate_rosinstall(distro_name, names,
    from_paths=None, repo_names=None,
    deps=False, deps_up_to=None, deps_depth=None, deps_only=False,
    wet_only=False, dry_only=False, catkin_only=False, non_catkin_only=False,
    excludes=None, exclude_paths=None,
    flat=False,
    tar=False,
    upstream_version_tag=False, upstream_source_version=False):

    # classify package/stack names
    names, keywords = _split_special_keywords(names)

    # find packages recursively in include paths
    if from_paths:
        include_names_from_path = set([])
        [include_names_from_path.update(_get_package_names(from_path)) for from_path in from_paths]
        logger.debug("The following wet packages found in '--from-path' will be considered: %s" % ', '.join(sorted(include_names_from_path)))
        names.update(include_names_from_path)

    # expand repository names into package names
    repo_names, unknown_repo_names = _classify_repo_names(distro_name, repo_names)
    if unknown_repo_names:
        logger.warn('The following unknown repositories will be ignored: %s' % (', '.join(sorted(unknown_repo_names))))
    wet_package_names, unreleased_repo_names = _get_packages_for_repos(distro_name, repo_names)
    names.update(wet_package_names)
    if unreleased_repo_names and not upstream_version_tag and not upstream_source_version:
        logger.warn('The following unreleased repositories will be ignored: %s' % ', '.join(sorted(unreleased_repo_names)))
    if unreleased_repo_names and (deps or deps_up_to) and (upstream_version_tag or upstream_source_version):
        logger.warn('The dependencies of the following unreleased repositories are unknown and will be ignored: %s' % ', '.join(sorted(unreleased_repo_names)))
    has_repos = ((repo_names - unreleased_repo_names) and (upstream_version_tag or upstream_source_version)) or (unreleased_repo_names and upstream_source_version)

    names, unknown_names = _classify_names(distro_name, names)
    if unknown_names:
        logger.warn('The following not released packages/stacks will be ignored: %s' % (', '.join(sorted(unknown_names))))
    if keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, keywords))
        if unknown_names:
            logger.warn('The following not released packages/stacks from the %s will be ignored: %s' % (ROS_PACKAGE_PATH, ', '.join(sorted(unknown_names))))
        names.update(expanded_names)
    if not names.wet_package_names and not names.dry_stack_names and not has_repos:
        raise RuntimeError('No packages/stacks left after ignoring not released')
    if names.wet_package_names or names.dry_stack_names:
        logger.debug('Packages/stacks: %s' % ', '.join(sorted(names.wet_package_names | names.dry_stack_names)))
    if unreleased_repo_names:
        logger.debug('Unreleased repositories: %s' % ', '.join(sorted(unreleased_repo_names)))

    # classify deps-up-to
    deps_up_to_names, keywords = _split_special_keywords(deps_up_to or [])
    deps_up_to_names, unknown_names = _classify_names(distro_name, deps_up_to_names)
    if unknown_names:
        logger.warn("The following not released '--deps-up-to' packages/stacks will be ignored: %s" % (', '.join(sorted(unknown_names))))
    if keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, keywords))
        if unknown_names:
            logger.warn("The following not released '--deps-up-to' packages/stacks from the %s will be ignored: %s" % (ROS_PACKAGE_PATH, ', '.join(sorted(unknown_names))))
        deps_up_to_names.update(expanded_names)
    if deps_up_to:
        logger.debug('Dependencies up to: %s' % ', '.join(sorted(deps_up_to_names.wet_package_names | deps_up_to_names.dry_stack_names)))

    # classify excludes
    exclude_names, keywords = _split_special_keywords(excludes or [])
    if exclude_paths:
        exclude_names_from_path = set([])
        [exclude_names_from_path.update(_get_package_names(exclude_path)) for exclude_path in exclude_paths]
        logger.debug("The following wet packages found in '--exclude-path' will be excluded: %s" % ', '.join(sorted(exclude_names_from_path)))
        exclude_names.update(exclude_names_from_path)
    exclude_names, unknown_names = _classify_names(distro_name, exclude_names)
    if unknown_names:
        logger.warn("The following not released '--exclude' packages/stacks will be ignored: %s" % (', '.join(sorted(unknown_names))))
    if keywords:
        expanded_names, unknown_names = _classify_names(distro_name, _expand_keywords(distro_name, keywords))
        exclude_names.update(expanded_names)
    if excludes:
        logger.debug('Excluded packages/stacks: %s' % ', '.join(sorted(exclude_names.wet_package_names | exclude_names.dry_stack_names)))

    result = copy.deepcopy(names)
    # clear wet packages if not requested
    if dry_only:
        result.wet_package_names.clear()
    # clear dry packages if not requested and no dependencies
    if wet_only and not deps and not deps_up_to:
        result.dry_stack_names.clear()

    # remove excluded names from the list of wet and dry names
    result.wet_package_names -= exclude_names.wet_package_names
    result.dry_stack_names -= exclude_names.dry_stack_names
    if not result.wet_package_names and not result.dry_stack_names and not has_repos:
        raise RuntimeError('No packages/stacks left after applying the exclusions')

    if result.wet_package_names:
        logger.debug('Wet packages: %s' % ', '.join(sorted(result.wet_package_names)))
    if result.dry_stack_names:
        logger.debug('Dry stacks: %s' % ', '.join(sorted(result.dry_stack_names)))

    # extend the names with recursive dependencies
    if deps or deps_up_to:
        # add dry dependencies
        if result.dry_stack_names:
            dry_distro = get_dry_distro(distro_name)
            _, unreleased_stack_names = get_stack_names(dry_distro)
            excludes = exclude_names.dry_stack_names | deps_up_to_names.dry_stack_names | set(unreleased_stack_names)
            dry_dependencies, wet_dependencies = get_recursive_dependencies_of_dry(dry_distro, result.dry_stack_names, excludes=excludes)
            logger.debug('Dry stacks including dependencies: %s' % ', '.join(sorted(dry_dependencies)))
            result.dry_stack_names |= dry_dependencies

            if not dry_only:
                # add wet dependencies of dry stuff
                logger.debug('Wet dependencies of dry stacks: %s' % ', '.join(sorted(wet_dependencies)))
                for depend in wet_dependencies:
                    if depend in exclude_names.wet_package_names or depend in deps_up_to_names.wet_package_names:
                        continue
                    wet_distro = get_wet_distro(distro_name)
                    assert depend in wet_distro.release_packages, "Package '%s' does not have a version" % depend
                    result.wet_package_names.add(depend)
        # add wet dependencies
        if result.wet_package_names:
            wet_distro = get_wet_distro(distro_name)
            _, unreleased_package_names = get_package_names(wet_distro)
            excludes = exclude_names.wet_package_names | deps_up_to_names.wet_package_names | set(unreleased_package_names)
            result.wet_package_names |= get_recursive_dependencies_of_wet(wet_distro, result.wet_package_names, excludes=excludes, limit_depth=deps_depth)
            logger.debug('Wet packages including dependencies: %s' % ', '.join(sorted(result.wet_package_names)))

    # intersect result with recursive dependencies on
    if deps_up_to:
        # intersect with wet dependencies on
        if deps_up_to_names.wet_package_names:
            wet_distro = get_wet_distro(distro_name)
            # wet depends on do not include the names since they are excluded to stop recursion asap
            wet_package_names = get_recursive_dependencies_on_of_wet(wet_distro, deps_up_to_names.wet_package_names, excludes=names.wet_package_names, limit=result.wet_package_names)
            # keep all names which are already in the result set
            wet_package_names |= result.wet_package_names & names.wet_package_names
            result.wet_package_names = wet_package_names
        else:
            result.wet_package_names.clear()
        logger.debug('Wet packages after intersection: %s' % ', '.join(sorted(result.wet_package_names)))

        # intersect with dry dependencies on
        dry_dependency_names = result.wet_package_names | deps_up_to_names.dry_stack_names
        if dry_dependency_names and not wet_only:
            dry_distro = get_dry_distro(distro_name)
            # dry depends on do not include the names since they are excluded to stop recursion asap
            dry_stack_names = get_recursive_dependencies_on_of_dry(dry_distro, dry_dependency_names, excludes=names.dry_stack_names, limit=result.dry_stack_names)
            # keep all names which are already in the result set
            dry_stack_names |= result.dry_stack_names & names.dry_stack_names
            result.dry_stack_names = dry_stack_names
        else:
            result.dry_stack_names.clear()
        logger.debug('Dry stacks after intersection: %s' % ', '.join(sorted(result.dry_stack_names)))

    # exclude passed in names
    if deps_only:
        result.wet_package_names -= set(names.wet_package_names)
        result.dry_stack_names -= set(names.dry_stack_names)

    # exclude wet packages based on build type
    if catkin_only or non_catkin_only:
        wet_distro = get_wet_distro(distro_name)
        for pkg_name in list(result.wet_package_names):
            pkg_xml = wet_distro.get_release_package_xml(pkg_name)
            try:
                pkg = parse_package_string(pkg_xml)
            except InvalidPackage as e:
                logger.warn("The package '%s' has an invalid manifest and will be ignored: %s" % (pkg_name, e))
                result.wet_package_names.remove(pkg_name)
                continue
            build_type = ([e.content for e in pkg.exports if e.tagname == 'build_type'][0]) if 'build_type' in [e.tagname for e in pkg.exports] else 'catkin'
            if catkin_only ^ (build_type == 'catkin'):
                result.wet_package_names.remove(pkg_name)

    # get wet and/or dry rosinstall data
    rosinstall_data = []
    if not dry_only and (result.wet_package_names or has_repos):
        wet_distro = get_wet_distro(distro_name)
        if upstream_version_tag or upstream_source_version:
            # determine repositories based on package names and passed in repository names
            repos = {}
            for pkg_name in result.wet_package_names:
                pkg = wet_distro.release_packages[pkg_name]
                if pkg.repository_name not in repos:
                    repo = wet_distro.repositories[pkg.repository_name]
                    release_repo = repo.release_repository
                    assert not upstream_version_tag or release_repo.version is not None, "Package '%s' in repository '%s' does not have a release version" % (pkg_name, pkg.repository_name)
                    repos[pkg.repository_name] = repo
            for repo_name in repo_names:
                if repo_name not in repos:
                    repos[repo_name] = wet_distro.repositories[repo_name]
            # ignore repos which lack information
            repos_without_source = [repo_name for repo_name, repo in repos.items() if not repo.source_repository]
            if repos_without_source:
                logger.warn('The following repositories with an unknown upstream will be ignored: %s' % ', '.join(sorted(repos_without_source)))
                [repos.pop(repo_name) for repo_name in repos_without_source]
            if upstream_version_tag:
                repos_without_release = [repo_name for repo_name, repo in repos.items() if not repo.release_repository or not repo.release_repository.version]
                if repos_without_release:
                    logger.warn('The following repositories without a release will be ignored: %s' % ', '.join(sorted(repos_without_release)))
                    [repos.pop(repo_name) for repo_name in repos_without_release]
            logger.debug('Generate rosinstall entries for wet repositories: %s' % ', '.join(sorted(repos.keys())))
            wet_rosinstall_data = generate_rosinstall_for_repos(repos, version_tag=upstream_version_tag, tar=tar)
            rosinstall_data += wet_rosinstall_data
        else:
            logger.debug('Generate rosinstall entries for wet packages: %s' % ', '.join(sorted(result.wet_package_names)))
            wet_rosinstall_data = generate_wet_rosinstall(wet_distro, result.wet_package_names, flat=flat, tar=tar)
            rosinstall_data += wet_rosinstall_data
    if not wet_only and result.dry_stack_names:
        logger.debug('Generate rosinstall entries for dry stacks: %s' % ', '.join(sorted(result.dry_stack_names)))
        dry_distro = get_dry_distro(distro_name)
        dry_rosinstall_data = generate_dry_rosinstall(dry_distro, result.dry_stack_names)
        rosinstall_data += dry_rosinstall_data
    return rosinstall_data
Exemple #17
0
def generate_installers(
    distro_name,  # ros distro name
    overlay,  # repo instance
    gen_pkg_func,  # function to call for generating
    preserve_existing=True,  # don't regenerate if installer exists
    *args  # any aditional args for gen_pkg_func
):
    distro = get_distro(distro_name)
    pkg_names = get_package_names(distro)
    total = float(len(pkg_names[0]))
    borkd_pkgs = dict()
    changes = []
    installers = []
    bad_installers = []
    succeeded = 0
    failed = 0

    info("Generating installers for distro '%s'" % distro_name)
    for i, pkg in enumerate(sorted(pkg_names[0])):
        version = get_pkg_version(distro, pkg)
        percent = '%.1f' % (100 * (float(i) / total))
        try:
            current, bad_deps = gen_pkg_func(overlay, pkg, distro,
                                             preserve_existing, *args)
            if not current and bad_deps:
                # we are missing dependencies
                failed_msg = "{0}%: Failed to generate".format(percent)
                failed_msg += " installer for package '%s'!" % pkg
                err(failed_msg)
                borkd_pkgs[pkg] = bad_deps
                failed = failed + 1
                continue
            elif not current and preserve_existing:
                # don't replace the installer
                succeeded = succeeded + 1
                continue
            success_msg = 'Successfully generated installer for package'
            ok('{0}%: {1} \'{2}\'.'.format(percent, success_msg, pkg))
            succeeded = succeeded + 1
            changes.append('*{0} --> {1}*'.format(pkg, version))
            installers.append(pkg)
        except UnknownLicense as ul:
            err("{0}%: Unknown License '{1}'.".format(percent, str(ul)))
            bad_installers.append(pkg)
            failed = failed + 1
        except KeyError:
            failed_msg = 'Failed to generate installer'
            err("{0}%: {1} for package {2}!".format(percent, failed_msg, pkg))
            bad_installers.append(pkg)
            failed = failed + 1
    results = 'Generated {0} / {1}'.format(succeeded, failed + succeeded)
    results += ' for distro {0}'.format(distro_name)
    info("------ {0} ------\n".format(results))

    if len(borkd_pkgs) > 0:
        warn("Unresolved:")
        for broken in borkd_pkgs.keys():
            warn("{}:".format(broken))
            warn("  {}".format(borkd_pkgs[broken]))

    return installers, borkd_pkgs, changes
Exemple #18
0
def regenerate_pkg(overlay, pkg, rosdistro, preserve_existing, srcrev_cache,
                   skip_keys):
    pkg_names = get_package_names(rosdistro)[0]
    if pkg not in pkg_names:
        yoctoRecipe.not_generated_recipes.add(pkg)
        raise RuntimeError("Unknown package '%s' available packages"
                           " in selected distro: %s" %
                           (pkg, get_package_names(rosdistro)))
    try:
        version = get_pkg_version(rosdistro, pkg, is_oe=True)
    except KeyError as ke:
        yoctoRecipe.not_generated_recipes.add(pkg)
        raise ke
    repo_dir = overlay.repo.repo_dir
    component_name = yoctoRecipe.convert_to_oe_name(
        rosdistro.release_packages[pkg].repository_name)
    recipe = yoctoRecipe.convert_to_oe_name(pkg)
    # check for an existing recipe which was removed by clean_ros_recipe_dirs
    prefix = 'meta-ros{0}-{1}/generated-recipes/*/{2}_*.bb'.format(
        yoctoRecipe._get_ros_version(rosdistro.name), rosdistro.name, recipe)
    existing = overlay.repo.git.status('--porcelain', '--', prefix)
    if existing:
        # The git status --porcelain output will look like this:
        # D  meta-ros2-eloquent/generated-recipes/variants/ros-base_0.8.3-1.bb
        # we want just the path with filename
        if len(existing.split('\n')) > 1:
            warn('More than 1 recipe was output by "git status --porcelain '
                 'meta-ros{0}-{1}/generated-recipes/*/{2}_*.bb": "{3}"'.format(
                     yoctoRecipe._get_ros_version(rosdistro.name),
                     rosdistro.name, recipe, existing))
        if existing.split()[0] != 'D':
            err('Unexpected output from "git status --porcelain '
                'meta-ros{0}-{1}/generated-recipes/*/{2}_*.bb": "{3}"'.format(
                    yoctoRecipe._get_ros_version(rosdistro.name),
                    rosdistro.name, recipe, existing))

        existing = existing.split()[1]
    else:
        # If it isn't shown in git status, it could still exist as normal
        # unchanged file when --only option is being used
        import glob
        existing = glob.glob('{0}/{1}'.format(repo_dir, prefix))
        if existing:
            if len(existing) > 1:
                err('More than 1 recipe was output by "git status '
                    '--porcelain '
                    'meta-ros{0}-{1}/generated-recipes/*/{2}_*.bb": "{3}"'.
                    format(yoctoRecipe._get_ros_version(rosdistro.name),
                           rosdistro.name, recipe, existing))
            existing = existing[0]

    previous_version = None
    if preserve_existing and existing:
        ok("recipe for package '%s' up to date, skipping..." % pkg)
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    elif existing:
        overlay.repo.remove_file(existing, True)
        idx_version = existing.rfind('_') + len('_')
        previous_version = existing[idx_version:].rstrip('.bb')
    try:
        current = oe_recipe(rosdistro, pkg, srcrev_cache, skip_keys)
    except InvalidPackage as e:
        err('Invalid package: ' + str(e))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    except Exception as e:
        err('Failed generating recipe for {}! {}'.format(pkg, str(e)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    try:
        recipe_text = current.recipe_text()
    except NoPkgXml as nopkg:
        err("Could not fetch pkg! {}".format(str(nopkg)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    except KeyError as ke:
        err("Failed to parse data for package {}! {}".format(pkg, str(ke)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    make_dir("{0}/meta-ros{1}-{2}/generated-recipes/{3}".format(
        repo_dir, yoctoRecipe._get_ros_version(rosdistro.name), rosdistro.name,
        component_name))
    success_msg = 'Successfully generated recipe for package'
    ok('{0} \'{1}\'.'.format(success_msg, pkg))
    recipe_file_name = '{0}/meta-ros{1}-{2}/generated-recipes/{3}/' \
        '{4}_{5}.bb'.format(
            repo_dir,
            yoctoRecipe._get_ros_version(rosdistro.name),
            rosdistro.name,
            component_name,
            recipe,
            version
        )
    try:
        with open('{0}'.format(recipe_file_name), "w") as recipe_file:
            ok('Writing recipe {0}'.format(recipe_file_name))
            recipe_file.write(recipe_text)
            yoctoRecipe.generated_components.add(component_name)
            yoctoRecipe.generated_recipes[recipe] = (version, component_name)
    except Exception:
        err("Failed to write recipe to disk!")
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    return current, previous_version, recipe
Exemple #19
0
def regenerate_pkg(overlay, pkg, distro, preserve_existing, srcrev_cache,
                   skip_keys):
    pkg_names = get_package_names(distro)[0]
    if pkg not in pkg_names:
        yoctoRecipe.not_generated_recipes.add(pkg)
        raise RuntimeError("Unknown package '%s'" % pkg)
    try:
        version = get_pkg_version(distro, pkg, is_oe=True)
    except KeyError as ke:
        yoctoRecipe.not_generated_recipes.add(pkg)
        raise ke
    repo_dir = overlay.repo.repo_dir
    component_name = yoctoRecipe.convert_to_oe_name(
        distro.release_packages[pkg].repository_name)
    recipe = yoctoRecipe.convert_to_oe_name(pkg)
    # check for an existing recipe
    prefix = '{0}/meta-ros{1}-{2}/generated-recipes/{3}/{4}'.format(
        repo_dir,
        yoctoRecipe._get_ros_version(distro.name),
        distro.name,
        component_name,
        recipe,
    )
    existing = glob.glob('{}_*.bb'.format(prefix))
    previous_version = None
    if preserve_existing and existing:
        ok("recipe for package '%s' up to date, skipping..." % pkg)
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    elif existing:
        existing = existing[0]
        overlay.repo.remove_file(existing, True)
        idx_version = existing.rfind('_') + len('_')
        previous_version = existing[idx_version:].rstrip('.bb')
    try:
        current = oe_recipe(distro, pkg, srcrev_cache, skip_keys)
    except InvalidPackage as e:
        err('Invalid package: ' + str(e))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    except Exception as e:
        err('Failed generating recipe for {}! {}'.format(pkg, str(e)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    try:
        recipe_text = current.recipe_text()
    except NoPkgXml as nopkg:
        err("Could not fetch pkg! {}".format(str(nopkg)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    except KeyError as ke:
        err("Failed to parse data for package {}! {}".format(pkg, str(ke)))
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    make_dir("{0}/meta-ros{1}-{2}/generated-recipes/{3}".format(
        repo_dir, yoctoRecipe._get_ros_version(distro.name), distro.name,
        component_name))
    success_msg = 'Successfully generated recipe for package'
    ok('{0} \'{1}\'.'.format(success_msg, pkg))
    recipe_file_name = '{0}/meta-ros{1}-{2}/generated-recipes/{3}/' \
        'ros{1}-{4}_{5}.bb'.format(
            repo_dir,
            yoctoRecipe._get_ros_version(distro.name),
            distro.name,
            component_name,
            recipe,
            version
        )
    try:
        with open('{0}'.format(recipe_file_name), "w") as recipe_file:
            ok('Writing recipe {0}'.format(recipe_file_name))
            recipe_file.write(recipe_text)
            yoctoRecipe.generated_components.add(component_name)
            yoctoRecipe.generated_recipes['ros{0}-{1}'.format(
                yoctoRecipe._get_ros_version(distro.name),
                recipe)] = (version, component_name)
    except Exception:
        err("Failed to write recipe to disk!")
        yoctoRecipe.not_generated_recipes.add(pkg)
        return None, [], None
    return current, previous_version, recipe