def _check_all_keys_are_valid(self, peer_packages, rosdistro): keys_to_resolve = set() key_to_packages_which_depends_on = collections.defaultdict(list) keys_to_ignore = set() for package in self.packages.values(): evaluate_package_conditions(package, rosdistro) depends = [ dep for dep in (package.run_depends + package.buildtool_export_depends) if dep.evaluated_condition is not False] build_depends = [ dep for dep in (package.build_depends + package.buildtool_depends + package.test_depends) if dep.evaluated_condition is not False] unresolved_keys = [ dep for dep in (depends + build_depends + package.replaces + package.conflicts) if dep.evaluated_condition is not False] keys_to_ignore = { dep for dep in keys_to_ignore.union(package.replaces + package.conflicts) if dep.evaluated_condition is not False} keys = [d.name for d in unresolved_keys] keys_to_resolve.update(keys) for key in keys: key_to_packages_which_depends_on[key].append(package.name) for skip_key in self.skip_keys: try: keys_to_resolve.remove(skip_key) except KeyError: warning("Key '{0}' specified by --skip-keys was not found".format(skip_key)) else: warning("Skipping dependency key '{0}' per --skip-keys".format(skip_key)) os_name = self.os_name rosdistro = self.rosdistro all_keys_valid = True for key in sorted(keys_to_resolve): for os_version in self.distros: try: extended_peer_packages = peer_packages + [d.name for d in keys_to_ignore] rule, installer_key, default_installer_key = \ resolve_rosdep_key(key, os_name, os_version, rosdistro, extended_peer_packages, retry=False) if rule is None: continue if installer_key != default_installer_key: error("Key '{0}' resolved to '{1}' with installer '{2}', " "which does not match the default installer '{3}'." .format(key, rule, installer_key, default_installer_key)) BloomGenerator.exit( "The RPM generator does not support dependencies " "which are installed with the '{0}' installer." .format(installer_key), returncode=code.GENERATOR_INVALID_INSTALLER_KEY) except (GeneratorError, RuntimeError) as e: print(fmt("Failed to resolve @{cf}@!{key}@| on @{bf}{os_name}@|:@{cf}@!{os_version}@| with: {e}") .format(**locals())) print(fmt("@{cf}@!{0}@| is depended on by these packages: ").format(key) + str(list(set(key_to_packages_which_depends_on[key])))) print(fmt("@{kf}@!<== @{rf}@!Failed@|")) all_keys_valid = False return all_keys_valid
def generate_substitutions_from_package(package, os_name, os_version, ros_distro, installation_prefix='/usr', deb_inc=0, peer_packages=None, releaser_history=None, fallback_resolver=None, native=False): peer_packages = peer_packages or [] data = {} # Name, Version, Description data['Name'] = package.name data['Version'] = package.version data['Description'] = format_description(package.description) # Websites websites = [str(url) for url in package.urls if url.type == 'website'] homepage = websites[0] if websites else '' if homepage == '': warning("No homepage set, defaulting to ''") data['Homepage'] = homepage # Debian Increment Number data['DebianInc'] = '' if native else '-{0}'.format(deb_inc) # Debian Package Format data['format'] = 'native' if native else 'quilt' # Package name data['Package'] = sanitize_package_name(package.name) # Installation prefix data['InstallationPrefix'] = installation_prefix # Resolve dependencies evaluate_package_conditions(package, ros_distro) depends = [ dep for dep in (package.run_depends + package.buildtool_export_depends) if dep.evaluated_condition is not False ] build_depends = [ dep for dep in (package.build_depends + package.buildtool_depends + package.test_depends) if dep.evaluated_condition is not False ] unresolved_keys = [ dep for dep in (depends + build_depends + package.replaces + package.conflicts) if dep.evaluated_condition is not False ] # The installer key is not considered here, but it is checked when the keys are checked before this resolved_deps = resolve_dependencies( unresolved_keys, os_name, os_version, ros_distro, peer_packages + [d.name for d in package.replaces + package.conflicts], fallback_resolver) data['Depends'] = sorted(set(format_depends(depends, resolved_deps))) data['BuildDepends'] = sorted( set(format_depends(build_depends, resolved_deps))) data['Replaces'] = sorted( set(format_depends(package.replaces, resolved_deps))) data['Conflicts'] = sorted( set(format_depends(package.conflicts, resolved_deps))) # Build-type specific substitutions. build_type = package.get_build_type() if build_type == 'catkin': pass elif build_type == 'cmake': pass elif build_type == 'ament_cmake': pass elif build_type == 'ament_python': # Don't set the install-scripts flag if it's already set in setup.cfg. package_path = os.path.abspath(os.path.dirname(package.filename)) setup_cfg_path = os.path.join(package_path, 'setup.cfg') data['pass_install_scripts'] = True if os.path.isfile(setup_cfg_path): setup_cfg = SafeConfigParser() setup_cfg.read([setup_cfg_path]) if (setup_cfg.has_option('install', 'install-scripts') or setup_cfg.has_option('install', 'install_scripts')): data['pass_install_scripts'] = False else: error("Build type '{}' is not supported by this version of bloom.". format(build_type), exit=True) # Set the distribution data['Distribution'] = os_version # Use the time stamp to set the date strings stamp = datetime.datetime.now(tz.tzlocal()) data['Date'] = stamp.strftime('%a, %d %b %Y %T %z') data['YYYY'] = stamp.strftime('%Y') # Maintainers maintainers = [] for m in package.maintainers: maintainers.append(str(m)) data['Maintainer'] = maintainers[0] data['Maintainers'] = ', '.join(maintainers) # Changelog changelogs = get_changelogs(package, releaser_history) if changelogs and package.version not in [x[0] for x in changelogs]: warning("") warning( "A CHANGELOG.rst was found, but no changelog for this version was found." ) warning( "You REALLY should have a entry (even a blank one) for each version of your package." ) warning("") if not changelogs: # Ensure at least a minimal changelog changelogs = [] if package.version not in [x[0] for x in changelogs]: changelogs.insert(0, ( package.version, get_rfc_2822_date(datetime.datetime.now()), ' * Autogenerated, no changelog for this version found in CHANGELOG.rst.', package.maintainers[0].name, package.maintainers[0].email)) bad_changelog = False # Make sure that the first change log is the version being released if package.version != changelogs[0][0]: error("") error("The version of the first changelog entry '{0}' is not the " "same as the version being currently released '{1}'.".format( package.version, changelogs[0][0])) bad_changelog = True # Make sure that the current version is the latest in the changelog for changelog in changelogs: if parse_version(package.version) < parse_version(changelog[0]): error("") error( "There is at least one changelog entry, '{0}', which has a " "newer version than the version of package '{1}' being released, '{2}'." .format(changelog[0], package.name, package.version)) bad_changelog = True if bad_changelog: error("This is almost certainly by mistake, you should really take a " "look at the changelogs for the package you are releasing.") error("") if not maybe_continue('n', 'Continue anyways'): sys.exit("User quit.") data['changelogs'] = changelogs # Use debhelper version 7 for oneric, otherwise 9 data['debhelper_version'] = 7 if os_version in ['oneiric'] else 9 # Summarize dependencies summarize_dependency_mapping(data, depends, build_depends, resolved_deps) # Copyright licenses = [] separator = '\n' + '=' * 80 + '\n\n' for l in package.licenses: if hasattr(l, 'file') and l.file is not None: license_file = os.path.join(os.path.dirname(package.filename), l.file) if not os.path.exists(license_file): error("License file '{}' is not found.".format(license_file), exit=True) license_text = open(license_file, 'r').read() if not license_text.endswith('\n'): license_text += '\n' licenses.append(license_text) data['Copyright'] = separator.join(licenses) def convertToUnicode(obj): if sys.version_info.major == 2: if isinstance(obj, str): return unicode(obj.decode('utf8')) elif isinstance(obj, unicode): return obj else: if isinstance(obj, bytes): return str(obj.decode('utf8')) elif isinstance(obj, str): return obj if isinstance(obj, list): for i, val in enumerate(obj): obj[i] = convertToUnicode(val) return obj elif isinstance(obj, type(None)): return None elif isinstance(obj, tuple): obj_tmp = list(obj) for i, val in enumerate(obj_tmp): obj_tmp[i] = convertToUnicode(obj_tmp[i]) return tuple(obj_tmp) elif isinstance(obj, int): return obj raise RuntimeError('need to deal with type %s' % (str(type(obj)))) for item in data.items(): data[item[0]] = convertToUnicode(item[1]) return data
def generate_substitutions_from_package( package, os_name, os_version, ros_distro, installation_prefix='/usr', rpm_inc=0, peer_packages=None, releaser_history=None, fallback_resolver=None, skip_keys=None ): peer_packages = peer_packages or [] skip_keys = skip_keys or set() data = {} # Name, Version, Description data['Name'] = package.name data['Version'] = package.version data['Description'] = rpmify_string(package.description) # License if not package.licenses or not package.licenses[0]: error("No license set for package '{0}', aborting.".format(package.name), exit=True) data['License'] = ' and '.join(package.licenses) data['LicenseFiles'] = sorted(set(l.file for l in package.licenses if l.file)) # Websites websites = [str(url) for url in package.urls if url.type == 'website'] data['Homepage'] = websites[0] if websites else '' if data['Homepage'] == '': warning("No homepage set") # RPM Increment Number data['RPMInc'] = rpm_inc # Package name data['Package'] = sanitize_package_name(package.name) # Installation prefix data['InstallationPrefix'] = installation_prefix # Resolve dependencies evaluate_package_conditions(package, ros_distro) depends = [ dep for dep in (package.run_depends + package.buildtool_export_depends) if dep.evaluated_condition is not False and dep.name not in skip_keys] build_depends = [ dep for dep in (package.build_depends + package.buildtool_depends) if dep.evaluated_condition is not False and dep.name not in skip_keys] test_depends = [ dep for dep in (package.test_depends) if dep.evaluated_condition is not False and dep.name not in skip_keys] replaces = [ dep for dep in package.replaces if dep.evaluated_condition is not False] conflicts = [ dep for dep in package.conflicts if dep.evaluated_condition is not False] unresolved_keys = depends + build_depends + test_depends + replaces + conflicts # The installer key is not considered here, but it is checked when the keys are checked before this resolved_deps = resolve_dependencies(unresolved_keys, os_name, os_version, ros_distro, peer_packages + [d.name for d in (replaces + conflicts)], fallback_resolver) data['Depends'] = sorted( set(format_depends(depends, resolved_deps)) ) data['BuildDepends'] = sorted( set(format_depends(build_depends, resolved_deps)) ) data['TestDepends'] = sorted( set(format_depends(test_depends, resolved_deps)).difference(data['BuildDepends']) ) data['Replaces'] = sorted( set(format_depends(replaces, resolved_deps)) ) data['Conflicts'] = sorted( set(format_depends(conflicts, resolved_deps)) ) data['Provides'] = [] data['Supplements'] = [] # Build-type specific substitutions. build_type = package.get_build_type() if build_type == 'catkin': pass elif build_type == 'cmake': pass elif build_type == 'ament_cmake': pass elif build_type == 'ament_python': pass else: error( "Build type '{}' is not supported by this version of bloom.". format(build_type), exit=True) # Set the OS and distribution data['OSName'] = os_name data['Distribution'] = os_version # Use the time stamp to set the date strings stamp = datetime.datetime.now(tz.tzlocal()) data['Date'] = stamp.strftime('%a %b %d %Y') # Maintainers maintainers = [] for m in package.maintainers: maintainers.append(str(m)) data['Maintainer'] = maintainers[0] data['Maintainers'] = ', '.join(maintainers) # Changelog if releaser_history: sorted_releaser_history = sorted(releaser_history, key=lambda k: LooseVersion(k), reverse=True) sorted_releaser_history = sorted(sorted_releaser_history, key=lambda k: strptime(releaser_history.get(k)[0], '%a %b %d %Y'), reverse=True) changelogs = [(v, releaser_history[v]) for v in sorted_releaser_history] else: # Ensure at least a minimal changelog changelogs = [] if package.version + '-' + str(rpm_inc) not in [x[0] for x in changelogs]: changelogs.insert(0, ( package.version + '-' + str(rpm_inc), ( data['Date'], package.maintainers[0].name, package.maintainers[0].email ) )) exported_tags = [e.tagname for e in package.exports] data['NoArch'] = 'metapackage' in exported_tags or 'architecture_independent' in exported_tags data['changelogs'] = changelogs # Summarize dependencies summarize_dependency_mapping(data, depends, build_depends, resolved_deps) def convertToUnicode(obj): if sys.version_info.major == 2: if isinstance(obj, str): return unicode(obj.decode('utf8')) elif isinstance(obj, unicode): return obj else: if isinstance(obj, bytes): return str(obj.decode('utf8')) elif isinstance(obj, str): return obj if isinstance(obj, list): for i, val in enumerate(obj): obj[i] = convertToUnicode(val) return obj elif isinstance(obj, type(None)): return None elif isinstance(obj, tuple): obj_tmp = list(obj) for i, val in enumerate(obj_tmp): obj_tmp[i] = convertToUnicode(obj_tmp[i]) return tuple(obj_tmp) elif isinstance(obj, int): return obj elif isinstance(obj, int): return obj raise RuntimeError('need to deal with type %s' % (str(type(obj)))) for item in data.items(): data[item[0]] = convertToUnicode(item[1]) return data
def mine_packages(): os_name = "ubuntu" os_version = "focal" ros_distro = "noetic" system_package = find_packages("/opt/ros/noetic/share") workspace_packages = find_packages("tesseract-1") if type(workspace_packages) == dict and workspace_packages != {}: peer_packages = [p.name for p in workspace_packages.values()] processed_pkgs = {} for k, v in dict(workspace_packages).items(): evaluate_package_conditions(v, ros_distro) processed_pkg = ProcessedPackage() processed_pkg.name = v.name processed_pkg.version = v.version processed_pkg.licenses = v.licenses processed_pkg.depends = [ dep for dep in (v.run_depends + v.buildtool_export_depends) if dep.evaluated_condition is not False ] processed_pkg.build_depends = [ dep for dep in (v.build_depends + v.buildtool_depends + v.test_depends) if dep.evaluated_condition is not False ] processed_pkg.replaces = [ dep for dep in v.replaces if dep.evaluated_condition is not False ] processed_pkg.conflicts = [ dep for dep in v.conflicts if dep.evaluated_condition is not False ] unresolved_keys = processed_pkg.depends + processed_pkg.build_depends + processed_pkg.replaces + processed_pkg.conflicts processed_pkg.resolved_depends = resolve_dependencies( unresolved_keys, os_name, os_version, ros_distro, peer_packages + [ d.name for d in (processed_pkg.replaces + processed_pkg.conflicts) ], default_fallback_resolver) processed_pkgs[processed_pkg.name] = processed_pkg print("") # Print Workspace packages print("{:<40} {:<15} {:<30}".format('Workspace Package', 'Version', 'Licenses')) for k, v in dict(processed_pkgs).items(): print("{:<40} {:<15} {:<30}".format(v.name, v.version, ",".join(v.licenses))) for k, v in dict(processed_pkgs).items(): formatted_depends = format_depends(v.depends, v.resolved_depends, peer_packages, ros_distro) formatted_build_depends = format_depends(v.build_depends, v.resolved_depends, peer_packages, ros_distro) print("") print("{0}".format(v.name)) print(" Depends : {0}".format(";".join(formatted_depends))) print(" Build Depends: {0}".format( ";".join(formatted_build_depends))) # Print each packages dependencies for k, v in dict(processed_pkgs).items(): print("") print("{:<40} {:<40} {:<15} {:<30}".format(v.name, 'Depends', 'Version', 'Licenses')) for d in v.depends: if d.name in system_package: dpkg = system_package[d.name] print("{:<40} {:<40} {:<15} {:<30}".format( "", d.name, dpkg.version, ",".join(dpkg.licenses))) elif d.name in processed_pkgs: dpkg = processed_pkgs[d.name] print("{:<40} {:<40} {:<15} {:<30}".format( "", d.name, dpkg.version, ",".join(dpkg.licenses))) else: for rd in v.resolved_depends[d.name]: rd_pkg_version = extract_ubuntu_package_version(rd) rd_pkg_licenses = extract_ubuntu_package_license(rd) filtered_licenses = list(set(rd_pkg_licenses)) print("{:<40} {:<40} {:<15} {:<30}".format( "", rd, rd_pkg_version, ",".join(filtered_licenses))) # Print each packages build dependencies for k, v in dict(processed_pkgs).items(): print("") print("{:<40} {:<40} {:<15} {:<30}".format(v.name, 'Build Depends', 'Version', 'Licenses')) for d in v.build_depends: if d.name in system_package: dpkg = system_package[d.name] print("{:<40} {:<40} {:<15} {:<30}".format( "", d.name, dpkg.version, ",".join(dpkg.licenses))) elif d.name in processed_pkgs: dpkg = processed_pkgs[d.name] print("{:<40} {:<40} {:<15} {:<30}".format( "", d.name, dpkg.version, ",".join(dpkg.licenses))) else: for rd in v.resolved_depends[d.name]: rd_pkg_version = extract_ubuntu_package_version(rd) rd_pkg_licenses = extract_ubuntu_package_license(rd) filtered_licenses = list(set(rd_pkg_licenses)) print("{:<40} {:<40} {:<15} {:<30}".format( "", rd, rd_pkg_version, ",".join(filtered_licenses)))