def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_package_selection_args(parser) add_argument_repos_file_urls(parser) add_argument_repository_names(parser, optional=True) add_argument_skip_rosdep_keys(parser) add_argument_test_branch(parser) parser.add_argument('--workspace-root', nargs='+', help='The root path of the workspace to compile') args = parser.parse_args(argv) assert args.repos_file_urls or args.repository_names debian_pkg_names = [ 'git', 'python3-apt', 'python3-colcon-metadata', 'python3-colcon-package-information', 'python3-colcon-package-selection', 'python3-colcon-recursive-crawl', 'python3-colcon-ros', 'python3-rosdep', 'python3-vcstool', ] # get versions for build dependencies apt_cache = Cache() debian_pkg_versions = get_binary_package_versions(apt_cache, debian_pkg_names) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'custom_rosdep_urls': [], 'uid': get_user_id(), 'build_environment_variables': ['%s=%s' % key_value for key_value in args.env_vars.items()], 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'repos_file_urls': args.repos_file_urls, 'repository_names': args.repository_names, 'test_branch': args.test_branch, 'skip_rosdep_keys': args.skip_rosdep_keys, 'package_selection_args': args.package_selection_args, 'workspace_root': args.workspace_root, } create_dockerfile('ci/create_workspace.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for building the binarydeb") add_argument_rosdistro_index_url(parser) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarydeb_dir(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) args = parser.parse_args(argv) debian_package_name = get_debian_package_name( args.rosdistro_name, args.package_name) # get expected package version from rosdistro index = get_index(args.rosdistro_index_url) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.package_name in dist_file.release_packages pkg = dist_file.release_packages[args.package_name] repo = dist_file.repositories[pkg.repository_name] package_version = repo.release_repository.version debian_package_version = package_version # build_binarydeb dependencies debian_pkg_names = ['apt-src'] # add build dependencies from .dsc file dsc_file = get_dsc_file( args.binarydeb_dir, debian_package_name, debian_package_version) debian_pkg_names += sorted(get_build_depends(dsc_file)) # get versions for build dependencies apt_cache = Cache() debian_pkg_versions = get_binary_package_versions( apt_cache, debian_pkg_names) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'build_environment_variables': args.env_vars, 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'install_lists': [], 'rosdistro_name': args.rosdistro_name, 'package_name': args.package_name, 'binarydeb_dir': args.binarydeb_dir, } create_dockerfile( 'release/binarydeb_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) print(' -v %s:/tmp/binarydeb' % args.binarydeb_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the doc job") add_argument_config_url(parser) parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') add_argument_build_name(parser, 'doc') parser.add_argument( '--workspace-root', required=True, help='The root path of the workspace to compile') parser.add_argument( '--rosdoc-lite-dir', required=True, help='The root path of the rosdoc_lite repository') parser.add_argument( '--catkin-sphinx-dir', required=True, help='The root path of the catkin-sphinx repository') parser.add_argument( '--rosdoc-index-dir', required=True, help='The root path of the rosdoc_index folder') add_argument_repository_name(parser) parser.add_argument( '--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument( '--os-code-name', required=True, help="The OS code name (e.g. 'trusty')") parser.add_argument( '--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_vcs_information(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_force(parser) add_argument_output_dir(parser, required=True) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) config = get_config_index(args.config_url) with Scope('SUBSECTION', 'packages'): # find packages in workspace source_space = os.path.join(args.workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) pkgs = find_packages(source_space) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) rosdoc_index = RosdocIndex( [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)]) vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2) with Scope('SUBSECTION', 'determine need to run documentation generation'): # compare hashes to determine if documentation needs to be regenerated current_hashes = {} current_hashes['ros_buildfarm'] = 2 # increase to retrigger doc jobs current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir) current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir) repo_dir = os.path.join( args.workspace_root, 'src', args.repository_name) current_hashes[args.repository_name] = get_hash(repo_dir) print('Current repository hashes: %s' % current_hashes) tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {}) print('Stored repository hashes: %s' % tag_index_hashes) skip_doc_generation = current_hashes == tag_index_hashes if skip_doc_generation: print('No changes to the source repository or any tooling repository') if not args.force: print('Skipping generation of documentation') # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api')) # check if any entry needs to be updated print('Creating update manifest.yaml files') for pkg_name in pkg_names: # update manifest.yaml files current_manifest_yaml_file = os.path.join( args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name, 'manifest.yaml') if not os.path.exists(current_manifest_yaml_file): print('- %s: skipping no manifest.yaml yet' % pkg_name) continue with open(current_manifest_yaml_file, 'r') as h: remote_data = yaml.load(h) data = copy.deepcopy(remote_data) data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, [])) if data == remote_data: print('- %s: skipping same data' % pkg_name) continue # write manifest.yaml if it has changes print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name)) dst = os.path.join( args.output_dir, 'api', pkg_name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h, default_flow_style=False) return 0 print("But job was started with the 'force' parameter set") else: print('The source repository and/or a tooling repository has changed') print('Running generation of documentation') rosdoc_index.hashes[args.repository_name] = current_hashes rosdoc_index.write_modified_data(args.output_dir, ['hashes']) # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc')) index = get_index(config.rosdistro_index_url) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.repository_name in dist_file.repositories valid_package_names = \ set(pkg_names) | set(dist_file.release_packages.keys()) # update package deps and metapackage deps with Scope('SUBSECTION', 'updated rosdoc_index information'): for pkg in pkgs.values(): print("Updating dependendencies for package '%s'" % pkg.name) depends = _get_build_run_doc_dependencies(pkg) ros_dependency_names = sorted(set([ d.name for d in depends if d.name in valid_package_names])) rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names) if pkg.is_metapackage(): print("Updating dependendencies for metapackage '%s'" % pkg.name) depends = _get_run_dependencies(pkg) ros_dependency_names = sorted(set([ d.name for d in depends if d.name in valid_package_names])) else: ros_dependency_names = None rosdoc_index.set_metapackage_deps( pkg.name, ros_dependency_names) rosdoc_index.write_modified_data( args.output_dir, ['deps', 'metapackage_deps']) # generate changelog html from rst package_names_with_changelogs = set([]) with Scope('SUBSECTION', 'generate changelog html from rst'): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml')) changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst') if os.path.exists(changelog_file): print(("Package '%s' contains a CHANGELOG.rst, generating " + "html") % pkg.name) package_names_with_changelogs.add(pkg.name) with open(changelog_file, 'r') as h: rst_code = h.read() from docutils.core import publish_string html_code = publish_string(rst_code, writer_name='html') html_code = html_code.decode() # strip system message from html output open_tag = re.escape('<div class="first system-message">') close_tag = re.escape('</div>') pattern = '(' + open_tag + '.+?' + close_tag + ')' html_code = re.sub(pattern, '', html_code, flags=re.DOTALL) pkg_changelog_doc_path = os.path.join( args.output_dir, 'changelogs', pkg.name) os.makedirs(pkg_changelog_doc_path) with open(os.path.join( pkg_changelog_doc_path, 'changelog.html'), 'w') as h: h.write(html_code) ordered_pkg_tuples = topological_order_packages(pkgs) # create rosdoc tag list and location files with Scope('SUBSECTION', 'create rosdoc tag list and location files'): for _, pkg in ordered_pkg_tuples: dst = os.path.join( args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name) print("Generating rosdoc tag list file for package '%s'" % pkg.name) dep_names = rosdoc_index.get_recursive_dependencies(pkg.name) # make sure that we don't pass our own tagfile to ourself # bad things happen when we do this assert pkg.name not in dep_names locations = [] for dep_name in sorted(dep_names): if dep_name not in rosdoc_index.locations: print("- skipping not existing location file of " + "dependency '%s'" % dep_name) continue print("- including location files of dependency '%s'" % dep_name) dep_locations = rosdoc_index.locations[dep_name] if dep_locations: for dep_location in dep_locations: assert dep_location['package'] == dep_name # update tag information to point to local location location = copy.deepcopy(dep_location) if not location['location'].startswith('file://'): location['location'] = 'file://%s' % os.path.join( args.rosdoc_index_dir, location['location']) locations.append(location) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(locations, h) print("Creating location file for package '%s'" % pkg.name) data = { 'docs_url': '../../../api/%s/html' % pkg.name, 'location': 'file://%s' % os.path.join( args.output_dir, 'symbols', '%s.tag' % pkg.name), 'package': pkg.name, } rosdoc_index.locations[pkg.name] = [data] # do not write these local locations # used to determine all source and release jobs source_build_files = get_source_build_files(config, args.rosdistro_name) release_build_files = get_release_build_files(config, args.rosdistro_name) # TODO this should reuse the logic from the job generation used_source_build_names = [] for source_build_name, build_file in source_build_files.items(): repo_names = build_file.filter_repositories([args.repository_name]) if not repo_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.source_repository: continue if not repo.source_repository.version: continue if build_file.test_commits_force is False: continue elif repo.source_repository.test_commits is False: continue elif repo.source_repository.test_commits is None and \ not build_file.test_commits_default: continue used_source_build_names.append(source_build_name) # create manifest.yaml files from repository / package meta information # will be merged with the manifest.yaml file generated by rosdoc_lite later repository = dist_file.repositories[args.repository_name] with Scope('SUBSECTION', 'create manifest.yaml files'): for pkg in pkgs.values(): data = {} data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['repo_name'] = args.repository_name data['timestamp'] = time.time() data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, [])) data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, [])) if pkg.name in rosdoc_index.metapackage_index: data['metapackages'] = rosdoc_index.metapackage_index[pkg.name] if pkg.name in rosdoc_index.metapackage_deps: data['packages'] = rosdoc_index.metapackage_deps[pkg.name] if pkg.name in package_names_with_changelogs: data['has_changelog_rst'] = True data['api_documentation'] = 'http://docs.ros.org/%s/api/%s/html' % \ (args.rosdistro_name, pkg.name) pkg_status = None pkg_status_description = None # package level status information if pkg.name in repository.status_per_package: pkg_status_data = repository.status_per_package[pkg.name] pkg_status = pkg_status_data.get('status', None) pkg_status_description = pkg_status_data.get( 'status_description', None) # repository level status information if pkg_status is None: pkg_status = repository.status if pkg_status_description is None: pkg_status_description = repository.status_description if pkg_status is not None: data['maintainer_status'] = pkg_status if pkg_status_description is not None: data['maintainer_status_description'] = pkg_status_description # add doc job url data['doc_job'] = get_doc_job_url( config.jenkins_url, args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) # add devel job urls build_files = {} for build_name in used_source_build_names: build_files[build_name] = source_build_files[build_name] devel_job_urls = get_devel_job_urls( config.jenkins_url, build_files, args.rosdistro_name, args.repository_name) if devel_job_urls: data['devel_jobs'] = devel_job_urls # TODO this should reuse the logic from the job generation used_release_build_names = [] for release_build_name, build_file in release_build_files.items(): filtered_pkg_names = build_file.filter_packages([pkg.name]) if not filtered_pkg_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.release_repository: continue if not repo.release_repository.version: continue used_release_build_names.append(release_build_name) # add release job urls build_files = {} for build_name in used_release_build_names: build_files[build_name] = release_build_files[build_name] release_job_urls = get_release_job_urls( config.jenkins_url, build_files, args.rosdistro_name, pkg.name) if release_job_urls: data['release_jobs'] = release_job_urls # write manifest.yaml dst = os.path.join( args.output_dir, 'manifests', pkg.name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h) # overwrite CMakeLists.txt files of each package with Scope( 'SUBSECTION', 'overwrite CMakeLists.txt files to only generate messages' ): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) build_types = [ e.content for e in pkg.exports if e.tagname == 'build_type'] build_type_cmake = build_types and build_types[0] == 'cmake' data = { 'package_name': pkg.name, 'build_type_cmake': build_type_cmake, } content = expand_template('doc/CMakeLists.txt.em', data) print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name) cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt') with open(cmakelist_file, 'w') as h: h.write(content) with Scope( 'SUBSECTION', 'determine dependencies and generate Dockerfile' ): # initialize rosdep view context = initialize_resolver( args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'openssh-client', 'python3', 'python3-yaml', 'rsync', # the following are required by rosdoc_lite 'doxygen', 'python-catkin-pkg', 'python-epydoc', 'python-kitchen', 'python-rospkg', 'python-sphinx', 'python-yaml', # since catkin is not a run dependency but provides the setup files get_debian_package_name(args.rosdistro_name, 'catkin'), # rosdoc_lite does not work without genmsg being importable get_debian_package_name(args.rosdistro_name, 'genmsg'), ] if 'actionlib_msgs' in pkg_names: # to document actions in other packages in the same repository debian_pkg_names.append( get_debian_package_name(args.rosdistro_name, 'actionlib_msgs')) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build, run and doc dependencies and map them to binary packages depends = get_dependencies( pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies) debian_pkg_names_depends = resolve_names(depends, **context) debian_pkg_names_depends -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_depends) missing_debian_pkg_names = [] for debian_pkg_name in debian_pkg_names: try: debian_pkg_versions.update( get_binary_package_versions(apt_cache, [debian_pkg_name])) except KeyError: missing_debian_pkg_names.append(debian_pkg_name) if missing_debian_pkg_names: # we allow missing dependencies to support basic documentation # of packages which use not released dependencies print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build') for debian_pkg_name in missing_debian_pkg_names: print("Could not find apt package '%s', skipping dependency" % debian_pkg_name) debian_pkg_names.remove(debian_pkg_name) print('# END SUBSECTION') build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] rosdoc_config_files = {} for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) rosdoc_exports = [ e.attributes['content'] for e in pkg.exports if e.tagname == 'rosdoc' and 'content' in e.attributes] prefix = '${prefix}' rosdoc_config_file = rosdoc_exports[-1] \ if rosdoc_exports else '%s/rosdoc.yaml' % prefix rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path) if os.path.isfile(rosdoc_config_file): rosdoc_config_files[pkg.name] = rosdoc_config_file # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'canonical_base_url': build_file.canonical_base_url, 'ordered_pkg_tuples': ordered_pkg_tuples, 'rosdoc_config_files': rosdoc_config_files, } create_dockerfile( 'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
def write_install_list(install_list_path, debian_pkg_names, apt_cache): debian_pkg_versions = get_binary_package_versions(apt_cache, debian_pkg_names) with open(install_list_path, 'w') as out_file: for pkg, pkg_version in sorted(debian_pkg_versions.items()): out_file.write('%s=%s\n' % (pkg, pkg_version))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the doc job") add_argument_config_url(parser) parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') add_argument_build_name(parser, 'doc') parser.add_argument('--workspace-root', required=True, help='The root path of the workspace to compile') parser.add_argument('--rosdoc-lite-dir', required=True, help='The root path of the rosdoc_lite repository') parser.add_argument('--catkin-sphinx-dir', required=True, help='The root path of the catkin-sphinx repository') parser.add_argument('--rosdoc-index-dir', required=True, help='The root path of the rosdoc_index folder') add_argument_repository_name(parser) parser.add_argument('--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument('--os-code-name', required=True, help="The OS code name (e.g. 'xenial')") parser.add_argument('--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_build_tool(parser, required=True) add_argument_vcs_information(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_force(parser) add_argument_output_dir(parser, required=True) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) config = get_config_index(args.config_url) index = get_index(config.rosdistro_index_url) condition_context = get_package_condition_context(index, args.rosdistro_name) with Scope('SUBSECTION', 'packages'): # find packages in workspace source_space = os.path.join(args.workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) pkgs = find_packages(source_space) for pkg in pkgs.values(): pkg.evaluate_conditions(condition_context) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) rosdoc_index = RosdocIndex( [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)]) vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2) with Scope('SUBSECTION', 'determine need to run documentation generation'): # compare hashes to determine if documentation needs to be regenerated current_hashes = {} current_hashes['ros_buildfarm'] = 2 # increase to retrigger doc jobs current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir) current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir) repo_dir = os.path.join(args.workspace_root, 'src', args.repository_name) current_hashes[args.repository_name] = get_hash(repo_dir) print('Current repository hashes: %s' % current_hashes) tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {}) print('Stored repository hashes: %s' % tag_index_hashes) skip_doc_generation = current_hashes == tag_index_hashes if skip_doc_generation: print('No changes to the source repository or any tooling repository') if not args.force: print('Skipping generation of documentation') # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api')) # check if any entry needs to be updated print('Creating update manifest.yaml files') for pkg_name in pkg_names: # update manifest.yaml files current_manifest_yaml_file = os.path.join( args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name, 'manifest.yaml') if not os.path.exists(current_manifest_yaml_file): print('- %s: skipping no manifest.yaml yet' % pkg_name) continue with open(current_manifest_yaml_file, 'r') as h: remote_data = yaml.safe_load(h) data = copy.deepcopy(remote_data) data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['depends_on'] = sorted( rosdoc_index.reverse_deps.get(pkg_name, [])) if data == remote_data: print('- %s: skipping same data' % pkg_name) continue # write manifest.yaml if it has changes print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name)) dst = os.path.join(args.output_dir, 'api', pkg_name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h, default_flow_style=False) return 0 print("But job was started with the 'force' parameter set") else: print('The source repository and/or a tooling repository has changed') print('Running generation of documentation') rosdoc_index.hashes[args.repository_name] = current_hashes rosdoc_index.write_modified_data(args.output_dir, ['hashes']) # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc')) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.repository_name in dist_file.repositories valid_package_names = \ set(pkg_names) | set(dist_file.release_packages.keys()) # update package deps and metapackage deps with Scope('SUBSECTION', 'updated rosdoc_index information'): for pkg in pkgs.values(): print("Updating dependendencies for package '%s'" % pkg.name) depends = _get_build_run_doc_dependencies(pkg) ros_dependency_names = sorted( set([d.name for d in depends if d.name in valid_package_names])) rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names) if pkg.is_metapackage(): print("Updating dependendencies for metapackage '%s'" % pkg.name) depends = _get_run_dependencies(pkg) ros_dependency_names = sorted( set([ d.name for d in depends if d.name in valid_package_names ])) else: ros_dependency_names = None rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names) rosdoc_index.write_modified_data(args.output_dir, ['deps', 'metapackage_deps']) # generate changelog html from rst package_names_with_changelogs = set([]) with Scope('SUBSECTION', 'generate changelog html from rst'): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml')) changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst') if os.path.exists(changelog_file): print(("Package '%s' contains a CHANGELOG.rst, generating " + "html") % pkg.name) package_names_with_changelogs.add(pkg.name) with open(changelog_file, 'r') as h: rst_code = h.read() from docutils.core import publish_string html_code = publish_string(rst_code, writer_name='html') html_code = html_code.decode() # strip system message from html output open_tag = re.escape('<div class="first system-message">') close_tag = re.escape('</div>') pattern = '(' + open_tag + '.+?' + close_tag + ')' html_code = re.sub(pattern, '', html_code, flags=re.DOTALL) pkg_changelog_doc_path = os.path.join(args.output_dir, 'changelogs', pkg.name) os.makedirs(pkg_changelog_doc_path) with open( os.path.join(pkg_changelog_doc_path, 'changelog.html'), 'w') as h: h.write(html_code) ordered_pkg_tuples = topological_order_packages(pkgs) # create rosdoc tag list and location files with Scope('SUBSECTION', 'create rosdoc tag list and location files'): rosdoc_config_files = {} for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) rosdoc_exports = [ e.attributes['content'] for e in pkg.exports if e.tagname == 'rosdoc' and 'content' in e.attributes ] prefix = '${prefix}' rosdoc_config_file = rosdoc_exports[-1] \ if rosdoc_exports else '%s/rosdoc.yaml' % prefix rosdoc_config_file = rosdoc_config_file.replace( prefix, abs_pkg_path) if os.path.isfile(rosdoc_config_file): rosdoc_config_files[pkg.name] = rosdoc_config_file for _, pkg in ordered_pkg_tuples: dst = os.path.join(args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name) print("Generating rosdoc tag list file for package '%s'" % pkg.name) dep_names = rosdoc_index.get_recursive_dependencies(pkg.name) # make sure that we don't pass our own tagfile to ourself # bad things happen when we do this assert pkg.name not in dep_names locations = [] for dep_name in sorted(dep_names): if dep_name not in rosdoc_index.locations: print("- skipping not existing location file of " + "dependency '%s'" % dep_name) continue print("- including location files of dependency '%s'" % dep_name) dep_locations = rosdoc_index.locations[dep_name] if dep_locations: for dep_location in dep_locations: assert dep_location['package'] == dep_name # update tag information to point to local location location = copy.deepcopy(dep_location) if not location['location'].startswith('file://'): location['location'] = 'file://%s' % os.path.join( args.rosdoc_index_dir, location['location']) locations.append(location) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(locations, h) print("Creating location file for package '%s'" % pkg.name) data = { 'docs_url': '../../../api/%s/html' % pkg.name, 'location': 'file://%s' % os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name), 'package': pkg.name, } # fetch generator specific output folders from rosdoc_lite if pkg.name in rosdoc_config_files: output_folders = get_generator_output_folders( rosdoc_config_files[pkg.name], pkg.name) if 'doxygen' in output_folders: data['docs_url'] += '/' + output_folders['doxygen'] rosdoc_index.locations[pkg.name] = [data] # do not write these local locations # used to determine all source and release jobs source_build_files = get_source_build_files(config, args.rosdistro_name) release_build_files = get_release_build_files(config, args.rosdistro_name) # TODO this should reuse the logic from the job generation used_source_build_names = [] for source_build_name, build_file in source_build_files.items(): repo_names = build_file.filter_repositories([args.repository_name]) if not repo_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.source_repository: continue if not repo.source_repository.version: continue if build_file.test_commits_force is False: continue elif repo.source_repository.test_commits is False: continue elif repo.source_repository.test_commits is None and \ not build_file.test_commits_default: continue used_source_build_names.append(source_build_name) doc_build_files = get_doc_build_files(config, args.rosdistro_name) doc_build_file = doc_build_files[args.doc_build_name] # create manifest.yaml files from repository / package meta information # will be merged with the manifest.yaml file generated by rosdoc_lite later repository = dist_file.repositories[args.repository_name] with Scope('SUBSECTION', 'create manifest.yaml files'): for pkg in pkgs.values(): data = {} data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['repo_name'] = args.repository_name data['timestamp'] = time.time() data['depends'] = sorted( rosdoc_index.forward_deps.get(pkg.name, [])) data['depends_on'] = sorted( rosdoc_index.reverse_deps.get(pkg.name, [])) if pkg.name in rosdoc_index.metapackage_index: data['metapackages'] = rosdoc_index.metapackage_index[pkg.name] if pkg.name in rosdoc_index.metapackage_deps: data['packages'] = rosdoc_index.metapackage_deps[pkg.name] if pkg.name in package_names_with_changelogs: data['has_changelog_rst'] = True data['api_documentation'] = '%s/%s/api/%s/html' % \ (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name) pkg_status = None pkg_status_description = None # package level status information if pkg.name in repository.status_per_package: pkg_status_data = repository.status_per_package[pkg.name] pkg_status = pkg_status_data.get('status', None) pkg_status_description = pkg_status_data.get( 'status_description', None) # repository level status information if pkg_status is None: pkg_status = repository.status if pkg_status_description is None: pkg_status_description = repository.status_description if pkg_status is not None: data['maintainer_status'] = pkg_status if pkg_status_description is not None: data['maintainer_status_description'] = pkg_status_description # add doc job url data['doc_job'] = get_doc_job_url(config.jenkins_url, args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) # add devel job urls build_files = {} for build_name in used_source_build_names: build_files[build_name] = source_build_files[build_name] devel_job_urls = get_devel_job_urls(config.jenkins_url, build_files, args.rosdistro_name, args.repository_name) if devel_job_urls: data['devel_jobs'] = devel_job_urls # TODO this should reuse the logic from the job generation used_release_build_names = [] for release_build_name, build_file in release_build_files.items(): filtered_pkg_names = build_file.filter_packages([pkg.name]) if not filtered_pkg_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.release_repository: continue if not repo.release_repository.version: continue used_release_build_names.append(release_build_name) # add release job urls build_files = {} for build_name in used_release_build_names: build_files[build_name] = release_build_files[build_name] release_job_urls = get_release_job_urls(config.jenkins_url, build_files, args.rosdistro_name, pkg.name) if release_job_urls: data['release_jobs'] = release_job_urls # write manifest.yaml dst = os.path.join(args.output_dir, 'manifests', pkg.name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h) # overwrite CMakeLists.txt files of each package with Scope('SUBSECTION', 'overwrite CMakeLists.txt files to only generate messages'): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) build_types = [ e.content for e in pkg.exports if e.tagname == 'build_type' ] build_type_cmake = build_types and build_types[0] == 'cmake' data = { 'package_name': pkg.name, 'build_type_cmake': build_type_cmake, } content = expand_template('doc/CMakeLists.txt.em', data) print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name) cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt') with open(cmakelist_file, 'w') as h: h.write(content) with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'): # initialize rosdep view context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'openssh-client', 'python3', 'python3-yaml', 'rsync', # the following are required by rosdoc_lite 'doxygen', # since catkin is not a run dependency but provides the setup files get_os_package_name(args.rosdistro_name, 'catkin'), # rosdoc_lite does not work without genmsg being importable get_os_package_name(args.rosdistro_name, 'genmsg'), ] if '3' == str(condition_context['ROS_PYTHON_VERSION']): # the following are required by rosdoc_lite debian_pkg_names.extend([ 'python3-catkin-pkg-modules', 'python3-kitchen', 'python3-rospkg-modules', 'python3-sphinx', 'python3-yaml' ]) else: if '2' != str(condition_context['ROS_PYTHON_VERSION']): print('Unknown python version, using Python 2', condition_context) # the following are required by rosdoc_lite debian_pkg_names.extend([ 'python-catkin-pkg-modules', 'python-epydoc', 'python-kitchen', 'python-rospkg', 'python-sphinx', 'python-yaml' ]) if args.build_tool == 'colcon': debian_pkg_names.append('python3-colcon-ros') if 'actionlib_msgs' in pkg_names: # to document actions in other packages in the same repository debian_pkg_names.append( get_os_package_name(args.rosdistro_name, 'actionlib_msgs')) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build, run and doc dependencies and map them to binary packages depends = get_dependencies(pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies) debian_pkg_names_depends = resolve_names(depends, **context) debian_pkg_names_depends -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_depends) missing_debian_pkg_names = [] for debian_pkg_name in debian_pkg_names: try: debian_pkg_versions.update( get_binary_package_versions(apt_cache, [debian_pkg_name])) except KeyError: missing_debian_pkg_names.append(debian_pkg_name) if missing_debian_pkg_names: # we allow missing dependencies to support basic documentation # of packages which use not released dependencies print( '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build' ) for debian_pkg_name in missing_debian_pkg_names: print("Could not find apt package '%s', skipping dependency" % debian_pkg_name) debian_pkg_names.remove(debian_pkg_name) print('# END SUBSECTION') # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'build_tool': doc_build_file.build_tool, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'environment_variables': [ 'ROS_PYTHON_VERSION={}'.format( condition_context['ROS_PYTHON_VERSION']) ], 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'install_lists': [], 'canonical_base_url': doc_build_file.canonical_base_url, 'ordered_pkg_tuples': ordered_pkg_tuples, 'rosdoc_config_files': rosdoc_config_files, } create_dockerfile('doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the devel job") parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') parser.add_argument( '--workspace-root', nargs='+', help='The root path of the workspace to compile') parser.add_argument( '--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument( '--os-code-name', required=True, help="The OS code name (e.g. 'xenial')") parser.add_argument( '--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_dockerfile_dir(parser) parser.add_argument( '--testing', action='store_true', help='The flag if the workspace should be built with tests enabled ' 'and instead of installing the tests are ran') args = parser.parse_args(argv) # get direct build dependencies pkgs = {} for workspace_root in args.workspace_root: source_space = os.path.join(workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) pkgs.update(find_packages(source_space)) pkg_names = [pkg.name for pkg in pkgs.values()] print("Found the following packages:") for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) context = initialize_resolver( args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'python3', ] if 'catkin' not in pkg_names: debian_pkg_names.append( get_debian_package_name(args.rosdistro_name, 'catkin')) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build dependencies and map them to binary packages build_depends = get_dependencies( pkgs.values(), 'build', _get_build_and_recursive_run_dependencies) debian_pkg_names_building = resolve_names(build_depends, **context) debian_pkg_names_building -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_building) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names)) # get run and test dependencies and map them to binary packages run_and_test_depends = get_dependencies( pkgs.values(), 'run and test', _get_run_and_test_dependencies) debian_pkg_names_testing = resolve_names( run_and_test_depends, **context) # all additional run/test dependencies # are added after the build dependencies # in order to reuse existing images in the docker container debian_pkg_names_testing -= set(debian_pkg_names) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names_testing)) if args.testing: debian_pkg_names += order_dependencies(debian_pkg_names_testing) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'testing': args.testing, 'prerelease_overlay': len(args.workspace_root) > 1, } create_dockerfile( 'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) print(' -v %s:/tmp/catkin_workspace' % args.workspace_root[-1])
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Lists available binary packages and versions which are' 'needed to satisfy rosdep keys for ROS packages in the workspace') # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_output_dir(parser) add_argument_package_selection_args(parser) add_argument_skip_rosdep_keys(parser) parser.add_argument('--package-root', nargs='+', help='The path to the directory containing packages') args = parser.parse_args(argv) workspace_root = args.package_root[-1] os.chdir(workspace_root) with Scope('SUBSECTION', 'mark packages with IGNORE files'): all_packages = locate_packages(workspace_root) selected_packages = all_packages if args.package_selection_args: print('Using package selection arguments:', args.package_selection_args) selected_packages = locate_packages( workspace_root, extra_args=args.package_selection_args) to_ignore = all_packages.keys() - selected_packages.keys() print('Ignoring %d packages' % len(to_ignore)) for package in sorted(to_ignore): print('-', package) package_root = all_packages[package] Path(package_root, 'COLCON_IGNORE').touch() print('There are %d packages which meet selection criteria' % len(selected_packages)) with Scope('SUBSECTION', 'Enumerating packages needed to build'): # find all of the underlay packages underlay_pkgs = {} all_underlay_pkg_names = set() for package_root in args.package_root[0:-1]: print("Crawling for packages in '%s'" % package_root) underlay_pkgs.update(find_packages(package_root)) # Check for a colcon index for non-ROS package detection colcon_index = os.path.join(package_root, 'colcon-core', 'packages') try: all_underlay_pkg_names.update(os.listdir(colcon_index)) except FileNotFoundError: pass underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()] print('Found the following ROS underlay packages:') for pkg_name in sorted(underlay_pkg_names): print(' -', pkg_name) # get direct build dependencies package_root = args.package_root[-1] print("Crawling for packages in '%s'" % package_root) pkgs = find_packages(package_root) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following ROS packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) # get build dependencies and map them to binary packages all_pkgs = set(pkgs.values()).union(underlay_pkgs.values()) for pkg in all_pkgs: pkg.evaluate_conditions(os.environ) for pkg in all_pkgs: for group_depend in pkg.group_depends: if group_depend.evaluated_condition: group_depend.extract_group_members(all_pkgs) dependency_keys_build = get_dependencies( all_pkgs, 'build', _get_build_and_recursive_run_dependencies, pkgs.values()) dependency_keys_test = get_dependencies( all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies, pkgs.values()) if args.skip_rosdep_keys: dependency_keys_build.difference_update(args.skip_rosdep_keys) dependency_keys_test.difference_update(args.skip_rosdep_keys) # remove all non-ROS packages and packages which are present but # specifically ignored every_package_name = all_packages.keys() | all_underlay_pkg_names dependency_keys_build -= every_package_name dependency_keys_test -= every_package_name context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) os_pkg_names_build = resolve_names(dependency_keys_build, **context) os_pkg_names_test = resolve_names(dependency_keys_test, **context) os_pkg_names_test -= os_pkg_names_build with Scope('SUBSECTION', 'Resolving packages versions using apt cache'): apt_cache = Cache() os_pkg_versions = get_binary_package_versions( apt_cache, os_pkg_names_build | os_pkg_names_test) with open(os.path.join(args.output_dir, 'install_list_build.txt'), 'w') as out_file: for package in sorted(os_pkg_names_build): out_file.write('%s=%s\n' % (package, os_pkg_versions[package])) with open(os.path.join(args.output_dir, 'install_list_test.txt'), 'w') as out_file: for package in sorted(os_pkg_names_test): out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the devel job") parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') parser.add_argument('--workspace-root', nargs='+', help='The root path of the workspace to compile') parser.add_argument('--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument('--os-code-name', required=True, help="The OS code name (e.g. 'xenial')") parser.add_argument('--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_build_tool(parser, required=True) add_argument_ros_version(parser) add_argument_env_vars(parser) add_argument_dockerfile_dir(parser) add_argument_run_abichecker(parser) add_argument_require_gpu_support(parser) a1 = add_argument_build_tool_args(parser) a2 = add_argument_build_tool_test_args(parser) parser.add_argument( '--testing', action='store_true', help='The flag if the workspace should be built with tests enabled ' 'and instead of installing the tests are ran') remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) condition_context = dict(args.env_vars) condition_context['ROS_DISTRO'] = args.rosdistro_name condition_context['ROS_VERSION'] = args.ros_version # get direct build dependencies pkgs = get_packages_in_workspaces(args.workspace_root, condition_context) pkg_names = [pkg.name for pkg in pkgs.values()] print("Found the following packages:") for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'python3', ] if args.build_tool == 'colcon': debian_pkg_names += [ 'python3-colcon-metadata', 'python3-colcon-output', 'python3-colcon-parallel-executor', 'python3-colcon-ros', 'python3-colcon-test-result', ] elif 'catkin' not in pkg_names: debian_pkg_names += resolve_names(['catkin'], **context) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build dependencies and map them to binary packages build_depends = get_dependencies( pkgs.values(), 'build', _get_build_and_recursive_run_dependencies) debian_pkg_names_building = resolve_names(build_depends, **context) debian_pkg_names_building -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_building) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names)) # get run and test dependencies and map them to binary packages run_and_test_depends = get_dependencies(pkgs.values(), 'run and test', _get_run_and_test_dependencies) debian_pkg_names_testing = resolve_names(run_and_test_depends, **context) # all additional run/test dependencies # are added after the build dependencies # in order to reuse existing images in the docker container debian_pkg_names_testing -= set(debian_pkg_names) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names_testing)) if args.testing: debian_pkg_names += order_dependencies(debian_pkg_names_testing) mapped_workspaces = [ (workspace_root, '/tmp/ws%s' % (index if index > 1 else '')) for index, workspace_root in enumerate(args.workspace_root, 1) ] parent_result_space = [] if len(args.workspace_root) > 1: parent_result_space = ['/opt/ros/%s' % args.rosdistro_name] + \ [mapping[1] for mapping in mapped_workspaces[:-1]] # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'build_tool': args.build_tool, 'build_tool_args': args.build_tool_args, 'build_tool_test_args': args.build_tool_test_args, 'ros_version': args.ros_version, 'build_environment_variables': ['%s=%s' % key_value for key_value in args.env_vars.items()], 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'install_lists': [], 'testing': args.testing, 'run_abichecker': args.run_abichecker, 'require_gpu_support': args.require_gpu_support, 'workspace_root': mapped_workspaces[-1][1], 'parent_result_space': parent_result_space, } create_dockerfile('devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) for mapping in mapped_workspaces: print(' -v %s:%s' % mapping)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the devel job") parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') parser.add_argument('--workspace-root', nargs='+', help='The root path of the workspace to compile') parser.add_argument('--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument('--os-code-name', required=True, help="The OS code name (e.g. 'trusty')") parser.add_argument('--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_dockerfile_dir(parser) parser.add_argument( '--testing', action='store_true', help='The flag if the workspace should be built with tests enabled ' 'and instead of installing the tests are ran') args = parser.parse_args(argv) # get direct build dependencies pkgs = {} for workspace_root in args.workspace_root: source_space = os.path.join(workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) pkgs.update(find_packages(source_space)) pkg_names = [pkg.name for pkg in pkgs.values()] print("Found the following packages:") for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'python3', ] if 'catkin' not in pkg_names: debian_pkg_names.append( get_debian_package_name(args.rosdistro_name, 'catkin')) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build dependencies and map them to binary packages build_depends = get_dependencies( pkgs.values(), 'build', _get_build_and_recursive_run_dependencies) debian_pkg_names_building = resolve_names(build_depends, **context) debian_pkg_names_building -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_building) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names)) # get run and test dependencies and map them to binary packages run_and_test_depends = get_dependencies(pkgs.values(), 'run and test', _get_run_and_test_dependencies) debian_pkg_names_testing = resolve_names(run_and_test_depends, **context) # all additional run/test dependencies # are added after the build dependencies # in order to reuse existing images in the docker container debian_pkg_names_testing -= set(debian_pkg_names) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names_testing)) if args.testing: debian_pkg_names += order_dependencies(debian_pkg_names_testing) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'testing': args.testing, 'prerelease_overlay': len(args.workspace_root) > 1, } create_dockerfile('devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) print(' -v %s:/tmp/catkin_workspace' % args.workspace_root[-1])
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Lists available binary packages and versions which are' 'needed to satisfy rosdep keys for ROS packages in the workspace') # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_output_dir(parser) add_argument_skip_rosdep_keys(parser) parser.add_argument('--package-root', nargs='+', help='The path to the directory containing packages') args = parser.parse_args(argv) with Scope('SUBSECTION', 'Enumerating packages needed to build'): # find all of the underlay packages underlay_pkgs = {} for package_root in args.package_root[0:-1]: print("Crawling for packages in '%s'" % package_root) underlay_pkgs.update(find_packages(package_root)) underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()] print('Found the following underlay packages:') for pkg_name in sorted(underlay_pkg_names): print(' -', pkg_name) # get direct build dependencies package_root = args.package_root[-1] print("Crawling for packages in '%s'" % package_root) pkgs = find_packages(package_root) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) # get build dependencies and map them to binary packages all_pkgs = set(pkgs.values()).union(underlay_pkgs.values()) for pkg in all_pkgs: pkg.evaluate_conditions(os.environ) for pkg in all_pkgs: for group_depend in pkg.group_depends: if group_depend.evaluated_condition: group_depend.extract_group_members(all_pkgs) dependency_keys_build = get_dependencies( all_pkgs, 'build', _get_build_and_recursive_run_dependencies, pkgs.values()) dependency_keys_test = get_dependencies( all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies, pkgs.values()) if args.skip_rosdep_keys: dependency_keys_build.difference_update(args.skip_rosdep_keys) dependency_keys_test.difference_update(args.skip_rosdep_keys) context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) os_pkg_names_build = resolve_names(dependency_keys_build, **context) os_pkg_names_test = resolve_names(dependency_keys_test, **context) os_pkg_names_test -= os_pkg_names_build with Scope('SUBSECTION', 'Resolving packages versions using apt cache'): apt_cache = Cache() os_pkg_versions = get_binary_package_versions( apt_cache, os_pkg_names_build | os_pkg_names_test) with open(os.path.join(args.output_dir, 'install_list_build.txt'), 'w') as out_file: for package in sorted(os_pkg_names_build): out_file.write('%s=%s\n' % (package, os_pkg_versions[package])) with open(os.path.join(args.output_dir, 'install_list_test.txt'), 'w') as out_file: for package in sorted(os_pkg_names_test): out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Lists available binary packages and versions which are' 'needed to satisfy rosdep keys for ROS packages in the workspace') # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_output_dir(parser) add_argument_skip_rosdep_keys(parser) parser.add_argument( '--package-root', nargs='+', help='The path to the directory containing packages') args = parser.parse_args(argv) with Scope('SUBSECTION', 'Enumerating packages needed to build'): # find all of the underlay packages underlay_pkgs = {} for package_root in args.package_root[0:-1]: print("Crawling for packages in '%s'" % package_root) underlay_pkgs.update(find_packages(package_root)) underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()] print('Found the following underlay packages:') for pkg_name in sorted(underlay_pkg_names): print(' -', pkg_name) # get direct build dependencies package_root = args.package_root[-1] print("Crawling for packages in '%s'" % package_root) pkgs = find_packages(package_root) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) # get build dependencies and map them to binary packages all_pkgs = set(pkgs.values()).union(underlay_pkgs.values()) for pkg in all_pkgs: pkg.evaluate_conditions(os.environ) for pkg in all_pkgs: for group_depend in pkg.group_depends: if group_depend.evaluated_condition: group_depend.extract_group_members(all_pkgs) dependency_keys_build = get_dependencies( all_pkgs, 'build', _get_build_and_recursive_run_dependencies, pkgs.values()) dependency_keys_test = get_dependencies( all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies, pkgs.values()) if args.skip_rosdep_keys: dependency_keys_build.difference_update(args.skip_rosdep_keys) dependency_keys_test.difference_update(args.skip_rosdep_keys) context = initialize_resolver( args.rosdistro_name, args.os_name, args.os_code_name) os_pkg_names_build = resolve_names(dependency_keys_build, **context) os_pkg_names_test = resolve_names(dependency_keys_test, **context) os_pkg_names_test -= os_pkg_names_build with Scope('SUBSECTION', 'Resolving packages versions using apt cache'): apt_cache = Cache() os_pkg_versions = get_binary_package_versions( apt_cache, os_pkg_names_build | os_pkg_names_test) with open(os.path.join(args.output_dir, 'install_list_build.txt'), 'w') as out_file: for package in sorted(os_pkg_names_build): out_file.write('%s=%s\n' % (package, os_pkg_versions[package])) with open(os.path.join(args.output_dir, 'install_list_test.txt'), 'w') as out_file: for package in sorted(os_pkg_names_test): out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_ignore(parser) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_package_selection_args(parser) add_argument_repos_file_urls(parser, required=True) add_argument_skip_rosdep_keys(parser) add_argument_test_branch(parser) parser.add_argument( '--workspace-root', nargs='+', help='The root path of the workspace to compile') args = parser.parse_args(argv) debian_pkg_names = [ 'git', 'python3-apt', 'python3-colcon-common-extensions', 'python3-rosdep', 'python3-vcstool', ] # get versions for build dependencies apt_cache = Cache() debian_pkg_versions = get_binary_package_versions( apt_cache, debian_pkg_names) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'custom_rosdep_urls': [], 'uid': get_user_id(), 'build_environment_variables': args.env_vars, 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'repos_file_urls': args.repos_file_urls, 'test_branch': args.test_branch, 'skip_rosdep_keys': args.skip_rosdep_keys, 'build_ignore': args.build_ignore, 'package_selection_args': args.package_selection_args, 'workspace_root': args.workspace_root, } create_dockerfile( 'ci/create_workspace.Dockerfile.em', data, args.dockerfile_dir)