def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'devel' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_custom_rosdep_urls(parser) parser.add_argument( '--prerelease-overlay', action='store_true', help='Operate on two catkin workspaces') add_argument_build_tool(parser, required=True) add_argument_custom_rosdep_update_options(parser) add_argument_ros_version(parser) add_argument_env_vars(parser) add_argument_dockerfile_dir(parser) add_argument_run_abichecker(parser) add_argument_require_gpu_support(parser) a1 = add_argument_build_tool_args(parser) a2 = add_argument_build_tool_test_args(parser) remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'custom_rosdep_urls': args.custom_rosdep_urls, 'rosdep_update_options': args.custom_rosdep_update_options, 'uid': get_user_id(), }) create_dockerfile( 'devel/devel_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Invoke the build tool on a workspace') parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced (if available)') add_argument_build_tool(parser, required=True) add_argument_build_tool_args(parser) add_argument_run_abichecker(parser) parser.add_argument('--workspace-root', required=True, help='The root path of the workspace to compile') parser.add_argument('--parent-result-space', nargs='*', help='The paths of the parent result spaces') parser.add_argument( '--clean-before', action='store_true', help='The flag if the workspace should be cleaned before the ' 'invocation') parser.add_argument( '--clean-after', action='store_true', help='The flag if the workspace should be cleaned after the ' 'invocation') add_argument_ros_version(parser) args = parser.parse_args(argv) ensure_workspace_exists(args.workspace_root) if args.clean_before: clean_workspace(args.workspace_root) env = dict(os.environ) env.setdefault('MAKEFLAGS', '-j1') env.setdefault('ROS_DISTRO', args.rosdistro_name) try: with Scope('SUBSECTION', 'build workspace in isolation and install'): parent_result_spaces = None if args.parent_result_space: parent_result_spaces = args.parent_result_space rc = call_build_tool( args.build_tool, args.rosdistro_name, args.workspace_root, cmake_args=['-DBUILD_TESTING=0', '-DCATKIN_SKIP_TESTING=1'], args=args.build_tool_args, install=True, parent_result_spaces=parent_result_spaces, env=env) finally: if args.clean_after: clean_workspace(args.workspace_root) # only run abi-checker after successful builds and when requested if not rc and args.run_abichecker: with Scope('SUBSECTION', 'use abi checker'): abi_rc = call_abi_checker([args.workspace_root], args.ros_version, env) # Never fail a build because of abi errors but make them # unstable by printing MAKE_BUILD_UNSTABLE. Jenkins will # use a plugin to make it if abi_rc: print('MAKE_BUILD_UNSTABLE') return rc
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the devel job") parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') parser.add_argument('--workspace-root', nargs='+', help='The root path of the workspace to compile') parser.add_argument('--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument('--os-code-name', required=True, help="The OS code name (e.g. 'xenial')") parser.add_argument('--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_build_tool(parser, required=True) add_argument_ros_version(parser) add_argument_env_vars(parser) add_argument_dockerfile_dir(parser) add_argument_run_abichecker(parser) add_argument_require_gpu_support(parser) a1 = add_argument_build_tool_args(parser) a2 = add_argument_build_tool_test_args(parser) parser.add_argument( '--testing', action='store_true', help='The flag if the workspace should be built with tests enabled ' 'and instead of installing the tests are ran') remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) condition_context = dict(args.env_vars) condition_context['ROS_DISTRO'] = args.rosdistro_name condition_context['ROS_VERSION'] = args.ros_version # get direct build dependencies pkgs = get_packages_in_workspaces(args.workspace_root, condition_context) pkg_names = [pkg.name for pkg in pkgs.values()] print("Found the following packages:") for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'python3', ] if args.build_tool == 'colcon': debian_pkg_names += [ 'python3-colcon-metadata', 'python3-colcon-output', 'python3-colcon-parallel-executor', 'python3-colcon-ros', 'python3-colcon-test-result', ] elif 'catkin' not in pkg_names: debian_pkg_names += resolve_names(['catkin'], **context) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build dependencies and map them to binary packages build_depends = get_dependencies( pkgs.values(), 'build', _get_build_and_recursive_run_dependencies) debian_pkg_names_building = resolve_names(build_depends, **context) debian_pkg_names_building -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_building) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names)) # get run and test dependencies and map them to binary packages run_and_test_depends = get_dependencies(pkgs.values(), 'run and test', _get_run_and_test_dependencies) debian_pkg_names_testing = resolve_names(run_and_test_depends, **context) # all additional run/test dependencies # are added after the build dependencies # in order to reuse existing images in the docker container debian_pkg_names_testing -= set(debian_pkg_names) debian_pkg_versions.update( get_binary_package_versions(apt_cache, debian_pkg_names_testing)) if args.testing: debian_pkg_names += order_dependencies(debian_pkg_names_testing) mapped_workspaces = [ (workspace_root, '/tmp/ws%s' % (index if index > 1 else '')) for index, workspace_root in enumerate(args.workspace_root, 1) ] parent_result_space = [] if len(args.workspace_root) > 1: parent_result_space = ['/opt/ros/%s' % args.rosdistro_name] + \ [mapping[1] for mapping in mapped_workspaces[:-1]] # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'build_tool': args.build_tool, 'build_tool_args': args.build_tool_args, 'build_tool_test_args': args.build_tool_test_args, 'ros_version': args.ros_version, 'build_environment_variables': ['%s=%s' % key_value for key_value in args.env_vars.items()], 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'install_lists': [], 'testing': args.testing, 'run_abichecker': args.run_abichecker, 'require_gpu_support': args.require_gpu_support, 'workspace_root': mapped_workspaces[-1][1], 'parent_result_space': parent_result_space, } create_dockerfile('devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) for mapping in mapped_workspaces: print(' -v %s:%s' % mapping)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser, required=True) a1 = add_argument_build_tool_args(parser) a2 = add_argument_build_tool_test_args(parser) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_install_packages(parser) add_argument_ros_version(parser) add_argument_run_abichecker(parser) add_argument_require_gpu_support(parser) add_argument_testing(parser) parser.add_argument('--workspace-root', nargs='+', help='The root path of the workspace to compile') remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) apt_cache = Cache() debian_pkg_names = set(['build-essential']) debian_pkg_names.update(args.install_packages) if args.build_tool == 'colcon': debian_pkg_names.update([ 'python3-catkin-pkg-modules', 'python3-colcon-metadata', 'python3-colcon-output', 'python3-colcon-package-selection', 'python3-colcon-parallel-executor', 'python3-colcon-ros', 'python3-colcon-test-result', 'python3-rosdistro-modules', ]) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) install_list = 'install_list.txt' write_install_list(os.path.join(args.dockerfile_dir, install_list), debian_pkg_names, apt_cache) install_lists = [install_list, 'install_list_build.txt'] if args.testing: install_lists.append('install_list_test.txt') mapped_workspaces = [ (workspace_root, '/tmp/ws%s' % (index if index > 1 else '')) for index, workspace_root in enumerate(args.workspace_root, 1) ] # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'build_tool': args.build_tool, 'build_tool_args': args.build_tool_args, 'build_tool_test_args': args.build_tool_test_args, 'ros_version': args.ros_version, 'build_environment_variables': ['%s=%s' % key_value for key_value in args.env_vars.items()], 'install_lists': install_lists, 'dependencies': [], 'dependency_versions': [], 'testing': args.testing, 'run_abichecker': args.run_abichecker, 'require_gpu_support': args.require_gpu_support, 'workspace_root': mapped_workspaces[-1][1], 'parent_result_space': [mapping[1] for mapping in mapped_workspaces[:-1]], } create_dockerfile('devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) for mapping in mapped_workspaces: print(' -v %s:%s' % mapping)
def main(argv=sys.argv[1:]): build_tool_args_helper = build_tool_args_epilog_action( 'source', get_source_build_files) parser = argparse.ArgumentParser( description="Generate a 'devel' script", formatter_class=argparse.RawTextHelpFormatter) add_argument_config_url(parser, action=build_tool_args_helper) add_argument_rosdistro_name(parser, action=build_tool_args_helper) add_argument_build_name(parser, 'source', action=build_tool_args_helper) add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser) add_argument_run_abichecker(parser) add_argument_require_gpu_support(parser) a1 = add_argument_build_tool_args(parser) a2 = add_argument_build_tool_test_args(parser) remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] def beforeInclude(self, *_, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): script = kwargs['locals']['script'] # reuse existing ros_buildfarm folder if it exists if 'Clone ros_buildfarm' in script: lines = script.splitlines() lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then') lines += [ 'else', 'echo "Using existing ros_buildfarm folder"', 'fi', ] script = '\n'.join(lines) if args.build_tool and ' --build-tool ' in script: script = script.replace( ' --build-tool catkin_make_isolated', ' --build-tool ' + args.build_tool) if args.build_tool_args is not None or args.build_tool_test_args is not None: lines = script.splitlines() for i, line in enumerate(lines): if ( line.startswith('export build_tool_args=') and args.build_tool_args is not None ): lines[i] = 'export build_tool_args="%s"' % ( ' '.join(args.build_tool_args)) break if ( line.startswith('export build_tool_test_args=') and args.build_tool_test_args is not None ): lines[i] = 'export build_tool_test_args="%s"' % ( ' '.join(args.build_tool_test_args)) break script = '\n'.join(lines) self.scripts.append(script) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] config = get_config_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, jenkins=False, views=False, run_abichecker=args.run_abichecker, require_gpu_support=args.require_gpu_support) templates.template_hooks = None devel_job_name = get_devel_job_name( args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) value = expand_template( 'devel/devel_script.sh.em', { 'devel_job_name': devel_job_name, 'scms': hook.scms, 'scripts': hook.scripts, 'build_tool': args.build_tool or build_file.build_tool}, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)