def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Run the 'doc' job")
    add_argument_config_url(parser)
    add_argument_rosdistro_index_url(parser, required=True)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'doc')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_custom_rosdep_urls(parser)
    add_argument_force(parser)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'custom_rosdep_urls': args.custom_rosdep_urls,
        'uid': get_user_id(),
    })
    create_dockerfile(
        'doc/doc_create_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Run the 'devel' job")
    add_argument_rosdistro_index_url(parser, required=True)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_custom_rosdep_urls(parser)
    parser.add_argument(
        '--prerelease-overlay',
        action='store_true',
        help='Operate on two catkin workspaces')
    add_argument_build_tool(parser, required=True)
    add_argument_ros_version(parser)
    add_argument_env_vars(parser)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'custom_rosdep_urls': args.custom_rosdep_urls,
        'uid': get_user_id(),
    })
    create_dockerfile(
        'devel/devel_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
Пример #3
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Run the 'doc' job")
    add_argument_config_url(parser)
    add_argument_rosdistro_index_url(parser, required=True)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'doc')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_custom_rosdep_urls(parser)
    add_argument_force(parser)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'custom_rosdep_urls':
        args.custom_rosdep_urls,
        'uid':
        get_user_id(),
    })
    create_dockerfile('doc/doc_create_task.Dockerfile.em', data,
                      args.dockerfile_dir)
Пример #4
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Invoke the build tool on a workspace')
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced (if available)')
    add_argument_build_tool(parser, required=True)
    add_argument_build_tool_args(parser)
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--parent-result-space',
                        nargs='*',
                        help='The paths of the parent result spaces')
    parser.add_argument(
        '--clean-before',
        action='store_true',
        help='The flag if the workspace should be cleaned before the '
        'invocation')
    parser.add_argument(
        '--clean-after',
        action='store_true',
        help='The flag if the workspace should be cleaned after the '
        'invocation')
    args = parser.parse_args(argv)

    ensure_workspace_exists(args.workspace_root)

    if args.clean_before:
        clean_workspace(args.workspace_root)

    try:
        with Scope('SUBSECTION', 'build workspace in isolation and install'):
            parent_result_spaces = None
            if args.parent_result_space:
                parent_result_spaces = args.parent_result_space
            env = dict(os.environ)
            env.setdefault('MAKEFLAGS', '-j1')
            rc = call_build_tool(
                args.build_tool,
                args.rosdistro_name,
                args.workspace_root,
                cmake_args=['-DBUILD_TESTING=0', '-DCATKIN_SKIP_TESTING=1'],
                args=args.build_tool_args,
                install=True,
                parent_result_spaces=parent_result_spaces,
                env=env)
    finally:
        if args.clean_after:
            clean_workspace(args.workspace_root)

    return rc
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Invoke the build tool on a workspace')
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced (if available)')
    add_argument_build_tool(parser, required=True)
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--parent-result-space', nargs='*',
        help='The paths of the parent result spaces')
    parser.add_argument(
        '--clean-before',
        action='store_true',
        help='The flag if the workspace should be cleaned before the '
             'invocation')
    parser.add_argument(
        '--clean-after',
        action='store_true',
        help='The flag if the workspace should be cleaned after the '
             'invocation')
    args = parser.parse_args(argv)

    ensure_workspace_exists(args.workspace_root)

    if args.clean_before:
        clean_workspace(args.workspace_root)

    try:
        with Scope('SUBSECTION', 'build workspace in isolation and install'):
            parent_result_spaces = None
            if args.parent_result_space:
                parent_result_spaces = args.parent_result_space
            env = dict(os.environ)
            env['MAKEFLAGS'] = '-j1'
            rc = call_build_tool(
                args.build_tool, args.rosdistro_name, args.workspace_root,
                cmake_args=['-DBUILD_TESTING=0', '-DCATKIN_SKIP_TESTING=1'],
                install=True,
                parent_result_spaces=parent_result_spaces, env=env)
    finally:
        if args.clean_after:
            clean_workspace(args.workspace_root)

    return rc
Пример #6
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Run the 'CI' job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_tool(parser, required=True)
    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_install_packages(parser)
    a1 = add_argument_package_selection_args(parser)
    a2 = add_argument_build_tool_args(parser)
    add_argument_repos_file_urls(parser)
    add_argument_repository_names(parser, optional=True)
    add_argument_ros_version(parser)
    add_argument_skip_rosdep_keys(parser)
    add_argument_test_branch(parser)
    parser.add_argument(
        '--workspace-mount-point',
        nargs='*',
        help='Locations within the docker image where the workspace(s) '
        'will be mounted when the docker image is run.')

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    assert args.repos_file_urls or args.repository_names

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'uid':
        get_user_id(),
    })
    create_dockerfile('ci/ci_create_tasks.Dockerfile.em', data,
                      args.dockerfile_dir)
Пример #7
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Run the 'devel' job")
    add_argument_rosdistro_index_url(parser, required=True)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_custom_rosdep_urls(parser)
    parser.add_argument(
        '--prerelease-overlay',
        action='store_true',
        help='Operate on two catkin workspaces')
    add_argument_build_tool(parser, required=True)
    add_argument_custom_rosdep_update_options(parser)
    add_argument_ros_version(parser)
    add_argument_env_vars(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_run_abichecker(parser)
    add_argument_require_gpu_support(parser)
    a1 = add_argument_build_tool_args(parser)
    a2 = add_argument_build_tool_test_args(parser)

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'custom_rosdep_urls': args.custom_rosdep_urls,
        'rosdep_update_options': args.custom_rosdep_update_options,
        'uid': get_user_id(),
    })
    create_dockerfile(
        'devel/devel_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Run the 'CI' job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_ignore(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_install_packages(parser)
    add_argument_package_selection_args(parser)
    add_argument_repos_file_urls(parser, required=True)
    add_argument_ros_version(parser)
    add_argument_skip_rosdep_keys(parser)
    add_argument_test_branch(parser)
    parser.add_argument(
        '--workspace-mount-point', nargs='*',
        help='Locations within the docker image where the workspace(s) '
             'will be mounted when the docker image is run.')
    args = parser.parse_args(argv)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'uid': get_user_id(),
    })
    create_dockerfile(
        'ci/ci_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
Пример #9
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the devel job")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_ros_version(parser)
    add_argument_env_vars(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_run_abichecker(parser)
    add_argument_require_gpu_support(parser)
    a1 = add_argument_build_tool_args(parser)
    a2 = add_argument_build_tool_test_args(parser)
    parser.add_argument(
        '--testing',
        action='store_true',
        help='The flag if the workspace should be built with tests enabled '
        'and instead of installing the tests are ran')

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    condition_context = dict(args.env_vars)
    condition_context['ROS_DISTRO'] = args.rosdistro_name
    condition_context['ROS_VERSION'] = args.ros_version

    # get direct build dependencies
    pkgs = get_packages_in_workspaces(args.workspace_root, condition_context)
    pkg_names = [pkg.name for pkg in pkgs.values()]
    print("Found the following packages:")
    for pkg_name in sorted(pkg_names):
        print('  -', pkg_name)

    maintainer_emails = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))

    context = initialize_resolver(args.rosdistro_name, args.os_name,
                                  args.os_code_name)

    apt_cache = Cache()

    debian_pkg_names = [
        'build-essential',
        'python3',
    ]
    if args.build_tool == 'colcon':
        debian_pkg_names += [
            'python3-colcon-metadata',
            'python3-colcon-output',
            'python3-colcon-parallel-executor',
            'python3-colcon-ros',
            'python3-colcon-test-result',
        ]
    elif 'catkin' not in pkg_names:
        debian_pkg_names += resolve_names(['catkin'], **context)
    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    debian_pkg_versions = {}

    # get build dependencies and map them to binary packages
    build_depends = get_dependencies(
        pkgs.values(), 'build', _get_build_and_recursive_run_dependencies)
    debian_pkg_names_building = resolve_names(build_depends, **context)
    debian_pkg_names_building -= set(debian_pkg_names)
    debian_pkg_names += order_dependencies(debian_pkg_names_building)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names))

    # get run and test dependencies and map them to binary packages
    run_and_test_depends = get_dependencies(pkgs.values(), 'run and test',
                                            _get_run_and_test_dependencies)
    debian_pkg_names_testing = resolve_names(run_and_test_depends, **context)
    # all additional run/test dependencies
    # are added after the build dependencies
    # in order to reuse existing images in the docker container
    debian_pkg_names_testing -= set(debian_pkg_names)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names_testing))
    if args.testing:
        debian_pkg_names += order_dependencies(debian_pkg_names_testing)

    mapped_workspaces = [
        (workspace_root, '/tmp/ws%s' % (index if index > 1 else ''))
        for index, workspace_root in enumerate(args.workspace_root, 1)
    ]

    parent_result_space = []
    if len(args.workspace_root) > 1:
        parent_result_space = ['/opt/ros/%s' % args.rosdistro_name] + \
            [mapping[1] for mapping in mapped_workspaces[:-1]]

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'uid':
        get_user_id(),
        'build_tool':
        args.build_tool,
        'build_tool_args':
        args.build_tool_args,
        'build_tool_test_args':
        args.build_tool_test_args,
        'ros_version':
        args.ros_version,
        'build_environment_variables':
        ['%s=%s' % key_value for key_value in args.env_vars.items()],
        'dependencies':
        debian_pkg_names,
        'dependency_versions':
        debian_pkg_versions,
        'install_lists': [],
        'testing':
        args.testing,
        'run_abichecker':
        args.run_abichecker,
        'require_gpu_support':
        args.require_gpu_support,
        'workspace_root':
        mapped_workspaces[-1][1],
        'parent_result_space':
        parent_result_space,
    }
    create_dockerfile('devel/devel_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    for mapping in mapped_workspaces:
        print('  -v %s:%s' % mapping)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the CI job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_tool(parser, required=True)
    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_install_packages(parser)
    add_argument_ros_version(parser)
    add_argument_testing(parser)
    parser.add_argument(
        '--workspace-root', nargs='*',
        action=check_len_action(1, 2),
        help='The root path of the workspace to compile')
    args = parser.parse_args(argv)

    apt_cache = Cache()

    debian_pkg_names = set(['build-essential'])
    debian_pkg_names.update(args.install_packages)
    if args.build_tool == 'colcon':
        debian_pkg_names.update([
            'python3-catkin-pkg-modules',
            'python3-colcon-ros',
            'python3-colcon-test-result',
            'python3-rosdistro-modules',
        ])

    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    install_list = 'install_list.txt'
    write_install_list(
        os.path.join(args.dockerfile_dir, install_list),
        debian_pkg_names, apt_cache)
    install_lists = [install_list, 'install_list_build.txt']
    if args.testing:
        install_lists.append('install_list_test.txt')

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'rosdistro_name': args.rosdistro_name,

        'uid': get_user_id(),

        'build_tool': args.build_tool,
        'ros_version': args.ros_version,

        'build_environment_variables': args.env_vars,

        'install_lists': install_lists,
        'dependencies': [],
        'dependency_versions': [],

        'testing': args.testing,
        'prerelease_overlay': len(args.workspace_root) > 1,
    }
    create_dockerfile(
        'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    if len(args.workspace_root) == 1:
        print('  -v %s:/tmp/ws' % args.workspace_root[0])
    else:
        for i, workspace_root in enumerate(args.workspace_root[0:-1]):
            print('  -v %s:/tmp/ws%s' % (workspace_root, i or ''))
        print('  -v %s:/tmp/ws_overlay' % args.workspace_root[-1])
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Generate a 'devel' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):
        def __init__(self):
            Hook.__init__(self)
            self.scms = []
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    configure_devel_job(args.config_url,
                        args.rosdistro_name,
                        args.source_build_name,
                        args.repository_name,
                        args.os_name,
                        args.os_code_name,
                        args.arch,
                        config=config,
                        build_file=build_file,
                        jenkins=False,
                        views=False)

    templates.template_hooks = None

    devel_job_name = get_devel_job_name(args.rosdistro_name,
                                        args.source_build_name,
                                        args.repository_name, args.os_name,
                                        args.os_code_name, args.arch)

    value = expand_template(
        'devel/devel_script.sh.em', {
            'devel_job_name': devel_job_name,
            'scms': hook.scms,
            'scripts': hook.scripts,
            'build_tool': args.build_tool or build_file.build_tool
        },
        options={BANGPATH_OPT: False})
    value = value.replace('python3', sys.executable)
    print(value)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the devel job")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    parser.add_argument(
        '--workspace-root',
        nargs='+',
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'xenial')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_ros_version(parser)
    add_argument_env_vars(parser)
    add_argument_dockerfile_dir(parser)
    parser.add_argument(
        '--testing',
        action='store_true',
        help='The flag if the workspace should be built with tests enabled '
             'and instead of installing the tests are ran')
    args = parser.parse_args(argv)

    # get direct build dependencies
    pkgs = {}
    for workspace_root in args.workspace_root:
        source_space = os.path.join(workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        ws_pkgs = find_packages(source_space)
        for pkg in ws_pkgs.values():
            pkg.evaluate_conditions({
                'ROS_DISTRO': args.rosdistro_name,
                'ROS_VERSION': args.ros_version,
            })
        pkgs.update(ws_pkgs)

    pkg_names = [pkg.name for pkg in pkgs.values()]
    print("Found the following packages:")
    for pkg_name in sorted(pkg_names):
        print('  -', pkg_name)

    maintainer_emails = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))

    context = initialize_resolver(
        args.rosdistro_name, args.os_name, args.os_code_name)

    apt_cache = Cache()

    debian_pkg_names = [
        'build-essential',
        'python3',
    ]
    if args.build_tool == 'colcon':
        debian_pkg_names += [
            'python3-colcon-ros',
            'python3-colcon-test-result',
        ]
    elif 'catkin' not in pkg_names:
        debian_pkg_names += resolve_names(['catkin'], **context)
    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    debian_pkg_versions = {}

    # get build dependencies and map them to binary packages
    build_depends = get_dependencies(
        pkgs.values(), 'build', _get_build_and_recursive_run_dependencies)
    debian_pkg_names_building = resolve_names(build_depends, **context)
    debian_pkg_names_building -= set(debian_pkg_names)
    debian_pkg_names += order_dependencies(debian_pkg_names_building)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names))

    # get run and test dependencies and map them to binary packages
    run_and_test_depends = get_dependencies(
        pkgs.values(), 'run and test', _get_run_and_test_dependencies)
    debian_pkg_names_testing = resolve_names(
        run_and_test_depends, **context)
    # all additional run/test dependencies
    # are added after the build dependencies
    # in order to reuse existing images in the docker container
    debian_pkg_names_testing -= set(debian_pkg_names)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names_testing))
    if args.testing:
        debian_pkg_names += order_dependencies(debian_pkg_names_testing)

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'rosdistro_name': args.rosdistro_name,

        'uid': get_user_id(),

        'build_tool': args.build_tool,
        'ros_version': args.ros_version,

        'build_environment_variables': args.env_vars,

        'dependencies': debian_pkg_names,
        'dependency_versions': debian_pkg_versions,
        'install_lists': [],

        'testing': args.testing,
        'prerelease_overlay': len(args.workspace_root) > 1,
    }
    create_dockerfile(
        'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/ws' % args.workspace_root[-1])
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
        'built and installed as well as built and tested. ' +
        'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
        'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository, '
        'e.g. "common_tutorials:git:https://github.com/ros/common_tutorials:pullrequest-1"'
    )

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(
                    targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name,
                  file=sys.stderr)
            return 1
        if not repositories[repo_name]:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name,
                  file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name,
                  file=sys.stderr)
            return 1
        if not source_repo:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name,
                  file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(repo_name, {
            'type': repo_type,
            'url': repo_url,
            'version': version,
        })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'ws/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):
        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use any source repo to pass to devel job template
    if index.distributions[args.rosdistro_name].get('distribution_type',
                                                    'ros1') == 'ros1':
        package_name = 'catkin'
    elif index.distributions[args.rosdistro_name].get('distribution_type',
                                                      'ros1') == 'ros2':
        package_name = 'ros_workspace'
    else:
        assert False, 'Unsupported ROS version ' + \
            str(index.distributions[args.rosdistro_name].get('distribution_type', None))
    source_repository = deepcopy(
        dist_file.repositories[package_name].source_repository)
    if not source_repository:
        print((
            "The repository '%s' does not have a source entry in the distribution "
            + 'file. We cannot generate a prerelease without a source entry.')
              % package_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(args.config_url,
                        args.rosdistro_name,
                        args.source_build_name,
                        None,
                        args.os_name,
                        args.os_code_name,
                        args.arch,
                        config=config,
                        build_file=build_file,
                        index=index,
                        dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=False,
                        views=False,
                        source_repository=source_repository,
                        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(run_devel_job,
                                    run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - build_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/ws:/tmp/ws'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'ws_overlay:/tmp/ws_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms':
        scms,
        'scripts':
        hook.scripts,
        'overlay_scripts':
        overlay_scripts,
        'ros_buildfarm_python_path':
        os.path.dirname(os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable':
        sys.executable,
        'prerelease_script_path':
        os.path.dirname(os.path.abspath(__file__)),
        'build_tool':
        args.build_tool or build_file.build_tool
    })

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease', 'prerelease_build_overlay',
            'prerelease_build_underlay', 'prerelease_clone_overlay',
            'prerelease_clone_underlay'
    ]:
        content = expand_template('prerelease/%s_script.sh.em' % script_name,
                                  data,
                                  options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Invoke the build tool on a workspace while enabling and '
                    'running the tests')
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced (if available)')
    add_argument_build_tool(parser, required=True)
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--parent-result-space', nargs='*',
        help='The paths of the parent result spaces')
    parser.add_argument(
        '--clean-before',
        action='store_true',
        help='The flag if the workspace should be cleaned before the '
             'invocation')
    parser.add_argument(
        '--clean-after',
        action='store_true',
        help='The flag if the workspace should be cleaned after the '
             'invocation')
    args = parser.parse_args(argv)

    ensure_workspace_exists(args.workspace_root)

    if args.clean_before:
        clean_workspace(args.workspace_root)

    parent_result_spaces = None
    if args.parent_result_space:
        parent_result_spaces = args.parent_result_space

    try:
        with Scope('SUBSECTION', 'build workspace in isolation'):
            test_results_dir = os.path.join(
                args.workspace_root, 'test_results')
            cmake_args = [
                '-DBUILD_TESTING=1',
                '-DCATKIN_ENABLE_TESTING=1', '-DCATKIN_SKIP_TESTING=0',
                '-DCATKIN_TEST_RESULTS_DIR=%s' % test_results_dir]
            additional_args = None
            if args.build_tool == 'colcon':
                additional_args = ['--test-result-base', test_results_dir]
            env = dict(os.environ)
            env['MAKEFLAGS'] = '-j1'
            rc = call_build_tool(
                args.build_tool, args.rosdistro_name, args.workspace_root,
                cmake_clean_cache=True,
                cmake_args=cmake_args, args=additional_args,
                parent_result_spaces=parent_result_spaces, env=env)
        if not rc:
            with Scope('SUBSECTION', 'build tests'):
                additional_args = None
                if args.build_tool == 'colcon':
                    additional_args = ['--cmake-target-skip-unavailable']
                rc = call_build_tool(
                    args.build_tool, args.rosdistro_name, args.workspace_root,
                    cmake_args=cmake_args,
                    make_args=['tests'], args=additional_args,
                    parent_result_spaces=parent_result_spaces, env=env)
            if not rc:
                make_args = ['run_tests']
                additional_args = None
                if args.build_tool == 'colcon':
                    cmake_args = None
                    make_args = None
                    additional_args = ['--test-result-base', test_results_dir]
                # for workspaces with only plain cmake packages the setup files
                # generated by cmi won't implicitly source the underlays
                if parent_result_spaces is None:
                    parent_result_spaces = ['/opt/ros/%s' % args.rosdistro_name]
                if args.build_tool == 'catkin_make_isolated':
                    devel_space = os.path.join(
                        args.workspace_root, 'devel_isolated')
                    parent_result_spaces.append(devel_space)
                # since catkin_make_isolated doesn't provide a custom
                # environment to run tests this needs to source the devel space
                # and force a CMake run ro use the new environment
                with Scope('SUBSECTION', 'run tests'):
                    rc = call_build_tool(
                        args.build_tool,
                        args.rosdistro_name, args.workspace_root,
                        cmake_args=cmake_args,
                        force_cmake=args.build_tool == 'catkin_make_isolated',
                        make_args=make_args, args=additional_args,
                        parent_result_spaces=parent_result_spaces, env=env,
                        colcon_verb='test')
    finally:
        if args.clean_after:
            clean_workspace(args.workspace_root)

    return rc
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Generate a 'CI' script")

    # Positional
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'ci')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_tool(parser)
    a1 = add_argument_package_selection_args(parser)
    a2 = add_argument_build_tool_args(parser)
    a3 = add_argument_build_tool_test_args(parser)
    add_argument_repos_file_urls(parser)
    add_argument_skip_cleanup(parser)
    add_argument_test_branch(parser)
    parser.add_argument(
        '--underlay-source-path',
        nargs='*',
        metavar='DIR_NAME',
        help='Path to one or more install spaces to use as an underlay')

    remainder_args = extract_multiple_remainders(argv, (a1, a2, a3))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    # collect all template snippets of specific types
    class IncludeHook(Hook):
        def __init__(self):
            Hook.__init__(self)
            self.scms = []
            self.scripts = []
            self.parameters = {}

            if args.skip_cleanup:
                self.parameters['skip_cleanup'] = 'true'
            if args.repos_file_urls is not None:
                self.parameters['repos_file_urls'] = ' '.join(
                    args.repos_file_urls)
            if args.test_branch is not None:
                self.parameters['test_branch'] = args.test_branch
            if args.package_selection_args is not None:
                self.parameters['package_selection_args'] = ' '.join(
                    args.package_selection_args)
            if args.build_tool_args is not None:
                self.parameters['build_tool_args'] = ' '.join(
                    args.build_tool_args)
            if args.build_tool_test_args is not None:
                self.parameters['build_tool_test_args'] = ' '.join(
                    args.build_tool_test_args)

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)
            if template_path.endswith(
                    '/snippet/property_parameters-definition.xml.em'):
                for parameter in reversed(kwargs['locals']['parameters']):
                    name = parameter['name']
                    value_type = parameter['type']
                    if value_type in ['string', 'text']:
                        default_value = parameter['default_value']
                    elif value_type == 'boolean':
                        default_value = 'true' if parameter.get(
                            'default_value', False) else 'false'
                    else:
                        continue

                    self.parameters.setdefault(name, default_value)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    config = get_config_index(args.config_url)
    build_files = get_ci_build_files(config, args.rosdistro_name)
    build_file = build_files[args.ci_build_name]

    underlay_source_paths = [
        os.path.abspath(p) for p in args.underlay_source_path or []
    ]

    configure_ci_job(args.config_url,
                     args.rosdistro_name,
                     args.ci_build_name,
                     args.os_name,
                     args.os_code_name,
                     args.arch,
                     config=config,
                     build_file=build_file,
                     jenkins=False,
                     views=False,
                     underlay_source_paths=underlay_source_paths)

    templates.template_hooks = None

    ci_job_name = get_ci_job_name(args.rosdistro_name, args.os_name,
                                  args.os_code_name, args.arch, 'script')

    value = expand_template('ci/ci_script.sh.em', {
        'ci_job_name': ci_job_name,
        'scms': hook.scms,
        'scripts': hook.scripts,
        'build_tool': args.build_tool or build_file.build_tool,
        'parameters': hook.parameters
    },
                            options={BANGPATH_OPT: False})
    value = value.replace('python3 ', sys.executable + ' ')
    print(value)
Пример #16
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the CI job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_tool(parser, required=True)
    add_argument_build_tool_args(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_install_packages(parser)
    add_argument_ros_version(parser)
    add_argument_testing(parser)
    parser.add_argument(
        '--workspace-root', nargs='*',
        action=check_len_action(1, 2),
        help='The root path of the workspace to compile')
    args = parser.parse_args(argv)

    apt_cache = Cache()

    debian_pkg_names = set(['build-essential'])
    debian_pkg_names.update(args.install_packages)
    if args.build_tool == 'colcon':
        debian_pkg_names.update([
            'python3-catkin-pkg-modules',
            'python3-colcon-output',
            'python3-colcon-parallel-executor',
            'python3-colcon-ros',
            'python3-colcon-test-result',
            'python3-rosdistro-modules',
        ])

    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    install_list = 'install_list.txt'
    write_install_list(
        os.path.join(args.dockerfile_dir, install_list),
        debian_pkg_names, apt_cache)
    install_lists = [install_list, 'install_list_build.txt']
    if args.testing:
        install_lists.append('install_list_test.txt')

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'rosdistro_name': args.rosdistro_name,

        'uid': get_user_id(),

        'build_tool': args.build_tool,
        'build_tool_args': args.build_tool_args,
        'ros_version': args.ros_version,

        'build_environment_variables': args.env_vars,

        'install_lists': install_lists,
        'dependencies': [],
        'dependency_versions': [],

        'testing': args.testing,
        'prerelease_overlay': len(args.workspace_root) > 1,
    }
    create_dockerfile(
        'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    if len(args.workspace_root) == 1:
        print('  -v %s:/tmp/ws' % args.workspace_root[0])
    else:
        for i, workspace_root in enumerate(args.workspace_root[0:-1]):
            print('  -v %s:/tmp/ws%s' % (workspace_root, i or ''))
        print('  -v %s:/tmp/ws_overlay' % args.workspace_root[-1])
def main(argv=sys.argv[1:]):
    build_tool_args_helper = build_tool_args_epilog_action(
        'source', get_source_build_files)
    parser = argparse.ArgumentParser(
        description="Generate a 'devel' script",
        formatter_class=argparse.RawTextHelpFormatter)
    add_argument_config_url(parser, action=build_tool_args_helper)
    add_argument_rosdistro_name(parser, action=build_tool_args_helper)
    add_argument_build_name(parser, 'source', action=build_tool_args_helper)
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    add_argument_run_abichecker(parser)
    add_argument_require_gpu_support(parser)
    a1 = add_argument_build_tool_args(parser)
    a2 = add_argument_build_tool_test_args(parser)

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scms = []
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                if args.build_tool_args is not None or args.build_tool_test_args is not None:
                    lines = script.splitlines()
                    for i, line in enumerate(lines):
                        if (
                            line.startswith('export build_tool_args=') and
                            args.build_tool_args is not None
                        ):
                            lines[i] = 'export build_tool_args="%s"' % (
                                ' '.join(args.build_tool_args))
                            break
                        if (
                            line.startswith('export build_tool_test_args=') and
                            args.build_tool_test_args is not None
                        ):
                            lines[i] = 'export build_tool_test_args="%s"' % (
                                ' '.join(args.build_tool_test_args))
                            break
                    script = '\n'.join(lines)

                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file, jenkins=False, views=False,
        run_abichecker=args.run_abichecker,
        require_gpu_support=args.require_gpu_support)

    templates.template_hooks = None

    devel_job_name = get_devel_job_name(
        args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch)

    value = expand_template(
        'devel/devel_script.sh.em', {
            'devel_job_name': devel_job_name,
            'scms': hook.scms,
            'scripts': hook.scripts,
            'build_tool': args.build_tool or build_file.build_tool},
        options={BANGPATH_OPT: False})
    value = value.replace('python3', sys.executable)
    print(value)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'CI' script")

    # Positional
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'ci')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_ignore(parser)
    add_argument_build_tool(parser)
    add_argument_package_selection_args(parser)
    add_argument_repos_file_urls(parser)
    add_argument_skip_cleanup(parser)
    add_argument_test_branch(parser)
    parser.add_argument(
        '--underlay-source-path', nargs='*', metavar='DIR_NAME',
        help='Path to one or more install spaces to use as an underlay')
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scms = []
            self.scripts = []
            self.parameters = {}

            if args.skip_cleanup:
                self.parameters['skip_cleanup'] = 'true'
            if args.repos_file_urls is not None:
                self.parameters['repos_file_urls'] = ' '.join(args.repos_file_urls)
            if args.test_branch is not None:
                self.parameters['test_branch'] = args.test_branch
            if args.build_ignore is not None:
                self.parameters['build_ignore'] = ' '.join(args.build_ignore)
            if args.package_selection_args is not None:
                self.parameters['package_selection_args'] = ' '.join(args.package_selection_args)

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)
            if template_path.endswith('/snippet/property_parameters-definition.xml.em'):
                for parameter in reversed(kwargs['locals']['parameters']):
                    name = parameter['name']
                    value_type = parameter['type']
                    if value_type in ['string', 'text']:
                        default_value = parameter['default_value']
                    elif value_type == 'boolean':
                        default_value = 'true' if parameter.get(
                            'default_value', False) else 'false'
                    else:
                        continue

                    self.parameters.setdefault(name, default_value)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    config = get_config_index(args.config_url)
    build_files = get_ci_build_files(config, args.rosdistro_name)
    build_file = build_files[args.ci_build_name]

    underlay_source_paths = [os.path.abspath(p) for p in args.underlay_source_path or []]

    configure_ci_job(
        args.config_url, args.rosdistro_name, args.ci_build_name,
        args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file, jenkins=False, views=False,
        underlay_source_paths=underlay_source_paths)

    templates.template_hooks = None

    ci_job_name = get_ci_job_name(
        args.rosdistro_name, args.os_name,
        args.os_code_name, args.arch, 'script')

    value = expand_template(
        'ci/ci_script.sh.em', {
            'ci_job_name': ci_job_name,
            'scms': hook.scms,
            'scripts': hook.scripts,
            'build_tool': args.build_tool or build_file.build_tool,
            'parameters': hook.parameters},
        options={BANGPATH_OPT: False})
    value = value.replace('python3 ', sys.executable + ' ')
    print(value)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the devel job")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_ros_version(parser)
    add_argument_env_vars(parser)
    add_argument_dockerfile_dir(parser)
    parser.add_argument(
        '--testing',
        action='store_true',
        help='The flag if the workspace should be built with tests enabled '
        'and instead of installing the tests are ran')
    args = parser.parse_args(argv)

    # get direct build dependencies
    pkgs = {}
    for workspace_root in args.workspace_root:
        source_space = os.path.join(workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs.update(find_packages(source_space))

    pkg_names = [pkg.name for pkg in pkgs.values()]
    print("Found the following packages:")
    for pkg_name in sorted(pkg_names):
        print('  -', pkg_name)

    maintainer_emails = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))

    context = initialize_resolver(args.rosdistro_name, args.os_name,
                                  args.os_code_name)

    apt_cache = Cache()

    debian_pkg_names = [
        'build-essential',
        'python3',
    ]
    if args.build_tool == 'colcon':
        debian_pkg_names += [
            'python3-colcon-ros',
            'python3-colcon-test-result',
        ]
    elif 'catkin' not in pkg_names:
        debian_pkg_names += resolve_names(['catkin'], **context)
    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    debian_pkg_versions = {}

    # get build dependencies and map them to binary packages
    build_depends = get_dependencies(
        pkgs.values(), 'build', _get_build_and_recursive_run_dependencies)
    debian_pkg_names_building = resolve_names(build_depends, **context)
    debian_pkg_names_building -= set(debian_pkg_names)
    debian_pkg_names += order_dependencies(debian_pkg_names_building)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names))

    # get run and test dependencies and map them to binary packages
    run_and_test_depends = get_dependencies(pkgs.values(), 'run and test',
                                            _get_run_and_test_dependencies)
    debian_pkg_names_testing = resolve_names(run_and_test_depends, **context)
    # all additional run/test dependencies
    # are added after the build dependencies
    # in order to reuse existing images in the docker container
    debian_pkg_names_testing -= set(debian_pkg_names)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names_testing))
    if args.testing:
        debian_pkg_names += order_dependencies(debian_pkg_names_testing)

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'uid':
        get_user_id(),
        'build_tool':
        args.build_tool,
        'ros_version':
        args.ros_version,
        'build_environment_variables':
        args.env_vars,
        'dependencies':
        debian_pkg_names,
        'dependency_versions':
        debian_pkg_versions,
        'testing':
        args.testing,
        'prerelease_overlay':
        len(args.workspace_root) > 1,
    }
    create_dockerfile('devel/devel_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/ws' % args.workspace_root[-1])
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'devel' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scms = []
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file, jenkins=False, views=False)

    templates.template_hooks = None

    devel_job_name = get_devel_job_name(
        args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch)

    value = expand_template(
        'devel/devel_script.sh.em', {
            'devel_job_name': devel_job_name,
            'scms': hook.scms,
            'scripts': hook.scripts,
            'build_tool': args.build_tool or build_file.build_tool},
        options={BANGPATH_OPT: False})
    value = value.replace('python3', sys.executable)
    print(value)
Пример #21
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the CI job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_tool(parser, required=True)
    a1 = add_argument_build_tool_args(parser)
    a2 = add_argument_build_tool_test_args(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_install_packages(parser)
    add_argument_ros_version(parser)
    add_argument_run_abichecker(parser)
    add_argument_require_gpu_support(parser)
    add_argument_testing(parser)
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    apt_cache = Cache()

    debian_pkg_names = set(['build-essential'])
    debian_pkg_names.update(args.install_packages)
    if args.build_tool == 'colcon':
        debian_pkg_names.update([
            'python3-catkin-pkg-modules',
            'python3-colcon-metadata',
            'python3-colcon-output',
            'python3-colcon-package-selection',
            'python3-colcon-parallel-executor',
            'python3-colcon-ros',
            'python3-colcon-test-result',
            'python3-rosdistro-modules',
        ])

    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    install_list = 'install_list.txt'
    write_install_list(os.path.join(args.dockerfile_dir, install_list),
                       debian_pkg_names, apt_cache)
    install_lists = [install_list, 'install_list_build.txt']
    if args.testing:
        install_lists.append('install_list_test.txt')

    mapped_workspaces = [
        (workspace_root, '/tmp/ws%s' % (index if index > 1 else ''))
        for index, workspace_root in enumerate(args.workspace_root, 1)
    ]

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'uid':
        get_user_id(),
        'build_tool':
        args.build_tool,
        'build_tool_args':
        args.build_tool_args,
        'build_tool_test_args':
        args.build_tool_test_args,
        'ros_version':
        args.ros_version,
        'build_environment_variables':
        ['%s=%s' % key_value for key_value in args.env_vars.items()],
        'install_lists':
        install_lists,
        'dependencies': [],
        'dependency_versions': [],
        'testing':
        args.testing,
        'run_abichecker':
        args.run_abichecker,
        'require_gpu_support':
        args.require_gpu_support,
        'workspace_root':
        mapped_workspaces[-1][1],
        'parent_result_space':
        [mapping[1] for mapping in mapped_workspaces[:-1]],
    }
    create_dockerfile('devel/devel_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    for mapping in mapped_workspaces:
        print('  -v %s:%s' % mapping)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    index = get_index(config.rosdistro_index_url)

    condition_context = get_package_condition_context(index,
                                                      args.rosdistro_name)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        for pkg in pkgs.values():
            pkg.evaluate_conditions(condition_context)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(args.workspace_root, 'src',
                                args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api',
                    pkg_name, 'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.safe_load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(
                    rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(args.output_dir, 'api', pkg_name,
                                   'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(
                set([d.name for d in depends
                     if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(
                    set([
                        d.name for d in depends
                        if d.name in valid_package_names
                    ]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(args.output_dir,
                                         ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(args.output_dir,
                                                      'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(
                        os.path.join(pkg_changelog_doc_path, 'changelog.html'),
                        'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes
            ]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(
                prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(args.output_dir, 'rosdoc_tags',
                               '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url':
                '../../../api/%s/html' % pkg.name,
                'location':
                'file://%s' %
                os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package':
                pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(
                rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(
                rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(config.jenkins_url,
                                              args.rosdistro_name,
                                              args.doc_build_name,
                                              args.repository_name,
                                              args.os_name, args.os_code_name,
                                              args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name,
                                                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(config.jenkins_url,
                                                    build_files,
                                                    args.rosdistro_name,
                                                    pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(args.output_dir, 'manifests', pkg.name,
                               'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope('SUBSECTION',
               'overwrite CMakeLists.txt files to only generate messages'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type'
            ]
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'):
        # initialize rosdep view
        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            # since catkin is not a run dependency but provides the setup files
            get_os_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_os_package_name(args.rosdistro_name, 'genmsg'),
        ]

        if '3' == str(condition_context['ROS_PYTHON_VERSION']):
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python3-catkin-pkg-modules', 'python3-kitchen',
                'python3-rospkg-modules', 'python3-sphinx', 'python3-yaml'
            ])
        else:
            if '2' != str(condition_context['ROS_PYTHON_VERSION']):
                print('Unknown python version, using Python 2',
                      condition_context)
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python-catkin-pkg-modules', 'python-epydoc', 'python-kitchen',
                'python-rospkg', 'python-sphinx', 'python-yaml'
            ])

        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_os_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(pkgs.values(), 'build, run and doc',
                                   _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print(
                '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build'
            )
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name':
            args.os_name,
            'os_code_name':
            args.os_code_name,
            'arch':
            args.arch,
            'build_tool':
            doc_build_file.build_tool,
            'distribution_repository_urls':
            args.distribution_repository_urls,
            'distribution_repository_keys':
            get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),
            'environment_variables': [
                'ROS_PYTHON_VERSION={}'.format(
                    condition_context['ROS_PYTHON_VERSION'])
            ],
            'rosdistro_name':
            args.rosdistro_name,
            'uid':
            get_user_id(),
            'dependencies':
            debian_pkg_names,
            'dependency_versions':
            debian_pkg_versions,
            'install_lists': [],
            'canonical_base_url':
            doc_build_file.canonical_base_url,
            'ordered_pkg_tuples':
            ordered_pkg_tuples,
            'rosdoc_config_files':
            rosdoc_config_files,
        }
        create_dockerfile('doc/doc_task.Dockerfile.em', data,
                          args.dockerfile_dir)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository, '
             'e.g. "common_tutorials:git:https://github.com/ros/common_tutorials:pullrequest-1"')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        if not repositories[repo_name]:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        if not source_repo:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'ws/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use any source repo to pass to devel job template
    if index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros1':
        package_name = 'catkin'
    elif index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros2':
        package_name = 'ros_workspace'
    else:
        assert False, 'Unsupported ROS version ' + \
            str(index.distributions[args.rosdistro_name].get('distribution_type', None))
    source_repository = deepcopy(
        dist_file.repositories[package_name].source_repository)
    if not source_repository:
        print(("The repository '%s' does not have a source entry in the distribution " +
               'file. We cannot generate a prerelease without a source entry.') % package_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository,
        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - build_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/ws:/tmp/ws'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'ws_overlay:/tmp/ws_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__)),
        'build_tool': args.build_tool or build_file.build_tool})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
Пример #24
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Invoke 'rosdoc_lite' on each package of a workspace")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced (if available)')
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    parser.add_argument(
        '--canonical-base-url',
        help='The canonical base URL to add to all generated HTML files')
    parser.add_argument(
        'pkg_tuples',
        nargs='*',
        help='A list of package tuples in topological order, each containing '
        'the name, the relative path and optionally the package-relative '
        'path of the rosdoc config file separated by a colon')
    add_argument_output_dir(parser, required=True)
    args = parser.parse_args(argv)

    ensure_workspace_exists(args.workspace_root)
    clean_workspace(args.workspace_root)

    with Scope('SUBSECTION', 'build workspace in isolation and install'):
        env = dict(os.environ)
        env.setdefault('MAKEFLAGS', '-j1')
        rc = call_build_tool(args.build_tool,
                             args.rosdistro_name,
                             args.workspace_root,
                             cmake_args=['-DCATKIN_SKIP_TESTING=1'],
                             install=True,
                             env=env)
    # TODO compile error should still allow to generate doc from static parts
    if rc:
        return rc

    rosdoc_index = RosdocIndex([
        os.path.join(args.output_dir, args.rosdistro_name),
        os.path.join(args.rosdoc_index_dir, args.rosdistro_name)
    ])

    source_space = os.path.join(args.workspace_root, 'src')
    for pkg_tuple in args.pkg_tuples:
        pkg_name, pkg_subfolder, pkg_rosdoc_config = pkg_tuple.split(':', 2)
        with Scope('SUBSECTION', 'rosdoc_lite - %s' % pkg_name):
            pkg_path = os.path.join(source_space, pkg_subfolder)

            pkg_doc_path = os.path.join(args.output_dir, 'api_rosdoc',
                                        pkg_name)
            pkg_tag_path = os.path.join(args.output_dir, 'symbols',
                                        '%s.tag' % pkg_name)

            source_cmd = [
                '.',
                os.path.join(args.workspace_root, 'install_isolated',
                             'setup.sh'),
            ]
            # for workspaces with only plain cmake packages the setup files
            # generated by cmi won't implicitly source the underlays
            setup_file = '/opt/ros/%s/setup.sh' % args.rosdistro_name
            if os.path.exists(setup_file):
                source_cmd = ['.', setup_file, '&&'] + source_cmd
            rosdoc_lite_cmd = [
                os.path.join(args.rosdoc_lite_dir, 'scripts', 'rosdoc_lite'),
                pkg_path,
                '-o',
                pkg_doc_path,
                '-g',
                pkg_tag_path,
                '-t',
                os.path.join(args.output_dir, 'rosdoc_tags',
                             '%s.yaml' % pkg_name),
            ]
            if '3' == os.environ.get('ROS_PYTHON_VERSION'):
                rosdoc_lite_cmd.insert(0, 'python3')
            print("Invoking `rosdoc_lite` for package '%s': %s" %
                  (pkg_name, ' '.join(rosdoc_lite_cmd)))
            pkg_rc = subprocess.call([
                'sh', '-c', ' '.join(source_cmd) + ' && ' +
                'PYTHONPATH=%s/src:%s/src:$PYTHONPATH ' %
                (args.rosdoc_lite_dir, args.catkin_sphinx_dir) +
                ' '.join(rosdoc_lite_cmd)
            ],
                                     stderr=subprocess.STDOUT,
                                     cwd=pkg_path)
            if pkg_rc:
                rc = pkg_rc

            # only if rosdoc runs generates a symbol file
            # create the corresponding location file
            if os.path.exists(pkg_tag_path):
                data = {
                    'docs_url':
                    '../../../api/%s/html' % pkg_name,
                    'location':
                    '%s/symbols/%s.tag' % (args.rosdistro_name, pkg_name),
                    'package':
                    pkg_name,
                }

                # fetch generator specific output folders from rosdoc_lite
                if pkg_rosdoc_config:
                    output_folders = get_generator_output_folders(
                        pkg_rosdoc_config, pkg_name)
                    for generator, output_folder in output_folders.items():
                        data['%s_output_folder' % generator] = output_folder

                rosdoc_index.locations[pkg_name] = [data]

            if args.canonical_base_url:
                add_canonical_link(
                    pkg_doc_path, '%s/%s/api/%s' %
                    (args.canonical_base_url, args.rosdistro_name, pkg_name))

            # merge manifest.yaml files
            rosdoc_manifest_yaml_file = os.path.join(pkg_doc_path,
                                                     'manifest.yaml')
            job_manifest_yaml_file = os.path.join(args.output_dir, 'manifests',
                                                  pkg_name, 'manifest.yaml')
            if os.path.exists(rosdoc_manifest_yaml_file):
                with open(rosdoc_manifest_yaml_file, 'r') as h:
                    rosdoc_data = yaml.safe_load(h)
            else:
                # if rosdoc_lite failed to generate the file
                rosdoc_data = {}
            with open(job_manifest_yaml_file, 'r') as h:
                job_data = yaml.safe_load(h)
            rosdoc_data.update(job_data)
            with open(rosdoc_manifest_yaml_file, 'w') as h:
                yaml.safe_dump(rosdoc_data, h, default_flow_style=False)

    rosdoc_index.write_modified_data(args.output_dir, ['locations'])

    return rc
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Invoke the build tool on a workspace while enabling and '
        'running the tests')
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced (if available)')
    add_argument_build_tool(parser, required=True)
    a1 = add_argument_build_tool_args(parser)
    a2 = add_argument_build_tool_test_args(parser)
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--parent-result-space',
                        nargs='*',
                        help='The paths of the parent result spaces')
    parser.add_argument(
        '--clean-before',
        action='store_true',
        help='The flag if the workspace should be cleaned before the '
        'invocation')
    parser.add_argument(
        '--clean-after',
        action='store_true',
        help='The flag if the workspace should be cleaned after the '
        'invocation')
    add_argument_require_gpu_support(parser)

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    ensure_workspace_exists(args.workspace_root)

    if args.clean_before:
        clean_workspace(args.workspace_root)

    parent_result_spaces = None
    if args.parent_result_space:
        parent_result_spaces = args.parent_result_space

    try:
        with Scope('SUBSECTION', 'build workspace in isolation'):
            test_results_dir = os.path.join(args.workspace_root,
                                            'test_results')
            cmake_args = [
                '-DBUILD_TESTING=1', '-DCATKIN_ENABLE_TESTING=1',
                '-DCATKIN_SKIP_TESTING=0',
                '-DCATKIN_TEST_RESULTS_DIR=%s' % test_results_dir
            ]
            additional_args = args.build_tool_args or []
            if args.build_tool == 'colcon':
                additional_args += ['--test-result-base', test_results_dir]
            env = dict(os.environ)
            env.setdefault('MAKEFLAGS', '-j1')
            rc = call_build_tool(args.build_tool,
                                 args.rosdistro_name,
                                 args.workspace_root,
                                 cmake_clean_cache=True,
                                 cmake_args=cmake_args,
                                 args=additional_args,
                                 parent_result_spaces=parent_result_spaces,
                                 env=env)
        if not rc:
            with Scope('SUBSECTION', 'build tests'):
                additional_args = args.build_tool_args or []
                if args.build_tool == 'colcon':
                    additional_args += ['--cmake-target-skip-unavailable']
                rc = call_build_tool(args.build_tool,
                                     args.rosdistro_name,
                                     args.workspace_root,
                                     cmake_args=cmake_args,
                                     make_args=['tests'],
                                     args=additional_args,
                                     parent_result_spaces=parent_result_spaces,
                                     env=env)
            if not rc:
                make_args = ['run_tests']
                additional_args = args.build_tool_args or []
                if args.build_tool == 'colcon':
                    cmake_args = None
                    make_args = None
                    additional_args = ['--test-result-base', test_results_dir]
                additional_args += args.build_tool_test_args or []
                # for workspaces with only plain cmake packages the setup files
                # generated by cmi won't implicitly source the underlays
                if parent_result_spaces is None:
                    parent_result_spaces = [
                        '/opt/ros/%s' % args.rosdistro_name
                    ]
                if args.build_tool == 'catkin_make_isolated':
                    devel_space = os.path.join(args.workspace_root,
                                               'devel_isolated')
                    parent_result_spaces.append(devel_space)
                # since catkin_make_isolated doesn't provide a custom
                # environment to run tests this needs to source the devel space
                # and force a CMake run ro use the new environment
                with Scope('SUBSECTION', 'run tests'):
                    rc = call_build_tool(
                        args.build_tool,
                        args.rosdistro_name,
                        args.workspace_root,
                        cmake_args=cmake_args,
                        force_cmake=args.build_tool == 'catkin_make_isolated',
                        make_args=make_args,
                        args=additional_args,
                        parent_result_spaces=parent_result_spaces,
                        env=env,
                        colcon_verb='test')
    finally:
        if args.clean_after:
            clean_workspace(args.workspace_root)

    return rc
Пример #26
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Invoke the build tool on a workspace')
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced (if available)')
    add_argument_build_tool(parser, required=True)
    add_argument_build_tool_args(parser)
    add_argument_run_abichecker(parser)
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--parent-result-space',
                        nargs='*',
                        help='The paths of the parent result spaces')
    parser.add_argument(
        '--clean-before',
        action='store_true',
        help='The flag if the workspace should be cleaned before the '
        'invocation')
    parser.add_argument(
        '--clean-after',
        action='store_true',
        help='The flag if the workspace should be cleaned after the '
        'invocation')

    add_argument_ros_version(parser)

    args = parser.parse_args(argv)

    ensure_workspace_exists(args.workspace_root)

    if args.clean_before:
        clean_workspace(args.workspace_root)

    env = dict(os.environ)
    env.setdefault('MAKEFLAGS', '-j1')
    env.setdefault('ROS_DISTRO', args.rosdistro_name)

    try:
        with Scope('SUBSECTION', 'build workspace in isolation and install'):
            parent_result_spaces = None
            if args.parent_result_space:
                parent_result_spaces = args.parent_result_space
            rc = call_build_tool(
                args.build_tool,
                args.rosdistro_name,
                args.workspace_root,
                cmake_args=['-DBUILD_TESTING=0', '-DCATKIN_SKIP_TESTING=1'],
                args=args.build_tool_args,
                install=True,
                parent_result_spaces=parent_result_spaces,
                env=env)
    finally:
        if args.clean_after:
            clean_workspace(args.workspace_root)

    # only run abi-checker after successful builds and when requested
    if not rc and args.run_abichecker:
        with Scope('SUBSECTION', 'use abi checker'):
            abi_rc = call_abi_checker([args.workspace_root], args.ros_version,
                                      env)
        # Never fail a build because of abi errors but make them
        # unstable by printing MAKE_BUILD_UNSTABLE. Jenkins will
        # use a plugin to make it
        if abi_rc:
            print('MAKE_BUILD_UNSTABLE')

    return rc
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--rosdoc-lite-dir',
        required=True,
        help='The root path of the rosdoc_lite repository')
    parser.add_argument(
        '--catkin-sphinx-dir',
        required=True,
        help='The root path of the catkin-sphinx repository')
    parser.add_argument(
        '--rosdoc-index-dir',
        required=True,
        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'xenial')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(
            args.workspace_root, 'src', args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name,
                    'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(
                    args.output_dir, 'api', pkg_name, 'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(set([
                d.name for d in depends if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(set([
                    d.name for d in depends if d.name in valid_package_names]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(
                pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(
            args.output_dir, ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(
                    args.output_dir, 'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(os.path.join(
                        pkg_changelog_doc_path, 'changelog.html'), 'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(
                args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url': '../../../api/%s/html' % pkg.name,
                'location': 'file://%s' % os.path.join(
                    args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package': pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(
                config.jenkins_url, args.rosdistro_name, args.doc_build_name,
                args.repository_name, args.os_name, args.os_code_name,
                args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name,
                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(
                args.output_dir, 'manifests', pkg.name, 'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope(
        'SUBSECTION',
        'overwrite CMakeLists.txt files to only generate messages'
    ):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type']
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" %
                  pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope(
        'SUBSECTION',
        'determine dependencies and generate Dockerfile'
    ):
        # initialize rosdep view
        context = initialize_resolver(
            args.rosdistro_name, args.os_name, args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg-modules',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(
            pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build')
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name': args.os_name,
            'os_code_name': args.os_code_name,
            'arch': args.arch,
            'build_tool': doc_build_file.build_tool,

            'distribution_repository_urls': args.distribution_repository_urls,
            'distribution_repository_keys': get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),

            'rosdistro_name': args.rosdistro_name,

            'uid': get_user_id(),

            'dependencies': debian_pkg_names,
            'dependency_versions': debian_pkg_versions,
            'install_lists': [],

            'canonical_base_url': doc_build_file.canonical_base_url,

            'ordered_pkg_tuples': ordered_pkg_tuples,
            'rosdoc_config_files': rosdoc_config_files,
        }
        create_dockerfile(
            'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)