Beispiel #1
0
def configure_sync_packages_to_main_job(config_url,
                                        rosdistro_name,
                                        release_build_name,
                                        config=None,
                                        build_file=None,
                                        jenkins=None,
                                        dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)

    package_formats = set(package_format_mapping[os_name]
                          for os_name in build_file.targets.keys())
    assert len(package_formats) == 1
    package_format = package_formats.pop()

    job_name = get_sync_packages_to_main_job_name(rosdistro_name,
                                                  package_format)
    job_config = _get_sync_packages_to_main_job_config(rosdistro_name,
                                                       build_file,
                                                       package_format)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
    return (job_name, job_config)
Beispiel #2
0
 def create_job(os_name, job_name, template_file, additional_dict):
     job_data = dict(data)
     job_data['os_name'] = os_name
     job_data.update(os_configs[os_name])
     job_data.update(additional_dict)
     job_config = expand_template(template_file, job_data)
     configure_job(jenkins, job_name, job_config, **jenkins_kwargs)
Beispiel #3
0
def configure_sync_packages_to_testing_job(config_url,
                                           rosdistro_name,
                                           release_build_name,
                                           os_code_name,
                                           arch,
                                           config=None,
                                           build_file=None,
                                           jenkins=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_testing_job_name(rosdistro_name,
                                                     os_code_name, arch)
    job_config = _get_sync_packages_to_testing_job_config(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config, build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_management_view(jenkins)
        configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'upload_main' and 'upload_testing' jobs.")
    add_argument_config_url(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    template_name = 'release/trigger_upload_repo_job.xml.em'

    config = get_index(args.config_url)
    jenkins = connect(config.jenkins_url)

    for repo in ['main', 'testing']:
        job_name = 'upload_%s' % repo
        block_when_upstream_building = 'true'
        if repo == 'testing':
            block_when_upstream_building = 'false'
        job_config = expand_template(
            template_name, {
                'block_when_upstream_building': block_when_upstream_building,
                'repo': repo,
                'upstream_job_names': get_upstream_job_names(config, repo),
                'recipients': config.notify_emails
            })

        configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def configure_sync_packages_to_main_job(config_url,
                                        rosdistro_name,
                                        release_build_name,
                                        config=None,
                                        build_file=None,
                                        jenkins=None,
                                        dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_main_job_name(rosdistro_name)
    job_config = _get_sync_packages_to_main_job_config(rosdistro_name,
                                                       build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
    return (job_name, job_config)
Beispiel #6
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    package_formats = set(
        package_format_mapping[os_name] for os_name in build_file.targets.keys())
    assert len(package_formats) == 1
    package_format = package_formats.pop()

    group_name = get_release_job_prefix(
        args.rosdistro_name, args.release_build_name)

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    trigger_missed_jobs_job_config = get_trigger_missed_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file, package_format)
    trigger_broken_with_non_broken_upstream_job_config = \
        _get_trigger_broken_with_non_broken_upstream_job_config(
            args.rosdistro_name, args.release_build_name, build_file)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(
        jenkins, job_name, reconfigure_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(
        jenkins, job_name, trigger_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-missed-jobs')
    configure_job(
        jenkins, job_name, trigger_missed_jobs_job_config,
        dry_run=args.dry_run)

    job_name = 'import_upstream%s' % ('' if package_format == 'deb' else '_' + package_format)
    configure_job(
        jenkins, job_name, import_upstream_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % \
        (group_name, 'trigger-broken-with-non-broken-upstream')
    configure_job(
        jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config,
        dry_run=args.dry_run)
Beispiel #7
0
def configure_reconfigure_jobs_job(jenkins,
                                   group_name,
                                   args,
                                   config,
                                   build_file,
                                   dry_run=False):
    job_config = get_reconfigure_jobs_job_config(args, config, build_file)
    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(jenkins, job_name, job_config, dry_run=dry_run)
Beispiel #8
0
 def create_job(os_name, job_name, template_file, additional_dict):
     if args.select_jobs_regexp and not args.pattern_select_jobs_regexp.match(job_name):
         return
     job_data = dict(data)
     job_data['os_name'] = os_name
     job_data.update(os_configs[os_name])
     job_data.update(additional_dict)
     job_data.update(os_config_overrides.get(os_name, {}))
     job_config = expand_template(template_file, job_data)
     configure_job(jenkins, job_name, job_config, **jenkins_kwargs)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    group_name = get_release_job_prefix(
        args.rosdistro_name, args.release_build_name)

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    trigger_missed_jobs_job_config = get_trigger_missed_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file)
    trigger_broken_with_non_broken_upstream_job_config = \
        _get_trigger_broken_with_non_broken_upstream_job_config(
            args.rosdistro_name, args.release_build_name, build_file)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(
        jenkins, job_name, reconfigure_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(
        jenkins, job_name, trigger_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-missed-jobs')
    configure_job(
        jenkins, job_name, trigger_missed_jobs_job_config,
        dry_run=args.dry_run)

    job_name = 'import_upstream'
    configure_job(
        jenkins, job_name, import_upstream_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % \
        (group_name, 'trigger-broken-with-non-broken-upstream')
    configure_job(
        jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config,
        dry_run=args.dry_run)
Beispiel #10
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'check_slaves' job on Jenkins")
    add_argument_config_url(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(config.notify_emails)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    job_name = 'check_slaves'
    configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'dashboard' job on Jenkins")
    add_argument_config_url(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(config.notify_emails)

    jenkins = connect(config.jenkins_url)

    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    job_name = 'dashboard'
    configure_job(jenkins, job_name, job_config, view=view)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'dashboard' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    job_name = '%s_rosdistro-cache' % args.rosdistro_name
    configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'dashboard' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    job_name = '%s_rosdistro-cache' % args.rosdistro_name
    configure_job(jenkins, job_name, job_config)
Beispiel #14
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'bloom_status' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    job_name = 'bloom_status'
    configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'failing_jobs' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args.rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    job_name = '%s_failing-jobs' % args.rosdistro_name
    configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
Beispiel #16
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'repos_status_page' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    prefix = get_release_job_prefix(args.rosdistro_name)
    job_name = '%s_repos-status-page' % prefix
    configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'repos_status_page' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    prefix = get_release_job_prefix(args.rosdistro_name)
    job_name = '%s_repos-status-page' % prefix
    configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release_status_page' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    job_name = '%s_%s_release-status-page' % \
        (args.rosdistro_name, args.release_build_name)
    configure_job(jenkins, job_name, job_config, view=view)
Beispiel #19
0
def configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=None, build_file=None, jenkins=None, dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_import_package_job_name(rosdistro_name)
    job_config = _get_import_package_job_config(build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
    return (job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release_status_page' job on Jenkins")
    add_argument_config_url(parser)
    add_argument_debian_repository_urls(parser)
    add_argument_os_code_name_and_arch_tuples(parser)
    add_argument_output_name(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    job_name = '%s_repos-status-page' % args.output_name
    configure_job(jenkins, job_name, job_config, view=view)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'blocked_source_entries_page' job on Jenkins"
    )
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    job_config = get_job_config(args, config)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    prefix = get_release_job_prefix(args.rosdistro_name)
    job_name = '%s_blocked-source-entries-page' % prefix
    configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def configure_doc_independent_job(
        config_url, doc_build_name, config=None, build_file=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_global_doc_build_files(config)
        build_file = build_files[doc_build_name]

    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url)

    job_name = 'doc_%s' % doc_build_name

    job_config = _get_doc_independent_job_config(
        config, config_url, job_name, build_file)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config)
def configure_doc_metadata_job(
        config_url, rosdistro_name, doc_build_name,
        config=None, build_file=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_doc_build_files(config, rosdistro_name)
        build_file = build_files[doc_build_name]

    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url)

    job_name = get_doc_view_name(rosdistro_name, doc_build_name)

    job_config = _get_doc_metadata_job_config(
        config, config_url, rosdistro_name, doc_build_name, build_file)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config)
def configure_sync_packages_to_main_job(
        config_url, rosdistro_name, release_build_name,
        config=None, build_file=None, jenkins=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_main_job_name(
        rosdistro_name)
    job_config = _get_sync_packages_to_main_job_config(
        rosdistro_name, build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_management_view(jenkins)
        configure_job(jenkins, job_name, job_config)
def configure_doc_metadata_job(
        config_url, rosdistro_name, doc_build_name,
        config=None, build_file=None, dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_doc_build_files(config, rosdistro_name)
        build_file = build_files[doc_build_name]

    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url)

    job_name = get_doc_view_name(rosdistro_name, doc_build_name)

    job_config = _get_doc_metadata_job_config(
        config, config_url, rosdistro_name, doc_build_name, build_file)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
def configure_doc_independent_job(
        config_url, doc_build_name, config=None, build_file=None,
        dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_global_doc_build_files(config)
        build_file = build_files[doc_build_name]

    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url)

    job_name = 'doc_%s' % doc_build_name

    job_config = _get_doc_independent_job_config(
        config, config_url, job_name, build_file)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
def configure_sync_packages_to_testing_job(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config=None, build_file=None, jenkins=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_testing_job_name(
        rosdistro_name, release_build_name, os_code_name, arch)
    job_config = _get_sync_packages_to_testing_job_config(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config, build_file)
    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config, view)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file)

    jenkins = connect(config.jenkins_url)

    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    group_name = get_release_view_name(
        args.rosdistro_name, args.release_build_name)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(jenkins, job_name, reconfigure_jobs_job_config, view=view)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(jenkins, job_name, trigger_jobs_job_config, view=view)

    job_name = 'import_upstream'
    configure_job(jenkins, job_name, import_upstream_job_config, view=view)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'upload_main' and 'upload_testing' jobs.")
    add_argument_config_url(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    template_name = 'release/trigger_upload_repo_job.xml.em'

    config = get_index(args.config_url)
    jenkins = connect(config.jenkins_url)

    for repo in ['main', 'testing']:
        job_name = 'upload_%s' % repo
        block_when_upstream_building = 'true'
        if repo == 'testing':
            block_when_upstream_building = 'false'
        job_config = expand_template(template_name, {
            'block_when_upstream_building': block_when_upstream_building,
            'repo': repo,
            'upstream_job_names': get_upstream_job_names(config, repo),
            'recipients': config.notify_emails})

        configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    group_name = get_release_job_prefix(args.rosdistro_name,
                                        args.release_build_name)

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file)
    trigger_broken_with_non_broken_upstream_job_config = \
        _get_trigger_broken_with_non_broken_upstream_job_config(
            args.rosdistro_name, args.release_build_name, build_file)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(jenkins, job_name, reconfigure_jobs_job_config)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(jenkins, job_name, trigger_jobs_job_config)

    job_name = 'import_upstream'
    configure_job(jenkins, job_name, import_upstream_job_config)

    job_name = '%s_%s' % \
        (group_name, 'trigger-broken-with-non-broken-upstream')
    configure_job(jenkins, job_name,
                  trigger_broken_with_non_broken_upstream_job_config)
Beispiel #31
0
def configure_devel_job(
        config_url, rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None):
    """
    Configure a single Jenkins devel job.

    This includes the following steps:
    - clone the source repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the release/run_devel_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    if repo_name not in repo_names:
        raise JobValidationError(
            "Invalid repository name '%s' " % repo_name +
            'choose one of the following: %s' % ', '.join(sorted(repo_names)))

    repo = dist_file.repositories[repo_name]

    if not repo.source_repository:
        raise JobValidationError(
            "Repository '%s' has no source section" % repo_name)
    if not repo.source_repository.version:
        raise JobValidationError(
            "Repository '%s' has no source version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' % ', '.join(sorted(
                build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_devel_view_name(rosdistro_name, source_build_name)
        configure_devel_view(jenkins, view_name)

    job_name = get_devel_job_name(
        rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, repo.source_repository,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    return job_name
def configure_doc_job(
        config_url, rosdistro_name, doc_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, views=None,
        is_disabled=False,
        groovy_script=None,
        doc_repository=None):
    """
    Configure a single Jenkins doc job.

    This includes the following steps:
    - clone the doc repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the run_doc_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_doc_build_files(config, rosdistro_name)
        build_file = build_files[doc_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    repo_names = dist_file.repositories.keys()

    if repo_name is not None:
        if repo_name not in repo_names:
            raise JobValidationError(
                "Invalid repository name '%s' " % repo_name +
                'choose one of the following: %s' %
                ', '.join(sorted(repo_names)))

        repo = dist_file.repositories[repo_name]
        if not repo.doc_repository:
            raise JobValidationError(
                "Repository '%s' has no doc section" % repo_name)
        if not repo.doc_repository.version:
            raise JobValidationError(
                "Repository '%s' has no doc version" % repo_name)
        doc_repository = repo.doc_repository

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' % ', '.join(sorted(
                build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        view_name = get_doc_view_name(
            rosdistro_name, doc_build_name)
        configure_doc_view(jenkins, view_name)

    job_name = get_doc_job_name(
        rosdistro_name, doc_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_doc_job_config(
        config, config_url, rosdistro_name, doc_build_name,
        build_file, os_name, os_code_name, arch, doc_repository,
        repo_name, dist_cache=dist_cache, is_disabled=is_disabled)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config)

    return job_name, job_config
Beispiel #33
0
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser(
        description="Creates the ros2 jobs on Jenkins")
    parser.add_argument(
        '--jenkins-url',
        '-u',
        default='http://ci.ros2.org',
        help="Url of the jenkins server to which the job should be added")
    parser.add_argument(
        '--ci-scripts-repository',
        default='[email protected]:ros2/ci.git',
        help="repository from which ci scripts should be cloned")
    parser.add_argument(
        '--ci-scripts-default-branch',
        default='master',
        help=
        "default branch of the ci repository to get ci scripts from (this is a job parameter)"
    )
    parser.add_argument(
        '--commit',
        action='store_true',
        help='Actually modify the Jenkis jobs instead of only doing a dry run',
    )
    args = parser.parse_args(argv)

    data = {
        'ci_scripts_repository': args.ci_scripts_repository,
        'ci_scripts_default_branch': args.ci_scripts_default_branch,
        'default_repos_url': DEFAULT_REPOS_URL,
        'supplemental_repos_url': '',
        'time_trigger_spec': '',
        'mailer_recipients': '',
        'use_connext_default': 'true',
        'disable_connext_static_default': 'false',
        'disable_connext_dynamic_default': 'true',
        'use_osrf_connext_debs_default': 'false',
        'use_fastrtps_default': 'true',
        'use_opensplice_default': 'false',
        'ament_build_args_default': '--parallel --cmake-args -DSECURITY=ON --',
        'ament_test_args_default': '--retest-until-pass 10',
        'enable_c_coverage_default': 'false',
        'dont_notify_every_unstable_build': 'false',
        'turtlebot_demo': False,
        'build_timeout_mins': 0,
    }

    jenkins = connect(args.jenkins_url)

    os_configs = {
        'linux': {
            'label_expression': 'linux',
            'shell_type': 'Shell',
        },
        'osx': {
            'label_expression':
            'osx_slave',
            'shell_type':
            'Shell',
            # the current OS X slave can't handle  git@github urls
            'ci_scripts_repository':
            args.ci_scripts_repository.replace('[email protected]:',
                                               'https://github.com/'),
        },
        'windows': {
            'label_expression': 'windows_slave',
            'shell_type': 'BatchFile',
        },
        'linux-aarch64': {
            'label_expression': 'linux_aarch64',
            'shell_type': 'Shell',
            'use_connext_default': 'false',
        },
    }

    jenkins_kwargs = {}
    if not args.commit:
        jenkins_kwargs['dry_run'] = True

    # configure os specific jobs
    for os_name in sorted(os_configs.keys()):
        job_data = dict(data)
        job_data['os_name'] = os_name
        job_data.update(os_configs[os_name])

        # configure manual triggered job
        job_name = 'ci_' + os_name
        job_data['cmake_build_type'] = 'None'
        job_config = expand_template('ci_job.xml.em', job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

        # configure a manual version of the packaging job
        job_name = 'ci_packaging_' + os_name
        job_data['cmake_build_type'] = 'RelWithDebInfo'
        job_data['test_bridge_default'] = 'true'
        job_config = expand_template('packaging_job.xml.em', job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

        # all following jobs are triggered nightly with email notification
        job_data['time_trigger_spec'] = '30 7 * * *'
        # for now, skip emailing about Windows failures
        job_data['mailer_recipients'] = '*****@*****.**'

        # configure packaging job
        job_name = 'packaging_' + os_name
        job_data['cmake_build_type'] = 'RelWithDebInfo'
        job_data['test_bridge_default'] = 'true'
        job_config = expand_template('packaging_job.xml.em', job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

        # keeping the paths on Windows shorter
        os_name = os_name.replace('windows', 'win')
        # configure nightly triggered job
        job_name = 'nightly_' + os_name + '_debug'
        if os_name == 'win':
            job_name = job_name[0:-2]
        job_data['cmake_build_type'] = 'Debug'
        job_config = expand_template('ci_job.xml.em', job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

        # configure nightly coverage job on x86 Linux only
        if os_name == 'linux':
            job_name = 'nightly_' + os_name + '_coverage'
            job_data['cmake_build_type'] = 'Debug'
            job_data['enable_c_coverage_default'] = 'true'
            job_config = expand_template('ci_job.xml.em', job_data)
            configure_job(jenkins, job_name, job_config, **jenkins_kwargs)
            job_data['enable_c_coverage_default'] = 'false'

        # configure nightly triggered job
        job_name = 'nightly_' + os_name + '_release'
        if os_name == 'win':
            job_name = job_name[0:-4]
        job_data['cmake_build_type'] = 'Release'
        job_config = expand_template('ci_job.xml.em', job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

        # configure nightly triggered job with repeated testing
        job_name = 'nightly_' + os_name + '_repeated'
        if os_name == 'win':
            job_name = job_name[0:-5]
        job_data['time_trigger_spec'] = '30 7 * * *'
        job_data['cmake_build_type'] = 'None'
        job_data[
            'ament_test_args_default'] = '--retest-until-fail 20 --ctest-args -LE linter --'
        job_config = expand_template('ci_job.xml.em', job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

    # configure the launch job
    os_specific_data = collections.OrderedDict()
    for os_name in sorted(os_configs.keys()):
        os_specific_data[os_name] = dict(data)
        os_specific_data[os_name].update(os_configs[os_name])
        os_specific_data[os_name]['job_name'] = 'ci_' + os_name
    job_data = dict(data)
    job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch
    job_data['label_expression'] = 'master'
    job_data['os_specific_data'] = os_specific_data
    job_data['cmake_build_type'] = 'None'
    job_config = expand_template('ci_launcher_job.xml.em', job_data)
    configure_job(jenkins, 'ci_launcher', job_config, **jenkins_kwargs)

    # Run the turtlebot job on Linux only for now.
    for os_name in ['linux', 'linux-aarch64']:
        turtlebot_job_data = dict(data)
        turtlebot_job_data['os_name'] = os_name
        turtlebot_job_data.update(os_configs[os_name])
        turtlebot_job_data['turtlebot_demo'] = True
        # Use a turtlebot2_demo-specific repos file by default.
        turtlebot_job_data[
            'supplemental_repos_url'] = 'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos'
        turtlebot_job_data['cmake_build_type'] = 'None'
        job_config = expand_template('ci_job.xml.em', turtlebot_job_data)
        configure_job(jenkins, 'ci_turtlebot-demo_%s' % (os_name), job_config,
                      **jenkins_kwargs)
Beispiel #34
0
def configure_trigger_jobs_job(jenkins, group_name, build_file, dry_run=False):
    job_config = get_trigger_jobs_job_config(group_name, build_file)
    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(jenkins, job_name, job_config, dry_run=dry_run)
def configure_release_job_with_validation(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name, append_timestamp=False,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None,
        generate_import_package_job=True,
        filter_arches=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_release_view_name(rosdistro_name, release_build_name)
        configure_release_view(jenkins, view_name)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins)

    # sourcedeb job
    job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, _get_target_arches(
            build_file, os_name, os_code_name, print_skipped=False),
        repo.release_repository, pkg_name,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        if dependency_names is None:
            return

    # binarydeb jobs
    for arch in _get_target_arches(build_file, os_name, os_code_name):
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            repo.release_repository, pkg_name, append_timestamp,
            repo_name, dist_cache=dist_cache,
            upstream_job_names=upstream_job_names)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config)
Beispiel #36
0
def configure_release_job(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, views=None,
        generate_import_package_job=True,
        generate_sync_packages_jobs=True,
        is_disabled=False, other_build_files_same_platform=None,
        groovy_script=None,
        filter_arches=None,
        dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(
            jenkins, rosdistro_name, release_build_name, targets,
            dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name, file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            pkg_name, repo_name, repo.release_repository,
            dist_cache=dist_cache, upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
Beispiel #37
0
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser(description="Creates the ros2 jobs on Jenkins")
    parser.add_argument(
        '--jenkins-url', '-u', default='https://ci.ros2.org',
        help="Url of the jenkins server to which the job should be added")
    parser.add_argument(
        '--ci-scripts-repository', default='[email protected]:ros2/ci.git',
        help="repository from which ci scripts should be cloned"
    )
    parser.add_argument(
        '--ci-scripts-default-branch', default='master',
        help="default branch of the ci repository to get ci scripts from (this is a job parameter)"
    )
    parser.add_argument(
        '--commit', action='store_true',
        help='Actually modify the Jenkins jobs instead of only doing a dry run',
    )
    parser.add_argument(
        '--select-jobs-regexp', default='',
        help='Limit the job creation to those that match the given regular expression'
    )
    parser.add_argument(
        '--context-lines', type=nonnegative_int, default=0,
        help='Set the number of diff context lines when showing differences between old and new jobs'
    )
    args = parser.parse_args(argv)

    data = {
        'build_discard': {
            'days_to_keep': 1000,
            'num_to_keep': 3000},
        'ci_scripts_repository': args.ci_scripts_repository,
        'ci_scripts_default_branch': args.ci_scripts_default_branch,
        'default_repos_url': DEFAULT_REPOS_URL,
        'supplemental_repos_url': '',
        'time_trigger_spec': '',
        'mailer_recipients': '',
        'ignore_rmw_default': {
            'rmw_connext_dynamic_cpp',
            'rmw_fastrtps_dynamic_cpp',
            'rmw_opensplice_cpp'},
        'use_connext_debs_default': 'false',
        'use_isolated_default': 'true',
        'colcon_mixin_url': 'https://raw.githubusercontent.com/colcon/colcon-mixin-repository/master/index.yaml',
        'build_args_default': '--event-handlers console_cohesion+ console_package_list+ --cmake-args -DINSTALL_EXAMPLES=OFF -DSECURITY=ON',
        'test_args_default': '--event-handlers console_direct+ --executor sequential --retest-until-pass 2 --ctest-args -LE xfail --pytest-args -m "not xfail"',
        'compile_with_clang_default': 'false',
        'enable_coverage_default': 'false',
        'dont_notify_every_unstable_build': 'false',
        'build_timeout_mins': 0,
        'ubuntu_distro': 'focal',
        'ros_distro': 'rolling',
    }

    jenkins = connect(args.jenkins_url)

    os_configs = {
        'linux': {
            'label_expression': 'linux',
            'shell_type': 'Shell',
        },
        'osx': {
            'label_expression': 'macos',
            'shell_type': 'Shell',
            # the current OS X agent can't handle  git@github urls
            'ci_scripts_repository': args.ci_scripts_repository.replace(
                '[email protected]:', 'https://github.com/'),
        },
        'windows-metal': {
            'label_expression': 'windows',
            'shell_type': 'BatchFile',
            'use_isolated_default': 'false',
        },
        'windows': {
            'label_expression': 'windows-container',
            'shell_type': 'BatchFile',
            'use_isolated_default': 'false',
        },
        'linux-aarch64': {
            'label_expression': 'linux_aarch64',
            'shell_type': 'Shell',
            'ignore_rmw_default': data['ignore_rmw_default'] | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'},
        },
        'linux-armhf': {
            'label_expression': 'linux_armhf',
            'shell_type': 'Shell',
            'ignore_rmw_default': data['ignore_rmw_default'] | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'},
            'build_args_default': data['build_args_default'].replace(
                '--cmake-args', '--cmake-args -DCMAKE_CXX_FLAGS=-Wno-psabi -DCMAKE_C_FLAGS=-Wno-psabi -DDISABLE_SANITIZERS=ON'),
        },
        'linux-centos': {
            'label_expression': 'linux',
            'shell_type': 'Shell',
            'build_args_default': '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge ' + data['build_args_default'].replace(
                '--cmake-args', '--cmake-args -DCMAKE_POLICY_DEFAULT_CMP0072=NEW -DPYTHON_VERSION=3.6 -DDISABLE_SANITIZERS=ON'),
            'test_args_default': '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge ' + data['test_args_default'],
        },
    }

    os_config_overrides = {
        'linux-centos': {
            'mixed_overlay_pkgs': '',
            'ignore_rmw_default': {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp', 'rmw_opensplice_cpp'},
            'use_connext_debs_default': 'false',
        },
    }

    launcher_exclude = {
        'linux-armhf',
        'linux-centos',
        'windows-metal',
    }

    jenkins_kwargs = {}
    jenkins_kwargs['context_lines'] = args.context_lines
    if not args.commit:
        jenkins_kwargs['dry_run'] = True
    if args.select_jobs_regexp:
        args.pattern_select_jobs_regexp = re.compile(args.select_jobs_regexp)

    def create_job(os_name, job_name, template_file, additional_dict):
        if args.select_jobs_regexp and not args.pattern_select_jobs_regexp.match(job_name):
            return
        job_data = dict(data)
        job_data['os_name'] = os_name
        job_data.update(os_configs[os_name])
        job_data.update(additional_dict)
        job_data.update(os_config_overrides.get(os_name, {}))
        job_config = expand_template(template_file, job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

    # configure os specific jobs
    for os_name in sorted(os_configs.keys()):
        # This short name is preserved for historic reasons, but long-paths have been enabled on
        # windows containers and their hosts
        job_os_name = os_name
        if os_name == 'windows':
            job_os_name = 'win'

        # configure manual triggered job
        create_job(os_name, 'ci_' + os_name, 'ci_job.xml.em', {
            'cmake_build_type': 'None',
        })
        # configure test jobs for experimenting with job config changes
        # Keep parameters the same as the manual triggered job above.
        create_job(os_name, 'test_ci_' + os_name, 'ci_job.xml.em', {
            'cmake_build_type': 'None',
        })

        if os_name == 'windows-metal':
            # Don't create nightlies or packaging jobs for bare-metal Windows
            continue

        packaging_label_expression = os_configs[os_name]['label_expression']
        if os_name == 'osx':
            packaging_label_expression = 'macos && mojave'

        # configure a manual version of the packaging job
        ignore_rmw_default_packaging = {'rmw_opensplice_cpp'}
        if os_name in ['linux-aarch64', 'linux-armhf']:
            ignore_rmw_default_packaging |= {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}
        create_job(os_name, 'ci_packaging_' + os_name, 'packaging_job.xml.em', {
            'build_discard': {
                'days_to_keep': 180,
                'num_to_keep': 100,
            },
            'cmake_build_type': 'RelWithDebInfo',
            'label_expression': packaging_label_expression,
            'mixed_overlay_pkgs': 'ros1_bridge',
            'ignore_rmw_default': ignore_rmw_default_packaging,
            'use_connext_debs_default': 'true',
        })

        # configure packaging job
        create_job(os_name, 'packaging_' + os_name, 'packaging_job.xml.em', {
            'build_discard': {
                'days_to_keep': 370,
                'num_to_keep': 370,
            },
            'cmake_build_type': 'RelWithDebInfo',
            'disabled': os_name == 'linux-armhf',
            'label_expression': packaging_label_expression,
            'mixed_overlay_pkgs': 'ros1_bridge',
            'time_trigger_spec': PERIODIC_JOB_SPEC,
            'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            'ignore_rmw_default': ignore_rmw_default_packaging,
            'use_connext_debs_default': 'true',
        })

        # create a nightly Debug packaging job on Windows
        if os_name == 'windows':
            create_job(os_name, 'packaging_' + os_name + '_debug', 'packaging_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 370,
                    'num_to_keep': 370,
                    },
                'cmake_build_type': 'Debug',
                'mixed_overlay_pkgs': 'ros1_bridge',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'ignore_rmw_default': ignore_rmw_default_packaging,
                'use_connext_debs_default': 'true',
            })

        # configure nightly triggered job
        if os_name != 'linux-armhf':
            job_name = 'nightly_' + job_os_name + '_debug'
            if os_name == 'windows':
                job_name = job_name[:15]
            create_job(os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Debug',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            })

        # configure nightly job for testing with address sanitizer on linux
        if os_name == 'linux':
            asan_build_args = data['build_args_default'].replace('--cmake-args',
                '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \
                ' --mixin asan-gcc --packages-up-to rcpputils'

            create_job(os_name, 'nightly_{}_address_sanitizer'.format(os_name), 'ci_job.xml.em', {
                'cmake_build_type': 'Debug',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]',
                'build_args_default': asan_build_args,
                'test_args_default': data['test_args_default'] + ' --packages-up-to rcpputils',
            })

        # configure nightly job for compiling with clang+libcxx on linux
        if os_name == 'linux':
            # Set the logging implementation to noop because log4cxx will not link properly when using libcxx.
            clang_libcxx_build_args = data['build_args_default'].replace('--cmake-args',
                '--cmake-args -DRCL_LOGGING_IMPLEMENTATION=rcl_logging_noop') + \
                ' --mixin clang-libcxx --packages-skip intra_process_demo'
            create_job(os_name, 'nightly_' + os_name + '_clang_libcxx', 'ci_job.xml.em', {
                'cmake_build_type': 'Debug',
                'compile_with_clang_default': 'true',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]',
                'build_args_default': clang_libcxx_build_args,
                # Only running test from the lowest-level C package to ensure "working" binaries are generated.
                # We do not want to test more than this as we observe issues with the clang libcxx standard library
                # we don't plan to tackle for now. The important part of this nightly is to make sure the code compiles
                # without emitting thread-safety related warnings.
                'test_args_default': data['test_args_default'].replace(' --retest-until-pass 2', '') + ' --packages-select rcutils'
            })

        # configure nightly job for testing rmw/rcl based packages with thread sanitizer on linux
        if os_name == 'linux':
            tsan_build_args = data['build_args_default'].replace('--cmake-args',
                '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \
                ' --mixin tsan --packages-up-to rcpputils rcutils'

            create_job(os_name, 'nightly_' + os_name + '_thread_sanitizer', 'ci_job.xml.em', {
                'cmake_build_type': 'Debug',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]',
                'build_args_default': tsan_build_args,
                'test_args_default': data['test_args_default'] + ' --packages-select rcpputils rcutils',
            })

        # configure a manually triggered version of the coverage job

        # Proposed list of packages to maximize coverage while testing quality level
        # packages. The list is composed by the list of qualitly level packages plus
        # packages of ros2.repos that are used by the qualitly level packages during
        # tests.

        # out of the list since ignored by colcon: shape_msgs, stereo_msgs, rmw_connext, rmw_cyclonedds
        quality_level_pkgs = [
            'action_msgs',
            'ament_index_cpp',
            'builtin_interfaces',
            'class_loader',
            'composition_interfaces',
            'console_bridge_vendor',
            'diagnostic_msgs',
            'fastcdr',
            'fastrtps',
            'foonathan_memory_vendor',
            'geometry_msgs',
            'libstatistics_collector',
            'libyaml_vendor',
            'lifecycle_msgs',
            'nav_msgs',
            'rcl',
            'rcl_action',
            'rcl_interfaces',
            'rcl_lifecycle',
            'rcl_logging_spdlog',
            'rcl_yaml_param_parser',
            'rclcpp',
            'rclcpp_action',
            'rclcpp_components',
            'rclcpp_lifecycle',
            'rcpputils',
            'rcutils',
            'rmw',
            'rmw_dds_common',
            'rmw_fastrtps_cpp',
            'rmw_fastrtps_shared_cpp',
            'rmw_implementation',
            'rosgraph_msgs',
            'rosidl_default_runtime',
            'rosidl_runtime_c',
            'rosidl_runtime_cpp',
            'rosidl_typesupport_c',
            'rosidl_typesupport_cpp',
            'rosidl_typesupport_fastrtps_c',
            'rosidl_typesupport_fastrtps_cpp',
            'rosidl_typesupport_interface',
            'spdlog_vendor',
            'statistics_msgs',
            'std_msgs',
            'std_srvs',
            'tracetools',
            'trajectory_msgs',
            'unique_identifier_msgs',
            'visualization_msgs',
        ]

        # out of the list since ignored by colcon: ros1_bridge
        testing_pkgs_for_quality_level = [
            'interactive_markers',
            'launch_testing_ros',
            'message_filters',
            'ros2action',
            'ros2component',
            'ros2doctor',
            'ros2interface',
            'ros2lifecycle',
            'ros2lifecycle_test_fixtures',
            'ros2param',
            'ros2topic',
            'rosbag2_compression',
            'rosbag2_converter_default_plugins',
            'rosbag2_cpp',
            'rosbag2_storage',
            'rosbag2_storage_default_plugins',
            'rosbag2_test_common',
            'rosbag2_tests',
            'rosbag2_transport',
            'rosidl_generator_c',
            'rosidl_generator_cpp',
            'rosidl_generator_py',
            'rosidl_runtime_py',
            'rosidl_typesupport_connext_c',
            'rosidl_typesupport_connext_cpp',
            'rosidl_typesupport_introspection_c',
            'rosidl_typesupport_introspection_cpp',
            'test_cli',
            'test_cli_remapping',
            'test_communication',
            'test_launch_ros',
            'test_msgs',
            'test_quality_of_service',
            'test_rclcpp',
            'test_security',
            'test_tf2',
            'tf2',
            'tf2_bullet',
            'tf2_eigen',
            'tf2_geometry_msgs',
            'tf2_kdl',
            'tf2_msgs',
            'tf2_py',
            'tf2_ros',
            'tf2_sensor_msgs',
            'tracetools_test',
        ]

        if os_name == 'linux':
            create_job(os_name, 'ci_' + os_name + '_coverage', 'ci_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 100,
                    'num_to_keep': 100,
                },
                'cmake_build_type': 'Debug',
                'enable_coverage_default': 'true',
                'build_args_default': data['build_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' +
                                      '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
                'test_args_default': data['test_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' +
                                     '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
            })
            create_job(os_name, 'test_' + os_name + '_coverage', 'ci_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 100,
                    'num_to_keep': 100,
                },
                'cmake_build_type': 'Debug',
                'enable_coverage_default': 'true',
                'build_args_default': data['build_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' +
                                      '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
                'test_args_default': data['test_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' +
                                     '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
            })

        # configure nightly coverage job on x86 Linux only
        if os_name == 'linux':
            create_job(os_name, 'nightly_' + os_name + '_coverage', 'ci_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 100,
                    'num_to_keep': 100,
                },
                'cmake_build_type': 'Debug',
                'enable_coverage_default': 'true',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'build_args_default': data['build_args_default'] +
                                      ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
                'test_args_default': data['test_args_default'] +
                                     ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
            })
            # Add a coverage job targeting Foxy.
            create_job(os_name, 'nightly_' + os_name + '_foxy_coverage', 'ci_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 100,
                    'num_to_keep': 100,
                },
                'cmake_build_type': 'Debug',
                'default_repos_url': 'https://raw.githubusercontent.com/ros2/ros2/foxy/ros2.repos',
                'enable_coverage_default': 'true',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'ros_distro': 'foxy',
                'ubuntu_distro': 'focal',
                'build_args_default': data['build_args_default'] +
                                      ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
                'test_args_default': data['test_args_default'] +
                                     ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level),
            })

        # configure nightly triggered job using FastRTPS dynamic
        if os_name != 'linux-armhf':
            job_name = 'nightly_' + job_os_name + '_extra_rmw' + '_release'
            if os_name == 'windows':
                job_name = job_name[:25]
            create_job(os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'ignore_rmw_default': {
                    'rmw_connext_cpp',
                    'rmw_connext_dynamic_cpp',
                    'rmw_opensplice_cpp'},
            })

        # configure nightly triggered job
        if os_name != 'linux-armhf':
            job_name = 'nightly_' + job_os_name + '_release'
            if os_name == 'windows':
                job_name = job_name[:15]
            create_job(os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            })

        # configure nightly triggered job with repeated testing
        if os_name != 'linux-armhf':
            job_name = 'nightly_' + job_os_name + '_repeated'
            if os_name == 'windows':
                job_name = job_name[:15]
            test_args_default = os_configs.get(os_name, data).get('test_args_default', data['test_args_default'])
            test_args_default = test_args_default.replace('--retest-until-pass', '--retest-until-fail')
            test_args_default = test_args_default.replace('--ctest-args -LE xfail', '--ctest-args -LE "(linter|xfail)"')
            test_args_default = test_args_default.replace('--pytest-args -m "not xfail"', '--pytest-args -m "not linter and not xfail"')
            if job_os_name == 'linux-aarch64':
                # skipping known to be flaky tests https://github.com/ros2/rviz/issues/368
                test_args_default += ' --packages-skip rviz_common rviz_default_plugins rviz_rendering rviz_rendering_tests'
            create_job(os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'None',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'test_args_default': test_args_default,
            })

        # configure nightly triggered job for excluded test
        if os_name != 'linux-armhf':
            job_name = 'nightly_' + job_os_name + '_xfail'
            test_args_default = os_configs.get(os_name, data).get('test_args_default', data['test_args_default'])
            test_args_default = test_args_default.replace('--ctest-args -LE xfail', '--ctest-args -L xfail')
            test_args_default = test_args_default.replace('--pytest-args -m "not xfail"', '--pytest-args -m xfail --runxfail')
            create_job(os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'None',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'test_args_default': test_args_default,
            })

    # configure the launch job
    launcher_job_name = 'ci_launcher'
    if args.select_jobs_regexp and args.pattern_select_jobs_regexp.match(launcher_job_name):
        os_specific_data = collections.OrderedDict()
        for os_name in sorted(os_configs.keys() - launcher_exclude):
            os_specific_data[os_name] = dict(data)
            os_specific_data[os_name].update(os_configs[os_name])
            os_specific_data[os_name]['job_name'] = 'ci_' + os_name
        job_data = dict(data)
        job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch
        job_data['label_expression'] = 'master'
        job_data['os_specific_data'] = os_specific_data
        job_data['cmake_build_type'] = 'None'
        job_config = expand_template('ci_launcher_job.xml.em', job_data)
        configure_job(jenkins, launcher_job_name, job_config, **jenkins_kwargs)
Beispiel #38
0
def configure_devel_job(
        config_url, rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    if repo_name not in repo_names:
        return "Invalid repository name '%s' " % repo_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(repo_names))

    repo = dist_file.repositories[repo_name]

    if not repo.source_repository:
        return "Repository '%s' has no source section" % repo_name
    if not repo.source_repository.version:
        return "Repository '%s' has no source version" % repo_name

    if os_name not in build_file.targets.keys():
        return "Invalid OS name '%s' " % os_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(build_file.targets.keys()))
    if os_code_name not in build_file.targets[os_name].keys():
        return "Invalid OS code name '%s' " % os_code_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(build_file.targets[os_name].keys()))
    if arch not in build_file.targets[os_name][os_code_name]:
        return "Invalid architecture '%s' " % arch + \
            'choose one of the following: ' + \
            ', '.join(sorted(
                build_file.targets[os_name][os_code_name]))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_devel_view_name(rosdistro_name, source_build_name)
        configure_devel_view(jenkins, view_name)

    job_name = get_devel_job_name(
        rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, repo.source_repository,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)
Beispiel #39
0
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser(
        description="Creates the ros2 jobs on Jenkins")
    parser.add_argument(
        '--jenkins-url',
        '-u',
        default='https://ci.ros2.org',
        help="Url of the jenkins server to which the job should be added")
    parser.add_argument(
        '--ci-scripts-repository',
        default='[email protected]:ros2/ci.git',
        help="repository from which ci scripts should be cloned")
    parser.add_argument(
        '--ci-scripts-default-branch',
        default='master',
        help=
        "default branch of the ci repository to get ci scripts from (this is a job parameter)"
    )
    parser.add_argument(
        '--commit',
        action='store_true',
        help='Actually modify the Jenkins jobs instead of only doing a dry run',
    )
    args = parser.parse_args(argv)

    data = {
        'build_discard': {
            'days_to_keep': 1000,
            'num_to_keep': 3000
        },
        'ci_scripts_repository': args.ci_scripts_repository,
        'ci_scripts_default_branch': args.ci_scripts_default_branch,
        'default_repos_url': DEFAULT_REPOS_URL,
        'supplemental_repos_url': '',
        'time_trigger_spec': '',
        'mailer_recipients': '',
        'ignore_rmw_default': {
            'rmw_connext_dynamic_cpp', 'rmw_cyclonedds_cpp',
            'rmw_fastrtps_dynamic_cpp', 'rmw_opensplice_cpp'
        },
        'use_connext_debs_default': 'false',
        'use_isolated_default': 'true',
        'colcon_mixin_url':
        'https://raw.githubusercontent.com/colcon/colcon-mixin-repository/master/index.yaml',
        'build_args_default':
        '--event-handlers console_cohesion+ console_package_list+ --cmake-args -DINSTALL_EXAMPLES=OFF -DSECURITY=ON',
        'test_args_default':
        '--event-handlers console_direct+ --executor sequential --retest-until-pass 10',
        'compile_with_clang_default': 'false',
        'enable_c_coverage_default': 'false',
        'dont_notify_every_unstable_build': 'false',
        'turtlebot_demo': False,
        'build_timeout_mins': 0,
        'ubuntu_distro': 'bionic',
    }

    jenkins = connect(args.jenkins_url)

    os_configs = {
        'linux': {
            'label_expression': 'linux',
            'shell_type': 'Shell',
        },
        'osx': {
            'label_expression':
            'macos',
            'shell_type':
            'Shell',
            # the current OS X agent can't handle  git@github urls
            'ci_scripts_repository':
            args.ci_scripts_repository.replace('[email protected]:',
                                               'https://github.com/'),
        },
        'windows': {
            'label_expression': 'windows',
            'shell_type': 'BatchFile',
            'use_isolated_default': 'false',
        },
        'linux-aarch64': {
            'label_expression': 'linux_aarch64',
            'shell_type': 'Shell',
            'ignore_rmw_default': data['ignore_rmw_default']
            | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'},
        },
        'linux-armhf': {
            'label_expression':
            'linux_armhf',
            'shell_type':
            'Shell',
            'ignore_rmw_default':
            data['ignore_rmw_default']
            | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'},
            'build_args_default':
            data['build_args_default'].replace(
                '--cmake-args',
                '--cmake-args -DCMAKE_CXX_FLAGS=-Wno-psabi -DCMAKE_C_FLAGS=-Wno-psabi -DDISABLE_SANITIZERS=ON'
            ),
        },
        'linux-centos': {
            'label_expression':
            'linux',
            'shell_type':
            'Shell',
            'build_args_default':
            '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge '
            + data['build_args_default'].replace(
                '--cmake-args',
                '--cmake-args -DCMAKE_POLICY_DEFAULT_CMP0072=NEW -DPYTHON_VERSION=3.6 -DDISABLE_SANITIZERS=ON'
            ),
            'test_args_default':
            '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge '
            + data['test_args_default'],
        },
    }

    os_config_overrides = {
        'linux-centos': {
            'mixed_overlay_pkgs': '',
            'ignore_rmw_default': {
                'rmw_connext_cpp', 'rmw_connext_dynamic_cpp',
                'rmw_opensplice_cpp'
            },
            'use_connext_debs_default': 'false',
        },
    }

    launcher_exclude = {
        'linux-armhf',
        'linux-centos',
    }

    jenkins_kwargs = {}
    if not args.commit:
        jenkins_kwargs['dry_run'] = True

    def create_job(os_name, job_name, template_file, additional_dict):
        job_data = dict(data)
        job_data['os_name'] = os_name
        job_data.update(os_configs[os_name])
        job_data.update(additional_dict)
        job_data.update(os_config_overrides.get(os_name, {}))
        job_config = expand_template(template_file, job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

    # configure os specific jobs
    for os_name in sorted(os_configs.keys()):
        # We need the keep the paths short on Windows, so on that platform make
        # the os_name shorter just for the jobs
        job_os_name = os_name
        if os_name == 'windows':
            job_os_name = 'win'

        # configure manual triggered job
        create_job(os_name, 'ci_' + os_name, 'ci_job.xml.em', {
            'cmake_build_type': 'None',
        })
        # configure test jobs for experimenting with job config changes
        # Keep parameters the same as the manual triggered job above.
        create_job(os_name, 'test_ci_' + os_name, 'ci_job.xml.em', {
            'cmake_build_type': 'None',
        })

        # configure a manual version of the packaging job
        create_job(
            os_name, 'ci_packaging_' + os_name, 'packaging_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 180,
                    'num_to_keep': 100,
                },
                'cmake_build_type': 'RelWithDebInfo',
                'mixed_overlay_pkgs': 'ros1_bridge',
                'ignore_rmw_default':
                {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}
                if os_name in ['linux-aarch64', 'linux-armhf'] else set(),
                'use_connext_debs_default': 'true',
            })

        # configure packaging job
        create_job(
            os_name, 'packaging_' + os_name, 'packaging_job.xml.em', {
                'build_discard': {
                    'days_to_keep': 370,
                    'num_to_keep': 370,
                },
                'cmake_build_type': 'RelWithDebInfo',
                'mixed_overlay_pkgs': 'ros1_bridge',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'ignore_rmw_default':
                {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}
                if os_name in ['linux-aarch64', 'linux-armhf'] else set(),
                'use_connext_debs_default': 'true',
            })

        # create a nightly Debug packaging job on Windows
        if os_name == 'windows':
            create_job(
                os_name, 'packaging_' + os_name + '_debug',
                'packaging_job.xml.em', {
                    'build_discard': {
                        'days_to_keep': 370,
                        'num_to_keep': 370,
                    },
                    'cmake_build_type': 'Debug',
                    'mixed_overlay_pkgs': 'ros1_bridge',
                    'time_trigger_spec': PERIODIC_JOB_SPEC,
                    'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                    'ignore_rmw_default':
                    {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}
                    if os_name in ['linux-aarch64', 'linux-armhf'] else set(),
                    'use_connext_debs_default': 'true',
                })

        # configure nightly triggered job
        job_name = 'nightly_' + job_os_name + '_debug'
        if os_name == 'windows':
            job_name = job_name[:15]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Debug',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            })

        # configure nightly job for testing with address sanitizer on linux
        if os_name == 'linux':
            asan_build_args = data['build_args_default'].replace('--cmake-args',
                '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \
                ' --mixin asan-gcc --packages-up-to rcpputils'

            create_job(
                os_name, 'nightly_{}_address_sanitizer'.format(os_name),
                'ci_job.xml.em', {
                    'cmake_build_type':
                    'Debug',
                    'time_trigger_spec':
                    PERIODIC_JOB_SPEC,
                    'mailer_recipients':
                    DEFAULT_MAIL_RECIPIENTS + ' [email protected]',
                    'build_args_default':
                    asan_build_args,
                    'test_args_default':
                    ('--event-handlers console_direct+ --executor sequential '
                     '--retest-until-pass 10 --packages-up-to rcpputils'),
                })

        # configure nightly job for compiling with clang+libcxx on linux
        if os_name == 'linux':
            # Set the logging implementation to noop because log4cxx will not link properly when using libcxx.
            clang_libcxx_build_args = data['build_args_default'].replace('--cmake-args',
                '--cmake-args -DRCL_LOGGING_IMPLEMENTATION=rcl_logging_noop') + \
                ' --mixin clang-libcxx'
            create_job(
                os_name,
                'nightly_' + os_name + '_clang_libcxx',
                'ci_job.xml.em',
                {
                    'cmake_build_type':
                    'Debug',
                    'compile_with_clang_default':
                    'true',
                    'time_trigger_spec':
                    PERIODIC_JOB_SPEC,
                    'mailer_recipients':
                    DEFAULT_MAIL_RECIPIENTS + ' [email protected]',
                    'build_args_default':
                    clang_libcxx_build_args,
                    # Only running test from the lowest-level C package to ensure "working" binaries are generated.
                    # We do not want to test more than this as we observe issues with the clang libcxx standard library
                    # we don't plan to tackle for now. The important part of this nightly is to make sure the code compiles
                    # without emitting thread-safety related warnings.
                    'test_args_default':
                    '--event-handlers console_direct+ --executor sequential --packages-select rcutils',
                })

        # configure nightly job for testing rmw/rcl based packages with thread sanitizer on linux
        if os_name == 'linux':
            tsan_build_args = data['build_args_default'].replace('--cmake-args',
                '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \
                ' --mixin tsan --packages-up-to rcpputils rcutils'

            create_job(
                os_name, 'nightly_' + os_name + '_thread_sanitizer',
                'ci_job.xml.em', {
                    'cmake_build_type':
                    'Debug',
                    'time_trigger_spec':
                    PERIODIC_JOB_SPEC,
                    'mailer_recipients':
                    DEFAULT_MAIL_RECIPIENTS + ' [email protected]',
                    'build_args_default':
                    tsan_build_args,
                    'test_args_default':
                    ('--event-handlers console_direct+ --executor sequential '
                     '--retest-until-pass 10 --packages-select rcpputils rcutils'
                     ),
                })

        # configure a manually triggered version of the coverage job
        if os_name == 'linux':
            create_job(
                os_name, 'ci_' + os_name + '_coverage', 'ci_job.xml.em', {
                    'cmake_build_type':
                    'Debug',
                    'enable_c_coverage_default':
                    'true',
                    'build_args_default':
                    data['build_args_default'] +
                    ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp',
                    'test_args_default':
                    data['test_args_default'] +
                    ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp',
                })
            create_job(
                os_name, 'test_' + os_name + '_coverage', 'ci_job.xml.em', {
                    'cmake_build_type':
                    'Debug',
                    'enable_c_coverage_default':
                    'true',
                    'build_args_default':
                    data['build_args_default'] +
                    ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp',
                    'test_args_default':
                    data['test_args_default'] +
                    ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp',
                })

        # configure nightly coverage job on x86 Linux only
        if os_name == 'linux':
            create_job(
                os_name, 'nightly_' + os_name + '_coverage', 'ci_job.xml.em', {
                    'cmake_build_type': 'Debug',
                    'enable_c_coverage_default': 'true',
                    'time_trigger_spec': PERIODIC_JOB_SPEC,
                    'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                })

        # configure nightly triggered job using opensplice
        job_name = 'nightly_' + job_os_name + '_extra_rmw' + '_release'
        if os_name == 'windows':
            job_name = job_name[:25]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'ignore_rmw_default':
                {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'},
            })

        # configure nightly triggered job
        job_name = 'nightly_' + job_os_name + '_release'
        if os_name == 'windows':
            job_name = job_name[:15]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            })

        # configure nightly triggered job with repeated testing
        job_name = 'nightly_' + job_os_name + '_repeated'
        if os_name == 'windows':
            job_name = job_name[:15]
        test_args_default = os_configs.get(os_name,
                                           data).get('test_args_default',
                                                     data['test_args_default'])
        test_args_default = test_args_default.replace(
            '--retest-until-pass', '--retest-until-fail'
        ) + " --ctest-args -LE linter --pytest-args -m 'not linter'"
        if job_os_name == 'linux-aarch64':
            # skipping known to be flaky tests https://github.com/ros2/rviz/issues/368
            test_args_default += ' --packages-skip rviz_common rviz_default_plugins rviz_rendering rviz_rendering_tests'
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'None',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'test_args_default': test_args_default,
            })

        # configure turtlebot jobs on Linux only for now
        if os_name in ['linux', 'linux-aarch64']:
            create_job(
                os_name, 'ci_turtlebot-demo_' + os_name, 'ci_job.xml.em', {
                    'cmake_build_type':
                    'None',
                    'turtlebot_demo':
                    True,
                    'supplemental_repos_url':
                    'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos',
                })
            create_job(
                os_name, 'nightly_turtlebot-demo_' + os_name + '_release',
                'ci_job.xml.em', {
                    'disabled': True,
                    'cmake_build_type': 'Release',
                    'turtlebot_demo': True,
                    'supplemental_repos_url':
                    'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos',
                    'time_trigger_spec': PERIODIC_JOB_SPEC,
                    'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                })

    # configure the launch job
    os_specific_data = collections.OrderedDict()
    for os_name in sorted(os_configs.keys() - launcher_exclude):
        os_specific_data[os_name] = dict(data)
        os_specific_data[os_name].update(os_configs[os_name])
        os_specific_data[os_name]['job_name'] = 'ci_' + os_name
    job_data = dict(data)
    job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch
    job_data['label_expression'] = 'master'
    job_data['os_specific_data'] = os_specific_data
    job_data['cmake_build_type'] = 'None'
    job_config = expand_template('ci_launcher_job.xml.em', job_data)
    configure_job(jenkins, 'ci_launcher', job_config, **jenkins_kwargs)
Beispiel #40
0
def configure_ci_job(config_url,
                     rosdistro_name,
                     ci_build_name,
                     os_name,
                     os_code_name,
                     arch,
                     config=None,
                     build_file=None,
                     index=None,
                     dist_file=None,
                     jenkins=None,
                     views=None,
                     is_disabled=False,
                     groovy_script=None,
                     build_targets=None,
                     dry_run=False,
                     underlay_source_paths=None,
                     trigger_timer=None):
    """
    Configure a single Jenkins CI job.

    This includes the following steps:
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the ci/run_ci_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_ci_build_files(config, rosdistro_name)
        build_file = build_files[ci_build_name]
    # Overwrite build_file.targets if build_targets is specified
    if build_targets is not None:
        build_file.targets = build_targets

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' %
            ', '.join(sorted(build_file.targets[os_name][os_code_name])))

    underlay_source_jobs = [
        get_ci_job_name(rosdistro_name, os_name, os_code_name, arch,
                        underlay_job)
        for underlay_job in build_file.underlay_from_ci_jobs
    ]
    underlay_source_paths = (underlay_source_paths or []) + \
        ['$UNDERLAY%d_JOB_SPACE' % (index + 1) for index in range(len(underlay_source_jobs))]

    trigger_jobs = [
        get_ci_job_name(rosdistro_name, os_name, os_code_name, arch,
                        trigger_job)
        for trigger_job in build_file.jenkins_job_upstream_triggers
    ]

    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        view_name = get_ci_view_name(rosdistro_name)
        configure_ci_view(jenkins, view_name, dry_run=dry_run)

    job_name = get_ci_job_name(rosdistro_name, os_name, os_code_name, arch,
                               ci_build_name)

    job_config = _get_ci_job_config(index,
                                    rosdistro_name,
                                    build_file,
                                    os_name,
                                    os_code_name,
                                    arch,
                                    build_file.repos_files,
                                    build_file.repository_names,
                                    underlay_source_jobs,
                                    underlay_source_paths,
                                    trigger_timer,
                                    trigger_jobs,
                                    is_disabled=is_disabled)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)

    return job_name, job_config
def configure_reconfigure_jobs_job(
        jenkins, group_name, args, config, build_file):
    job_config = get_reconfigure_jobs_job_config(args, config, build_file)
    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(jenkins, job_name, job_config)
def configure_trigger_jobs_job(
        jenkins, group_name, build_file):
    job_config = get_trigger_jobs_job_config(group_name, build_file)
    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(jenkins, job_name, job_config)
Beispiel #43
0
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser(
        description="Creates the ros2 jobs on Jenkins")
    parser.add_argument(
        '--jenkins-url',
        '-u',
        default='https://ci.ros2.org',
        help="Url of the jenkins server to which the job should be added")
    parser.add_argument(
        '--ci-scripts-repository',
        default='[email protected]:ros2/ci.git',
        help="repository from which ci scripts should be cloned")
    parser.add_argument(
        '--ci-scripts-default-branch',
        default='master',
        help=
        "default branch of the ci repository to get ci scripts from (this is a job parameter)"
    )
    parser.add_argument(
        '--commit',
        action='store_true',
        help='Actually modify the Jenkis jobs instead of only doing a dry run',
    )
    args = parser.parse_args(argv)

    data = {
        'ci_scripts_repository': args.ci_scripts_repository,
        'ci_scripts_default_branch': args.ci_scripts_default_branch,
        'default_repos_url': DEFAULT_REPOS_URL,
        'supplemental_repos_url': '',
        'time_trigger_spec': '',
        'mailer_recipients': '',
        'use_connext_default': 'true',
        'disable_connext_static_default': 'false',
        'disable_connext_dynamic_default': 'true',
        'use_connext_debs_default': 'false',
        'use_fastrtps_default': 'true',
        'use_opensplice_default': 'false',
        'use_isolated_default': 'true',
        'build_args_default':
        '--event-handler console_cohesion+ --cmake-args -DSECURITY=ON',
        'test_args_default':
        '--event-handler console_direct+ --executor sequential --retest-until-pass 10',
        'enable_c_coverage_default': 'false',
        'dont_notify_every_unstable_build': 'false',
        'turtlebot_demo': False,
        'build_timeout_mins': 0,
        'ubuntu_distro': 'bionic',
    }

    jenkins = connect(args.jenkins_url)

    os_configs = {
        'linux': {
            'label_expression': 'linux',
            'shell_type': 'Shell',
        },
        'osx': {
            'label_expression':
            'macos',
            'shell_type':
            'Shell',
            # the current OS X agent can't handle  git@github urls
            'ci_scripts_repository':
            args.ci_scripts_repository.replace('[email protected]:',
                                               'https://github.com/'),
        },
        'windows': {
            'label_expression': 'windows',
            'shell_type': 'BatchFile',
            'use_isolated_default': 'false',
        },
        'linux-aarch64': {
            'label_expression': 'linux_aarch64',
            'shell_type': 'Shell',
            'use_connext_default': 'false',
        },
    }

    jenkins_kwargs = {}
    if not args.commit:
        jenkins_kwargs['dry_run'] = True

    def create_job(os_name, job_name, template_file, additional_dict):
        job_data = dict(data)
        job_data['os_name'] = os_name
        job_data.update(os_configs[os_name])
        job_data.update(additional_dict)
        job_config = expand_template(template_file, job_data)
        configure_job(jenkins, job_name, job_config, **jenkins_kwargs)

    # configure os specific jobs
    for os_name in sorted(os_configs.keys()):
        # We need the keep the paths short on Windows, so on that platform make
        # the os_name shorter just for the jobs
        job_os_name = os_name
        if os_name == 'windows':
            job_os_name = 'win'

        # configure manual triggered job
        create_job(os_name, 'ci_' + os_name, 'ci_job.xml.em', {
            'cmake_build_type': 'None',
        })
        # configure test jobs for experimenting with job config changes
        # Keep parameters the same as the manual triggered job above.
        create_job(os_name, 'test_ci_' + os_name, 'ci_job.xml.em', {
            'cmake_build_type': 'None',
        })

        # configure a manual version of the packaging job
        create_job(
            os_name, 'ci_packaging_' + os_name, 'packaging_job.xml.em', {
                'cmake_build_type': 'RelWithDebInfo',
                'test_bridge_default': 'true',
                'use_fastrtps_default': 'true',
                'use_opensplice_default': 'true',
                'use_connext_default':
                'false' if os_name is 'linux-aarch64' else 'true',
                'use_connext_debs_default': 'true',
            })

        # configure packaging job
        create_job(
            os_name, 'packaging_' + os_name, 'packaging_job.xml.em', {
                'cmake_build_type': 'RelWithDebInfo',
                'test_bridge_default': 'true',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'use_fastrtps_default': 'true',
                'use_opensplice_default': 'true',
                'use_connext_default':
                'false' if os_name is 'linux-aarch64' else 'true',
                'use_connext_debs_default': 'true',
            })

        # create a nightly Debug packaging job on Windows
        if os_name == 'windows':
            create_job(
                os_name, 'packaging_' + os_name + '_debug',
                'packaging_job.xml.em', {
                    'cmake_build_type':
                    'Debug',
                    'test_bridge_default':
                    'true',
                    'time_trigger_spec':
                    PERIODIC_JOB_SPEC,
                    'mailer_recipients':
                    DEFAULT_MAIL_RECIPIENTS,
                    'use_fastrtps_default':
                    'true',
                    'use_opensplice_default':
                    'true',
                    'use_connext_default':
                    'false' if os_name is 'linux-aarch64' else 'true',
                    'use_connext_debs_default':
                    'true',
                })

        # configure nightly triggered job
        job_name = 'nightly_' + job_os_name + '_debug'
        if os_name == 'windows':
            job_name = job_name[:15]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Debug',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            })

        # configure a manually triggered version of the coverage job
        if os_name == 'linux':
            create_job(os_name, 'ci_' + os_name + '_coverage', 'ci_job.xml.em',
                       {
                           'cmake_build_type': 'Debug',
                           'enable_c_coverage_default': 'true',
                       })
            create_job(os_name, 'test_' + os_name + '_coverage',
                       'ci_job.xml.em', {
                           'cmake_build_type': 'Debug',
                           'enable_c_coverage_default': 'true',
                       })

        # configure nightly coverage job on x86 Linux only
        if os_name == 'linux':
            create_job(
                os_name, 'nightly_' + os_name + '_coverage', 'ci_job.xml.em', {
                    'cmake_build_type': 'Debug',
                    'enable_c_coverage_default': 'true',
                    'time_trigger_spec': PERIODIC_JOB_SPEC,
                    'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                })

        # configure nightly triggered job using opensplice
        job_name = 'nightly_' + job_os_name + '_ospl' + '_release'
        if os_name == 'windows':
            job_name = job_name[:20]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'use_connext_default': 'false',
                'use_fastrtps_default': 'true',
                'use_opensplice_default': 'true',
            })

        # configure nightly triggered job
        job_name = 'nightly_' + job_os_name + '_release'
        if os_name == 'windows':
            job_name = job_name[:15]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
            })

        # configure nightly triggered job with repeated testing
        job_name = 'nightly_' + job_os_name + '_repeated'
        if os_name == 'windows':
            job_name = job_name[:15]
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type':
                'None',
                'time_trigger_spec':
                PERIODIC_JOB_SPEC,
                'mailer_recipients':
                DEFAULT_MAIL_RECIPIENTS,
                'test_args_default':
                '--event-handler console_direct+ --executor sequential --retest-until-fail 10 --ctest-args -LE linter --pytest-args -m "not linter"',
            })

        # configure turtlebot jobs on Linux only for now
        if os_name in ['linux', 'linux-aarch64']:
            create_job(
                os_name, 'ci_turtlebot-demo_' + os_name, 'ci_job.xml.em', {
                    'cmake_build_type':
                    'None',
                    'turtlebot_demo':
                    True,
                    'supplemental_repos_url':
                    'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos',
                })
            create_job(
                os_name, 'nightly_turtlebot-demo_' + os_name + '_release',
                'ci_job.xml.em', {
                    'cmake_build_type': 'Release',
                    'turtlebot_demo': True,
                    'supplemental_repos_url':
                    'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos',
                    'time_trigger_spec': PERIODIC_JOB_SPEC,
                    'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                })

    for os_name in ['linux', 'linux-aarch64']:
        # configure a nightly triggered job for xenial using all RMW implementations
        ubuntu_distro = 'xenial'
        job_name = 'nightly_{0}_{1}_release'.format(ubuntu_distro, os_name)
        create_job(
            os_name, job_name, 'ci_job.xml.em', {
                'cmake_build_type': 'Release',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'use_connext_default':
                'false' if os_name is 'linux-aarch64' else 'true',
                'use_fastrtps_default': 'true',
                'use_opensplice_default': 'true',
                'ubuntu_distro': ubuntu_distro,
            })

        ubuntu_distro = 'xenial'
        # configure a nightly xenial packaging job
        job_name = 'packaging_{0}_{1}'.format(ubuntu_distro, os_name)
        create_job(
            os_name, job_name, 'packaging_job.xml.em', {
                'cmake_build_type': 'RelWithDebInfo',
                'test_bridge_default': 'true',
                'time_trigger_spec': PERIODIC_JOB_SPEC,
                'mailer_recipients': DEFAULT_MAIL_RECIPIENTS,
                'ubuntu_distro': ubuntu_distro,
                'use_fastrtps_default': 'true',
                'use_opensplice_default': 'true',
                'use_connext_default':
                'false' if os_name is 'linux-aarch64' else 'true',
                'use_connext_debs_default': 'true',
            })

    # configure the launch job
    os_specific_data = collections.OrderedDict()
    for os_name in sorted(os_configs.keys()):
        os_specific_data[os_name] = dict(data)
        os_specific_data[os_name].update(os_configs[os_name])
        os_specific_data[os_name]['job_name'] = 'ci_' + os_name
    job_data = dict(data)
    job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch
    job_data['label_expression'] = 'master'
    job_data['os_specific_data'] = os_specific_data
    job_data['cmake_build_type'] = 'None'
    job_config = expand_template('ci_launcher_job.xml.em', job_data)
    configure_job(jenkins, 'ci_launcher', job_config, **jenkins_kwargs)
def configure_devel_job(config_url,
                        rosdistro_name,
                        source_build_name,
                        repo_name,
                        os_name,
                        os_code_name,
                        arch,
                        pull_request=False,
                        config=None,
                        build_file=None,
                        index=None,
                        dist_file=None,
                        dist_cache=None,
                        jenkins=None,
                        views=None,
                        is_disabled=False,
                        groovy_script=None,
                        source_repository=None,
                        build_targets=None,
                        dry_run=False):
    """
    Configure a single Jenkins devel job.

    This includes the following steps:
    - clone the source repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the release/run_devel_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]
    # Overwrite build_file.targets if build_targets is specified
    if build_targets is not None:
        build_file.targets = build_targets

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    repo_names = dist_file.repositories.keys()

    if repo_name is not None:
        if repo_name not in repo_names:
            raise JobValidationError("Invalid repository name '%s' " %
                                     repo_name +
                                     'choose one of the following: %s' %
                                     ', '.join(sorted(repo_names)))

        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            raise JobValidationError("Repository '%s' has no source section" %
                                     repo_name)
        if not repo.source_repository.version:
            raise JobValidationError("Repository '%s' has no source version" %
                                     repo_name)
        source_repository = repo.source_repository

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' %
            ', '.join(sorted(build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        view_name = get_devel_view_name(rosdistro_name,
                                        source_build_name,
                                        pull_request=pull_request)
        configure_devel_view(jenkins, view_name, dry_run=dry_run)

    job_name = get_devel_job_name(rosdistro_name, source_build_name, repo_name,
                                  os_name, os_code_name, arch, pull_request)

    job_config = _get_devel_job_config(config,
                                       rosdistro_name,
                                       source_build_name,
                                       build_file,
                                       os_name,
                                       os_code_name,
                                       arch,
                                       source_repository,
                                       repo_name,
                                       pull_request,
                                       job_name,
                                       dist_cache=dist_cache,
                                       is_disabled=is_disabled)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)

    return job_name, job_config
def configure_release_job(config_url,
                          rosdistro_name,
                          release_build_name,
                          pkg_name,
                          os_name,
                          os_code_name,
                          config=None,
                          build_file=None,
                          index=None,
                          dist_file=None,
                          dist_cache=None,
                          jenkins=None,
                          views=None,
                          generate_import_package_job=True,
                          generate_sync_packages_jobs=True,
                          is_disabled=False,
                          other_build_files_same_platform=None,
                          groovy_script=None,
                          filter_arches=None,
                          dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError("Invalid package name '%s' " % pkg_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError("Repository '%s' has no release section" %
                                 repo_name)

    if not repo.release_repository.version:
        raise JobValidationError("Repository '%s' has no release version" %
                                 repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(jenkins,
                                rosdistro_name,
                                release_build_name,
                                targets,
                                dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(config_url,
                                     rosdistro_name,
                                     release_build_name,
                                     config=config,
                                     build_file=build_file,
                                     jenkins=jenkins,
                                     dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(config_url,
                                            rosdistro_name,
                                            release_build_name,
                                            config=config,
                                            build_file=build_file,
                                            jenkins=jenkins,
                                            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins,
                                                   dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(rosdistro_name,
                                             release_build_name, pkg_name,
                                             os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url,
        rosdistro_name,
        release_build_name,
        config,
        build_file,
        os_name,
        os_code_name,
        pkg_name,
        repo_name,
        repo.release_repository,
        dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(pkg_name, dist_cache,
                                                    pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name,
                  file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(rosdistro_name, release_build_name,
                                          pkg_name, os_name, os_code_name,
                                          arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name, dependency_name, os_name,
                os_code_name, arch) for dependency_name in dependency_names
        ]

        job_config = _get_binarydeb_job_config(
            config_url,
            rosdistro_name,
            release_build_name,
            config,
            build_file,
            os_name,
            os_code_name,
            arch,
            pkg_name,
            repo_name,
            repo.release_repository,
            dist_cache=dist_cache,
            upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs