def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'doc' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] if build_file.documentation_type != DOC_TYPE_ROSDOC: print(("The doc build file '%s' has the wrong documentation type to " + "be used with this script") % args.doc_build_name, file=sys.stderr) return 1 jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) group_name = get_doc_view_name(args.rosdistro_name, args.doc_build_name) configure_reconfigure_jobs_job(jenkins, group_name, args, config, build_file, dry_run=args.dry_run) configure_trigger_jobs_job(jenkins, group_name, build_file, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'upload_main' and 'upload_testing' jobs.") add_argument_config_url(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) template_name = 'release/trigger_upload_repo_job.xml.em' config = get_index(args.config_url) jenkins = connect(config.jenkins_url) for repo in ['main', 'testing']: job_name = 'upload_%s' % repo block_when_upstream_building = 'true' if repo == 'testing': block_when_upstream_building = 'false' job_config = expand_template( template_name, { 'block_when_upstream_building': block_when_upstream_building, 'repo': repo, 'upstream_job_names': get_upstream_job_names(config, repo), 'recipients': config.notify_emails }) configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_release_build_files(config, args.rosdistro_name) build_file = build_files[args.release_build_name] reconfigure_jobs_job_config = get_reconfigure_jobs_job_config( args, config, build_file) trigger_jobs_job_config = get_trigger_jobs_job_config( args, config, build_file) import_upstream_job_config = get_import_upstream_job_config( args, config, build_file) jenkins = connect(config.jenkins_url) view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW) group_name = get_release_view_name( args.rosdistro_name, args.release_build_name) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') configure_job(jenkins, job_name, reconfigure_jobs_job_config, view=view) job_name = '%s_%s' % (group_name, 'trigger-jobs') configure_job(jenkins, job_name, trigger_jobs_job_config, view=view) job_name = 'import_upstream' configure_job(jenkins, job_name, import_upstream_job_config, view=view)
def configure_sync_packages_to_main_job(config_url, rosdistro_name, release_build_name, config=None, build_file=None, jenkins=None, dry_run=False): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) job_name = get_sync_packages_to_main_job_name(rosdistro_name) job_config = _get_sync_packages_to_main_job_config(rosdistro_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config, dry_run=dry_run) return (job_name, job_config)
def configure_sync_packages_to_main_job(config_url, rosdistro_name, release_build_name, config=None, build_file=None, jenkins=None, dry_run=False): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) package_formats = set(package_format_mapping[os_name] for os_name in build_file.targets.keys()) assert len(package_formats) == 1 package_format = package_formats.pop() job_name = get_sync_packages_to_main_job_name(rosdistro_name, package_format) job_config = _get_sync_packages_to_main_job_config(rosdistro_name, build_file, package_format) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config, dry_run=dry_run) return (job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'doc' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] if build_file.documentation_type != DOC_TYPE_ROSDOC: print(("The doc build file '%s' has the wrong documentation type to " + "be used with this script") % args.doc_build_name, file=sys.stderr) return 1 jenkins = connect(config.jenkins_url) configure_management_view(jenkins) group_name = get_doc_view_name( args.rosdistro_name, args.doc_build_name) configure_reconfigure_jobs_job( jenkins, group_name, args, config, build_file) configure_trigger_jobs_job(jenkins, group_name, build_file)
def configure_sync_packages_to_testing_job(config_url, rosdistro_name, release_build_name, os_code_name, arch, config=None, build_file=None, jenkins=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if jenkins is None: jenkins = connect(config.jenkins_url) job_name = get_sync_packages_to_testing_job_name(rosdistro_name, os_code_name, arch) job_config = _get_sync_packages_to_testing_job_config( config_url, rosdistro_name, release_build_name, os_code_name, arch, config, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_management_view(jenkins) configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'devel' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) group_name = get_devel_view_name(args.rosdistro_name, args.source_build_name) configure_reconfigure_jobs_job(jenkins, group_name, args, config, build_file, dry_run=args.dry_run) configure_trigger_jobs_job(jenkins, group_name, build_file, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_release_build_files(config, args.rosdistro_name) build_file = build_files[args.release_build_name] package_formats = set( package_format_mapping[os_name] for os_name in build_file.targets.keys()) assert len(package_formats) == 1 package_format = package_formats.pop() group_name = get_release_job_prefix( args.rosdistro_name, args.release_build_name) reconfigure_jobs_job_config = get_reconfigure_jobs_job_config( args, config, build_file) trigger_jobs_job_config = get_trigger_jobs_job_config( args, config, build_file) trigger_missed_jobs_job_config = get_trigger_missed_jobs_job_config( args, config, build_file) import_upstream_job_config = get_import_upstream_job_config( args, config, build_file, package_format) trigger_broken_with_non_broken_upstream_job_config = \ _get_trigger_broken_with_non_broken_upstream_job_config( args.rosdistro_name, args.release_build_name, build_file) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') configure_job( jenkins, job_name, reconfigure_jobs_job_config, dry_run=args.dry_run) job_name = '%s_%s' % (group_name, 'trigger-jobs') configure_job( jenkins, job_name, trigger_jobs_job_config, dry_run=args.dry_run) job_name = '%s_%s' % (group_name, 'trigger-missed-jobs') configure_job( jenkins, job_name, trigger_missed_jobs_job_config, dry_run=args.dry_run) job_name = 'import_upstream%s' % ('' if package_format == 'deb' else '_' + package_format) configure_job( jenkins, job_name, import_upstream_job_config, dry_run=args.dry_run) job_name = '%s_%s' % \ (group_name, 'trigger-broken-with-non-broken-upstream') configure_job( jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config, dry_run=args.dry_run)
def configure_release_jobs( config_url, rosdistro_name, release_build_name, append_timestamp=False): config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers or build_file.abi_incompatibility_assumed: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): targets.append((os_name, os_code_name)) print('The build file contains the following targets:') for os_name, os_code_name in targets: print(' - %s %s: %s' % (os_name, os_code_name, ', '.join( build_file.targets[os_name][os_code_name]))) dist_file = get_distribution_file(index, rosdistro_name) jenkins = connect(config.jenkins_url) configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) view_name = get_release_view_name(rosdistro_name, release_build_name) view = configure_release_view(jenkins, view_name) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: print(("Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name)) continue if not repo.release_repository.version: print(("Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name)) continue for os_name, os_code_name in targets: configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, append_timestamp=append_timestamp, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, view=view, generate_import_package_job=False)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_release_build_files(config, args.rosdistro_name) build_file = build_files[args.release_build_name] group_name = get_release_job_prefix( args.rosdistro_name, args.release_build_name) reconfigure_jobs_job_config = get_reconfigure_jobs_job_config( args, config, build_file) trigger_jobs_job_config = get_trigger_jobs_job_config( args, config, build_file) trigger_missed_jobs_job_config = get_trigger_missed_jobs_job_config( args, config, build_file) import_upstream_job_config = get_import_upstream_job_config( args, config, build_file) trigger_broken_with_non_broken_upstream_job_config = \ _get_trigger_broken_with_non_broken_upstream_job_config( args.rosdistro_name, args.release_build_name, build_file) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') configure_job( jenkins, job_name, reconfigure_jobs_job_config, dry_run=args.dry_run) job_name = '%s_%s' % (group_name, 'trigger-jobs') configure_job( jenkins, job_name, trigger_jobs_job_config, dry_run=args.dry_run) job_name = '%s_%s' % (group_name, 'trigger-missed-jobs') configure_job( jenkins, job_name, trigger_missed_jobs_job_config, dry_run=args.dry_run) job_name = 'import_upstream' configure_job( jenkins, job_name, import_upstream_job_config, dry_run=args.dry_run) job_name = '%s_%s' % \ (group_name, 'trigger-broken-with-non-broken-upstream') configure_job( jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'check_slaves' job on Jenkins") add_argument_config_url(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(config.notify_emails) jenkins = connect(config.jenkins_url) configure_management_view(jenkins) job_name = 'check_slaves' configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'dashboard' job on Jenkins") add_argument_config_url(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(config.notify_emails) jenkins = connect(config.jenkins_url) view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW) job_name = 'dashboard' configure_job(jenkins, job_name, job_config, view=view)
def configure_devel_jobs( config_url, rosdistro_name, source_build_name): config = get_config_index(config_url) build_files = get_source_build_files(config, rosdistro_name) build_file = build_files[source_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name) jenkins = connect(config.jenkins_url) view_name = get_devel_view_name(rosdistro_name, source_build_name) view = configure_devel_view(jenkins, view_name) repo_names = dist_file.repositories.keys() repo_names = build_file.filter_repositories(repo_names) for repo_name in sorted(repo_names): repo = dist_file.repositories[repo_name] if not repo.source_repository: print("Skipping repository '%s': no source section" % repo_name) continue if not repo.source_repository.version: print("Skipping repository '%s': no source version" % repo_name) continue for os_name, os_code_name, arch in targets: configure_devel_job( config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, view=view)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'dashboard' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args, config) jenkins = connect(config.jenkins_url) configure_management_view(jenkins) job_name = '%s_rosdistro-cache' % args.rosdistro_name configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'bloom_status' job on Jenkins") add_argument_config_url(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args, config) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) job_name = 'bloom_status' configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'repos_status_page' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args, config) jenkins = connect(config.jenkins_url) configure_management_view(jenkins) prefix = get_release_job_prefix(args.rosdistro_name) job_name = '%s_repos-status-page' % prefix configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'failing_jobs' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args.rosdistro_name) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) job_name = '%s_failing-jobs' % args.rosdistro_name configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release_status_page' job on Jenkins") add_argument_config_url(parser) add_argument_debian_repository_urls(parser) add_argument_os_code_name_and_arch_tuples(parser) add_argument_output_name(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args, config) jenkins = connect(config.jenkins_url) view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW) job_name = '%s_repos-status-page' % args.output_name configure_job(jenkins, job_name, job_config, view=view)
def configure_import_package_job( config_url, rosdistro_name, release_build_name, config=None, build_file=None, jenkins=None, dry_run=False): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if jenkins is None: jenkins = connect(config.jenkins_url) job_name = get_import_package_job_name(rosdistro_name) job_config = _get_import_package_job_config(build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config, dry_run=dry_run) return (job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release_status_page' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args, config) jenkins = connect(config.jenkins_url) view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW) job_name = '%s_%s_release-status-page' % \ (args.rosdistro_name, args.release_build_name) configure_job(jenkins, job_name, job_config, view=view)
def configure_doc_independent_job( config_url, doc_build_name, config=None, build_file=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_global_doc_build_files(config) build_file = build_files[doc_build_name] from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) job_name = 'doc_%s' % doc_build_name job_config = _get_doc_independent_job_config( config, config_url, job_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'blocked_source_entries_page' job on Jenkins" ) add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) job_config = get_job_config(args, config) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) prefix = get_release_job_prefix(args.rosdistro_name) job_name = '%s_blocked-source-entries-page' % prefix configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_release_build_files(config, args.rosdistro_name) build_file = build_files[args.release_build_name] group_name = get_release_job_prefix(args.rosdistro_name, args.release_build_name) reconfigure_jobs_job_config = get_reconfigure_jobs_job_config( args, config, build_file) trigger_jobs_job_config = get_trigger_jobs_job_config( args, config, build_file) import_upstream_job_config = get_import_upstream_job_config( args, config, build_file) trigger_broken_with_non_broken_upstream_job_config = \ _get_trigger_broken_with_non_broken_upstream_job_config( args.rosdistro_name, args.release_build_name, build_file) jenkins = connect(config.jenkins_url) configure_management_view(jenkins) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') configure_job(jenkins, job_name, reconfigure_jobs_job_config) job_name = '%s_%s' % (group_name, 'trigger-jobs') configure_job(jenkins, job_name, trigger_jobs_job_config) job_name = 'import_upstream' configure_job(jenkins, job_name, import_upstream_job_config) job_name = '%s_%s' % \ (group_name, 'trigger-broken-with-non-broken-upstream') configure_job(jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config)
def configure_sync_packages_to_main_job( config_url, rosdistro_name, release_build_name, config=None, build_file=None, jenkins=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if jenkins is None: jenkins = connect(config.jenkins_url) job_name = get_sync_packages_to_main_job_name( rosdistro_name) job_config = _get_sync_packages_to_main_job_config( rosdistro_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_management_view(jenkins) configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'CI' management jobs on Jenkins") # Positional add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_ci_build_files(config, args.rosdistro_name) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) group_name = get_ci_view_name(args.rosdistro_name) configure_reconfigure_jobs_job( jenkins, group_name, args, config, build_files, dry_run=args.dry_run)
def configure_doc_metadata_job( config_url, rosdistro_name, doc_build_name, config=None, build_file=None, dry_run=False): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) job_name = get_doc_view_name(rosdistro_name, doc_build_name) job_config = _get_doc_metadata_job_config( config, config_url, rosdistro_name, doc_build_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config, dry_run=dry_run)
def configure_doc_independent_job( config_url, doc_build_name, config=None, build_file=None, dry_run=False): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_global_doc_build_files(config) build_file = build_files[doc_build_name] from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) job_name = 'doc_%s' % doc_build_name job_config = _get_doc_independent_job_config( config, config_url, job_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config, dry_run=dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'devel' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] jenkins = connect(config.jenkins_url) configure_management_view(jenkins) group_name = get_devel_view_name( args.rosdistro_name, args.source_build_name) configure_reconfigure_jobs_job( jenkins, group_name, args, config, build_file) configure_trigger_jobs_job(jenkins, group_name, build_file)
def configure_doc_metadata_job( config_url, rosdistro_name, doc_build_name, config=None, build_file=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) job_name = get_doc_view_name(rosdistro_name, doc_build_name) job_config = _get_doc_metadata_job_config( config, config_url, rosdistro_name, doc_build_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config)
def configure_sync_packages_to_testing_job( config_url, rosdistro_name, release_build_name, os_code_name, arch, config=None, build_file=None, jenkins=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if jenkins is None: jenkins = connect(config.jenkins_url) job_name = get_sync_packages_to_testing_job_name( rosdistro_name, release_build_name, os_code_name, arch) job_config = _get_sync_packages_to_testing_job_config( config_url, rosdistro_name, release_build_name, os_code_name, arch, config, build_file) view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config, view)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'CI' management jobs on Jenkins") # Positional add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_ci_build_files(config, args.rosdistro_name) jenkins = connect(config.jenkins_url) configure_management_view(jenkins, dry_run=args.dry_run) group_name = get_ci_view_name(args.rosdistro_name) configure_reconfigure_jobs_job(jenkins, group_name, args, config, build_files, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'upload_main' and 'upload_testing' jobs.") add_argument_config_url(parser) add_argument_dry_run(parser) args = parser.parse_args(argv) template_name = 'release/trigger_upload_repo_job.xml.em' config = get_index(args.config_url) jenkins = connect(config.jenkins_url) for repo in ['main', 'testing']: job_name = 'upload_%s' % repo block_when_upstream_building = 'true' if repo == 'testing': block_when_upstream_building = 'false' job_config = expand_template(template_name, { 'block_when_upstream_building': block_when_upstream_building, 'repo': repo, 'upstream_job_names': get_upstream_job_names(config, repo), 'recipients': config.notify_emails}) configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
def configure_release_job(config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, generate_import_package_job=True, generate_sync_packages_jobs=True, is_disabled=False, other_build_files_same_platform=None, groovy_script=None, filter_arches=None, dry_run=False): """ Configure a Jenkins release job. The following jobs are created for each package: - M source jobs, one for each OS node name - M * N binary jobs, one for each combination of OS code name and arch """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') pkg_names = dist_file.release_packages.keys() if pkg_name not in pkg_names: raise JobValidationError("Invalid package name '%s' " % pkg_name + 'choose one of the following: ' + ', '.join(sorted(pkg_names))) pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: raise JobValidationError("Repository '%s' has no release section" % repo_name) if not repo.release_repository.version: raise JobValidationError("Repository '%s' has no release version" % repo_name) if os_name not in build_file.targets.keys(): raise JobValidationError("Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if dist_cache is None and \ (build_file.notify_maintainers or build_file.abi_incompatibility_assumed): dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if views is None: targets = [] targets.append((os_name, os_code_name, 'source')) for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) configure_release_views(jenkins, rosdistro_name, release_build_name, targets, dry_run=dry_run) if generate_import_package_job: configure_import_package_job(config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if generate_sync_packages_jobs: configure_sync_packages_to_main_job(config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) for arch in build_file.targets[os_name][os_code_name]: configure_sync_packages_to_testing_job(config_url, rosdistro_name, release_build_name, os_code_name, arch, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) source_job_names = [] binary_job_names = [] job_configs = {} # sourcedeb job # since sourcedeb jobs are potentially being shared across multiple build # files the configuration has to take all of them into account in order to # generate a job which all build files agree on source_job_name = get_sourcedeb_job_name(rosdistro_name, release_build_name, pkg_name, os_name, os_code_name) # while the package is disabled in the current build file # it might be used by sibling build files is_source_disabled = is_disabled if is_source_disabled and other_build_files_same_platform: # check if sourcedeb job is used by any other build file with the same platform for other_build_file in other_build_files_same_platform: if other_build_file.filter_packages([pkg_name]): is_source_disabled = False break job_config = _get_sourcedeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache, is_disabled=is_source_disabled, other_build_files_same_platform=other_build_files_same_platform) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, source_job_name, job_config, dry_run=dry_run) source_job_names.append(source_job_name) job_configs[source_job_name] = job_config dependency_names = [] if build_file.abi_incompatibility_assumed: dependency_names = _get_direct_dependencies(pkg_name, dist_cache, pkg_names) # if dependencies are not yet available in rosdistro cache # skip binary jobs if dependency_names is None: print(("Skipping binary jobs for package '%s' because it is not " + "yet in the rosdistro cache") % pkg_name, file=sys.stderr) return source_job_names, binary_job_names, job_configs # binarydeb jobs for arch in build_file.targets[os_name][os_code_name]: if filter_arches and arch not in filter_arches: continue job_name = get_binarydeb_job_name(rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, arch) upstream_job_names = [source_job_name] + [ get_binarydeb_job_name( rosdistro_name, release_build_name, dependency_name, os_name, os_code_name, arch) for dependency_name in dependency_names ] job_config = _get_binarydeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, arch, pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache, upstream_job_names=upstream_job_names, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config, dry_run=dry_run) binary_job_names.append(job_name) job_configs[job_name] = job_config return source_job_names, binary_job_names, job_configs
def main(argv=None): if argv is None: argv = sys.argv[1:] parser = argparse.ArgumentParser( description="Creates the ros2 jobs on Jenkins") parser.add_argument( '--jenkins-url', '-u', default='http://ci.ros2.org', help="Url of the jenkins server to which the job should be added") parser.add_argument( '--ci-scripts-repository', default='[email protected]:ros2/ci.git', help="repository from which ci scripts should be cloned") parser.add_argument( '--ci-scripts-default-branch', default='master', help= "default branch of the ci repository to get ci scripts from (this is a job parameter)" ) parser.add_argument( '--commit', action='store_true', help='Actually modify the Jenkis jobs instead of only doing a dry run', ) args = parser.parse_args(argv) data = { 'ci_scripts_repository': args.ci_scripts_repository, 'ci_scripts_default_branch': args.ci_scripts_default_branch, 'default_repos_url': DEFAULT_REPOS_URL, 'supplemental_repos_url': '', 'time_trigger_spec': '', 'mailer_recipients': '', 'use_connext_default': 'true', 'disable_connext_static_default': 'false', 'disable_connext_dynamic_default': 'true', 'use_osrf_connext_debs_default': 'false', 'use_fastrtps_default': 'true', 'use_opensplice_default': 'false', 'ament_build_args_default': '--parallel --cmake-args -DSECURITY=ON --', 'ament_test_args_default': '--retest-until-pass 10', 'enable_c_coverage_default': 'false', 'dont_notify_every_unstable_build': 'false', 'turtlebot_demo': False, 'build_timeout_mins': 0, } jenkins = connect(args.jenkins_url) os_configs = { 'linux': { 'label_expression': 'linux', 'shell_type': 'Shell', }, 'osx': { 'label_expression': 'osx_slave', 'shell_type': 'Shell', # the current OS X slave can't handle git@github urls 'ci_scripts_repository': args.ci_scripts_repository.replace('[email protected]:', 'https://github.com/'), }, 'windows': { 'label_expression': 'windows_slave', 'shell_type': 'BatchFile', }, 'linux-aarch64': { 'label_expression': 'linux_aarch64', 'shell_type': 'Shell', 'use_connext_default': 'false', }, } jenkins_kwargs = {} if not args.commit: jenkins_kwargs['dry_run'] = True # configure os specific jobs for os_name in sorted(os_configs.keys()): job_data = dict(data) job_data['os_name'] = os_name job_data.update(os_configs[os_name]) # configure manual triggered job job_name = 'ci_' + os_name job_data['cmake_build_type'] = 'None' job_config = expand_template('ci_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # configure a manual version of the packaging job job_name = 'ci_packaging_' + os_name job_data['cmake_build_type'] = 'RelWithDebInfo' job_data['test_bridge_default'] = 'true' job_config = expand_template('packaging_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # all following jobs are triggered nightly with email notification job_data['time_trigger_spec'] = '30 7 * * *' # for now, skip emailing about Windows failures job_data['mailer_recipients'] = '*****@*****.**' # configure packaging job job_name = 'packaging_' + os_name job_data['cmake_build_type'] = 'RelWithDebInfo' job_data['test_bridge_default'] = 'true' job_config = expand_template('packaging_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # keeping the paths on Windows shorter os_name = os_name.replace('windows', 'win') # configure nightly triggered job job_name = 'nightly_' + os_name + '_debug' if os_name == 'win': job_name = job_name[0:-2] job_data['cmake_build_type'] = 'Debug' job_config = expand_template('ci_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # configure nightly coverage job on x86 Linux only if os_name == 'linux': job_name = 'nightly_' + os_name + '_coverage' job_data['cmake_build_type'] = 'Debug' job_data['enable_c_coverage_default'] = 'true' job_config = expand_template('ci_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) job_data['enable_c_coverage_default'] = 'false' # configure nightly triggered job job_name = 'nightly_' + os_name + '_release' if os_name == 'win': job_name = job_name[0:-4] job_data['cmake_build_type'] = 'Release' job_config = expand_template('ci_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # configure nightly triggered job with repeated testing job_name = 'nightly_' + os_name + '_repeated' if os_name == 'win': job_name = job_name[0:-5] job_data['time_trigger_spec'] = '30 7 * * *' job_data['cmake_build_type'] = 'None' job_data[ 'ament_test_args_default'] = '--retest-until-fail 20 --ctest-args -LE linter --' job_config = expand_template('ci_job.xml.em', job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # configure the launch job os_specific_data = collections.OrderedDict() for os_name in sorted(os_configs.keys()): os_specific_data[os_name] = dict(data) os_specific_data[os_name].update(os_configs[os_name]) os_specific_data[os_name]['job_name'] = 'ci_' + os_name job_data = dict(data) job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch job_data['label_expression'] = 'master' job_data['os_specific_data'] = os_specific_data job_data['cmake_build_type'] = 'None' job_config = expand_template('ci_launcher_job.xml.em', job_data) configure_job(jenkins, 'ci_launcher', job_config, **jenkins_kwargs) # Run the turtlebot job on Linux only for now. for os_name in ['linux', 'linux-aarch64']: turtlebot_job_data = dict(data) turtlebot_job_data['os_name'] = os_name turtlebot_job_data.update(os_configs[os_name]) turtlebot_job_data['turtlebot_demo'] = True # Use a turtlebot2_demo-specific repos file by default. turtlebot_job_data[ 'supplemental_repos_url'] = 'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos' turtlebot_job_data['cmake_build_type'] = 'None' job_config = expand_template('ci_job.xml.em', turtlebot_job_data) configure_job(jenkins, 'ci_turtlebot-demo_%s' % (os_name), job_config, **jenkins_kwargs)
def main(argv=None): if argv is None: argv = sys.argv[1:] parser = argparse.ArgumentParser(description="Creates the ros2 jobs on Jenkins") parser.add_argument( '--jenkins-url', '-u', default='https://ci.ros2.org', help="Url of the jenkins server to which the job should be added") parser.add_argument( '--ci-scripts-repository', default='[email protected]:ros2/ci.git', help="repository from which ci scripts should be cloned" ) parser.add_argument( '--ci-scripts-default-branch', default='master', help="default branch of the ci repository to get ci scripts from (this is a job parameter)" ) parser.add_argument( '--commit', action='store_true', help='Actually modify the Jenkins jobs instead of only doing a dry run', ) parser.add_argument( '--select-jobs-regexp', default='', help='Limit the job creation to those that match the given regular expression' ) parser.add_argument( '--context-lines', type=nonnegative_int, default=0, help='Set the number of diff context lines when showing differences between old and new jobs' ) args = parser.parse_args(argv) data = { 'build_discard': { 'days_to_keep': 1000, 'num_to_keep': 3000}, 'ci_scripts_repository': args.ci_scripts_repository, 'ci_scripts_default_branch': args.ci_scripts_default_branch, 'default_repos_url': DEFAULT_REPOS_URL, 'supplemental_repos_url': '', 'time_trigger_spec': '', 'mailer_recipients': '', 'ignore_rmw_default': { 'rmw_connext_dynamic_cpp', 'rmw_fastrtps_dynamic_cpp', 'rmw_opensplice_cpp'}, 'use_connext_debs_default': 'false', 'use_isolated_default': 'true', 'colcon_mixin_url': 'https://raw.githubusercontent.com/colcon/colcon-mixin-repository/master/index.yaml', 'build_args_default': '--event-handlers console_cohesion+ console_package_list+ --cmake-args -DINSTALL_EXAMPLES=OFF -DSECURITY=ON', 'test_args_default': '--event-handlers console_direct+ --executor sequential --retest-until-pass 2 --ctest-args -LE xfail --pytest-args -m "not xfail"', 'compile_with_clang_default': 'false', 'enable_coverage_default': 'false', 'dont_notify_every_unstable_build': 'false', 'build_timeout_mins': 0, 'ubuntu_distro': 'focal', 'ros_distro': 'rolling', } jenkins = connect(args.jenkins_url) os_configs = { 'linux': { 'label_expression': 'linux', 'shell_type': 'Shell', }, 'osx': { 'label_expression': 'macos', 'shell_type': 'Shell', # the current OS X agent can't handle git@github urls 'ci_scripts_repository': args.ci_scripts_repository.replace( '[email protected]:', 'https://github.com/'), }, 'windows-metal': { 'label_expression': 'windows', 'shell_type': 'BatchFile', 'use_isolated_default': 'false', }, 'windows': { 'label_expression': 'windows-container', 'shell_type': 'BatchFile', 'use_isolated_default': 'false', }, 'linux-aarch64': { 'label_expression': 'linux_aarch64', 'shell_type': 'Shell', 'ignore_rmw_default': data['ignore_rmw_default'] | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}, }, 'linux-armhf': { 'label_expression': 'linux_armhf', 'shell_type': 'Shell', 'ignore_rmw_default': data['ignore_rmw_default'] | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}, 'build_args_default': data['build_args_default'].replace( '--cmake-args', '--cmake-args -DCMAKE_CXX_FLAGS=-Wno-psabi -DCMAKE_C_FLAGS=-Wno-psabi -DDISABLE_SANITIZERS=ON'), }, 'linux-centos': { 'label_expression': 'linux', 'shell_type': 'Shell', 'build_args_default': '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge ' + data['build_args_default'].replace( '--cmake-args', '--cmake-args -DCMAKE_POLICY_DEFAULT_CMP0072=NEW -DPYTHON_VERSION=3.6 -DDISABLE_SANITIZERS=ON'), 'test_args_default': '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge ' + data['test_args_default'], }, } os_config_overrides = { 'linux-centos': { 'mixed_overlay_pkgs': '', 'ignore_rmw_default': {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp', 'rmw_opensplice_cpp'}, 'use_connext_debs_default': 'false', }, } launcher_exclude = { 'linux-armhf', 'linux-centos', 'windows-metal', } jenkins_kwargs = {} jenkins_kwargs['context_lines'] = args.context_lines if not args.commit: jenkins_kwargs['dry_run'] = True if args.select_jobs_regexp: args.pattern_select_jobs_regexp = re.compile(args.select_jobs_regexp) def create_job(os_name, job_name, template_file, additional_dict): if args.select_jobs_regexp and not args.pattern_select_jobs_regexp.match(job_name): return job_data = dict(data) job_data['os_name'] = os_name job_data.update(os_configs[os_name]) job_data.update(additional_dict) job_data.update(os_config_overrides.get(os_name, {})) job_config = expand_template(template_file, job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # configure os specific jobs for os_name in sorted(os_configs.keys()): # This short name is preserved for historic reasons, but long-paths have been enabled on # windows containers and their hosts job_os_name = os_name if os_name == 'windows': job_os_name = 'win' # configure manual triggered job create_job(os_name, 'ci_' + os_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', }) # configure test jobs for experimenting with job config changes # Keep parameters the same as the manual triggered job above. create_job(os_name, 'test_ci_' + os_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', }) if os_name == 'windows-metal': # Don't create nightlies or packaging jobs for bare-metal Windows continue packaging_label_expression = os_configs[os_name]['label_expression'] if os_name == 'osx': packaging_label_expression = 'macos && mojave' # configure a manual version of the packaging job ignore_rmw_default_packaging = {'rmw_opensplice_cpp'} if os_name in ['linux-aarch64', 'linux-armhf']: ignore_rmw_default_packaging |= {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'} create_job(os_name, 'ci_packaging_' + os_name, 'packaging_job.xml.em', { 'build_discard': { 'days_to_keep': 180, 'num_to_keep': 100, }, 'cmake_build_type': 'RelWithDebInfo', 'label_expression': packaging_label_expression, 'mixed_overlay_pkgs': 'ros1_bridge', 'ignore_rmw_default': ignore_rmw_default_packaging, 'use_connext_debs_default': 'true', }) # configure packaging job create_job(os_name, 'packaging_' + os_name, 'packaging_job.xml.em', { 'build_discard': { 'days_to_keep': 370, 'num_to_keep': 370, }, 'cmake_build_type': 'RelWithDebInfo', 'disabled': os_name == 'linux-armhf', 'label_expression': packaging_label_expression, 'mixed_overlay_pkgs': 'ros1_bridge', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ignore_rmw_default': ignore_rmw_default_packaging, 'use_connext_debs_default': 'true', }) # create a nightly Debug packaging job on Windows if os_name == 'windows': create_job(os_name, 'packaging_' + os_name + '_debug', 'packaging_job.xml.em', { 'build_discard': { 'days_to_keep': 370, 'num_to_keep': 370, }, 'cmake_build_type': 'Debug', 'mixed_overlay_pkgs': 'ros1_bridge', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ignore_rmw_default': ignore_rmw_default_packaging, 'use_connext_debs_default': 'true', }) # configure nightly triggered job if os_name != 'linux-armhf': job_name = 'nightly_' + job_os_name + '_debug' if os_name == 'windows': job_name = job_name[:15] create_job(os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, }) # configure nightly job for testing with address sanitizer on linux if os_name == 'linux': asan_build_args = data['build_args_default'].replace('--cmake-args', '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \ ' --mixin asan-gcc --packages-up-to rcpputils' create_job(os_name, 'nightly_{}_address_sanitizer'.format(os_name), 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]', 'build_args_default': asan_build_args, 'test_args_default': data['test_args_default'] + ' --packages-up-to rcpputils', }) # configure nightly job for compiling with clang+libcxx on linux if os_name == 'linux': # Set the logging implementation to noop because log4cxx will not link properly when using libcxx. clang_libcxx_build_args = data['build_args_default'].replace('--cmake-args', '--cmake-args -DRCL_LOGGING_IMPLEMENTATION=rcl_logging_noop') + \ ' --mixin clang-libcxx --packages-skip intra_process_demo' create_job(os_name, 'nightly_' + os_name + '_clang_libcxx', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'compile_with_clang_default': 'true', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]', 'build_args_default': clang_libcxx_build_args, # Only running test from the lowest-level C package to ensure "working" binaries are generated. # We do not want to test more than this as we observe issues with the clang libcxx standard library # we don't plan to tackle for now. The important part of this nightly is to make sure the code compiles # without emitting thread-safety related warnings. 'test_args_default': data['test_args_default'].replace(' --retest-until-pass 2', '') + ' --packages-select rcutils' }) # configure nightly job for testing rmw/rcl based packages with thread sanitizer on linux if os_name == 'linux': tsan_build_args = data['build_args_default'].replace('--cmake-args', '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \ ' --mixin tsan --packages-up-to rcpputils rcutils' create_job(os_name, 'nightly_' + os_name + '_thread_sanitizer', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]', 'build_args_default': tsan_build_args, 'test_args_default': data['test_args_default'] + ' --packages-select rcpputils rcutils', }) # configure a manually triggered version of the coverage job # Proposed list of packages to maximize coverage while testing quality level # packages. The list is composed by the list of qualitly level packages plus # packages of ros2.repos that are used by the qualitly level packages during # tests. # out of the list since ignored by colcon: shape_msgs, stereo_msgs, rmw_connext, rmw_cyclonedds quality_level_pkgs = [ 'action_msgs', 'ament_index_cpp', 'builtin_interfaces', 'class_loader', 'composition_interfaces', 'console_bridge_vendor', 'diagnostic_msgs', 'fastcdr', 'fastrtps', 'foonathan_memory_vendor', 'geometry_msgs', 'libstatistics_collector', 'libyaml_vendor', 'lifecycle_msgs', 'nav_msgs', 'rcl', 'rcl_action', 'rcl_interfaces', 'rcl_lifecycle', 'rcl_logging_spdlog', 'rcl_yaml_param_parser', 'rclcpp', 'rclcpp_action', 'rclcpp_components', 'rclcpp_lifecycle', 'rcpputils', 'rcutils', 'rmw', 'rmw_dds_common', 'rmw_fastrtps_cpp', 'rmw_fastrtps_shared_cpp', 'rmw_implementation', 'rosgraph_msgs', 'rosidl_default_runtime', 'rosidl_runtime_c', 'rosidl_runtime_cpp', 'rosidl_typesupport_c', 'rosidl_typesupport_cpp', 'rosidl_typesupport_fastrtps_c', 'rosidl_typesupport_fastrtps_cpp', 'rosidl_typesupport_interface', 'spdlog_vendor', 'statistics_msgs', 'std_msgs', 'std_srvs', 'tracetools', 'trajectory_msgs', 'unique_identifier_msgs', 'visualization_msgs', ] # out of the list since ignored by colcon: ros1_bridge testing_pkgs_for_quality_level = [ 'interactive_markers', 'launch_testing_ros', 'message_filters', 'ros2action', 'ros2component', 'ros2doctor', 'ros2interface', 'ros2lifecycle', 'ros2lifecycle_test_fixtures', 'ros2param', 'ros2topic', 'rosbag2_compression', 'rosbag2_converter_default_plugins', 'rosbag2_cpp', 'rosbag2_storage', 'rosbag2_storage_default_plugins', 'rosbag2_test_common', 'rosbag2_tests', 'rosbag2_transport', 'rosidl_generator_c', 'rosidl_generator_cpp', 'rosidl_generator_py', 'rosidl_runtime_py', 'rosidl_typesupport_connext_c', 'rosidl_typesupport_connext_cpp', 'rosidl_typesupport_introspection_c', 'rosidl_typesupport_introspection_cpp', 'test_cli', 'test_cli_remapping', 'test_communication', 'test_launch_ros', 'test_msgs', 'test_quality_of_service', 'test_rclcpp', 'test_security', 'test_tf2', 'tf2', 'tf2_bullet', 'tf2_eigen', 'tf2_geometry_msgs', 'tf2_kdl', 'tf2_msgs', 'tf2_py', 'tf2_ros', 'tf2_sensor_msgs', 'tracetools_test', ] if os_name == 'linux': create_job(os_name, 'ci_' + os_name + '_coverage', 'ci_job.xml.em', { 'build_discard': { 'days_to_keep': 100, 'num_to_keep': 100, }, 'cmake_build_type': 'Debug', 'enable_coverage_default': 'true', 'build_args_default': data['build_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' + '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), 'test_args_default': data['test_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' + '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), }) create_job(os_name, 'test_' + os_name + '_coverage', 'ci_job.xml.em', { 'build_discard': { 'days_to_keep': 100, 'num_to_keep': 100, }, 'cmake_build_type': 'Debug', 'enable_coverage_default': 'true', 'build_args_default': data['build_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' + '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), 'test_args_default': data['test_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp ' + '--packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), }) # configure nightly coverage job on x86 Linux only if os_name == 'linux': create_job(os_name, 'nightly_' + os_name + '_coverage', 'ci_job.xml.em', { 'build_discard': { 'days_to_keep': 100, 'num_to_keep': 100, }, 'cmake_build_type': 'Debug', 'enable_coverage_default': 'true', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'build_args_default': data['build_args_default'] + ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), 'test_args_default': data['test_args_default'] + ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), }) # Add a coverage job targeting Foxy. create_job(os_name, 'nightly_' + os_name + '_foxy_coverage', 'ci_job.xml.em', { 'build_discard': { 'days_to_keep': 100, 'num_to_keep': 100, }, 'cmake_build_type': 'Debug', 'default_repos_url': 'https://raw.githubusercontent.com/ros2/ros2/foxy/ros2.repos', 'enable_coverage_default': 'true', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ros_distro': 'foxy', 'ubuntu_distro': 'focal', 'build_args_default': data['build_args_default'] + ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), 'test_args_default': data['test_args_default'] + ' --packages-up-to ' + ' '.join(quality_level_pkgs + testing_pkgs_for_quality_level), }) # configure nightly triggered job using FastRTPS dynamic if os_name != 'linux-armhf': job_name = 'nightly_' + job_os_name + '_extra_rmw' + '_release' if os_name == 'windows': job_name = job_name[:25] create_job(os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'Release', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ignore_rmw_default': { 'rmw_connext_cpp', 'rmw_connext_dynamic_cpp', 'rmw_opensplice_cpp'}, }) # configure nightly triggered job if os_name != 'linux-armhf': job_name = 'nightly_' + job_os_name + '_release' if os_name == 'windows': job_name = job_name[:15] create_job(os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'Release', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, }) # configure nightly triggered job with repeated testing if os_name != 'linux-armhf': job_name = 'nightly_' + job_os_name + '_repeated' if os_name == 'windows': job_name = job_name[:15] test_args_default = os_configs.get(os_name, data).get('test_args_default', data['test_args_default']) test_args_default = test_args_default.replace('--retest-until-pass', '--retest-until-fail') test_args_default = test_args_default.replace('--ctest-args -LE xfail', '--ctest-args -LE "(linter|xfail)"') test_args_default = test_args_default.replace('--pytest-args -m "not xfail"', '--pytest-args -m "not linter and not xfail"') if job_os_name == 'linux-aarch64': # skipping known to be flaky tests https://github.com/ros2/rviz/issues/368 test_args_default += ' --packages-skip rviz_common rviz_default_plugins rviz_rendering rviz_rendering_tests' create_job(os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'test_args_default': test_args_default, }) # configure nightly triggered job for excluded test if os_name != 'linux-armhf': job_name = 'nightly_' + job_os_name + '_xfail' test_args_default = os_configs.get(os_name, data).get('test_args_default', data['test_args_default']) test_args_default = test_args_default.replace('--ctest-args -LE xfail', '--ctest-args -L xfail') test_args_default = test_args_default.replace('--pytest-args -m "not xfail"', '--pytest-args -m xfail --runxfail') create_job(os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'test_args_default': test_args_default, }) # configure the launch job launcher_job_name = 'ci_launcher' if args.select_jobs_regexp and args.pattern_select_jobs_regexp.match(launcher_job_name): os_specific_data = collections.OrderedDict() for os_name in sorted(os_configs.keys() - launcher_exclude): os_specific_data[os_name] = dict(data) os_specific_data[os_name].update(os_configs[os_name]) os_specific_data[os_name]['job_name'] = 'ci_' + os_name job_data = dict(data) job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch job_data['label_expression'] = 'master' job_data['os_specific_data'] = os_specific_data job_data['cmake_build_type'] = 'None' job_config = expand_template('ci_launcher_job.xml.em', job_data) configure_job(jenkins, launcher_job_name, job_config, **jenkins_kwargs)
def configure_devel_jobs(config_url, rosdistro_name, source_build_name, groovy_script=None, dry_run=False, whitelist_repository_names=None): """ Configure all Jenkins devel jobs. L{configure_release_job} will be invoked for source repository and target which matches the build file criteria. """ config = get_config_index(config_url) build_files = get_source_build_files(config, rosdistro_name) build_file = build_files[source_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return devel_view_name = get_devel_view_name(rosdistro_name, source_build_name, pull_request=False) pull_request_view_name = get_devel_view_name(rosdistro_name, source_build_name, pull_request=True) # all further configuration will be handled by either the Jenkins API # or by a generated groovy script from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if groovy_script is None else False view_configs = {} views = {} if build_file.test_commits_force is not False: views[devel_view_name] = configure_devel_view(jenkins, devel_view_name, dry_run=dry_run) if build_file.test_pull_requests_force is not False: views[pull_request_view_name] = configure_devel_view( jenkins, pull_request_view_name, dry_run=dry_run) if not jenkins: view_configs.update(views) groovy_data = { 'dry_run': dry_run, 'expected_num_views': len(view_configs), } repo_names = dist_file.repositories.keys() filtered_repo_names = build_file.filter_repositories(repo_names) devel_job_names = [] pull_request_job_names = [] job_configs = OrderedDict() for repo_name in sorted(repo_names): if whitelist_repository_names: if repo_name not in whitelist_repository_names: print( "Skipping repository '%s' not in explicitly passed list" % repo_name, file=sys.stderr) continue is_disabled = repo_name not in filtered_repo_names if is_disabled and build_file.skip_ignored_repositories: print("Skipping ignored repository '%s'" % repo_name, file=sys.stderr) continue repo = dist_file.repositories[repo_name] if not repo.source_repository: print("Skipping repository '%s': no source section" % repo_name) continue if not repo.source_repository.version: print("Skipping repository '%s': no source version" % repo_name) continue job_types = [] # check for testing commits if build_file.test_commits_force is False: print(("Skipping repository '%s': 'test_commits' is forced to " + "false in the build file") % repo_name) elif repo.source_repository.test_commits is False: print(("Skipping repository '%s': 'test_commits' of the " + "repository set to false") % repo_name) elif repo.source_repository.test_commits is None and \ not build_file.test_commits_default: print(("Skipping repository '%s': 'test_commits' defaults to " + "false in the build file") % repo_name) else: job_types.append('commit') if not is_disabled: # check for testing pull requests if build_file.test_pull_requests_force is False: # print(("Skipping repository '%s': 'test_pull_requests' " + # "is forced to false in the build file") % repo_name) pass elif repo.source_repository.test_pull_requests is False: # print(("Skipping repository '%s': 'test_pull_requests' of " + # "the repository set to false") % repo_name) pass elif repo.source_repository.test_pull_requests is None and \ not build_file.test_pull_requests_default: # print(("Skipping repository '%s': 'test_pull_requests' " + # "defaults to false in the build file") % repo_name) pass else: print("Pull request job for repository '%s'" % repo_name) job_types.append('pull_request') for job_type in job_types: pull_request = job_type == 'pull_request' for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_devel_job( config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script, dry_run=dry_run) if not pull_request: devel_job_names.append(job_name) else: pull_request_job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) groovy_data['expected_num_jobs'] = len(job_configs) groovy_data['job_prefixes_and_names'] = {} devel_job_prefix = '%s__' % devel_view_name pull_request_job_prefix = '%s__' % pull_request_view_name if not whitelist_repository_names: groovy_data['job_prefixes_and_names']['devel'] = \ (devel_job_prefix, devel_job_names) groovy_data['job_prefixes_and_names']['pull_request'] = \ (pull_request_job_prefix, pull_request_job_names) if groovy_script is None: # delete obsolete jobs in these views from ros_buildfarm.jenkins import remove_jobs print('Removing obsolete devel jobs') remove_jobs(jenkins, devel_job_prefix, devel_job_names, dry_run=dry_run) print('Removing obsolete pull request jobs') remove_jobs(jenkins, pull_request_job_prefix, pull_request_job_names, dry_run=dry_run) if groovy_script is not None: print( "Writing groovy script '%s' to reconfigure %d views and %d jobs" % (groovy_script, len(view_configs), len(job_configs))) content = expand_template('snippet/reconfigure_jobs.groovy.em', groovy_data) write_groovy_script_and_configs(groovy_script, content, job_configs, view_configs=view_configs)
def configure_doc_jobs( config_url, rosdistro_name, doc_build_name, groovy_script=None): """ Configure all Jenkins doc jobs. L{configure_doc_job} will be invoked for doc repository and target which matches the build file criteria. """ config = get_config_index(config_url) build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return doc_view_name = get_doc_view_name(rosdistro_name, doc_build_name) from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) views = [] views.append(configure_doc_view(jenkins, doc_view_name)) if groovy_script is not None: # all further configuration will be handled by the groovy script jenkins = False repo_names = dist_file.repositories.keys() filtered_repo_names = build_file.filter_repositories(repo_names) job_names = [] job_configs = {} for repo_name in sorted(repo_names): is_disabled = repo_name not in filtered_repo_names if is_disabled and build_file.skip_ignored_repositories: print("Skipping ignored repository '%s'" % repo_name, file=sys.stderr) continue repo = dist_file.repositories[repo_name] if not repo.doc_repository: print("Skipping repository '%s': no doc section" % repo_name) continue if not repo.doc_repository.version: print("Skipping repository '%s': no doc version" % repo_name) continue for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_doc_job( config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script) job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) job_prefix = '%s__' % doc_view_name if groovy_script is None: # delete obsolete jobs in this view from ros_buildfarm.jenkins import remove_jobs print('Removing obsolete doc jobs') remove_jobs(jenkins, job_prefix, job_names) else: print("Writing groovy script '%s' to reconfigure %d jobs" % (groovy_script, len(job_configs))) data = { 'expected_num_jobs': len(job_configs), 'job_prefixes_and_names': { 'doc': (job_prefix, job_names), } } content = expand_template('snippet/reconfigure_jobs.groovy.em', data) write_groovy_script_and_configs( groovy_script, content, job_configs)
def configure_release_jobs(config_url, rosdistro_name, release_build_name, groovy_script=None): """ Configure all Jenkins release jobs. L{configure_release_job} will be invoked for every released package and target which matches the build file criteria. Additionally a job to import Debian packages into the Debian repository is created. """ config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) # get targets platforms = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): platforms.append((os_name, os_code_name)) print('The build file contains the following targets:') for os_name, os_code_name in platforms: print(' - %s %s: %s' % (os_name, os_code_name, ', '.join( build_file.targets[os_name][os_code_name]))) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return pkg_names = dist_file.release_packages.keys() filtered_pkg_names = build_file.filter_packages(pkg_names) explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names) if explicitly_ignored_pkg_names: print(('The following packages are being %s because of ' + 'white-/blacklisting:') % ('ignored' if build_file.skip_ignored_packages else 'disabled')) for pkg_name in sorted(explicitly_ignored_pkg_names): print(' -', pkg_name) dist_cache = None if build_file.notify_maintainers or \ build_file.abi_incompatibility_assumed or \ explicitly_ignored_pkg_names: dist_cache = get_distribution_cache(index, rosdistro_name) if explicitly_ignored_pkg_names: # get direct dependencies from distro cache for each package direct_dependencies = {} for pkg_name in pkg_names: direct_dependencies[pkg_name] = _get_direct_dependencies( pkg_name, dist_cache, pkg_names) or set([]) # find recursive downstream deps for all explicitly ignored packages ignored_pkg_names = set(explicitly_ignored_pkg_names) while True: implicitly_ignored_pkg_names = _get_downstream_package_names( ignored_pkg_names, direct_dependencies) if implicitly_ignored_pkg_names - ignored_pkg_names: ignored_pkg_names |= implicitly_ignored_pkg_names continue break implicitly_ignored_pkg_names = \ ignored_pkg_names - explicitly_ignored_pkg_names if implicitly_ignored_pkg_names: print(('The following packages are being %s because their ' + 'dependencies are being ignored:') % ('ignored' if build_file.skip_ignored_packages else 'disabled')) for pkg_name in sorted(implicitly_ignored_pkg_names): print(' -', pkg_name) filtered_pkg_names = \ set(filtered_pkg_names) - implicitly_ignored_pkg_names jenkins = connect(config.jenkins_url) configure_import_package_job(config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) configure_sync_packages_to_main_job(config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) for os_name, os_code_name in platforms: for arch in sorted(build_file.targets[os_name][os_code_name]): configure_sync_packages_to_testing_job(config_url, rosdistro_name, release_build_name, os_code_name, arch, config=config, build_file=build_file, jenkins=jenkins) targets = [] for os_name, os_code_name in platforms: targets.append((os_name, os_code_name, 'source')) for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) views = configure_release_views(jenkins, rosdistro_name, release_build_name, targets) if groovy_script is not None: # all further configuration will be handled by the groovy script jenkins = False all_source_job_names = [] all_binary_job_names = [] all_job_configs = {} for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] is_disabled = pkg_name not in filtered_pkg_names if is_disabled and build_file.skip_ignored_packages: print("Skipping ignored package '%s' in repository '%s'" % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository: print(("Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository.version: print(("Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name), file=sys.stderr) continue for os_name, os_code_name in platforms: try: source_job_names, binary_job_names, job_configs = \ configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, generate_import_package_job=False, generate_sync_packages_jobs=False, is_disabled=is_disabled, groovy_script=groovy_script) all_source_job_names += source_job_names all_binary_job_names += binary_job_names if groovy_script is not None: print('Configuration for jobs: ' + ', '.join(source_job_names + binary_job_names)) all_job_configs.update(job_configs) except JobValidationError as e: print(e.message, file=sys.stderr) groovy_data = { 'expected_num_jobs': len(all_job_configs), 'job_prefixes_and_names': {}, } # delete obsolete binary jobs for os_name, os_code_name in platforms: for arch in build_file.targets[os_name][os_code_name]: binary_view = get_release_binary_view_name(rosdistro_name, release_build_name, os_name, os_code_name, arch) binary_job_prefix = '%s__' % binary_view excluded_job_names = set([ j for j in all_binary_job_names if j.startswith(binary_job_prefix) ]) if groovy_script is None: print("Removing obsolete binary jobs with prefix '%s'" % binary_job_prefix) remove_jobs(jenkins, binary_job_prefix, excluded_job_names) else: binary_key = 'binary_%s_%s_%s' % (os_name, os_code_name, arch) groovy_data['job_prefixes_and_names'][binary_key] = \ (binary_job_prefix, excluded_job_names) # delete obsolete source jobs # requires knowledge about all other release build files for os_name, os_code_name in platforms: other_source_job_names = [] # get source job names for all other release build files for other_release_build_name in [ k for k in build_files.keys() if k != release_build_name ]: other_build_file = build_files[other_release_build_name] other_dist_file = get_distribution_file(index, rosdistro_name, other_build_file) if not other_dist_file: continue if os_name not in other_build_file.targets or \ os_code_name not in other_build_file.targets[os_name]: continue if other_build_file.skip_ignored_packages: filtered_pkg_names = other_build_file.filter_packages( pkg_names) else: filtered_pkg_names = pkg_names for pkg_name in sorted(filtered_pkg_names): pkg = other_dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = other_dist_file.repositories[repo_name] if not repo.release_repository: continue if not repo.release_repository.version: continue other_job_name = get_sourcedeb_job_name( rosdistro_name, other_release_build_name, pkg_name, os_name, os_code_name) other_source_job_names.append(other_job_name) source_view_prefix = get_release_source_view_name( rosdistro_name, os_name, os_code_name) source_job_prefix = '%s__' % source_view_prefix excluded_job_names = set([ j for j in (all_source_job_names + other_source_job_names) if j.startswith(source_job_prefix) ]) if groovy_script is None: print("Removing obsolete source jobs with prefix '%s'" % source_job_prefix) remove_jobs(jenkins, source_job_prefix, excluded_job_names) else: source_key = 'source_%s_%s' % (os_name, os_code_name) groovy_data['job_prefixes_and_names'][source_key] = ( source_job_prefix, excluded_job_names) if groovy_script is not None: print("Writing groovy script '%s' to reconfigure %d jobs" % (groovy_script, len(all_job_configs))) content = expand_template('snippet/reconfigure_jobs.groovy.em', groovy_data) write_groovy_script_and_configs(groovy_script, content, all_job_configs)
def configure_ci_job(config_url, rosdistro_name, ci_build_name, os_name, os_code_name, arch, config=None, build_file=None, index=None, dist_file=None, jenkins=None, views=None, is_disabled=False, groovy_script=None, build_targets=None, dry_run=False, underlay_source_paths=None, trigger_timer=None): """ Configure a single Jenkins CI job. This includes the following steps: - clone the ros_buildfarm repository - write the distribution repository keys into files - invoke the ci/run_ci_job.py script """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_ci_build_files(config, rosdistro_name) build_file = build_files[ci_build_name] # Overwrite build_file.targets if build_targets is specified if build_targets is not None: build_file.targets = build_targets if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') if os_name not in build_file.targets.keys(): raise JobValidationError("Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if arch not in build_file.targets[os_name][os_code_name]: raise JobValidationError( "Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted(build_file.targets[os_name][os_code_name]))) underlay_source_jobs = [ get_ci_job_name(rosdistro_name, os_name, os_code_name, arch, underlay_job) for underlay_job in build_file.underlay_from_ci_jobs ] underlay_source_paths = (underlay_source_paths or []) + \ ['$UNDERLAY%d_JOB_SPACE' % (index + 1) for index in range(len(underlay_source_jobs))] trigger_jobs = [ get_ci_job_name(rosdistro_name, os_name, os_code_name, arch, trigger_job) for trigger_job in build_file.jenkins_job_upstream_triggers ] if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if views is None: view_name = get_ci_view_name(rosdistro_name) configure_ci_view(jenkins, view_name, dry_run=dry_run) job_name = get_ci_job_name(rosdistro_name, os_name, os_code_name, arch, ci_build_name) job_config = _get_ci_job_config(index, rosdistro_name, build_file, os_name, os_code_name, arch, build_file.repos_files, build_file.repository_names, underlay_source_jobs, underlay_source_paths, trigger_timer, trigger_jobs, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config, dry_run=dry_run) return job_name, job_config
def configure_release_job_with_validation( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, append_timestamp=False, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, view=None, generate_import_package_job=True, filter_arches=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) if pkg_name not in pkg_names: raise JobValidationError( "Invalid package name '%s' " % pkg_name + 'choose one of the following: ' + ', '.join(sorted(pkg_names))) pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: raise JobValidationError( "Repository '%s' has no release section" % repo_name) if not repo.release_repository.version: raise JobValidationError( "Repository '%s' has no release version" % repo_name) if os_name not in build_file.targets.keys(): raise JobValidationError( "Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if dist_cache is None and \ (build_file.notify_maintainers or build_file.abi_incompatibility_assumed): dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: jenkins = connect(config.jenkins_url) if view is None: view_name = get_release_view_name(rosdistro_name, release_build_name) configure_release_view(jenkins, view_name) if generate_import_package_job: configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) # sourcedeb job job_name = get_sourcedeb_job_name( rosdistro_name, release_build_name, pkg_name, os_name, os_code_name) job_config = _get_sourcedeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, _get_target_arches( build_file, os_name, os_code_name, print_skipped=False), repo.release_repository, pkg_name, repo_name, dist_cache=dist_cache) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config) dependency_names = [] if build_file.abi_incompatibility_assumed: dependency_names = _get_direct_dependencies( pkg_name, dist_cache, pkg_names) if dependency_names is None: return # binarydeb jobs for arch in _get_target_arches(build_file, os_name, os_code_name): if filter_arches and arch not in filter_arches: continue job_name = get_binarydeb_job_name( rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, arch) upstream_job_names = [ get_binarydeb_job_name( rosdistro_name, release_build_name, dependency_name, os_name, os_code_name, arch) for dependency_name in dependency_names] job_config = _get_binarydeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, arch, repo.release_repository, pkg_name, append_timestamp, repo_name, dist_cache=dist_cache, upstream_job_names=upstream_job_names) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config)
def configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, generate_import_package_job=True, generate_sync_packages_jobs=True, is_disabled=False, other_build_files_same_platform=None, groovy_script=None, filter_arches=None, dry_run=False): """ Configure a Jenkins release job. The following jobs are created for each package: - M source jobs, one for each OS node name - M * N binary jobs, one for each combination of OS code name and arch """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') pkg_names = dist_file.release_packages.keys() if pkg_name not in pkg_names: raise JobValidationError( "Invalid package name '%s' " % pkg_name + 'choose one of the following: ' + ', '.join(sorted(pkg_names))) pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: raise JobValidationError( "Repository '%s' has no release section" % repo_name) if not repo.release_repository.version: raise JobValidationError( "Repository '%s' has no release version" % repo_name) if os_name not in build_file.targets.keys(): raise JobValidationError( "Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if dist_cache is None and \ (build_file.notify_maintainers or build_file.abi_incompatibility_assumed): dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: jenkins = connect(config.jenkins_url) if views is None: targets = [] targets.append((os_name, os_code_name, 'source')) for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) configure_release_views( jenkins, rosdistro_name, release_build_name, targets, dry_run=dry_run) if generate_import_package_job: configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if generate_sync_packages_jobs: configure_sync_packages_to_main_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) for arch in build_file.targets[os_name][os_code_name]: configure_sync_packages_to_testing_job( config_url, rosdistro_name, release_build_name, os_code_name, arch, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) source_job_names = [] binary_job_names = [] job_configs = {} # sourcedeb job # since sourcedeb jobs are potentially being shared across multiple build # files the configuration has to take all of them into account in order to # generate a job which all build files agree on source_job_name = get_sourcedeb_job_name( rosdistro_name, release_build_name, pkg_name, os_name, os_code_name) # while the package is disabled in the current build file # it might be used by sibling build files is_source_disabled = is_disabled if is_source_disabled and other_build_files_same_platform: # check if sourcedeb job is used by any other build file with the same platform for other_build_file in other_build_files_same_platform: if other_build_file.filter_packages([pkg_name]): is_source_disabled = False break job_config = _get_sourcedeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache, is_disabled=is_source_disabled, other_build_files_same_platform=other_build_files_same_platform) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, source_job_name, job_config, dry_run=dry_run) source_job_names.append(source_job_name) job_configs[source_job_name] = job_config dependency_names = [] if build_file.abi_incompatibility_assumed: dependency_names = _get_direct_dependencies( pkg_name, dist_cache, pkg_names) # if dependencies are not yet available in rosdistro cache # skip binary jobs if dependency_names is None: print(("Skipping binary jobs for package '%s' because it is not " + "yet in the rosdistro cache") % pkg_name, file=sys.stderr) return source_job_names, binary_job_names, job_configs # binarydeb jobs for arch in build_file.targets[os_name][os_code_name]: if filter_arches and arch not in filter_arches: continue job_name = get_binarydeb_job_name( rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, arch) upstream_job_names = [source_job_name] + [ get_binarydeb_job_name( rosdistro_name, release_build_name, dependency_name, os_name, os_code_name, arch) for dependency_name in dependency_names] job_config = _get_binarydeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, arch, pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache, upstream_job_names=upstream_job_names, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config, dry_run=dry_run) binary_job_names.append(job_name) job_configs[job_name] = job_config return source_job_names, binary_job_names, job_configs
def configure_release_jobs( config_url, rosdistro_name, release_build_name, groovy_script=None, dry_run=False, whitelist_package_names=None): """ Configure all Jenkins release jobs. L{configure_release_job} will be invoked for every released package and target which matches the build file criteria. Additionally a job to import Debian packages into the Debian repository is created. """ config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) # get targets platforms = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): platforms.append((os_name, os_code_name)) print('The build file contains the following targets:') for os_name, os_code_name in platforms: print(' - %s %s: %s' % (os_name, os_code_name, ', '.join( build_file.targets[os_name][os_code_name]))) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return pkg_names = dist_file.release_packages.keys() filtered_pkg_names = build_file.filter_packages(pkg_names) explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names) if explicitly_ignored_pkg_names: print(('The following packages are being %s because of ' + 'white-/blacklisting:') % ('ignored' if build_file.skip_ignored_packages else 'disabled')) for pkg_name in sorted(explicitly_ignored_pkg_names): print(' -', pkg_name) dist_cache = None if build_file.notify_maintainers or \ build_file.abi_incompatibility_assumed or \ explicitly_ignored_pkg_names: dist_cache = get_distribution_cache(index, rosdistro_name) if explicitly_ignored_pkg_names: # get direct dependencies from distro cache for each package direct_dependencies = {} for pkg_name in pkg_names: direct_dependencies[pkg_name] = _get_direct_dependencies( pkg_name, dist_cache, pkg_names) or set([]) # find recursive downstream deps for all explicitly ignored packages ignored_pkg_names = set(explicitly_ignored_pkg_names) while True: implicitly_ignored_pkg_names = _get_downstream_package_names( ignored_pkg_names, direct_dependencies) if implicitly_ignored_pkg_names - ignored_pkg_names: ignored_pkg_names |= implicitly_ignored_pkg_names continue break implicitly_ignored_pkg_names = \ ignored_pkg_names - explicitly_ignored_pkg_names if implicitly_ignored_pkg_names: print(('The following packages are being %s because their ' + 'dependencies are being ignored:') % ('ignored' if build_file.skip_ignored_packages else 'disabled')) for pkg_name in sorted(implicitly_ignored_pkg_names): print(' -', pkg_name) filtered_pkg_names = \ set(filtered_pkg_names) - implicitly_ignored_pkg_names # all further configuration will be handled by either the Jenkins API # or by a generated groovy script jenkins = connect(config.jenkins_url) if groovy_script is None else False all_view_configs = {} all_job_configs = {} job_name, job_config = configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if not jenkins: all_job_configs[job_name] = job_config job_name, job_config = configure_sync_packages_to_main_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if not jenkins: all_job_configs[job_name] = job_config for os_name, os_code_name in platforms: for arch in sorted(build_file.targets[os_name][os_code_name]): job_name, job_config = configure_sync_packages_to_testing_job( config_url, rosdistro_name, release_build_name, os_code_name, arch, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if not jenkins: all_job_configs[job_name] = job_config targets = [] for os_name, os_code_name in platforms: targets.append((os_name, os_code_name, 'source')) for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) views = configure_release_views( jenkins, rosdistro_name, release_build_name, targets, dry_run=dry_run) if not jenkins: all_view_configs.update(views) groovy_data = { 'dry_run': dry_run, 'expected_num_views': len(views), } other_build_files = [v for k, v in build_files.items() if k != release_build_name] all_source_job_names = [] all_binary_job_names = [] for pkg_name in sorted(pkg_names): if whitelist_package_names: if pkg_name not in whitelist_package_names: print("Skipping package '%s' not in the explicitly passed list" % pkg_name, file=sys.stderr) continue pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] is_disabled = pkg_name not in filtered_pkg_names if is_disabled and build_file.skip_ignored_packages: print("Skipping ignored package '%s' in repository '%s'" % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository: print(("Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository.version: print(("Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name), file=sys.stderr) continue for os_name, os_code_name in platforms: other_build_files_same_platform = [] for other_build_file in other_build_files: if os_name not in other_build_file.targets: continue if os_code_name not in other_build_file.targets[os_name]: continue other_build_files_same_platform.append(other_build_file) try: source_job_names, binary_job_names, job_configs = \ configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, generate_import_package_job=False, generate_sync_packages_jobs=False, is_disabled=is_disabled, other_build_files_same_platform=other_build_files_same_platform, groovy_script=groovy_script, dry_run=dry_run) all_source_job_names += source_job_names all_binary_job_names += binary_job_names if groovy_script is not None: print('Configuration for jobs: ' + ', '.join(source_job_names + binary_job_names)) all_job_configs.update(job_configs) except JobValidationError as e: print(e.message, file=sys.stderr) groovy_data['expected_num_jobs'] = len(all_job_configs) groovy_data['job_prefixes_and_names'] = {} # with an explicit list of packages we don't delete obsolete jobs if not whitelist_package_names: # delete obsolete binary jobs for os_name, os_code_name in platforms: for arch in build_file.targets[os_name][os_code_name]: binary_view = get_release_binary_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch) binary_job_prefix = '%s__' % binary_view excluded_job_names = set([ j for j in all_binary_job_names if j.startswith(binary_job_prefix)]) if groovy_script is None: print("Removing obsolete binary jobs with prefix '%s'" % binary_job_prefix) remove_jobs( jenkins, binary_job_prefix, excluded_job_names, dry_run=dry_run) else: binary_key = 'binary_%s_%s_%s' % \ (os_name, os_code_name, arch) groovy_data['job_prefixes_and_names'][binary_key] = \ (binary_job_prefix, excluded_job_names) # delete obsolete source jobs # requires knowledge about all other release build files for os_name, os_code_name in platforms: other_source_job_names = [] # get source job names for all other release build files for other_release_build_name in [ k for k in build_files.keys() if k != release_build_name]: other_build_file = build_files[other_release_build_name] other_dist_file = get_distribution_file( index, rosdistro_name, other_build_file) if not other_dist_file: continue if os_name not in other_build_file.targets or \ os_code_name not in other_build_file.targets[os_name]: continue if other_build_file.skip_ignored_packages: filtered_pkg_names = other_build_file.filter_packages( pkg_names) else: filtered_pkg_names = pkg_names for pkg_name in sorted(filtered_pkg_names): pkg = other_dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = other_dist_file.repositories[repo_name] if not repo.release_repository: continue if not repo.release_repository.version: continue other_job_name = get_sourcedeb_job_name( rosdistro_name, other_release_build_name, pkg_name, os_name, os_code_name) other_source_job_names.append(other_job_name) source_view_prefix = get_release_source_view_name( rosdistro_name, os_name, os_code_name) source_job_prefix = '%s__' % source_view_prefix excluded_job_names = set([ j for j in (all_source_job_names + other_source_job_names) if j.startswith(source_job_prefix)]) if groovy_script is None: print("Removing obsolete source jobs with prefix '%s'" % source_job_prefix) remove_jobs( jenkins, source_job_prefix, excluded_job_names, dry_run=dry_run) else: source_key = 'source_%s_%s' % (os_name, os_code_name) groovy_data['job_prefixes_and_names'][source_key] = ( source_job_prefix, excluded_job_names) if groovy_script is not None: print( "Writing groovy script '%s' to reconfigure %d views and %d jobs" % (groovy_script, len(all_view_configs), len(all_job_configs))) content = expand_template( 'snippet/reconfigure_jobs.groovy.em', groovy_data) write_groovy_script_and_configs( groovy_script, content, all_job_configs, view_configs=all_view_configs)
def configure_devel_job( config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, view=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_source_build_files(config, rosdistro_name) build_file = build_files[source_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name) repo_names = dist_file.repositories.keys() repo_names = build_file.filter_repositories(repo_names) if repo_name not in repo_names: return "Invalid repository name '%s' " % repo_name + \ 'choose one of the following: ' + \ ', '.join(sorted(repo_names)) repo = dist_file.repositories[repo_name] if not repo.source_repository: return "Repository '%s' has no source section" % repo_name if not repo.source_repository.version: return "Repository '%s' has no source version" % repo_name if os_name not in build_file.targets.keys(): return "Invalid OS name '%s' " % os_name + \ 'choose one of the following: ' + \ ', '.join(sorted(build_file.targets.keys())) if os_code_name not in build_file.targets[os_name].keys(): return "Invalid OS code name '%s' " % os_code_name + \ 'choose one of the following: ' + \ ', '.join(sorted(build_file.targets[os_name].keys())) if arch not in build_file.targets[os_name][os_code_name]: return "Invalid architecture '%s' " % arch + \ 'choose one of the following: ' + \ ', '.join(sorted( build_file.targets[os_name][os_code_name])) if dist_cache is None and build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: jenkins = connect(config.jenkins_url) if view is None: view_name = get_devel_view_name(rosdistro_name, source_build_name) configure_devel_view(jenkins, view_name) job_name = get_devel_job_name( rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch) job_config = _get_devel_job_config( config, rosdistro_name, source_build_name, build_file, os_name, os_code_name, arch, repo.source_repository, repo_name, dist_cache=dist_cache) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config)
def configure_release_jobs(config_url, rosdistro_name, release_build_name, groovy_script=None, dry_run=False, whitelist_package_names=None): """ Configure all Jenkins release jobs. L{configure_release_job} will be invoked for every released package and target which matches the build file criteria. Additionally a job to import Debian packages into the Debian repository is created. """ config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) # get targets platforms = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): platforms.append((os_name, os_code_name)) print('The build file contains the following targets:') for os_name, os_code_name in platforms: print(' - %s %s: %s' % (os_name, os_code_name, ', '.join( build_file.targets[os_name][os_code_name]))) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return pkg_names = dist_file.release_packages.keys() filtered_pkg_names = build_file.filter_packages(pkg_names) explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names) if explicitly_ignored_pkg_names: print(('The following packages are being %s because of ' + 'white-/blacklisting:') % ('ignored' if build_file.skip_ignored_packages else 'disabled')) for pkg_name in sorted(explicitly_ignored_pkg_names): print(' -', pkg_name) dist_cache = get_distribution_cache(index, rosdistro_name) if explicitly_ignored_pkg_names: # get direct dependencies from distro cache for each package direct_dependencies = {} for pkg_name in pkg_names: direct_dependencies[pkg_name] = _get_direct_dependencies( pkg_name, dist_cache, pkg_names) or set([]) # find recursive downstream deps for all explicitly ignored packages ignored_pkg_names = set(explicitly_ignored_pkg_names) while True: implicitly_ignored_pkg_names = _get_downstream_package_names( ignored_pkg_names, direct_dependencies) if implicitly_ignored_pkg_names - ignored_pkg_names: ignored_pkg_names |= implicitly_ignored_pkg_names continue break implicitly_ignored_pkg_names = \ ignored_pkg_names - explicitly_ignored_pkg_names if implicitly_ignored_pkg_names: print(('The following packages are being %s because their ' + 'dependencies are being ignored:') % ('ignored' if build_file.skip_ignored_packages else 'disabled')) for pkg_name in sorted(implicitly_ignored_pkg_names): print(' -', pkg_name) filtered_pkg_names = \ set(filtered_pkg_names) - implicitly_ignored_pkg_names # all further configuration will be handled by either the Jenkins API # or by a generated groovy script jenkins = False if groovy_script is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) all_view_configs = {} all_job_configs = OrderedDict() job_name, job_config = configure_import_package_job(config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if not jenkins: all_job_configs[job_name] = job_config job_name, job_config = configure_sync_packages_to_main_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if not jenkins: all_job_configs[job_name] = job_config for os_name, os_code_name in platforms: for arch in sorted(build_file.targets[os_name][os_code_name]): job_name, job_config = configure_sync_packages_to_testing_job( config_url, rosdistro_name, release_build_name, os_code_name, arch, config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) if not jenkins: all_job_configs[job_name] = job_config targets = [] for os_name, os_code_name in platforms: targets.append((os_name, os_code_name, 'source')) for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) views = configure_release_views(jenkins, rosdistro_name, release_build_name, targets, dry_run=dry_run) if not jenkins: all_view_configs.update(views) groovy_data = { 'dry_run': dry_run, 'expected_num_views': len(views), } # binary jobs must be generated in topological order from catkin_pkg.package import parse_package_string from ros_buildfarm.common import topological_order_packages pkgs = {} for pkg_name in pkg_names: if pkg_name not in dist_cache.release_package_xmls: print("Skipping package '%s': no released package.xml in cache" % (pkg_name), file=sys.stderr) continue pkg_xml = dist_cache.release_package_xmls[pkg_name] pkg = parse_package_string(pkg_xml) pkgs[pkg_name] = pkg ordered_pkg_tuples = topological_order_packages(pkgs) other_build_files = [ v for k, v in build_files.items() if k != release_build_name ] all_source_job_names = [] all_binary_job_names = [] for pkg_name in [p.name for _, p in ordered_pkg_tuples]: if whitelist_package_names: if pkg_name not in whitelist_package_names: print( "Skipping package '%s' not in the explicitly passed list" % pkg_name, file=sys.stderr) continue pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] is_disabled = pkg_name not in filtered_pkg_names if is_disabled and build_file.skip_ignored_packages: print("Skipping ignored package '%s' in repository '%s'" % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository: print(("Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository.version: print(("Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name), file=sys.stderr) continue for os_name, os_code_name in platforms: other_build_files_same_platform = [] for other_build_file in other_build_files: if os_name not in other_build_file.targets: continue if os_code_name not in other_build_file.targets[os_name]: continue other_build_files_same_platform.append(other_build_file) try: source_job_names, binary_job_names, job_configs = \ configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, generate_import_package_job=False, generate_sync_packages_jobs=False, is_disabled=is_disabled, other_build_files_same_platform=other_build_files_same_platform, groovy_script=groovy_script, dry_run=dry_run) all_source_job_names += source_job_names all_binary_job_names += binary_job_names if groovy_script is not None: print('Configuration for jobs: ' + ', '.join(source_job_names + binary_job_names)) for source_job_name in source_job_names: all_job_configs[source_job_name] = job_configs[ source_job_name] for binary_job_name in binary_job_names: all_job_configs[binary_job_name] = job_configs[ binary_job_name] except JobValidationError as e: print(e.message, file=sys.stderr) groovy_data['expected_num_jobs'] = len(all_job_configs) groovy_data['job_prefixes_and_names'] = {} # with an explicit list of packages we don't delete obsolete jobs if not whitelist_package_names: # delete obsolete binary jobs for os_name, os_code_name in platforms: for arch in build_file.targets[os_name][os_code_name]: binary_view = get_release_binary_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch) binary_job_prefix = '%s__' % binary_view excluded_job_names = set([ j for j in all_binary_job_names if j.startswith(binary_job_prefix) ]) if groovy_script is None: print("Removing obsolete binary jobs with prefix '%s'" % binary_job_prefix) from ros_buildfarm.jenkins import remove_jobs remove_jobs(jenkins, binary_job_prefix, excluded_job_names, dry_run=dry_run) else: binary_key = 'binary_%s_%s_%s' % \ (os_name, os_code_name, arch) groovy_data['job_prefixes_and_names'][binary_key] = \ (binary_job_prefix, excluded_job_names) # delete obsolete source jobs # requires knowledge about all other release build files for os_name, os_code_name in platforms: other_source_job_names = [] # get source job names for all other release build files for other_release_build_name in [ k for k in build_files.keys() if k != release_build_name ]: other_build_file = build_files[other_release_build_name] other_dist_file = get_distribution_file( index, rosdistro_name, other_build_file) if not other_dist_file: continue if os_name not in other_build_file.targets or \ os_code_name not in other_build_file.targets[os_name]: continue if other_build_file.skip_ignored_packages: filtered_pkg_names = other_build_file.filter_packages( pkg_names) else: filtered_pkg_names = pkg_names for pkg_name in sorted(filtered_pkg_names): pkg = other_dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = other_dist_file.repositories[repo_name] if not repo.release_repository: continue if not repo.release_repository.version: continue other_job_name = get_sourcedeb_job_name( rosdistro_name, other_release_build_name, pkg_name, os_name, os_code_name) other_source_job_names.append(other_job_name) source_view_prefix = get_release_source_view_name( rosdistro_name, os_name, os_code_name) source_job_prefix = '%s__' % source_view_prefix excluded_job_names = set([ j for j in (all_source_job_names + other_source_job_names) if j.startswith(source_job_prefix) ]) if groovy_script is None: print("Removing obsolete source jobs with prefix '%s'" % source_job_prefix) from ros_buildfarm.jenkins import remove_jobs remove_jobs(jenkins, source_job_prefix, excluded_job_names, dry_run=dry_run) else: source_key = 'source_%s_%s' % (os_name, os_code_name) groovy_data['job_prefixes_and_names'][source_key] = ( source_job_prefix, excluded_job_names) if groovy_script is not None: print( "Writing groovy script '%s' to reconfigure %d views and %d jobs" % (groovy_script, len(all_view_configs), len(all_job_configs))) content = expand_template('snippet/reconfigure_jobs.groovy.em', groovy_data) write_groovy_script_and_configs(groovy_script, content, all_job_configs, view_configs=all_view_configs)
def configure_devel_job( config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, view=None): """ Configure a single Jenkins devel job. This includes the following steps: - clone the source repository to use - clone the ros_buildfarm repository - write the distribution repository keys into files - invoke the release/run_devel_job.py script """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_source_build_files(config, rosdistro_name) build_file = build_files[source_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name) repo_names = dist_file.repositories.keys() repo_names = build_file.filter_repositories(repo_names) if repo_name not in repo_names: raise JobValidationError( "Invalid repository name '%s' " % repo_name + 'choose one of the following: %s' % ', '.join(sorted(repo_names))) repo = dist_file.repositories[repo_name] if not repo.source_repository: raise JobValidationError( "Repository '%s' has no source section" % repo_name) if not repo.source_repository.version: raise JobValidationError( "Repository '%s' has no source version" % repo_name) if os_name not in build_file.targets.keys(): raise JobValidationError( "Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if arch not in build_file.targets[os_name][os_code_name]: raise JobValidationError( "Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted( build_file.targets[os_name][os_code_name]))) if dist_cache is None and build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: jenkins = connect(config.jenkins_url) if view is None: view_name = get_devel_view_name(rosdistro_name, source_build_name) configure_devel_view(jenkins, view_name) job_name = get_devel_job_name( rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch) job_config = _get_devel_job_config( config, rosdistro_name, source_build_name, build_file, os_name, os_code_name, arch, repo.source_repository, repo_name, dist_cache=dist_cache) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config) return job_name
def _configure_ci_jobs(config, build_files, config_url, rosdistro_name, ci_build_name, groovy_script=None, dry_run=False): """Configure all Jenkins CI jobs for a specific CI build name.""" build_file = build_files[ci_build_name] index = get_index(config.rosdistro_index_url) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print("The build file '%s' contains the following targets:" % ci_build_name) for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return ci_view_name = get_ci_view_name(rosdistro_name) # all further configuration will be handled by either the Jenkins API # or by a generated groovy script from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if groovy_script is None else False view_configs = {} views = { ci_view_name: configure_ci_view(jenkins, ci_view_name, dry_run=dry_run) } if not jenkins: view_configs.update(views) groovy_data = { 'dry_run': dry_run, 'expected_num_views': len(view_configs), } ci_job_names = [] job_configs = OrderedDict() is_disabled = False for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_ci_job( config_url, rosdistro_name, ci_build_name, os_name, os_code_name, arch, config=config, build_file=build_file, index=index, dist_file=dist_file, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script, dry_run=dry_run, trigger_timer=build_file.jenkins_job_schedule) ci_job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) groovy_data['expected_num_jobs'] = len(job_configs) groovy_data['job_prefixes_and_names'] = {} if groovy_script is not None: print("Writing groovy script '%s' to reconfigure %d jobs" % (groovy_script, len(job_configs))) content = expand_template('snippet/reconfigure_jobs.groovy.em', groovy_data) write_groovy_script_and_configs(groovy_script, content, job_configs, view_configs)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Generate all jobs on Jenkins') add_argument_config_url(parser) parser.add_argument( '--ros-distro-names', nargs='*', metavar='ROS_DISTRO_NAME', default=[], help='The list of ROS distribution names if not generating all') parser.add_argument('--skip-rosdistro-cache-job', action='store_true', help='Skip generating the rosdistro-cache jobs') parser.add_argument( '--commit', action='store_true', help='Apply the changes to Jenkins instead of only showing them') args = parser.parse_args(argv) if args.commit: print('The following changes will be applied to the Jenkins server.') else: print('This is a dry run. The Jenkins configuration is not changed.') print('') config = get_index(args.config_url) ros_distro_names = sorted(config.distributions.keys()) invalid_ros_distro_name = [ n for n in args.ros_distro_names if n not in ros_distro_names ] if invalid_ros_distro_name: parser.error( 'The following ROS distribution names are not part of the ' + 'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name))) # try to connect to Jenkins master jenkins = connect(config.jenkins_url) configure_view(jenkins, 'Queue', filter_queue=False, dry_run=not args.commit) generate_check_agents_job(args.config_url, dry_run=not args.commit) if not args.ros_distro_names: generate_dashboard_job(args.config_url, dry_run=not args.commit) for doc_build_name in sorted(config.doc_builds.keys()): generate_doc_independent_job(args.config_url, doc_build_name, dry_run=not args.commit) selected_ros_distro_names = [ n for n in ros_distro_names if not args.ros_distro_names or n in args.ros_distro_names ] for ros_distro_name in selected_ros_distro_names: print(ros_distro_name) if not args.skip_rosdistro_cache_job: generate_rosdistro_cache_job(args.config_url, ros_distro_name, dry_run=not args.commit) generate_failing_jobs_job(args.config_url, ros_distro_name, dry_run=not args.commit) release_build_files = get_release_build_files(config, ros_distro_name) for release_build_name in release_build_files.keys(): generate_release_status_page_job(args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) generate_release_maintenance_jobs(args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) source_build_files = get_source_build_files(config, ros_distro_name) for source_build_name in source_build_files.keys(): generate_devel_maintenance_jobs(args.config_url, ros_distro_name, source_build_name, dry_run=not args.commit) doc_build_files = get_doc_build_files(config, ros_distro_name) for doc_build_name, doc_build_file in doc_build_files.items(): if doc_build_file.documentation_type == DOC_TYPE_ROSDOC: generate_doc_maintenance_jobs(args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST: generate_doc_metadata_job(args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) else: assert False, ("Unknown documentation type '%s' in doc " + "build file '%s'") % \ (doc_build_file.documentation_type, doc_build_name) generate_repos_status_page_jobs(args.config_url, ros_distro_name, dry_run=not args.commit) index = ros_distro_names.index(ros_distro_name) if index > 0: # generate compare pages for this rosdistro against all older ones generate_release_compare_page_job(args.config_url, ros_distro_name, ros_distro_names[:index], dry_run=not args.commit) generate_blocked_releases_page_job(args.config_url, ros_distro_name, dry_run=not args.commit)
def main(argv=None): if argv is None: argv = sys.argv[1:] parser = argparse.ArgumentParser( description="Creates the ros2 jobs on Jenkins") parser.add_argument( '--jenkins-url', '-u', default='https://ci.ros2.org', help="Url of the jenkins server to which the job should be added") parser.add_argument( '--ci-scripts-repository', default='[email protected]:ros2/ci.git', help="repository from which ci scripts should be cloned") parser.add_argument( '--ci-scripts-default-branch', default='master', help= "default branch of the ci repository to get ci scripts from (this is a job parameter)" ) parser.add_argument( '--commit', action='store_true', help='Actually modify the Jenkins jobs instead of only doing a dry run', ) args = parser.parse_args(argv) data = { 'build_discard': { 'days_to_keep': 1000, 'num_to_keep': 3000 }, 'ci_scripts_repository': args.ci_scripts_repository, 'ci_scripts_default_branch': args.ci_scripts_default_branch, 'default_repos_url': DEFAULT_REPOS_URL, 'supplemental_repos_url': '', 'time_trigger_spec': '', 'mailer_recipients': '', 'ignore_rmw_default': { 'rmw_connext_dynamic_cpp', 'rmw_cyclonedds_cpp', 'rmw_fastrtps_dynamic_cpp', 'rmw_opensplice_cpp' }, 'use_connext_debs_default': 'false', 'use_isolated_default': 'true', 'colcon_mixin_url': 'https://raw.githubusercontent.com/colcon/colcon-mixin-repository/master/index.yaml', 'build_args_default': '--event-handlers console_cohesion+ console_package_list+ --cmake-args -DINSTALL_EXAMPLES=OFF -DSECURITY=ON', 'test_args_default': '--event-handlers console_direct+ --executor sequential --retest-until-pass 10', 'compile_with_clang_default': 'false', 'enable_c_coverage_default': 'false', 'dont_notify_every_unstable_build': 'false', 'turtlebot_demo': False, 'build_timeout_mins': 0, 'ubuntu_distro': 'bionic', } jenkins = connect(args.jenkins_url) os_configs = { 'linux': { 'label_expression': 'linux', 'shell_type': 'Shell', }, 'osx': { 'label_expression': 'macos', 'shell_type': 'Shell', # the current OS X agent can't handle git@github urls 'ci_scripts_repository': args.ci_scripts_repository.replace('[email protected]:', 'https://github.com/'), }, 'windows': { 'label_expression': 'windows', 'shell_type': 'BatchFile', 'use_isolated_default': 'false', }, 'linux-aarch64': { 'label_expression': 'linux_aarch64', 'shell_type': 'Shell', 'ignore_rmw_default': data['ignore_rmw_default'] | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}, }, 'linux-armhf': { 'label_expression': 'linux_armhf', 'shell_type': 'Shell', 'ignore_rmw_default': data['ignore_rmw_default'] | {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}, 'build_args_default': data['build_args_default'].replace( '--cmake-args', '--cmake-args -DCMAKE_CXX_FLAGS=-Wno-psabi -DCMAKE_C_FLAGS=-Wno-psabi -DDISABLE_SANITIZERS=ON' ), }, 'linux-centos': { 'label_expression': 'linux', 'shell_type': 'Shell', 'build_args_default': '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge ' + data['build_args_default'].replace( '--cmake-args', '--cmake-args -DCMAKE_POLICY_DEFAULT_CMP0072=NEW -DPYTHON_VERSION=3.6 -DDISABLE_SANITIZERS=ON' ), 'test_args_default': '--packages-skip-by-dep image_tools ros1_bridge --packages-skip image_tools ros1_bridge ' + data['test_args_default'], }, } os_config_overrides = { 'linux-centos': { 'mixed_overlay_pkgs': '', 'ignore_rmw_default': { 'rmw_connext_cpp', 'rmw_connext_dynamic_cpp', 'rmw_opensplice_cpp' }, 'use_connext_debs_default': 'false', }, } launcher_exclude = { 'linux-armhf', 'linux-centos', } jenkins_kwargs = {} if not args.commit: jenkins_kwargs['dry_run'] = True def create_job(os_name, job_name, template_file, additional_dict): job_data = dict(data) job_data['os_name'] = os_name job_data.update(os_configs[os_name]) job_data.update(additional_dict) job_data.update(os_config_overrides.get(os_name, {})) job_config = expand_template(template_file, job_data) configure_job(jenkins, job_name, job_config, **jenkins_kwargs) # configure os specific jobs for os_name in sorted(os_configs.keys()): # We need the keep the paths short on Windows, so on that platform make # the os_name shorter just for the jobs job_os_name = os_name if os_name == 'windows': job_os_name = 'win' # configure manual triggered job create_job(os_name, 'ci_' + os_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', }) # configure test jobs for experimenting with job config changes # Keep parameters the same as the manual triggered job above. create_job(os_name, 'test_ci_' + os_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', }) # configure a manual version of the packaging job create_job( os_name, 'ci_packaging_' + os_name, 'packaging_job.xml.em', { 'build_discard': { 'days_to_keep': 180, 'num_to_keep': 100, }, 'cmake_build_type': 'RelWithDebInfo', 'mixed_overlay_pkgs': 'ros1_bridge', 'ignore_rmw_default': {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'} if os_name in ['linux-aarch64', 'linux-armhf'] else set(), 'use_connext_debs_default': 'true', }) # configure packaging job create_job( os_name, 'packaging_' + os_name, 'packaging_job.xml.em', { 'build_discard': { 'days_to_keep': 370, 'num_to_keep': 370, }, 'cmake_build_type': 'RelWithDebInfo', 'mixed_overlay_pkgs': 'ros1_bridge', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ignore_rmw_default': {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'} if os_name in ['linux-aarch64', 'linux-armhf'] else set(), 'use_connext_debs_default': 'true', }) # create a nightly Debug packaging job on Windows if os_name == 'windows': create_job( os_name, 'packaging_' + os_name + '_debug', 'packaging_job.xml.em', { 'build_discard': { 'days_to_keep': 370, 'num_to_keep': 370, }, 'cmake_build_type': 'Debug', 'mixed_overlay_pkgs': 'ros1_bridge', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ignore_rmw_default': {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'} if os_name in ['linux-aarch64', 'linux-armhf'] else set(), 'use_connext_debs_default': 'true', }) # configure nightly triggered job job_name = 'nightly_' + job_os_name + '_debug' if os_name == 'windows': job_name = job_name[:15] create_job( os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, }) # configure nightly job for testing with address sanitizer on linux if os_name == 'linux': asan_build_args = data['build_args_default'].replace('--cmake-args', '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \ ' --mixin asan-gcc --packages-up-to rcpputils' create_job( os_name, 'nightly_{}_address_sanitizer'.format(os_name), 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]', 'build_args_default': asan_build_args, 'test_args_default': ('--event-handlers console_direct+ --executor sequential ' '--retest-until-pass 10 --packages-up-to rcpputils'), }) # configure nightly job for compiling with clang+libcxx on linux if os_name == 'linux': # Set the logging implementation to noop because log4cxx will not link properly when using libcxx. clang_libcxx_build_args = data['build_args_default'].replace('--cmake-args', '--cmake-args -DRCL_LOGGING_IMPLEMENTATION=rcl_logging_noop') + \ ' --mixin clang-libcxx' create_job( os_name, 'nightly_' + os_name + '_clang_libcxx', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'compile_with_clang_default': 'true', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]', 'build_args_default': clang_libcxx_build_args, # Only running test from the lowest-level C package to ensure "working" binaries are generated. # We do not want to test more than this as we observe issues with the clang libcxx standard library # we don't plan to tackle for now. The important part of this nightly is to make sure the code compiles # without emitting thread-safety related warnings. 'test_args_default': '--event-handlers console_direct+ --executor sequential --packages-select rcutils', }) # configure nightly job for testing rmw/rcl based packages with thread sanitizer on linux if os_name == 'linux': tsan_build_args = data['build_args_default'].replace('--cmake-args', '--cmake-args -DOSRF_TESTING_TOOLS_CPP_DISABLE_MEMORY_TOOLS=ON') + \ ' --mixin tsan --packages-up-to rcpputils rcutils' create_job( os_name, 'nightly_' + os_name + '_thread_sanitizer', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS + ' [email protected]', 'build_args_default': tsan_build_args, 'test_args_default': ('--event-handlers console_direct+ --executor sequential ' '--retest-until-pass 10 --packages-select rcpputils rcutils' ), }) # configure a manually triggered version of the coverage job if os_name == 'linux': create_job( os_name, 'ci_' + os_name + '_coverage', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'enable_c_coverage_default': 'true', 'build_args_default': data['build_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp', 'test_args_default': data['test_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp', }) create_job( os_name, 'test_' + os_name + '_coverage', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'enable_c_coverage_default': 'true', 'build_args_default': data['build_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp', 'test_args_default': data['test_args_default'] + ' --packages-skip qt_gui_cpp --packages-skip-by-dep qt_gui_cpp', }) # configure nightly coverage job on x86 Linux only if os_name == 'linux': create_job( os_name, 'nightly_' + os_name + '_coverage', 'ci_job.xml.em', { 'cmake_build_type': 'Debug', 'enable_c_coverage_default': 'true', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, }) # configure nightly triggered job using opensplice job_name = 'nightly_' + job_os_name + '_extra_rmw' + '_release' if os_name == 'windows': job_name = job_name[:25] create_job( os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'Release', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'ignore_rmw_default': {'rmw_connext_cpp', 'rmw_connext_dynamic_cpp'}, }) # configure nightly triggered job job_name = 'nightly_' + job_os_name + '_release' if os_name == 'windows': job_name = job_name[:15] create_job( os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'Release', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, }) # configure nightly triggered job with repeated testing job_name = 'nightly_' + job_os_name + '_repeated' if os_name == 'windows': job_name = job_name[:15] test_args_default = os_configs.get(os_name, data).get('test_args_default', data['test_args_default']) test_args_default = test_args_default.replace( '--retest-until-pass', '--retest-until-fail' ) + " --ctest-args -LE linter --pytest-args -m 'not linter'" if job_os_name == 'linux-aarch64': # skipping known to be flaky tests https://github.com/ros2/rviz/issues/368 test_args_default += ' --packages-skip rviz_common rviz_default_plugins rviz_rendering rviz_rendering_tests' create_job( os_name, job_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, 'test_args_default': test_args_default, }) # configure turtlebot jobs on Linux only for now if os_name in ['linux', 'linux-aarch64']: create_job( os_name, 'ci_turtlebot-demo_' + os_name, 'ci_job.xml.em', { 'cmake_build_type': 'None', 'turtlebot_demo': True, 'supplemental_repos_url': 'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos', }) create_job( os_name, 'nightly_turtlebot-demo_' + os_name + '_release', 'ci_job.xml.em', { 'disabled': True, 'cmake_build_type': 'Release', 'turtlebot_demo': True, 'supplemental_repos_url': 'https://raw.githubusercontent.com/ros2/turtlebot2_demo/master/turtlebot2_demo.repos', 'time_trigger_spec': PERIODIC_JOB_SPEC, 'mailer_recipients': DEFAULT_MAIL_RECIPIENTS, }) # configure the launch job os_specific_data = collections.OrderedDict() for os_name in sorted(os_configs.keys() - launcher_exclude): os_specific_data[os_name] = dict(data) os_specific_data[os_name].update(os_configs[os_name]) os_specific_data[os_name]['job_name'] = 'ci_' + os_name job_data = dict(data) job_data['ci_scripts_default_branch'] = args.ci_scripts_default_branch job_data['label_expression'] = 'master' job_data['os_specific_data'] = os_specific_data job_data['cmake_build_type'] = 'None' job_config = expand_template('ci_launcher_job.xml.em', job_data) configure_job(jenkins, 'ci_launcher', job_config, **jenkins_kwargs)
def configure_doc_job( config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, is_disabled=False, groovy_script=None, doc_repository=None): """ Configure a single Jenkins doc job. This includes the following steps: - clone the doc repository to use - clone the ros_buildfarm repository - write the distribution repository keys into files - invoke the run_doc_job.py script """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') repo_names = dist_file.repositories.keys() if repo_name is not None: if repo_name not in repo_names: raise JobValidationError( "Invalid repository name '%s' " % repo_name + 'choose one of the following: %s' % ', '.join(sorted(repo_names))) repo = dist_file.repositories[repo_name] if not repo.doc_repository: raise JobValidationError( "Repository '%s' has no doc section" % repo_name) if not repo.doc_repository.version: raise JobValidationError( "Repository '%s' has no doc version" % repo_name) doc_repository = repo.doc_repository if os_name not in build_file.targets.keys(): raise JobValidationError( "Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if arch not in build_file.targets[os_name][os_code_name]: raise JobValidationError( "Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted( build_file.targets[os_name][os_code_name]))) if dist_cache is None and build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if views is None: view_name = get_doc_view_name( rosdistro_name, doc_build_name) configure_doc_view(jenkins, view_name) job_name = get_doc_job_name( rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch) job_config = _get_doc_job_config( config, config_url, rosdistro_name, doc_build_name, build_file, os_name, os_code_name, arch, doc_repository, repo_name, dist_cache=dist_cache, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config) return job_name, job_config
def configure_devel_jobs( config_url, rosdistro_name, source_build_name, groovy_script=None): """ Configure all Jenkins devel jobs. L{configure_release_job} will be invoked for source repository and target which matches the build file criteria. """ config = get_config_index(config_url) build_files = get_source_build_files(config, rosdistro_name) build_file = build_files[source_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return devel_view_name = get_devel_view_name( rosdistro_name, source_build_name, pull_request=False) pull_request_view_name = get_devel_view_name( rosdistro_name, source_build_name, pull_request=True) from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) views = [] if build_file.test_commits_force is not False: views.append(configure_devel_view(jenkins, devel_view_name)) if build_file.test_pull_requests_force is not False: views.append(configure_devel_view(jenkins, pull_request_view_name)) if groovy_script is not None: # all further configuration will be handled by the groovy script jenkins = False repo_names = dist_file.repositories.keys() filtered_repo_names = build_file.filter_repositories(repo_names) devel_job_names = [] pull_request_job_names = [] job_configs = {} for repo_name in sorted(repo_names): is_disabled = repo_name not in filtered_repo_names if is_disabled and build_file.skip_ignored_repositories: print("Skipping ignored repository '%s'" % repo_name, file=sys.stderr) continue repo = dist_file.repositories[repo_name] if not repo.source_repository: print("Skipping repository '%s': no source section" % repo_name) continue if not repo.source_repository.version: print("Skipping repository '%s': no source version" % repo_name) continue job_types = [] # check for testing commits if build_file.test_commits_force is False: print(("Skipping repository '%s': 'test_commits' is forced to " + "false in the build file") % repo_name) elif repo.source_repository.test_commits is False: print(("Skipping repository '%s': 'test_commits' of the " + "repository set to false") % repo_name) elif repo.source_repository.test_commits is None and \ not build_file.test_commits_default: print(("Skipping repository '%s': 'test_commits' defaults to " + "false in the build file") % repo_name) else: job_types.append('commit') if not is_disabled: # check for testing pull requests if build_file.test_pull_requests_force is False: # print(("Skipping repository '%s': 'test_pull_requests' " + # "is forced to false in the build file") % repo_name) pass elif repo.source_repository.test_pull_requests is False: # print(("Skipping repository '%s': 'test_pull_requests' of " + # "the repository set to false") % repo_name) pass elif repo.source_repository.test_pull_requests is None and \ not build_file.test_pull_requests_default: # print(("Skipping repository '%s': 'test_pull_requests' " + # "defaults to false in the build file") % repo_name) pass else: print("Pull request job for repository '%s'" % repo_name) job_types.append('pull_request') for job_type in job_types: pull_request = job_type == 'pull_request' for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_devel_job( config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script) if not pull_request: devel_job_names.append(job_name) else: pull_request_job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) devel_job_prefix = '%s__' % devel_view_name pull_request_job_prefix = '%s__' % pull_request_view_name if groovy_script is None: # delete obsolete jobs in these views from ros_buildfarm.jenkins import remove_jobs print('Removing obsolete devel jobs') remove_jobs(jenkins, devel_job_prefix, devel_job_names) print('Removing obsolete pull request jobs') remove_jobs( jenkins, pull_request_job_prefix, pull_request_job_names) else: print("Writing groovy script '%s' to reconfigure %d jobs" % (groovy_script, len(job_configs))) data = { 'expected_num_jobs': len(job_configs), 'job_prefixes_and_names': { 'devel': (devel_job_prefix, devel_job_names), 'pull_request': ( pull_request_job_prefix, pull_request_job_names), } } content = expand_template('snippet/reconfigure_jobs.groovy.em', data) write_groovy_script_and_configs( groovy_script, content, job_configs)
def trigger_release_jobs( config_url, rosdistro_name, release_build_name, missing_only, source_only, cache_dir): config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) # get targets Target = namedtuple('Target', 'os_name os_code_name arch') targets = [] for os_name in sorted(build_file.targets.keys()): if os_name != 'ubuntu': continue for os_code_name in sorted(build_file.targets[os_name].keys()): targets.append(Target('ubuntu', os_code_name, 'source')) if source_only: continue for arch in sorted( build_file.targets[os_name][os_code_name].keys()): # TODO support for non amd64 arch missing if arch not in ['amd64']: print('Skipping arch:', arch) continue targets.append(Target('ubuntu', os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' - %s %s %s' % ('ubuntu', os_code_name, arch)) dist_file = get_distribution_file(index, rosdistro_name) repo_data = None if missing_only: repo_data = get_debian_repo_data( build_file.target_repository, targets, cache_dir) jenkins = connect(config.jenkins_url) jenkins_queue = jenkins.get_queue() pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) triggered_jobs = [] skipped_jobs = [] for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: print((" Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name)) continue if not repo.release_repository.version: print((" Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name)) continue pkg_version = repo.release_repository.version debian_package_name = get_debian_package_name(rosdistro_name, pkg_name) for target in targets: job_name = get_sourcedeb_job_name( rosdistro_name, release_build_name, pkg_name, target.os_name, target.os_code_name) if target.arch != 'source': # binary job can be skipped if source job was triggered if job_name in triggered_jobs: print((" Skipping binary jobs of '%s' since the source " + "job was triggered") % job_name) continue job_name = get_binarydeb_job_name( rosdistro_name, release_build_name, pkg_name, target.os_name, target.os_code_name, target.arch) if repo_data: # check if artifact is missing repo_index = repo_data[target] if debian_package_name in repo_index: version = repo_index[debian_package_name] version = _strip_version_suffix(version) if version == pkg_version: print((" Skipping job '%s' since the artifact is " + "already up-to-date") % job_name) continue success = invoke_job(jenkins, job_name, queue=jenkins_queue) if success: triggered_jobs.append(job_name) else: skipped_jobs.append(job_name) print('Triggered %d jobs, skipped %d jobs.' % (len(triggered_jobs), len(skipped_jobs)))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Generate all jobs on Jenkins') add_argument_config_url(parser) parser.add_argument( '--ros-distro-names', nargs='*', metavar='ROS_DISTRO_NAME', default=[], help='The list of ROS distribution names if not generating all') parser.add_argument( '--skip-rosdistro-cache-job', action='store_true', help='Skip generating the rosdistro-cache jobs') parser.add_argument( '--commit', action='store_true', help='Apply the changes to Jenkins instead of only showing them') args = parser.parse_args(argv) if args.commit: print('The following changes will be applied to the Jenkins server.') else: print('This is a dry run. The Jenkins configuration is not changed.') print('') config = get_index(args.config_url) ros_distro_names = sorted(config.distributions.keys()) invalid_ros_distro_name = [ n for n in args.ros_distro_names if n not in ros_distro_names] if invalid_ros_distro_name: parser.error( 'The following ROS distribution names are not part of the ' + 'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name))) # try to connect to Jenkins master jenkins = connect(config.jenkins_url) configure_view( jenkins, 'Queue', filter_queue=False, dry_run=not args.commit) generate_check_slaves_job(args.config_url, dry_run=not args.commit) if not args.ros_distro_names: generate_dashboard_job(args.config_url, dry_run=not args.commit) for doc_build_name in sorted(config.doc_builds.keys()): generate_doc_independent_job( args.config_url, doc_build_name, dry_run=not args.commit) selected_ros_distro_names = [ n for n in ros_distro_names if not args.ros_distro_names or n in args.ros_distro_names] for ros_distro_name in selected_ros_distro_names: print(ros_distro_name) if not args.skip_rosdistro_cache_job: generate_rosdistro_cache_job( args.config_url, ros_distro_name, dry_run=not args.commit) release_build_files = get_release_build_files(config, ros_distro_name) for release_build_name in release_build_files.keys(): generate_release_status_page_job( args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) generate_release_maintenance_jobs( args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) source_build_files = get_source_build_files(config, ros_distro_name) for source_build_name in source_build_files.keys(): generate_devel_maintenance_jobs( args.config_url, ros_distro_name, source_build_name, dry_run=not args.commit) doc_build_files = get_doc_build_files(config, ros_distro_name) for doc_build_name, doc_build_file in doc_build_files.items(): if doc_build_file.documentation_type == DOC_TYPE_ROSDOC: generate_doc_maintenance_jobs( args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST: generate_doc_metadata_job( args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) else: assert False, ("Unknown documentation type '%s' in doc " + "build file '%s'") % \ (doc_build_file.documentation_type, doc_build_name) generate_repos_status_page_jobs( args.config_url, ros_distro_name, dry_run=not args.commit) index = ros_distro_names.index(ros_distro_name) if index > 0: # Generate comparison pages for this rosdistro against all older ones. generate_release_compare_page_job( args.config_url, ros_distro_name, ros_distro_names[:index], dry_run=not args.commit)
def configure_devel_job(config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request=False, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, is_disabled=False, groovy_script=None, source_repository=None, build_targets=None, dry_run=False): """ Configure a single Jenkins devel job. This includes the following steps: - clone the source repository to use - clone the ros_buildfarm repository - write the distribution repository keys into files - invoke the release/run_devel_job.py script """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_source_build_files(config, rosdistro_name) build_file = build_files[source_build_name] # Overwrite build_file.targets if build_targets is specified if build_targets is not None: build_file.targets = build_targets if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') repo_names = dist_file.repositories.keys() if repo_name is not None: if repo_name not in repo_names: raise JobValidationError("Invalid repository name '%s' " % repo_name + 'choose one of the following: %s' % ', '.join(sorted(repo_names))) repo = dist_file.repositories[repo_name] if not repo.source_repository: raise JobValidationError("Repository '%s' has no source section" % repo_name) if not repo.source_repository.version: raise JobValidationError("Repository '%s' has no source version" % repo_name) source_repository = repo.source_repository if os_name not in build_file.targets.keys(): raise JobValidationError("Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if arch not in build_file.targets[os_name][os_code_name]: raise JobValidationError( "Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted(build_file.targets[os_name][os_code_name]))) if dist_cache is None and build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if views is None: view_name = get_devel_view_name(rosdistro_name, source_build_name, pull_request=pull_request) configure_devel_view(jenkins, view_name, dry_run=dry_run) job_name = get_devel_job_name(rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request) job_config = _get_devel_job_config(config, rosdistro_name, source_build_name, build_file, os_name, os_code_name, arch, source_repository, repo_name, pull_request, job_name, dist_cache=dist_cache, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config, dry_run=dry_run) return job_name, job_config
def configure_doc_jobs(config_url, rosdistro_name, doc_build_name, groovy_script=None, dry_run=False, whitelist_repository_names=None): """ Configure all Jenkins doc jobs. L{configure_doc_job} will be invoked for doc repository and target which matches the build file criteria. """ config = get_config_index(config_url) build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return doc_view_name = get_doc_view_name(rosdistro_name, doc_build_name) # all further configuration will be handled by either the Jenkins API # or by a generated groovy script from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if groovy_script is None else False view_configs = {} views = {} views[doc_view_name] = configure_doc_view(jenkins, doc_view_name, dry_run=dry_run) if not jenkins: view_configs.update(views) groovy_data = { 'dry_run': dry_run, 'expected_num_views': len(view_configs), } repo_names = dist_file.repositories.keys() filtered_repo_names = build_file.filter_repositories(repo_names) job_names = [] job_configs = OrderedDict() for repo_name in sorted(repo_names): if whitelist_repository_names: if repo_name not in whitelist_repository_names: print( "Skipping repository '%s' not in explicitly passed list" % repo_name, file=sys.stderr) continue is_disabled = repo_name not in filtered_repo_names if is_disabled and build_file.skip_ignored_repositories: print("Skipping ignored repository '%s'" % repo_name, file=sys.stderr) continue repo = dist_file.repositories[repo_name] if not repo.doc_repository: print("Skipping repository '%s': no doc section" % repo_name) continue if not repo.doc_repository.version: print("Skipping repository '%s': no doc version" % repo_name) continue for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_doc_job( config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script, dry_run=dry_run) job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) groovy_data['expected_num_jobs'] = len(job_configs) groovy_data['job_prefixes_and_names'] = {} job_prefix = '%s__' % doc_view_name if not whitelist_repository_names: groovy_data['job_prefixes_and_names']['doc'] = (job_prefix, job_names) if groovy_script is None: # delete obsolete jobs in this view from ros_buildfarm.jenkins import remove_jobs print('Removing obsolete doc jobs') remove_jobs(jenkins, job_prefix, job_names, dry_run=dry_run) if groovy_script is not None: print( "Writing groovy script '%s' to reconfigure %d views and %d jobs" % (groovy_script, len(view_configs), len(job_configs))) content = expand_template('snippet/reconfigure_jobs.groovy.em', groovy_data) write_groovy_script_and_configs(groovy_script, content, job_configs, view_configs=view_configs)
def trigger_release_jobs(config_url, rosdistro_name, release_build_name, missing_only, source_only, cache_dir, cause=None, groovy_script=None, not_failed_only=False): config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) # get targets targets = [] for os_name in sorted(build_file.targets.keys()): for os_code_name in sorted(build_file.targets[os_name].keys()): targets.append(Target(os_name, os_code_name, 'source')) if source_only: continue for arch in sorted( build_file.targets[os_name][os_code_name].keys()): targets.append(Target(os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' - %s %s %s' % (os_name, os_code_name, arch)) dist_file = get_cached_distribution(index, rosdistro_name) if not dist_file: print('No distribution file matches the build file') return repo_data = None if missing_only: repo_data = get_package_repo_data(build_file.target_repository, targets, cache_dir) if groovy_script is None: jenkins = connect(config.jenkins_url) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) triggered_jobs = [] skipped_jobs = [] for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: print((" Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name)) continue if not repo.release_repository.version: print((" Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name)) continue pkg_version = repo.release_repository.version debian_package_name = get_os_package_name(rosdistro_name, pkg_name) for target in targets: job_name = get_sourcedeb_job_name(rosdistro_name, release_build_name, pkg_name, target.os_name, target.os_code_name) if target.arch != 'source': # binary job can be skipped if source job was triggered if job_name in triggered_jobs: print((" Skipping binary jobs of '%s' since the source " + "job was triggered") % job_name) continue job_name = get_binarydeb_job_name(rosdistro_name, release_build_name, pkg_name, target.os_name, target.os_code_name, target.arch) if repo_data: # check if artifact is missing repo_index = repo_data[target] if debian_package_name in repo_index: version = repo_index[debian_package_name] version = _strip_version_suffix(version) if version == pkg_version: print((" Skipping job '%s' since the artifact is " + "already up-to-date") % job_name) continue if groovy_script is None: success = invoke_job(jenkins, job_name, cause=cause) else: success = True if success: triggered_jobs.append(job_name) else: skipped_jobs.append(job_name) if groovy_script is None: print('Triggered %d jobs, skipped %d jobs.' % (len(triggered_jobs), len(skipped_jobs))) else: print("Writing groovy script '%s' to trigger %d jobs" % (groovy_script, len(triggered_jobs))) data = { 'job_names': triggered_jobs, 'not_failed_only': not_failed_only, } content = expand_template('release/trigger_jobs.groovy.em', data) with open(groovy_script, 'w') as h: h.write(content)
def trigger_release_jobs( config_url, rosdistro_name, release_build_name, missing_only, source_only, cache_dir, cause=None, groovy_script=None, not_failed_only=False): config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) # get targets targets = [] for os_name in sorted(build_file.targets.keys()): for os_code_name in sorted(build_file.targets[os_name].keys()): targets.append(Target(os_name, os_code_name, 'source')) if source_only: continue for arch in sorted( build_file.targets[os_name][os_code_name].keys()): targets.append(Target(os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' - %s %s %s' % (os_name, os_code_name, arch)) dist_file = get_cached_distribution(index, rosdistro_name) if not dist_file: print('No distribution file matches the build file') return repo_data = None if missing_only: repo_data = get_debian_repo_data( build_file.target_repository, targets, cache_dir) if groovy_script is None: jenkins = connect(config.jenkins_url) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) triggered_jobs = [] skipped_jobs = [] for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: print((" Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name)) continue if not repo.release_repository.version: print((" Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name)) continue pkg_version = repo.release_repository.version debian_package_name = get_debian_package_name(rosdistro_name, pkg_name) for target in targets: job_name = get_sourcedeb_job_name( rosdistro_name, release_build_name, pkg_name, target.os_name, target.os_code_name) if target.arch != 'source': # binary job can be skipped if source job was triggered if job_name in triggered_jobs: print((" Skipping binary jobs of '%s' since the source " + "job was triggered") % job_name) continue job_name = get_binarydeb_job_name( rosdistro_name, release_build_name, pkg_name, target.os_name, target.os_code_name, target.arch) if repo_data: # check if artifact is missing repo_index = repo_data[target] if debian_package_name in repo_index: version = repo_index[debian_package_name] version = _strip_version_suffix(version) if version == pkg_version: print((" Skipping job '%s' since the artifact is " + "already up-to-date") % job_name) continue if groovy_script is None: success = invoke_job(jenkins, job_name, cause=cause) else: success = True if success: triggered_jobs.append(job_name) else: skipped_jobs.append(job_name) if groovy_script is None: print('Triggered %d jobs, skipped %d jobs.' % (len(triggered_jobs), len(skipped_jobs))) else: print("Writing groovy script '%s' to trigger %d jobs" % (groovy_script, len(triggered_jobs))) data = { 'job_names': triggered_jobs, 'not_failed_only': not_failed_only, } content = expand_template('release/trigger_jobs.groovy.em', data) with open(groovy_script, 'w') as h: h.write(content)