def configure_release_views(jenkins, rosdistro_name, release_build_name, targets, dry_run=False): from ros_buildfarm.jenkins import configure_view views = {} for os_name, os_code_name, arch in targets: view_name = get_release_view_name(rosdistro_name, release_build_name, os_name, os_code_name, arch) if arch == 'source': include_regex = '%s__.+__%s_%s__source' % \ (view_name, os_name, os_code_name) else: include_regex = '%s__.+__%s_%s_%s__binary' % \ (view_name, os_name, os_code_name, arch) views[view_name] = configure_view( jenkins, view_name, include_regex=include_regex, template_name='dashboard_view_all_jobs.xml.em', dry_run=dry_run) return views
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'release' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_release_build_files(config, args.rosdistro_name) build_file = build_files[args.release_build_name] reconfigure_jobs_job_config = get_reconfigure_jobs_job_config( args, config, build_file) trigger_jobs_job_config = get_trigger_jobs_job_config( args, config, build_file) import_upstream_job_config = get_import_upstream_job_config( args, config, build_file) jenkins = connect(config.jenkins_url) view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW) group_name = get_release_view_name( args.rosdistro_name, args.release_build_name) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') configure_job(jenkins, job_name, reconfigure_jobs_job_config, view=view) job_name = '%s_%s' % (group_name, 'trigger-jobs') configure_job(jenkins, job_name, trigger_jobs_job_config, view=view) job_name = 'import_upstream' configure_job(jenkins, job_name, import_upstream_job_config, view=view)
def configure_release_views( jenkins, rosdistro_name, release_build_name, targets): views = [] # generate view aggregating all binary views if len([t for t in targets if t[2] != 'source']) > 1: view_prefix = get_release_binary_view_prefix( rosdistro_name, release_build_name) views.append(configure_view( jenkins, view_prefix, include_regex='%s_.+__.+' % view_prefix, template_name='dashboard_view_all_jobs.xml.em')) for os_name, os_code_name, arch in targets: view_name = get_release_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch) if arch == 'source': include_regex = '%s__.+__%s_%s__source' % \ (view_name, os_name, os_code_name) else: include_regex = '%s__.+__%s_%s_%s__binary' % \ (view_name, os_name, os_code_name, arch) views.append(configure_view( jenkins, view_name, include_regex=include_regex, template_name='dashboard_view_all_jobs.xml.em')) return views
def configure_release_views(jenkins, rosdistro_name, release_build_name, targets): views = [] # generate view aggregating all binary views if len([t for t in targets if t[2] != 'source']) > 1: view_prefix = get_release_binary_view_prefix(rosdistro_name, release_build_name) views.append( configure_view(jenkins, view_prefix, include_regex='%s_.+__.+' % view_prefix, template_name='dashboard_view_all_jobs.xml.em')) for os_name, os_code_name, arch in targets: view_name = get_release_view_name(rosdistro_name, release_build_name, os_name, os_code_name, arch) if arch == 'source': include_regex = '%s__.+__%s_%s__source' % \ (view_name, os_name, os_code_name) else: include_regex = '%s__.+__%s_%s_%s__binary' % \ (view_name, os_name, os_code_name, arch) views.append( configure_view(jenkins, view_name, include_regex=include_regex, template_name='dashboard_view_all_jobs.xml.em')) return views
def configure_release_jobs( config_url, rosdistro_name, release_build_name, append_timestamp=False): config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers or build_file.abi_incompatibility_assumed: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): targets.append((os_name, os_code_name)) print('The build file contains the following targets:') for os_name, os_code_name in targets: print(' - %s %s: %s' % (os_name, os_code_name, ', '.join( build_file.targets[os_name][os_code_name]))) dist_file = get_distribution_file(index, rosdistro_name) jenkins = connect(config.jenkins_url) configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) view_name = get_release_view_name(rosdistro_name, release_build_name) view = configure_release_view(jenkins, view_name) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: print(("Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name)) continue if not repo.release_repository.version: print(("Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name)) continue for os_name, os_code_name in targets: configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, append_timestamp=append_timestamp, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, view=view, generate_import_package_job=False)
def configure_release_views( jenkins, rosdistro_name, release_build_name, targets, dry_run=False): views = {} for os_name, os_code_name, arch in targets: view_name = get_release_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch) if arch == 'source': include_regex = '%s__.+__%s_%s__source' % \ (view_name, os_name, os_code_name) else: include_regex = '%s__.+__%s_%s_%s__binary' % \ (view_name, os_name, os_code_name, arch) views[view_name] = configure_view( jenkins, view_name, include_regex=include_regex, template_name='dashboard_view_all_jobs.xml.em', dry_run=dry_run) return views
def configure_release_job_with_validation( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, append_timestamp=False, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, view=None, generate_import_package_job=True, filter_arches=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) if pkg_name not in pkg_names: raise JobValidationError( "Invalid package name '%s' " % pkg_name + 'choose one of the following: ' + ', '.join(sorted(pkg_names))) pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: raise JobValidationError( "Repository '%s' has no release section" % repo_name) if not repo.release_repository.version: raise JobValidationError( "Repository '%s' has no release version" % repo_name) if os_name not in build_file.targets.keys(): raise JobValidationError( "Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if dist_cache is None and \ (build_file.notify_maintainers or build_file.abi_incompatibility_assumed): dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: jenkins = connect(config.jenkins_url) if view is None: view_name = get_release_view_name(rosdistro_name, release_build_name) configure_release_view(jenkins, view_name) if generate_import_package_job: configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) # sourcedeb job job_name = get_sourcedeb_job_name( rosdistro_name, release_build_name, pkg_name, os_name, os_code_name) job_config = _get_sourcedeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, _get_target_arches( build_file, os_name, os_code_name, print_skipped=False), repo.release_repository, pkg_name, repo_name, dist_cache=dist_cache) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config) dependency_names = [] if build_file.abi_incompatibility_assumed: dependency_names = _get_direct_dependencies( pkg_name, dist_cache, pkg_names) if dependency_names is None: return # binarydeb jobs for arch in _get_target_arches(build_file, os_name, os_code_name): if filter_arches and arch not in filter_arches: continue job_name = get_binarydeb_job_name( rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, arch) upstream_job_names = [ get_binarydeb_job_name( rosdistro_name, release_build_name, dependency_name, os_name, os_code_name, arch) for dependency_name in dependency_names] job_config = _get_binarydeb_job_config( config_url, rosdistro_name, release_build_name, config, build_file, os_name, os_code_name, arch, repo.release_repository, pkg_name, append_timestamp, repo_name, dist_cache=dist_cache, upstream_job_names=upstream_job_names) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: configure_job(jenkins, job_name, job_config)
def get_import_package_job_name(rosdistro_name, release_build_name): view_name = get_release_view_name(rosdistro_name, release_build_name) return '%s_import_package' % view_name
def get_binarydeb_job_name(rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, arch): view_name = get_release_view_name(rosdistro_name, release_build_name) return '%s__%s__%s_%s_%s__binary' % \ (view_name, pkg_name, os_name, os_code_name, arch)
def get_sourcedeb_job_name(rosdistro_name, release_build_name, pkg_name, os_name, os_code_name): view_name = get_release_view_name(rosdistro_name, release_build_name) return '%s__%s__%s_%s__source' % \ (view_name, pkg_name, os_name, os_code_name)
def get_sync_packages_to_testing_job_name( rosdistro_name, release_build_name, os_code_name, arch): view_name = get_release_view_name(rosdistro_name, release_build_name) return '%s_sync-packages-to-testing_%s_%s' % \ (view_name, os_code_name, arch)
def configure_release_jobs( config_url, rosdistro_name, release_build_name, append_timestamp=False): """ Configure all Jenkins release jobs. L{configure_release_job} will be invoked for every released package and target which matches the build file criteria. Additionally a job to import Debian packages into the Debian repository is created. """ config = get_config_index(config_url) build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers or build_file.abi_incompatibility_assumed: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): targets.append((os_name, os_code_name)) print('The build file contains the following targets:') for os_name, os_code_name in targets: print(' - %s %s: %s' % (os_name, os_code_name, ', '.join( build_file.targets[os_name][os_code_name]))) dist_file = get_distribution_file(index, rosdistro_name) jenkins = connect(config.jenkins_url) configure_import_package_job( config_url, rosdistro_name, release_build_name, config=config, build_file=build_file, jenkins=jenkins) for os_name, os_code_name in targets: if os_name != 'ubuntu': continue for arch in sorted(build_file.targets[os_name][os_code_name]): configure_sync_packages_to_testing_job( config_url, rosdistro_name, release_build_name, os_code_name, arch, config=config, build_file=build_file, jenkins=jenkins) view_name = get_release_view_name(rosdistro_name, release_build_name) view = configure_release_view(jenkins, view_name) pkg_names = dist_file.release_packages.keys() pkg_names = build_file.filter_packages(pkg_names) all_job_names = [] for pkg_name in sorted(pkg_names): pkg = dist_file.release_packages[pkg_name] repo_name = pkg.repository_name repo = dist_file.repositories[repo_name] if not repo.release_repository: print(("Skipping package '%s' in repository '%s': no release " + "section") % (pkg_name, repo_name), file=sys.stderr) continue if not repo.release_repository.version: print(("Skipping package '%s' in repository '%s': no release " + "version") % (pkg_name, repo_name), file=sys.stderr) continue for os_name, os_code_name in targets: try: job_names = configure_release_job( config_url, rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, append_timestamp=append_timestamp, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, view=view, generate_import_package_job=False, generate_sync_packages_to_testing_job=False) all_job_names += job_names except JobValidationError as e: print(e.message, file=sys.stderr) # delete obsolete jobs in this view remove_jobs(jenkins, '%s__' % view_name, all_job_names)