def configure_release_jobs(config_url,
                           rosdistro_name,
                           release_build_name,
                           groovy_script=None,
                           dry_run=False,
                           whitelist_package_names=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') %
                  ('ignored'
                   if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    jenkins = False
    if groovy_script is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)

    all_view_configs = {}
    all_job_configs = OrderedDict()

    job_name, job_config = configure_import_package_job(config_url,
                                                        rosdistro_name,
                                                        release_build_name,
                                                        config=config,
                                                        build_file=build_file,
                                                        jenkins=jenkins,
                                                        dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    job_name, job_config = configure_sync_packages_to_main_job(
        config_url,
        rosdistro_name,
        release_build_name,
        config=config,
        build_file=build_file,
        jenkins=jenkins,
        dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            job_name, job_config = configure_sync_packages_to_testing_job(
                config_url,
                rosdistro_name,
                release_build_name,
                os_code_name,
                arch,
                config=config,
                build_file=build_file,
                jenkins=jenkins,
                dry_run=dry_run)
            if not jenkins:
                all_job_configs[job_name] = job_config

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(jenkins,
                                    rosdistro_name,
                                    release_build_name,
                                    targets,
                                    dry_run=dry_run)
    if not jenkins:
        all_view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(views),
    }

    # binary jobs must be generated in topological order
    from catkin_pkg.package import parse_package_string
    from ros_buildfarm.common import topological_order_packages
    pkgs = {}
    for pkg_name in pkg_names:
        if pkg_name not in dist_cache.release_package_xmls:
            print("Skipping package '%s': no released package.xml in cache" %
                  (pkg_name),
                  file=sys.stderr)
            continue
        pkg_xml = dist_cache.release_package_xmls[pkg_name]
        pkg = parse_package_string(pkg_xml)
        pkgs[pkg_name] = pkg
    ordered_pkg_tuples = topological_order_packages(pkgs)

    other_build_files = [
        v for k, v in build_files.items() if k != release_build_name
    ]

    all_source_job_names = []
    all_binary_job_names = []
    for pkg_name in [p.name for _, p in ordered_pkg_tuples]:
        if whitelist_package_names:
            if pkg_name not in whitelist_package_names:
                print(
                    "Skipping package '%s' not in the explicitly passed list" %
                    pkg_name,
                    file=sys.stderr)
                continue

        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            other_build_files_same_platform = []
            for other_build_file in other_build_files:
                if os_name not in other_build_file.targets:
                    continue
                if os_code_name not in other_build_file.targets[os_name]:
                    continue
                other_build_files_same_platform.append(other_build_file)

            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        other_build_files_same_platform=other_build_files_same_platform,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    for source_job_name in source_job_names:
                        all_job_configs[source_job_name] = job_configs[
                            source_job_name]
                    for binary_job_name in binary_job_names:
                        all_job_configs[binary_job_name] = job_configs[
                            binary_job_name]
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(all_job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    # with an explicit list of packages we don't delete obsolete jobs
    if not whitelist_package_names:
        # delete obsolete binary jobs
        for os_name, os_code_name in platforms:
            for arch in build_file.targets[os_name][os_code_name]:
                binary_view = get_release_binary_view_name(
                    rosdistro_name, release_build_name, os_name, os_code_name,
                    arch)
                binary_job_prefix = '%s__' % binary_view

                excluded_job_names = set([
                    j for j in all_binary_job_names
                    if j.startswith(binary_job_prefix)
                ])
                if groovy_script is None:
                    print("Removing obsolete binary jobs with prefix '%s'" %
                          binary_job_prefix)
                    from ros_buildfarm.jenkins import remove_jobs
                    remove_jobs(jenkins,
                                binary_job_prefix,
                                excluded_job_names,
                                dry_run=dry_run)
                else:
                    binary_key = 'binary_%s_%s_%s' % \
                        (os_name, os_code_name, arch)
                    groovy_data['job_prefixes_and_names'][binary_key] = \
                        (binary_job_prefix, excluded_job_names)

        # delete obsolete source jobs
        # requires knowledge about all other release build files
        for os_name, os_code_name in platforms:
            other_source_job_names = []
            # get source job names for all other release build files
            for other_release_build_name in [
                    k for k in build_files.keys() if k != release_build_name
            ]:
                other_build_file = build_files[other_release_build_name]
                other_dist_file = get_distribution_file(
                    index, rosdistro_name, other_build_file)
                if not other_dist_file:
                    continue

                if os_name not in other_build_file.targets or \
                        os_code_name not in other_build_file.targets[os_name]:
                    continue

                if other_build_file.skip_ignored_packages:
                    filtered_pkg_names = other_build_file.filter_packages(
                        pkg_names)
                else:
                    filtered_pkg_names = pkg_names
                for pkg_name in sorted(filtered_pkg_names):
                    pkg = other_dist_file.release_packages[pkg_name]
                    repo_name = pkg.repository_name
                    repo = other_dist_file.repositories[repo_name]
                    if not repo.release_repository:
                        continue
                    if not repo.release_repository.version:
                        continue

                    other_job_name = get_sourcedeb_job_name(
                        rosdistro_name, other_release_build_name, pkg_name,
                        os_name, os_code_name)
                    other_source_job_names.append(other_job_name)

            source_view_prefix = get_release_source_view_name(
                rosdistro_name, os_name, os_code_name)
            source_job_prefix = '%s__' % source_view_prefix
            excluded_job_names = set([
                j for j in (all_source_job_names + other_source_job_names)
                if j.startswith(source_job_prefix)
            ])
            if groovy_script is None:
                print("Removing obsolete source jobs with prefix '%s'" %
                      source_job_prefix)
                from ros_buildfarm.jenkins import remove_jobs
                remove_jobs(jenkins,
                            source_job_prefix,
                            excluded_job_names,
                            dry_run=dry_run)
            else:
                source_key = 'source_%s_%s' % (os_name, os_code_name)
                groovy_data['job_prefixes_and_names'][source_key] = (
                    source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(all_view_configs), len(all_job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script,
                                        content,
                                        all_job_configs,
                                        view_configs=all_view_configs)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Generate a 'release' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    parser.add_argument('--skip-install',
                        action='store_true',
                        help='Skip trying to install binarydeb')
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):
        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeFile(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/release/binarydeb_job.xml.em'):
                self.scripts.append('--')
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    configure_release_job(args.config_url,
                          args.rosdistro_name,
                          args.release_build_name,
                          args.package_name,
                          args.os_name,
                          args.os_code_name,
                          jenkins=False,
                          views=[],
                          generate_import_package_job=False,
                          generate_sync_packages_jobs=False,
                          filter_arches=args.arch)

    templates.template_hooks = None

    source_job_name = get_sourcedeb_job_name(args.rosdistro_name,
                                             args.release_build_name,
                                             args.package_name, args.os_name,
                                             args.os_code_name)

    binary_job_name = get_binarydeb_job_name(args.rosdistro_name,
                                             args.release_build_name,
                                             args.package_name, args.os_name,
                                             args.os_code_name, args.arch)

    separator_index = hook.scripts.index('--')
    source_scripts = hook.scripts[:separator_index]
    binary_scripts = hook.scripts[separator_index + 1:]

    # inject additional argument to skip fetching sourcedeb from repo
    script_name = '/run_binarydeb_job.py '
    additional_argument = '--skip-download-sourcedeb '
    for i, script in enumerate(binary_scripts):
        offset = script.find(script_name)
        if offset != -1:
            offset += len(script_name)
            script = script[:offset] + additional_argument + script[offset:]
            binary_scripts[i] = script
            break

    # remove rm command for sourcedeb location
    rm_command = 'rm -fr $WORKSPACE/binarydeb'
    for i, script in enumerate(binary_scripts):
        offset = script.find(rm_command)
        if offset != -1:
            script = script[:offset] + script[offset + len(rm_command):]
            binary_scripts[i] = script
            break

    if args.skip_install:
        # remove install step
        script_name = '/create_binarydeb_install_task_generator.py '
        for i, script in enumerate(binary_scripts):
            offset = script.find(script_name)
            if offset != -1:
                del binary_scripts[i]
                break

    value = expand_template('release/release_script.sh.em', {
        'source_job_name': source_job_name,
        'binary_job_name': binary_job_name,
        'source_scripts': source_scripts,
        'binary_scripts': binary_scripts
    },
                            options={BANGPATH_OPT: False})
    value = value.replace('python3', sys.executable)
    print(value)
def configure_release_job(config_url,
                          rosdistro_name,
                          release_build_name,
                          pkg_name,
                          os_name,
                          os_code_name,
                          config=None,
                          build_file=None,
                          index=None,
                          dist_file=None,
                          dist_cache=None,
                          jenkins=None,
                          views=None,
                          generate_import_package_job=True,
                          generate_sync_packages_jobs=True,
                          is_disabled=False,
                          other_build_files_same_platform=None,
                          groovy_script=None,
                          filter_arches=None,
                          dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError("Invalid package name '%s' " % pkg_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError("Repository '%s' has no release section" %
                                 repo_name)

    if not repo.release_repository.version:
        raise JobValidationError("Repository '%s' has no release version" %
                                 repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(jenkins,
                                rosdistro_name,
                                release_build_name,
                                targets,
                                dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(config_url,
                                     rosdistro_name,
                                     release_build_name,
                                     config=config,
                                     build_file=build_file,
                                     jenkins=jenkins,
                                     dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(config_url,
                                            rosdistro_name,
                                            release_build_name,
                                            config=config,
                                            build_file=build_file,
                                            jenkins=jenkins,
                                            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins,
                                                   dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(rosdistro_name,
                                             release_build_name, pkg_name,
                                             os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url,
        rosdistro_name,
        release_build_name,
        config,
        build_file,
        os_name,
        os_code_name,
        pkg_name,
        repo_name,
        repo.release_repository,
        dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(pkg_name, dist_cache,
                                                    pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name,
                  file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(rosdistro_name, release_build_name,
                                          pkg_name, os_name, os_code_name,
                                          arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name, dependency_name, os_name,
                os_code_name, arch) for dependency_name in dependency_names
        ]

        job_config = _get_binarydeb_job_config(
            config_url,
            rosdistro_name,
            release_build_name,
            config,
            build_file,
            os_name,
            os_code_name,
            arch,
            pkg_name,
            repo_name,
            repo.release_repository,
            dist_cache=dist_cache,
            upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'release' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    parser.add_argument(
        '--skip-install',
        action='store_true',
        help='Skip trying to install binarydeb')
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeFile(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/release/binarydeb_job.xml.em'):
                self.scripts.append('--')
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    configure_release_job(
        args.config_url, args.rosdistro_name, args.release_build_name,
        args.package_name, args.os_name, args.os_code_name,
        jenkins=False, views=[], generate_import_package_job=False,
        generate_sync_packages_jobs=False, filter_arches=args.arch)

    templates.template_hooks = None

    source_job_name = get_sourcedeb_job_name(
        args.rosdistro_name, args.release_build_name,
        args.package_name, args.os_name, args.os_code_name)

    binary_job_name = get_binarydeb_job_name(
        args.rosdistro_name, args.release_build_name,
        args.package_name, args.os_name, args.os_code_name, args.arch)

    separator_index = hook.scripts.index('--')
    source_scripts = hook.scripts[:separator_index]
    binary_scripts = hook.scripts[separator_index + 1:]

    # inject additional argument to skip fetching sourcedeb from repo
    script_name = '/run_binarydeb_job.py '
    additional_argument = '--skip-download-sourcedeb '
    for i, script in enumerate(binary_scripts):
        offset = script.find(script_name)
        if offset != -1:
            offset += len(script_name)
            script = script[:offset] + additional_argument + script[offset:]
            binary_scripts[i] = script
            break

    # remove rm command for sourcedeb location
    rm_command = 'rm -fr $WORKSPACE/binarydeb'
    for i, script in enumerate(binary_scripts):
        offset = script.find(rm_command)
        if offset != -1:
            script = script[:offset] + script[offset + len(rm_command):]
            binary_scripts[i] = script
            break

    if args.skip_install:
        # remove install step
        script_name = '/create_binarydeb_install_task_generator.py '
        for i, script in enumerate(binary_scripts):
            offset = script.find(script_name)
            if offset != -1:
                del binary_scripts[i]
                break

    value = expand_template(
        'release/release_script.sh.em', {
            'source_job_name': source_job_name,
            'binary_job_name': binary_job_name,
            'source_scripts': source_scripts,
            'binary_scripts': binary_scripts},
        options={BANGPATH_OPT: False})
    value = value.replace('python3', sys.executable)
    print(value)
示例#5
0
def configure_release_job(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, views=None,
        generate_import_package_job=True,
        generate_sync_packages_jobs=True,
        is_disabled=False, other_build_files_same_platform=None,
        groovy_script=None,
        filter_arches=None,
        dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(
            jenkins, rosdistro_name, release_build_name, targets,
            dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name, file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            pkg_name, repo_name, repo.release_repository,
            dist_cache=dist_cache, upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
示例#6
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name, groovy_script=None,
        dry_run=False, whitelist_package_names=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = None
    if build_file.notify_maintainers or \
            build_file.abi_incompatibility_assumed or \
            explicitly_ignored_pkg_names:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') % ('ignored'
                  if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    jenkins = connect(config.jenkins_url) if groovy_script is None else False

    all_view_configs = {}
    all_job_configs = {}

    job_name, job_config = configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    job_name, job_config = configure_sync_packages_to_main_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            job_name, job_config = configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)
            if not jenkins:
                all_job_configs[job_name] = job_config

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(
        jenkins, rosdistro_name, release_build_name, targets,
        dry_run=dry_run)
    if not jenkins:
        all_view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(views),
    }

    other_build_files = [v for k, v in build_files.items() if k != release_build_name]

    all_source_job_names = []
    all_binary_job_names = []
    for pkg_name in sorted(pkg_names):
        if whitelist_package_names:
            if pkg_name not in whitelist_package_names:
                print("Skipping package '%s' not in the explicitly passed list" %
                      pkg_name, file=sys.stderr)
                continue

        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name), file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            other_build_files_same_platform = []
            for other_build_file in other_build_files:
                if os_name not in other_build_file.targets:
                    continue
                if os_code_name not in other_build_file.targets[os_name]:
                    continue
                other_build_files_same_platform.append(other_build_file)

            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        other_build_files_same_platform=other_build_files_same_platform,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    all_job_configs.update(job_configs)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(all_job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    # with an explicit list of packages we don't delete obsolete jobs
    if not whitelist_package_names:
        # delete obsolete binary jobs
        for os_name, os_code_name in platforms:
            for arch in build_file.targets[os_name][os_code_name]:
                binary_view = get_release_binary_view_name(
                    rosdistro_name, release_build_name,
                    os_name, os_code_name, arch)
                binary_job_prefix = '%s__' % binary_view

                excluded_job_names = set([
                    j for j in all_binary_job_names
                    if j.startswith(binary_job_prefix)])
                if groovy_script is None:
                    print("Removing obsolete binary jobs with prefix '%s'" %
                          binary_job_prefix)
                    remove_jobs(
                        jenkins, binary_job_prefix, excluded_job_names,
                        dry_run=dry_run)
                else:
                    binary_key = 'binary_%s_%s_%s' % \
                        (os_name, os_code_name, arch)
                    groovy_data['job_prefixes_and_names'][binary_key] = \
                        (binary_job_prefix, excluded_job_names)

        # delete obsolete source jobs
        # requires knowledge about all other release build files
        for os_name, os_code_name in platforms:
            other_source_job_names = []
            # get source job names for all other release build files
            for other_release_build_name in [
                    k for k in build_files.keys() if k != release_build_name]:
                other_build_file = build_files[other_release_build_name]
                other_dist_file = get_distribution_file(
                    index, rosdistro_name, other_build_file)
                if not other_dist_file:
                    continue

                if os_name not in other_build_file.targets or \
                        os_code_name not in other_build_file.targets[os_name]:
                    continue

                if other_build_file.skip_ignored_packages:
                    filtered_pkg_names = other_build_file.filter_packages(
                        pkg_names)
                else:
                    filtered_pkg_names = pkg_names
                for pkg_name in sorted(filtered_pkg_names):
                    pkg = other_dist_file.release_packages[pkg_name]
                    repo_name = pkg.repository_name
                    repo = other_dist_file.repositories[repo_name]
                    if not repo.release_repository:
                        continue
                    if not repo.release_repository.version:
                        continue

                    other_job_name = get_sourcedeb_job_name(
                        rosdistro_name, other_release_build_name,
                        pkg_name, os_name, os_code_name)
                    other_source_job_names.append(other_job_name)

            source_view_prefix = get_release_source_view_name(
                rosdistro_name, os_name, os_code_name)
            source_job_prefix = '%s__' % source_view_prefix
            excluded_job_names = set([
                j for j in (all_source_job_names + other_source_job_names)
                if j.startswith(source_job_prefix)])
            if groovy_script is None:
                print("Removing obsolete source jobs with prefix '%s'" %
                      source_job_prefix)
                remove_jobs(
                    jenkins, source_job_prefix, excluded_job_names,
                    dry_run=dry_run)
            else:
                source_key = 'source_%s_%s' % (os_name, os_code_name)
                groovy_data['job_prefixes_and_names'][source_key] = (
                    source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(all_view_configs), len(all_job_configs)))
        content = expand_template(
            'snippet/reconfigure_jobs.groovy.em', groovy_data)
        write_groovy_script_and_configs(
            groovy_script, content, all_job_configs,
            view_configs=all_view_configs)
示例#7
0
def configure_release_jobs(config_url,
                           rosdistro_name,
                           release_build_name,
                           groovy_script=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = None
    if build_file.notify_maintainers or \
            build_file.abi_incompatibility_assumed or \
            explicitly_ignored_pkg_names:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') %
                  ('ignored'
                   if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(config_url,
                                 rosdistro_name,
                                 release_build_name,
                                 config=config,
                                 build_file=build_file,
                                 jenkins=jenkins)

    configure_sync_packages_to_main_job(config_url,
                                        rosdistro_name,
                                        release_build_name,
                                        config=config,
                                        build_file=build_file,
                                        jenkins=jenkins)
    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins)

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(jenkins, rosdistro_name,
                                    release_build_name, targets)

    if groovy_script is not None:
        # all further configuration will be handled by the groovy script
        jenkins = False

    all_source_job_names = []
    all_binary_job_names = []
    all_job_configs = {}
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        groovy_script=groovy_script)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    all_job_configs.update(job_configs)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data = {
        'expected_num_jobs': len(all_job_configs),
        'job_prefixes_and_names': {},
    }

    # delete obsolete binary jobs
    for os_name, os_code_name in platforms:
        for arch in build_file.targets[os_name][os_code_name]:
            binary_view = get_release_binary_view_name(rosdistro_name,
                                                       release_build_name,
                                                       os_name, os_code_name,
                                                       arch)
            binary_job_prefix = '%s__' % binary_view

            excluded_job_names = set([
                j for j in all_binary_job_names
                if j.startswith(binary_job_prefix)
            ])
            if groovy_script is None:
                print("Removing obsolete binary jobs with prefix '%s'" %
                      binary_job_prefix)
                remove_jobs(jenkins, binary_job_prefix, excluded_job_names)
            else:
                binary_key = 'binary_%s_%s_%s' % (os_name, os_code_name, arch)
                groovy_data['job_prefixes_and_names'][binary_key] = \
                    (binary_job_prefix, excluded_job_names)

    # delete obsolete source jobs
    # requires knowledge about all other release build files
    for os_name, os_code_name in platforms:
        other_source_job_names = []
        # get source job names for all other release build files
        for other_release_build_name in [
                k for k in build_files.keys() if k != release_build_name
        ]:
            other_build_file = build_files[other_release_build_name]
            other_dist_file = get_distribution_file(index, rosdistro_name,
                                                    other_build_file)
            if not other_dist_file:
                continue

            if os_name not in other_build_file.targets or \
                    os_code_name not in other_build_file.targets[os_name]:
                continue

            if other_build_file.skip_ignored_packages:
                filtered_pkg_names = other_build_file.filter_packages(
                    pkg_names)
            else:
                filtered_pkg_names = pkg_names
            for pkg_name in sorted(filtered_pkg_names):
                pkg = other_dist_file.release_packages[pkg_name]
                repo_name = pkg.repository_name
                repo = other_dist_file.repositories[repo_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue

                other_job_name = get_sourcedeb_job_name(
                    rosdistro_name, other_release_build_name, pkg_name,
                    os_name, os_code_name)
                other_source_job_names.append(other_job_name)

        source_view_prefix = get_release_source_view_name(
            rosdistro_name, os_name, os_code_name)
        source_job_prefix = '%s__' % source_view_prefix
        excluded_job_names = set([
            j for j in (all_source_job_names + other_source_job_names)
            if j.startswith(source_job_prefix)
        ])
        if groovy_script is None:
            print("Removing obsolete source jobs with prefix '%s'" %
                  source_job_prefix)
            remove_jobs(jenkins, source_job_prefix, excluded_job_names)
        else:
            source_key = 'source_%s_%s' % (os_name, os_code_name)
            groovy_data['job_prefixes_and_names'][source_key] = (
                source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print("Writing groovy script '%s' to reconfigure %d jobs" %
              (groovy_script, len(all_job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script, content,
                                        all_job_configs)