Пример #1
0
def get_sourcerpm(rosdistro_index_url,
                  rosdistro_name,
                  package_name,
                  sourcepkg_dir,
                  skip_download_sourcepkg=False):
    # ensure that no source subfolder exists
    rpm_package_name = get_os_package_name(rosdistro_name, package_name)
    if not skip_download_sourcepkg:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        cmd = [
            'mock', '--resultdir',
            '%s' % sourcepkg_dir, '--no-cleanup-after', '--verbose', '--root',
            'ros_buildfarm', '--dnf-cmd', '--', 'download', '--source',
            '--disablerepo', '*', '--enablerepo',
            'ros-buildfarm-target-source',
            '%s-%s.*' % (rpm_package_name, package_version)
        ]

        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcepkg_dir)
Пример #2
0
def _get_and_parse_distribution_cache(index, rosdistro_name, pkg_names):
    from catkin_pkg.package import parse_package_string
    from catkin_pkg.package import Dependency
    dist_cache = get_distribution_cache(index, rosdistro_name)
    pkg_names = set(['ros_workspace']).union(pkg_names)
    cached_pkgs = {
        pkg_name: parse_package_string(pkg_xml)
        for pkg_name, pkg_xml in dist_cache.release_package_xmls.items()
        if pkg_name in pkg_names
    }

    condition_context = get_package_condition_context(index, rosdistro_name)
    for pkg in cached_pkgs.values():
        pkg.evaluate_conditions(condition_context)
    for pkg in cached_pkgs.values():
        for group_depend in pkg.group_depends:
            if group_depend.evaluated_condition is not False:
                group_depend.extract_group_members(cached_pkgs.values())

    # for ROS 2 distributions bloom injects a dependency on ros_workspace
    # into almost all packages (except its dependencies)
    # therefore the same dependency needs to to be injected here
    distribution_type = index.distributions[rosdistro_name].get(
        'distribution_type')
    if distribution_type == 'ros2' and 'ros_workspace' in cached_pkgs:
        no_ros_workspace_dep = set(['ros_workspace']).union(
            get_direct_dependencies('ros_workspace', cached_pkgs, pkg_names))

        for pkg_name, pkg in cached_pkgs.items():
            if pkg_name not in no_ros_workspace_dep:
                pkg.exec_depends.append(Dependency('ros_workspace'))

    return cached_pkgs
Пример #3
0
 def __init__(self, name, ros_distro):
     SrcAptBase.__init__(self, name)
     self.ros_distro = self.detect_ros_distribution(ros_distro)
     self.rosdistro_index = rosdistro.get_index(rosdistro.get_index_url())
     self.cache = rosdistro.get_distribution_cache(self.rosdistro_index,
                                                   self.ros_distro)
     self.distro_file = self.cache.distribution_file
     # More logic could be needed with new ros distributions
     # ROS1 - https://www.ros.org/reps/rep-0003.html
     # ROS2 - http://design.ros2.org/articles/changes.html
     if self.ros_distro == 'melodic':
         self.compilation_flags.append('--std=c++14')
     else:
         self.compilation_flags.append('--std=c++17')
         # needed for gazebo_ros_pkgs
         self.compilation_flags.append('-DBOOST_HAS_PTHREADS=1')
         # gtest-vendor is ROS2
         self.compilation_flags.append('-I' +
             join('/opt/ros/', self.ros_distro, 'src', 'gtest_vendor', 'include'))
         # flag to avoid problems in rcutils
         # https://github.com/osrf/auto-abi-checker/issues/17
         self.compilation_flags.append('-DRCUTILS__STDATOMIC_HELPER_H_')
         # flags for rmw_connext packages
         self.compilation_flags.append('-DRTI_UNIX')
         for rti_path in glob.glob('/opt/rti.com/rti_connext_dds-*'):
             self.compilation_flags.append('-I' + rti_path + '/include/')
             self.compilation_flags.append('-I' + rti_path + '/include/ndds')
     # Needs to add /opt/ros includes to compile ROS software
     self.compilation_flags.append('-I' +
         join('/opt/ros/', self.ros_distro, 'include'))
Пример #4
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name,
        append_timestamp=False):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers or build_file.abi_incompatibility_assumed:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            targets.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in targets:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins)

    view_name = get_release_view_name(rosdistro_name, release_build_name)
    view = configure_release_view(jenkins, view_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue

        for os_name, os_code_name in targets:
            configure_release_job(
                config_url, rosdistro_name, release_build_name,
                pkg_name, os_name, os_code_name,
                append_timestamp=append_timestamp,
                config=config, build_file=build_file,
                index=index, dist_file=dist_file, dist_cache=dist_cache,
                jenkins=jenkins, view=view,
                generate_import_package_job=False)
    def __init__(self, distro):
        self.distro = distro

        try:
            index = get_index(get_index_url())
            self._distribution_file = get_distribution_cache(index, distro).distribution_file
        except:
            logger.error("Could not load rosdistro distribution cache")
            self._distribution_file = None
Пример #6
0
def configure_devel_jobs(
        config_url, rosdistro_name, source_build_name):
    config = get_config_index(config_url)
    build_files = get_source_build_files(config, rosdistro_name)
    build_file = build_files[source_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    view_name = get_devel_view_name(rosdistro_name, source_build_name)
    view = configure_devel_view(jenkins, view_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    for repo_name in sorted(repo_names):
        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            print("Skipping repository '%s': no source section" % repo_name)
            continue
        if not repo.source_repository.version:
            print("Skipping repository '%s': no source version" % repo_name)
            continue

        for os_name, os_code_name, arch in targets:
            configure_devel_job(
                config_url, rosdistro_name, source_build_name,
                repo_name, os_name, os_code_name, arch,
                config=config, build_file=build_file,
                index=index, dist_file=dist_file, dist_cache=dist_cache,
                jenkins=jenkins, view=view)
Пример #7
0
def get_proposed_packages_repo_names(pkg_name_list):
    index = get_index(
        'https://raw.githubusercontent.com/ros/rosdistro/master/index.yaml')
    dist_cache = get_distribution_cache(index, 'foxy')
    dist_file = dist_cache.distribution_file

    repositories = []
    for pkg_name in pkg_name_list:
        if not pkg_name:
            continue
        try:
            repo_name = dist_file.release_packages[pkg_name].repository_name
            release_repo = dist_file.repositories[
                repo_name].source_repository.get_url_parts()[1]
        except KeyError:
            if pkg_name.startswith('rosbag2_'):
                release_repo = 'ros2/rosbag2'
            elif pkg_name.startswith('test_launch_ros'):
                release_repo = 'ros2/launch_ros'
            elif pkg_name.startswith('test_'):
                release_repo = 'ros2/system_tests'
            elif pkg_name.startswith('tf2_'):
                release_repo = 'ros2/geometry2'
            elif pkg_name.startswith('libstatistics_collector'
                                     ) or pkg_name.startswith('statistics_'):
                release_repo = 'ros-tooling/libstatistics_collector'
            elif pkg_name.startswith('rmw_dds_common'):
                release_repo = 'ros2/rmw_dds_common'
            elif pkg_name.startswith('rosidl_runtime_c'):
                release_repo = 'ros2/rosidl'
            elif pkg_name.startswith('rmw_cyclonedds'):
                release_repo = 'ros2/rmw_cyclonedds'
            elif pkg_name.startswith('rmw_connext'):
                release_repo = 'ros2/rmw_connext'
            elif pkg_name.startswith('rmw_fastcdr'):
                release_repo = 'eProsima/Fast-CDR'
            else:
                raise Exception(
                    "package " + pkg_name +
                    " not found in foxy release. Might need to add code manually"
                )
        repositories.append(release_repo)
    return list(dict.fromkeys(repositories))
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository, '
             'e.g. "common_tutorials:git:https://github.com/ros/common_tutorials:pullrequest-1"')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        if not repositories[repo_name]:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        if not source_repo:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'ws/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use any source repo to pass to devel job template
    if index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros1':
        package_name = 'catkin'
    elif index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros2':
        package_name = 'ros_workspace'
    else:
        assert False, 'Unsupported ROS version ' + \
            str(index.distributions[args.rosdistro_name].get('distribution_type', None))
    source_repository = deepcopy(
        dist_file.repositories[package_name].source_repository)
    if not source_repository:
        print(("The repository '%s' does not have a source entry in the distribution " +
               'file. We cannot generate a prerelease without a source entry.') % package_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository,
        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - build_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/ws:/tmp/ws'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'ws_overlay:/tmp/ws_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__)),
        'build_tool': args.build_tool or build_file.build_tool})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
Пример #9
0
def configure_doc_job(
        config_url, rosdistro_name, doc_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, views=None,
        is_disabled=False,
        groovy_script=None,
        doc_repository=None):
    """
    Configure a single Jenkins doc job.

    This includes the following steps:
    - clone the doc repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the run_doc_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_doc_build_files(config, rosdistro_name)
        build_file = build_files[doc_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    repo_names = dist_file.repositories.keys()

    if repo_name is not None:
        if repo_name not in repo_names:
            raise JobValidationError(
                "Invalid repository name '%s' " % repo_name +
                'choose one of the following: %s' %
                ', '.join(sorted(repo_names)))

        repo = dist_file.repositories[repo_name]
        if not repo.doc_repository:
            raise JobValidationError(
                "Repository '%s' has no doc section" % repo_name)
        if not repo.doc_repository.version:
            raise JobValidationError(
                "Repository '%s' has no doc version" % repo_name)
        doc_repository = repo.doc_repository

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' % ', '.join(sorted(
                build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        view_name = get_doc_view_name(
            rosdistro_name, doc_build_name)
        configure_doc_view(jenkins, view_name)

    job_name = get_doc_job_name(
        rosdistro_name, doc_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_doc_job_config(
        config, config_url, rosdistro_name, doc_build_name,
        build_file, os_name, os_code_name, arch, doc_repository,
        repo_name, dist_cache=dist_cache, is_disabled=is_disabled)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config)

    return job_name, job_config
Пример #10
0
def configure_devel_job(
        config_url, rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None):
    """
    Configure a single Jenkins devel job.

    This includes the following steps:
    - clone the source repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the release/run_devel_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    if repo_name not in repo_names:
        raise JobValidationError(
            "Invalid repository name '%s' " % repo_name +
            'choose one of the following: %s' % ', '.join(sorted(repo_names)))

    repo = dist_file.repositories[repo_name]

    if not repo.source_repository:
        raise JobValidationError(
            "Repository '%s' has no source section" % repo_name)
    if not repo.source_repository.version:
        raise JobValidationError(
            "Repository '%s' has no source version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' % ', '.join(sorted(
                build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_devel_view_name(rosdistro_name, source_build_name)
        configure_devel_view(jenkins, view_name)

    job_name = get_devel_job_name(
        rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, repo.source_repository,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    return job_name
Пример #11
0
def configure_release_job(config_url,
                          rosdistro_name,
                          release_build_name,
                          pkg_name,
                          os_name,
                          os_code_name,
                          config=None,
                          build_file=None,
                          index=None,
                          dist_file=None,
                          dist_cache=None,
                          jenkins=None,
                          views=None,
                          generate_import_package_job=True,
                          generate_sync_packages_jobs=True,
                          is_disabled=False,
                          other_build_files_same_platform=None,
                          groovy_script=None,
                          filter_arches=None,
                          dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError("Invalid package name '%s' " % pkg_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError("Repository '%s' has no release section" %
                                 repo_name)

    if not repo.release_repository.version:
        raise JobValidationError("Repository '%s' has no release version" %
                                 repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(jenkins,
                                rosdistro_name,
                                release_build_name,
                                targets,
                                dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(config_url,
                                     rosdistro_name,
                                     release_build_name,
                                     config=config,
                                     build_file=build_file,
                                     jenkins=jenkins,
                                     dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(config_url,
                                            rosdistro_name,
                                            release_build_name,
                                            config=config,
                                            build_file=build_file,
                                            jenkins=jenkins,
                                            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins,
                                                   dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(rosdistro_name,
                                             release_build_name, pkg_name,
                                             os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url,
        rosdistro_name,
        release_build_name,
        config,
        build_file,
        os_name,
        os_code_name,
        pkg_name,
        repo_name,
        repo.release_repository,
        dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(pkg_name, dist_cache,
                                                    pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name,
                  file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(rosdistro_name, release_build_name,
                                          pkg_name, os_name, os_code_name,
                                          arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name, dependency_name, os_name,
                os_code_name, arch) for dependency_name in dependency_names
        ]

        job_config = _get_binarydeb_job_config(
            config_url,
            rosdistro_name,
            release_build_name,
            config,
            build_file,
            os_name,
            os_code_name,
            arch,
            pkg_name,
            repo_name,
            repo.release_repository,
            dist_cache=dist_cache,
            upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
Пример #12
0
    print('No previous distribution found.', file=sys.stderr)
    exit(-1)

if args.comparison:
    valid_comparison_keys = valid_distro_keys[:]
    valid_comparison_keys.remove(distro_key)
    if args.comparison not in valid_comparison_keys:
        print('Invalid rosdistro [%s] selected for comparison to [%s].' % (args.comparison, distro_key),
              file=sys.stderr)
        print('Valid rosdistros are %s.' % valid_comparison_keys, file=sys.stderr)
        exit(-1)
    prev_distro_key = args.comparison
else:
    prev_distro_key = valid_distro_keys[i - 1]

cache = rosdistro.get_distribution_cache(index, distro_key)
distro_file = cache.distribution_file

prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
prev_distribution = rosdistro.get_cached_distribution(
    index, prev_distro_key, cache=prev_cache)

prev_distro_file = prev_cache.distribution_file

dependency_walker = DependencyWalker(prev_distribution)

if repo_names_argument is None:
    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    repo_names_argument = prev_distro_file.repositories.keys()
Пример #13
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name, groovy_script=None,
        dry_run=False, whitelist_package_names=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = None
    if build_file.notify_maintainers or \
            build_file.abi_incompatibility_assumed or \
            explicitly_ignored_pkg_names:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') % ('ignored'
                  if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    jenkins = connect(config.jenkins_url) if groovy_script is None else False

    all_view_configs = {}
    all_job_configs = {}

    job_name, job_config = configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    job_name, job_config = configure_sync_packages_to_main_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            job_name, job_config = configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)
            if not jenkins:
                all_job_configs[job_name] = job_config

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(
        jenkins, rosdistro_name, release_build_name, targets,
        dry_run=dry_run)
    if not jenkins:
        all_view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(views),
    }

    other_build_files = [v for k, v in build_files.items() if k != release_build_name]

    all_source_job_names = []
    all_binary_job_names = []
    for pkg_name in sorted(pkg_names):
        if whitelist_package_names:
            if pkg_name not in whitelist_package_names:
                print("Skipping package '%s' not in the explicitly passed list" %
                      pkg_name, file=sys.stderr)
                continue

        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name), file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            other_build_files_same_platform = []
            for other_build_file in other_build_files:
                if os_name not in other_build_file.targets:
                    continue
                if os_code_name not in other_build_file.targets[os_name]:
                    continue
                other_build_files_same_platform.append(other_build_file)

            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        other_build_files_same_platform=other_build_files_same_platform,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    all_job_configs.update(job_configs)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(all_job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    # with an explicit list of packages we don't delete obsolete jobs
    if not whitelist_package_names:
        # delete obsolete binary jobs
        for os_name, os_code_name in platforms:
            for arch in build_file.targets[os_name][os_code_name]:
                binary_view = get_release_binary_view_name(
                    rosdistro_name, release_build_name,
                    os_name, os_code_name, arch)
                binary_job_prefix = '%s__' % binary_view

                excluded_job_names = set([
                    j for j in all_binary_job_names
                    if j.startswith(binary_job_prefix)])
                if groovy_script is None:
                    print("Removing obsolete binary jobs with prefix '%s'" %
                          binary_job_prefix)
                    remove_jobs(
                        jenkins, binary_job_prefix, excluded_job_names,
                        dry_run=dry_run)
                else:
                    binary_key = 'binary_%s_%s_%s' % \
                        (os_name, os_code_name, arch)
                    groovy_data['job_prefixes_and_names'][binary_key] = \
                        (binary_job_prefix, excluded_job_names)

        # delete obsolete source jobs
        # requires knowledge about all other release build files
        for os_name, os_code_name in platforms:
            other_source_job_names = []
            # get source job names for all other release build files
            for other_release_build_name in [
                    k for k in build_files.keys() if k != release_build_name]:
                other_build_file = build_files[other_release_build_name]
                other_dist_file = get_distribution_file(
                    index, rosdistro_name, other_build_file)
                if not other_dist_file:
                    continue

                if os_name not in other_build_file.targets or \
                        os_code_name not in other_build_file.targets[os_name]:
                    continue

                if other_build_file.skip_ignored_packages:
                    filtered_pkg_names = other_build_file.filter_packages(
                        pkg_names)
                else:
                    filtered_pkg_names = pkg_names
                for pkg_name in sorted(filtered_pkg_names):
                    pkg = other_dist_file.release_packages[pkg_name]
                    repo_name = pkg.repository_name
                    repo = other_dist_file.repositories[repo_name]
                    if not repo.release_repository:
                        continue
                    if not repo.release_repository.version:
                        continue

                    other_job_name = get_sourcedeb_job_name(
                        rosdistro_name, other_release_build_name,
                        pkg_name, os_name, os_code_name)
                    other_source_job_names.append(other_job_name)

            source_view_prefix = get_release_source_view_name(
                rosdistro_name, os_name, os_code_name)
            source_job_prefix = '%s__' % source_view_prefix
            excluded_job_names = set([
                j for j in (all_source_job_names + other_source_job_names)
                if j.startswith(source_job_prefix)])
            if groovy_script is None:
                print("Removing obsolete source jobs with prefix '%s'" %
                      source_job_prefix)
                remove_jobs(
                    jenkins, source_job_prefix, excluded_job_names,
                    dry_run=dry_run)
            else:
                source_key = 'source_%s_%s' % (os_name, os_code_name)
                groovy_data['job_prefixes_and_names'][source_key] = (
                    source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(all_view_configs), len(all_job_configs)))
        content = expand_template(
            'snippet/reconfigure_jobs.groovy.em', groovy_data)
        write_groovy_script_and_configs(
            groovy_script, content, all_job_configs,
            view_configs=all_view_configs)
Пример #14
0
def configure_devel_jobs(
        config_url, rosdistro_name, source_build_name, groovy_script=None):
    """
    Configure all Jenkins devel jobs.

    L{configure_release_job} will be invoked for source repository and target
    which matches the build file criteria.
    """
    config = get_config_index(config_url)
    build_files = get_source_build_files(config, rosdistro_name)
    build_file = build_files[source_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    devel_view_name = get_devel_view_name(
        rosdistro_name, source_build_name, pull_request=False)
    pull_request_view_name = get_devel_view_name(
        rosdistro_name, source_build_name, pull_request=True)

    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url)

    views = []
    if build_file.test_commits_force is not False:
        views.append(configure_devel_view(jenkins, devel_view_name))
    if build_file.test_pull_requests_force is not False:
        views.append(configure_devel_view(jenkins, pull_request_view_name))

    if groovy_script is not None:
        # all further configuration will be handled by the groovy script
        jenkins = False

    repo_names = dist_file.repositories.keys()
    filtered_repo_names = build_file.filter_repositories(repo_names)

    devel_job_names = []
    pull_request_job_names = []
    job_configs = {}
    for repo_name in sorted(repo_names):
        is_disabled = repo_name not in filtered_repo_names
        if is_disabled and build_file.skip_ignored_repositories:
            print("Skipping ignored repository '%s'" % repo_name,
                  file=sys.stderr)
            continue

        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            print("Skipping repository '%s': no source section" % repo_name)
            continue
        if not repo.source_repository.version:
            print("Skipping repository '%s': no source version" % repo_name)
            continue

        job_types = []
        # check for testing commits
        if build_file.test_commits_force is False:
            print(("Skipping repository '%s': 'test_commits' is forced to " +
                   "false in the build file") % repo_name)
        elif repo.source_repository.test_commits is False:
            print(("Skipping repository '%s': 'test_commits' of the " +
                   "repository set to false") % repo_name)
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            print(("Skipping repository '%s': 'test_commits' defaults to " +
                   "false in the build file") % repo_name)
        else:
            job_types.append('commit')

        if not is_disabled:
            # check for testing pull requests
            if build_file.test_pull_requests_force is False:
                # print(("Skipping repository '%s': 'test_pull_requests' " +
                #        "is forced to false in the build file") % repo_name)
                pass
            elif repo.source_repository.test_pull_requests is False:
                # print(("Skipping repository '%s': 'test_pull_requests' of " +
                #        "the repository set to false") % repo_name)
                pass
            elif repo.source_repository.test_pull_requests is None and \
                    not build_file.test_pull_requests_default:
                # print(("Skipping repository '%s': 'test_pull_requests' " +
                #        "defaults to false in the build file") % repo_name)
                pass
            else:
                print("Pull request job for repository '%s'" % repo_name)
                job_types.append('pull_request')

        for job_type in job_types:
            pull_request = job_type == 'pull_request'
            for os_name, os_code_name, arch in targets:
                try:
                    job_name, job_config = configure_devel_job(
                        config_url, rosdistro_name, source_build_name,
                        repo_name, os_name, os_code_name, arch, pull_request,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache, jenkins=jenkins, views=views,
                        is_disabled=is_disabled,
                        groovy_script=groovy_script)
                    if not pull_request:
                        devel_job_names.append(job_name)
                    else:
                        pull_request_job_names.append(job_name)
                    if groovy_script is not None:
                        print("Configuration for job '%s'" % job_name)
                        job_configs[job_name] = job_config
                except JobValidationError as e:
                    print(e.message, file=sys.stderr)

    devel_job_prefix = '%s__' % devel_view_name
    pull_request_job_prefix = '%s__' % pull_request_view_name
    if groovy_script is None:
        # delete obsolete jobs in these views
        from ros_buildfarm.jenkins import remove_jobs
        print('Removing obsolete devel jobs')
        remove_jobs(jenkins, devel_job_prefix, devel_job_names)
        print('Removing obsolete pull request jobs')
        remove_jobs(
            jenkins, pull_request_job_prefix, pull_request_job_names)
    else:
        print("Writing groovy script '%s' to reconfigure %d jobs" %
              (groovy_script, len(job_configs)))
        data = {
            'expected_num_jobs': len(job_configs),
            'job_prefixes_and_names': {
                'devel': (devel_job_prefix, devel_job_names),
                'pull_request': (
                    pull_request_job_prefix, pull_request_job_names),
            }
        }
        content = expand_template('snippet/reconfigure_jobs.groovy.em', data)
        write_groovy_script_and_configs(
            groovy_script, content, job_configs)
Пример #15
0
def configure_release_jobs(config_url,
                           rosdistro_name,
                           release_build_name,
                           groovy_script=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = None
    if build_file.notify_maintainers or \
            build_file.abi_incompatibility_assumed or \
            explicitly_ignored_pkg_names:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') %
                  ('ignored'
                   if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(config_url,
                                 rosdistro_name,
                                 release_build_name,
                                 config=config,
                                 build_file=build_file,
                                 jenkins=jenkins)

    configure_sync_packages_to_main_job(config_url,
                                        rosdistro_name,
                                        release_build_name,
                                        config=config,
                                        build_file=build_file,
                                        jenkins=jenkins)
    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins)

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(jenkins, rosdistro_name,
                                    release_build_name, targets)

    if groovy_script is not None:
        # all further configuration will be handled by the groovy script
        jenkins = False

    all_source_job_names = []
    all_binary_job_names = []
    all_job_configs = {}
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        groovy_script=groovy_script)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    all_job_configs.update(job_configs)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data = {
        'expected_num_jobs': len(all_job_configs),
        'job_prefixes_and_names': {},
    }

    # delete obsolete binary jobs
    for os_name, os_code_name in platforms:
        for arch in build_file.targets[os_name][os_code_name]:
            binary_view = get_release_binary_view_name(rosdistro_name,
                                                       release_build_name,
                                                       os_name, os_code_name,
                                                       arch)
            binary_job_prefix = '%s__' % binary_view

            excluded_job_names = set([
                j for j in all_binary_job_names
                if j.startswith(binary_job_prefix)
            ])
            if groovy_script is None:
                print("Removing obsolete binary jobs with prefix '%s'" %
                      binary_job_prefix)
                remove_jobs(jenkins, binary_job_prefix, excluded_job_names)
            else:
                binary_key = 'binary_%s_%s_%s' % (os_name, os_code_name, arch)
                groovy_data['job_prefixes_and_names'][binary_key] = \
                    (binary_job_prefix, excluded_job_names)

    # delete obsolete source jobs
    # requires knowledge about all other release build files
    for os_name, os_code_name in platforms:
        other_source_job_names = []
        # get source job names for all other release build files
        for other_release_build_name in [
                k for k in build_files.keys() if k != release_build_name
        ]:
            other_build_file = build_files[other_release_build_name]
            other_dist_file = get_distribution_file(index, rosdistro_name,
                                                    other_build_file)
            if not other_dist_file:
                continue

            if os_name not in other_build_file.targets or \
                    os_code_name not in other_build_file.targets[os_name]:
                continue

            if other_build_file.skip_ignored_packages:
                filtered_pkg_names = other_build_file.filter_packages(
                    pkg_names)
            else:
                filtered_pkg_names = pkg_names
            for pkg_name in sorted(filtered_pkg_names):
                pkg = other_dist_file.release_packages[pkg_name]
                repo_name = pkg.repository_name
                repo = other_dist_file.repositories[repo_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue

                other_job_name = get_sourcedeb_job_name(
                    rosdistro_name, other_release_build_name, pkg_name,
                    os_name, os_code_name)
                other_source_job_names.append(other_job_name)

        source_view_prefix = get_release_source_view_name(
            rosdistro_name, os_name, os_code_name)
        source_job_prefix = '%s__' % source_view_prefix
        excluded_job_names = set([
            j for j in (all_source_job_names + other_source_job_names)
            if j.startswith(source_job_prefix)
        ])
        if groovy_script is None:
            print("Removing obsolete source jobs with prefix '%s'" %
                  source_job_prefix)
            remove_jobs(jenkins, source_job_prefix, excluded_job_names)
        else:
            source_key = 'source_%s_%s' % (os_name, os_code_name)
            groovy_data['job_prefixes_and_names'][source_key] = (
                source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print("Writing groovy script '%s' to reconfigure %d jobs" %
              (groovy_script, len(all_job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script, content,
                                        all_job_configs)
Пример #16
0
def configure_release_job_with_validation(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name, append_timestamp=False,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None,
        generate_import_package_job=True,
        filter_arches=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_release_view_name(rosdistro_name, release_build_name)
        configure_release_view(jenkins, view_name)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins)

    # sourcedeb job
    job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, _get_target_arches(
            build_file, os_name, os_code_name, print_skipped=False),
        repo.release_repository, pkg_name,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        if dependency_names is None:
            return

    # binarydeb jobs
    for arch in _get_target_arches(build_file, os_name, os_code_name):
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            repo.release_repository, pkg_name, append_timestamp,
            repo_name, dist_cache=dist_cache,
            upstream_job_names=upstream_job_names)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config)
Пример #17
0
def test_get_release_cache():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    get_distribution_cache(i, 'foo')
def get_sourcedeb(
        rosdistro_index_url, rosdistro_name, package_name, sourcedeb_dir,
        skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        # get the exact sourcedeb version
        showsrc_output = subprocess.check_output([
            'apt-cache', 'showsrc', debian_package_name]).decode()
        line_prefix = 'Version: '
        debian_package_versions = [
            l[len(line_prefix):] for l in showsrc_output.splitlines()
            if l.startswith(line_prefix + package_version)]
        assert len(debian_package_versions) == 1, \
            "Failed to find sourcedeb with version '%s', only found: %s" % \
            (package_version, ', '.join(debian_package_versions))

        # download sourcedeb
        apt_script = os.path.join(
            os.path.dirname(__file__), 'wrapper', 'apt.py')
        cmd = [
            sys.executable, apt_script,
            'source', '--download-only', '--only-source',
            debian_package_name + '=' + debian_package_versions[0]]
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Пример #19
0
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker
    from catkin_pkg.package import InvalidPackage, parse_package_string

    prev_rosdistro_name = None

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)
    valid_rosdistro_names = list(index.distributions.keys())
    valid_rosdistro_names.sort()
    if rosdistro_name is None:
        rosdistro_name = valid_rosdistro_names[-1]
    print('Checking packages for "%s" distribution' % rosdistro_name)

    # Find the previous distribution to the current one
    try:
        i = valid_rosdistro_names.index(rosdistro_name)
    except ValueError:
        print('Distribution key not found in list of valid distributions.',
              file=sys.stderr)
        exit(-1)
    if i == 0:
        print('No previous distribution found.', file=sys.stderr)
        exit(-1)
    prev_rosdistro_name = valid_rosdistro_names[i - 1]

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(index,
                                                          prev_rosdistro_name,
                                                          cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(repo for repo in keys
                              if _is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(repo for repo in repo_names
                              if _is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(repo for repo in keys
                             if _is_released(repo, distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names for pkg in
        distro_file.repositories[repo].release_repository.package_names)

    released_repos = prev_repo_names.intersection(current_repo_names)

    unreleased_repos = list(prev_repo_names.difference(current_repo_names))

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set(
        )  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    try:
                        package_dependencies |= dependency_walker.get_recursive_depends(
                            package, ['build', 'buildtool', 'run', 'test'],
                            ros_packages_only=True,
                            limit_depth=1)
                    except AssertionError as e:
                        print(e, file=sys.stderr)

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = {}
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    pkg_xml = prev_distribution.get_release_package_xml(
                        pkg_name)
                    if pkg_xml is not None:
                        try:
                            pkg = parse_package_string(pkg_xml)
                        except InvalidPackage:
                            pass
                        else:
                            pkg_maintainers = {
                                m.name: m.email
                                for m in pkg.maintainers
                            }
                            if unreleased_repo_name not in maintainers:
                                maintainers[unreleased_repo_name] = {}
                            maintainers[unreleased_repo_name].update(
                                pkg_maintainers)
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = None
                    blocking_repo = prev_distro_file.repositories[
                        blocking_repo_name]
                    if blocking_repo.source_repository:
                        repo_url = blocking_repo.source_repository.url
                    elif blocking_repo.doc_repository:
                        repo_url = blocking_repo.doc_repository.url
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set(
                            [])
                    repos_info[blocking_repo_name]['repos_blocking'].add(
                        repo_name)

            # Get url of repo
            repo_url = None
            if repo.source_repository:
                repo_url = repo.source_repository.url
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(
                repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get(
                    'repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name][
                    'recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'catkin_workspace/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use random source repo to pass to devel job template
    source_repository = deepcopy(list(repositories.values())[0])
    if not source_repository:
        print(("The repository '%s' does not have a source entry in the distribution " +
               'file. We cannot generate a prerelease without a source entry.') % repo_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository,
        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - catkin_make_isolated_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/catkin_workspace:/tmp/catkin_workspace'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'catkin_workspace_overlay:/tmp/catkin_workspace_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__))})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease overlay' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_overlay_arguments(parser)
    parser.add_argument(
        '--underlay-packages', nargs='+',
        help='Names of packages on which the overlay builds '
             '(by default package names come from packages found in '
             "'catkin_workspace/src')"
    )
    parser.add_argument(
        '--json', action='store_true',
        help='Output overlay information as JSON instead of a shell script'
    )

    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for overlay workspace
    underlay_package_names = args.underlay_packages
    if underlay_package_names is None:
        packages = find_packages('catkin_workspace/src')
        underlay_package_names = [pkg.name for pkg in packages.values()]
    print('Underlay workspace contains %d packages:%s' %
          (len(underlay_package_names),
           ''.join(['\n- %s' % pkg_name
                    for pkg_name in sorted(underlay_package_names)])),
          file=sys.stderr)

    overlay_package_names = get_overlay_package_names(
        args.pkg, args.exclude_pkg, args.level,
        underlay_package_names, dist_cache.release_package_xmls, output=True)
    print('Overlay workspace will contain %d packages:%s' %
          (len(overlay_package_names),
           ''.join(['\n- %s' % pkg_name
                    for pkg_name in sorted(overlay_package_names)])),
          file=sys.stderr)

    repositories = {}
    for pkg_name in overlay_package_names:
        repositories[pkg_name] = \
            get_repository_specification_for_released_package(
                dist_file, pkg_name)
    scms = [
        (repositories[k], 'catkin_workspace_overlay/src/%s' % k)
        for k in sorted(repositories.keys())]

    if not args.json:
        value = expand_template(
            'prerelease/prerelease_overlay_script.sh.em', {
                'scms': scms},
            options={BANGPATH_OPT: False})
        print(value)
    else:
        print(json.dumps([vars(r) for r, p in scms], sort_keys=True, indent=2))
Пример #22
0
if args.comparison:
    valid_comparison_keys = valid_distro_keys[:]
    valid_comparison_keys.remove(distro_key)
    if args.comparison not in valid_comparison_keys:
        print('Invalid rosdistro [%s] selected for comparison to [%s].' %
              (args.comparison, distro_key),
              file=sys.stderr)
        print('Valid rosdistros are %s.' % valid_comparison_keys,
              file=sys.stderr)
        exit(-1)
    prev_distro_key = args.comparison
else:
    prev_distro_key = valid_distro_keys[i - 1]

cache = rosdistro.get_distribution_cache(index, distro_key)
distro_file = cache.distribution_file

prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
prev_distribution = rosdistro.get_cached_distribution(index,
                                                      prev_distro_key,
                                                      cache=prev_cache)

prev_distro_file = prev_cache.distribution_file

dependency_walker = DependencyWalker(prev_distribution)

if repo_names_argument is None:
    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
Пример #23
0
def get_blocking_info(distro_key, repo_names, depth):
    prev_distro_key = None

    index = rosdistro.get_index(rosdistro.get_index_url())
    valid_distro_keys = index.distributions.keys()
    valid_distro_keys.sort()
    if distro_key is None:
        distro_key = valid_distro_keys[-1]
    print('Checking packages for "%s" distribution' % distro_key)

    # Find the previous distribution to the current one
    try:
        i = valid_distro_keys.index(distro_key)
    except ValueError:
        print('Distribution key not found in list of valid distributions.')
        exit(-1)
    if i == 0:
        print('No previous distribution found.')
        exit(-1)
    prev_distro_key = valid_distro_keys[i - 1]

    cache = rosdistro.get_distribution_cache(index, distro_key)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
    prev_distribution = rosdistro.get_cached_distribution(
        index, prev_distro_key, cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(
            repo for repo in keys if is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(
            repo for repo in repo_names if is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print('Ignoring inputs for which repository info not found in previous distribution' +
                    ' (did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(
        repo for repo in keys if is_released(repo, distro_file))

    released_repos = prev_repo_names.intersection(
        current_repo_names)
    
    unreleased_repos = list(prev_repo_names.difference(
        current_repo_names))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names
        for pkg in distro_file.repositories[repo].release_repository.package_names)

    # Construct a dictionary where keys are repository names and values are a list
    # of the repos blocking/blocked by that repo
    blocked_repos = {}
    blocking_repos = {}
    unblocked_blocking_repos = set()

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(
            distro_key))

    # Process repo dependencies
    unblocked_repos = set()
    total_blocking_repos = set()

    for repository_name in unreleased_repos:
        repo = prev_distro_file.repositories[repository_name]
        release_repo = repo.release_repository
        package_dependencies = set()
        packages = release_repo.package_names
        # Accumulate all dependencies for those packages
        for package in packages:
            recursive_dependencies = dependency_walker.get_recursive_depends(
                package, ['build', 'run', 'buildtool'], ros_packages_only=True,
                limit_depth=depth)
            package_dependencies = package_dependencies.union(
                recursive_dependencies)

        # For all package dependencies, check if they are released yet
        unreleased_pkgs = package_dependencies.difference(
            current_package_names)
        # remove the packages which this repo provides.
        unreleased_pkgs = unreleased_pkgs.difference(packages)
        # Now get the repositories for these packages.
        blocking_repos_for_this_repo = set(prev_distro_file.release_packages[pkg].repository_name
                            for pkg in unreleased_pkgs)
        if len(blocking_repos_for_this_repo) == 0:
            unblocked_repos.add(repository_name)
        else:
            # Get the repository for the unreleased packages
            blocked_repos[repository_name] = blocking_repos_for_this_repo
            total_blocking_repos |= blocking_repos_for_this_repo
            
            for blocking_repo in blocking_repos_for_this_repo:
                try: 
                    blocking_repos[blocking_repo] |= set([repository_name]) 
                except KeyError:
                    blocking_repos[blocking_repo] = set([repository_name])

    unblocked_blocking_repos_names = total_blocking_repos.intersection(unblocked_repos)
    unblocked_blocking_repos = {
        repo:blocking for repo, blocking in blocking_repos.iteritems() 
        if repo in unblocked_blocking_repos_names
        }
    unblocked_leaf_repos = unblocked_repos.difference(unblocked_blocking_repos_names)

    # Double-check repositories that we think are leaf repos
    for repo in unblocked_leaf_repos:
        # Check only one level of depends_on
        depends_on = dependency_walker.get_depends_on(package, 'build') | \
            dependency_walker.get_depends_on(package, 'run') | \
            dependency_walker.get_depends_on(package, 'buildtool')
        if len(depends_on) != 0:
            # There are packages that depend on this "leaf", but we didn't find
            # them initially because they weren't related to our inputs
            for package in depends_on:
                depends_on_repo = prev_distro_file.release_packages[package].repository_name
                try: 
                    unblocked_blocking_repos[repo] |= set([depends_on_repo]) 
                except KeyError:
                    unblocked_blocking_repos[repo] = set([depends_on_repo])

    unblocked_unblocking_repos = unblocked_leaf_repos.difference(
        unblocked_blocking_repos.keys())
    
    if not len(repo_names) == (len(ignored_inputs) + len(released_repos) + len(blocked_repos.keys()) + 
        len(unblocked_blocking_repos.keys()) + len(unblocked_unblocking_repos)):
        raise Exception('Somewhere a repo has not been accounted for')
    return released_repos, blocked_repos, unblocked_blocking_repos, unblocked_unblocking_repos

# needs pygithub, yaml, rosdistro

from github import Github
import yaml
import os

import catkin_pkg.package
import rosdistro

maintainers = set()

ind = rosdistro.get_index(rosdistro.get_index_url())
for d in ind.distributions:
    distribution_cache = rosdistro.get_distribution_cache(ind, d)
    for p, x in distribution_cache.release_package_xmls.items():
        pkg = catkin_pkg.package.parse_package_string(x)
        for p in pkg.maintainers:
            #print p.email, p.name
            maintainers.add(p)

print "Found %s maintainers in %s" % (len(maintainers), ind.distributions)


g = Github() # Can add Oauth Token for higher rate limits

limit = g.get_rate_limit()
print "remaining API queries", limit.rate.remaining, "until", limit.rate.reset

Пример #25
0
def get_sourcedeb(rosdistro_index_url,
                  rosdistro_name,
                  package_name,
                  sourcedeb_dir,
                  skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        # get the exact sourcedeb version
        showsrc_output = subprocess.check_output(
            ['apt-cache', 'showsrc', debian_package_name]).decode()
        line_prefix = 'Version: '
        debian_package_versions = [
            l[len(line_prefix):] for l in showsrc_output.splitlines()
            if l.startswith(line_prefix + package_version)
        ]
        assert len(debian_package_versions) == 1, \
            "Failed to find sourcedeb with version '%s', only found: %s" % \
            (package_version, ', '.join(debian_package_versions))

        # download sourcedeb
        apt_script = os.path.join(os.path.dirname(__file__), 'wrapper',
                                  'apt.py')
        cmd = [
            sys.executable, apt_script, 'source', '--download-only',
            '--only-source',
            debian_package_name + '=' + debian_package_versions[0]
        ]
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Пример #26
0
def configure_release_job(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, views=None,
        generate_import_package_job=True,
        generate_sync_packages_jobs=True,
        is_disabled=False, other_build_files_same_platform=None,
        groovy_script=None,
        filter_arches=None,
        dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(
            jenkins, rosdistro_name, release_build_name, targets,
            dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name, file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            pkg_name, repo_name, repo.release_repository,
            dist_cache=dist_cache, upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
Пример #27
0
def configure_devel_job(config_url,
                        rosdistro_name,
                        source_build_name,
                        repo_name,
                        os_name,
                        os_code_name,
                        arch,
                        pull_request=False,
                        config=None,
                        build_file=None,
                        index=None,
                        dist_file=None,
                        dist_cache=None,
                        jenkins=None,
                        views=None,
                        is_disabled=False,
                        groovy_script=None,
                        source_repository=None,
                        build_targets=None,
                        dry_run=False):
    """
    Configure a single Jenkins devel job.

    This includes the following steps:
    - clone the source repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the release/run_devel_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]
    # Overwrite build_file.targets if build_targets is specified
    if build_targets is not None:
        build_file.targets = build_targets

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    repo_names = dist_file.repositories.keys()

    if repo_name is not None:
        if repo_name not in repo_names:
            raise JobValidationError("Invalid repository name '%s' " %
                                     repo_name +
                                     'choose one of the following: %s' %
                                     ', '.join(sorted(repo_names)))

        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            raise JobValidationError("Repository '%s' has no source section" %
                                     repo_name)
        if not repo.source_repository.version:
            raise JobValidationError("Repository '%s' has no source version" %
                                     repo_name)
        source_repository = repo.source_repository

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' %
            ', '.join(sorted(build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        view_name = get_devel_view_name(rosdistro_name,
                                        source_build_name,
                                        pull_request=pull_request)
        configure_devel_view(jenkins, view_name, dry_run=dry_run)

    job_name = get_devel_job_name(rosdistro_name, source_build_name, repo_name,
                                  os_name, os_code_name, arch, pull_request)

    job_config = _get_devel_job_config(config,
                                       rosdistro_name,
                                       source_build_name,
                                       build_file,
                                       os_name,
                                       os_code_name,
                                       arch,
                                       source_repository,
                                       repo_name,
                                       pull_request,
                                       job_name,
                                       dist_cache=dist_cache,
                                       is_disabled=is_disabled)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)

    return job_name, job_config
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease overlay' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_overlay_arguments(parser)
    parser.add_argument(
        '--underlay-packages',
        nargs='+',
        help='Names of packages on which the overlay builds '
        '(by default package names come from packages found in '
        "'ws/src')")
    group = parser.add_mutually_exclusive_group()
    group.add_argument(
        '--json',
        action='store_true',
        help='Output overlay information as JSON instead of a shell script')
    group.add_argument('--vcstool',
                       action='store_true',
                       help='Output overlay information as vcstool repos file')

    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for overlay workspace
    underlay_package_names = args.underlay_packages
    if underlay_package_names is None:
        packages = find_packages('ws/src')
        underlay_package_names = [pkg.name for pkg in packages.values()]
    print('Underlay workspace contains %d packages:%s' %
          (len(underlay_package_names), ''.join([
              '\n- %s' % pkg_name
              for pkg_name in sorted(underlay_package_names)
          ])),
          file=sys.stderr)

    overlay_package_names = get_overlay_package_names(
        args.pkg,
        args.exclude_pkg,
        args.level,
        underlay_package_names,
        dist_cache.release_package_xmls,
        output=True)
    print('Overlay workspace will contain %d packages:%s' %
          (len(overlay_package_names), ''.join([
              '\n- %s' % pkg_name for pkg_name in sorted(overlay_package_names)
          ])),
          file=sys.stderr)

    repositories = {}
    for pkg_name in overlay_package_names:
        repositories[pkg_name] = \
            get_repository_specification_for_released_package(
                dist_file, pkg_name)
    scms = [(repositories[k], 'ws_overlay/src/%s' % k)
            for k in sorted(repositories.keys())]

    if args.json:
        print(json.dumps([vars(r) for r, p in scms], sort_keys=True, indent=2))
    elif args.vcstool:
        print('repositories:')
        for r, p in scms:
            print('  %s:' % p)
            print('    type: ' + r.type)
            print('    url: ' + r.url)
            print('    version: ' + r.version)
    else:
        value = expand_template('prerelease/prerelease_overlay_script.sh.em',
                                {'scms': scms},
                                options={BANGPATH_OPT: False})
        print(value)
Пример #29
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name,
        append_timestamp=False):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers or build_file.abi_incompatibility_assumed:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            targets.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in targets:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins)

    for os_name, os_code_name in targets:
        if os_name != 'ubuntu':
            continue
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins)

    view_name = get_release_view_name(rosdistro_name, release_build_name)
    view = configure_release_view(jenkins, view_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    all_job_names = []
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name), file=sys.stderr)
            continue

        for os_name, os_code_name in targets:
            try:
                job_names = configure_release_job(
                    config_url, rosdistro_name, release_build_name,
                    pkg_name, os_name, os_code_name,
                    append_timestamp=append_timestamp,
                    config=config, build_file=build_file,
                    index=index, dist_file=dist_file, dist_cache=dist_cache,
                    jenkins=jenkins, view=view,
                    generate_import_package_job=False,
                    generate_sync_packages_to_testing_job=False)
                all_job_names += job_names
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    # delete obsolete jobs in this view
    remove_jobs(jenkins, '%s__' % view_name, all_job_names)
Пример #30
0
def configure_release_jobs(config_url,
                           rosdistro_name,
                           release_build_name,
                           groovy_script=None,
                           dry_run=False,
                           whitelist_package_names=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') %
                  ('ignored'
                   if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    jenkins = False
    if groovy_script is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)

    all_view_configs = {}
    all_job_configs = OrderedDict()

    job_name, job_config = configure_import_package_job(config_url,
                                                        rosdistro_name,
                                                        release_build_name,
                                                        config=config,
                                                        build_file=build_file,
                                                        jenkins=jenkins,
                                                        dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    job_name, job_config = configure_sync_packages_to_main_job(
        config_url,
        rosdistro_name,
        release_build_name,
        config=config,
        build_file=build_file,
        jenkins=jenkins,
        dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            job_name, job_config = configure_sync_packages_to_testing_job(
                config_url,
                rosdistro_name,
                release_build_name,
                os_code_name,
                arch,
                config=config,
                build_file=build_file,
                jenkins=jenkins,
                dry_run=dry_run)
            if not jenkins:
                all_job_configs[job_name] = job_config

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(jenkins,
                                    rosdistro_name,
                                    release_build_name,
                                    targets,
                                    dry_run=dry_run)
    if not jenkins:
        all_view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(views),
    }

    # binary jobs must be generated in topological order
    from catkin_pkg.package import parse_package_string
    from ros_buildfarm.common import topological_order_packages
    pkgs = {}
    for pkg_name in pkg_names:
        if pkg_name not in dist_cache.release_package_xmls:
            print("Skipping package '%s': no released package.xml in cache" %
                  (pkg_name),
                  file=sys.stderr)
            continue
        pkg_xml = dist_cache.release_package_xmls[pkg_name]
        pkg = parse_package_string(pkg_xml)
        pkgs[pkg_name] = pkg
    ordered_pkg_tuples = topological_order_packages(pkgs)

    other_build_files = [
        v for k, v in build_files.items() if k != release_build_name
    ]

    all_source_job_names = []
    all_binary_job_names = []
    for pkg_name in [p.name for _, p in ordered_pkg_tuples]:
        if whitelist_package_names:
            if pkg_name not in whitelist_package_names:
                print(
                    "Skipping package '%s' not in the explicitly passed list" %
                    pkg_name,
                    file=sys.stderr)
                continue

        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            other_build_files_same_platform = []
            for other_build_file in other_build_files:
                if os_name not in other_build_file.targets:
                    continue
                if os_code_name not in other_build_file.targets[os_name]:
                    continue
                other_build_files_same_platform.append(other_build_file)

            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        other_build_files_same_platform=other_build_files_same_platform,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    for source_job_name in source_job_names:
                        all_job_configs[source_job_name] = job_configs[
                            source_job_name]
                    for binary_job_name in binary_job_names:
                        all_job_configs[binary_job_name] = job_configs[
                            binary_job_name]
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(all_job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    # with an explicit list of packages we don't delete obsolete jobs
    if not whitelist_package_names:
        # delete obsolete binary jobs
        for os_name, os_code_name in platforms:
            for arch in build_file.targets[os_name][os_code_name]:
                binary_view = get_release_binary_view_name(
                    rosdistro_name, release_build_name, os_name, os_code_name,
                    arch)
                binary_job_prefix = '%s__' % binary_view

                excluded_job_names = set([
                    j for j in all_binary_job_names
                    if j.startswith(binary_job_prefix)
                ])
                if groovy_script is None:
                    print("Removing obsolete binary jobs with prefix '%s'" %
                          binary_job_prefix)
                    from ros_buildfarm.jenkins import remove_jobs
                    remove_jobs(jenkins,
                                binary_job_prefix,
                                excluded_job_names,
                                dry_run=dry_run)
                else:
                    binary_key = 'binary_%s_%s_%s' % \
                        (os_name, os_code_name, arch)
                    groovy_data['job_prefixes_and_names'][binary_key] = \
                        (binary_job_prefix, excluded_job_names)

        # delete obsolete source jobs
        # requires knowledge about all other release build files
        for os_name, os_code_name in platforms:
            other_source_job_names = []
            # get source job names for all other release build files
            for other_release_build_name in [
                    k for k in build_files.keys() if k != release_build_name
            ]:
                other_build_file = build_files[other_release_build_name]
                other_dist_file = get_distribution_file(
                    index, rosdistro_name, other_build_file)
                if not other_dist_file:
                    continue

                if os_name not in other_build_file.targets or \
                        os_code_name not in other_build_file.targets[os_name]:
                    continue

                if other_build_file.skip_ignored_packages:
                    filtered_pkg_names = other_build_file.filter_packages(
                        pkg_names)
                else:
                    filtered_pkg_names = pkg_names
                for pkg_name in sorted(filtered_pkg_names):
                    pkg = other_dist_file.release_packages[pkg_name]
                    repo_name = pkg.repository_name
                    repo = other_dist_file.repositories[repo_name]
                    if not repo.release_repository:
                        continue
                    if not repo.release_repository.version:
                        continue

                    other_job_name = get_sourcedeb_job_name(
                        rosdistro_name, other_release_build_name, pkg_name,
                        os_name, os_code_name)
                    other_source_job_names.append(other_job_name)

            source_view_prefix = get_release_source_view_name(
                rosdistro_name, os_name, os_code_name)
            source_job_prefix = '%s__' % source_view_prefix
            excluded_job_names = set([
                j for j in (all_source_job_names + other_source_job_names)
                if j.startswith(source_job_prefix)
            ])
            if groovy_script is None:
                print("Removing obsolete source jobs with prefix '%s'" %
                      source_job_prefix)
                from ros_buildfarm.jenkins import remove_jobs
                remove_jobs(jenkins,
                            source_job_prefix,
                            excluded_job_names,
                            dry_run=dry_run)
            else:
                source_key = 'source_%s_%s' % (os_name, os_code_name)
                groovy_data['job_prefixes_and_names'][source_key] = (
                    source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(all_view_configs), len(all_job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script,
                                        content,
                                        all_job_configs,
                                        view_configs=all_view_configs)
Пример #31
0
def configure_doc_jobs(config_url,
                       rosdistro_name,
                       doc_build_name,
                       groovy_script=None,
                       dry_run=False,
                       whitelist_repository_names=None):
    """
    Configure all Jenkins doc jobs.

    L{configure_doc_job} will be invoked for doc repository and target
    which matches the build file criteria.
    """
    config = get_config_index(config_url)
    build_files = get_doc_build_files(config, rosdistro_name)
    build_file = build_files[doc_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    doc_view_name = get_doc_view_name(rosdistro_name, doc_build_name)

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url) if groovy_script is None else False

    view_configs = {}
    views = {}
    views[doc_view_name] = configure_doc_view(jenkins,
                                              doc_view_name,
                                              dry_run=dry_run)
    if not jenkins:
        view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(view_configs),
    }

    repo_names = dist_file.repositories.keys()
    filtered_repo_names = build_file.filter_repositories(repo_names)

    job_names = []
    job_configs = OrderedDict()
    for repo_name in sorted(repo_names):
        if whitelist_repository_names:
            if repo_name not in whitelist_repository_names:
                print(
                    "Skipping repository '%s' not in explicitly passed list" %
                    repo_name,
                    file=sys.stderr)
                continue
        is_disabled = repo_name not in filtered_repo_names
        if is_disabled and build_file.skip_ignored_repositories:
            print("Skipping ignored repository '%s'" % repo_name,
                  file=sys.stderr)
            continue

        repo = dist_file.repositories[repo_name]
        if not repo.doc_repository:
            print("Skipping repository '%s': no doc section" % repo_name)
            continue
        if not repo.doc_repository.version:
            print("Skipping repository '%s': no doc version" % repo_name)
            continue

        for os_name, os_code_name, arch in targets:
            try:
                job_name, job_config = configure_doc_job(
                    config_url,
                    rosdistro_name,
                    doc_build_name,
                    repo_name,
                    os_name,
                    os_code_name,
                    arch,
                    config=config,
                    build_file=build_file,
                    index=index,
                    dist_file=dist_file,
                    dist_cache=dist_cache,
                    jenkins=jenkins,
                    views=views,
                    is_disabled=is_disabled,
                    groovy_script=groovy_script,
                    dry_run=dry_run)
                job_names.append(job_name)
                if groovy_script is not None:
                    print("Configuration for job '%s'" % job_name)
                    job_configs[job_name] = job_config
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    job_prefix = '%s__' % doc_view_name
    if not whitelist_repository_names:
        groovy_data['job_prefixes_and_names']['doc'] = (job_prefix, job_names)

        if groovy_script is None:
            # delete obsolete jobs in this view
            from ros_buildfarm.jenkins import remove_jobs
            print('Removing obsolete doc jobs')
            remove_jobs(jenkins, job_prefix, job_names, dry_run=dry_run)
    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(view_configs), len(job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script,
                                        content,
                                        job_configs,
                                        view_configs=view_configs)
Пример #32
0
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)

    print('Checking packages for "%s" distribution' % rosdistro_name)

    # Find the previous distribution to the current one
    try:
        prev_rosdistro_name = _prev_rosdistro(index, rosdistro_name)
    except ValueError as e:
        print(e.args[0], file=sys.stderr)
        exit(-1)

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(index,
                                                          prev_rosdistro_name,
                                                          cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    prev_repo_names = set(_released_repos(prev_distro_file))

    if repo_names is not None:
        ignored_inputs = prev_repo_names.difference(repo_names)
        prev_repo_names.intersection_update(repo_names)
        repo_names = prev_repo_names

        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    current_repo_names = set(_released_repos(distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        _released_packages(distro_file, current_repo_names))

    released_repos = prev_repo_names.intersection(current_repo_names)

    if prev_repo_names.issubset(current_repo_names):
        print('All inputs already released in {0}.'.format(rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set(
        )  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    package_dependencies.update(
                        _package_dependencies(dependency_walker, package))

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = defaultdict(dict)
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    maintainers[unreleased_repo_name].update(
                        dict(_maintainers(prev_distribution, pkg_name)))
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = _repo_url(prev_distribution, blocking_repo_name)
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set(
                            [])
                    repos_info[blocking_repo_name]['repos_blocking'].add(
                        repo_name)

            # Get url of repo
            repo_url = _repo_url(prev_distribution, repo_name)
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(
                repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get(
                    'repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name][
                    'recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
Пример #33
0
def run_audit(config_url, rosdistro_name, cache_dir):
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    dist_cache = get_distribution_cache(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    missing_packages = {}
    for bf_name, bf_value in build_files.items():
        missing_packages[bf_name] = copy.deepcopy(bf_value.targets)
        for target in bf_value.get_targets_list():
            all_pkgs, missing_pkgs = partition_packages(
                config_url,
                rosdistro_name,
                bf_name,
                target,
                cache_dir,
                deduplicate_dependencies=True,
                dist_cache=dist_cache)
            missing_packages[bf_name][target] = missing_pkgs
            if 'all' in missing_packages[bf_name]:
                missing_packages[bf_name]['all'] &= missing_pkgs
            else:
                missing_packages[bf_name]['all'] = missing_pkgs

            if 'all' in missing_packages:
                missing_packages['all'] &= missing_pkgs
            else:
                missing_packages['all'] = missing_pkgs

    recommended_actions = len(missing_packages['all'])
    print('# Sync preparation report for %s' % rosdistro_name)
    print('Prepared for configuration: %s' % config_url)
    print('Prepared for rosdistro index: %s' % config.rosdistro_index_url)
    print('\n\n')

    if missing_packages['all']:
        print('## Packages failing on all platforms\n\n'
              'These releases are recommended to be rolled back:\n')
        for mp in sorted(missing_packages['all']):
            print(' - %s ' % mp)
        print('\n\n')
    else:
        print('## No packages detected failing on all platforms\n\n')

    def get_package_repository_link(dist_file, pkg_name):
        """Return the best guess of the url for filing a ticket against the package."""
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if repo.source_repository and repo.source_repository.url:
            return repo.source_repository.url
        if repo.release_repository and repo.release_repository.url:
            return repo.release_repository.url
        return None

    for bf_name in build_files.keys():
        print('## Audit of buildfile %s\n\n' % bf_name)
        # TODO(tfoote) use rosdistro API to print the release build config for editing
        recommended_blacklists = sorted(missing_packages[bf_name]['all'] -
                                        missing_packages['all'])
        recommended_actions += len(recommended_blacklists)
        if not recommended_blacklists:
            print(
                'Congratulations! '
                'No packages are failing to build on all targets for this buildfile.\n\n'
            )
            continue
        print(
            'Attention! '
            'The following packages are failing to build on all targets for this buildfile. '
            'It is recommended to blacklist them in the buildfile.\n\n')
        for rb in recommended_blacklists:
            print(' - %s:' % rb)
            jenkins_urls = get_jenkins_job_urls(
                rosdistro_name, config.jenkins_url, bf_name,
                build_files[bf_name].get_targets_list())
            url = get_package_repository_link(dist_file, rb)
            print('   - Suggested ticket location [%s](%s)' % (url, url))
            print('')
            print('   Title:')
            print('')
            print('       %s in %s fails to build on %s targets' %
                  (rb, rosdistro_name, bf_name))
            print('')
            print('   Body:')
            print('')
            print(
                '       The package %s in %s has been detected as not building'
                % (rb, rosdistro_name) +
                ' on all platforms in the buildfile %s.' % (bf_name) +
                ' The release manager for %s will consider disabling' %
                (rosdistro_name) +
                ' this build if it continues to fail to build.')
            print('       - jenkins_urls:')
            for target, ju in jenkins_urls.items():
                target_str = ' '.join([x for x in target])
                url = ju.format(pkg=rb)
                print('          - [%s](%s)' % (target_str, url))
                # TODO(tfoote) embed build status when buildfarm has https
                # print('    - %s [![Build Status](%s)](%s)' % (' '.join([x for x in target]),
                #       ju.format(pkg = rb) + '/badge/icon', ju.format(pkg = rb)))
            print(
                '       This is being filed because this package is about to be blacklisted.'
                ' If this ticket is resolved please review whether it can be removed from'
                ' the blacklist that should cross reference here.')
            print('')

    return recommended_actions
Пример #34
0
def configure_doc_jobs(
        config_url, rosdistro_name, doc_build_name, groovy_script=None):
    """
    Configure all Jenkins doc jobs.

    L{configure_doc_job} will be invoked for doc repository and target
    which matches the build file criteria.
    """
    config = get_config_index(config_url)
    build_files = get_doc_build_files(config, rosdistro_name)
    build_file = build_files[doc_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    doc_view_name = get_doc_view_name(rosdistro_name, doc_build_name)

    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url)

    views = []
    views.append(configure_doc_view(jenkins, doc_view_name))

    if groovy_script is not None:
        # all further configuration will be handled by the groovy script
        jenkins = False

    repo_names = dist_file.repositories.keys()
    filtered_repo_names = build_file.filter_repositories(repo_names)

    job_names = []
    job_configs = {}
    for repo_name in sorted(repo_names):
        is_disabled = repo_name not in filtered_repo_names
        if is_disabled and build_file.skip_ignored_repositories:
            print("Skipping ignored repository '%s'" % repo_name,
                  file=sys.stderr)
            continue

        repo = dist_file.repositories[repo_name]
        if not repo.doc_repository:
            print("Skipping repository '%s': no doc section" % repo_name)
            continue
        if not repo.doc_repository.version:
            print("Skipping repository '%s': no doc version" % repo_name)
            continue

        for os_name, os_code_name, arch in targets:
            try:
                job_name, job_config = configure_doc_job(
                    config_url, rosdistro_name, doc_build_name,
                    repo_name, os_name, os_code_name, arch,
                    config=config, build_file=build_file,
                    index=index, dist_file=dist_file,
                    dist_cache=dist_cache, jenkins=jenkins, views=views,
                    is_disabled=is_disabled,
                    groovy_script=groovy_script)
                job_names.append(job_name)
                if groovy_script is not None:
                    print("Configuration for job '%s'" % job_name)
                    job_configs[job_name] = job_config
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    job_prefix = '%s__' % doc_view_name
    if groovy_script is None:
        # delete obsolete jobs in this view
        from ros_buildfarm.jenkins import remove_jobs
        print('Removing obsolete doc jobs')
        remove_jobs(jenkins, job_prefix, job_names)
    else:
        print("Writing groovy script '%s' to reconfigure %d jobs" %
              (groovy_script, len(job_configs)))
        data = {
            'expected_num_jobs': len(job_configs),
            'job_prefixes_and_names': {
                'doc': (job_prefix, job_names),
            }
        }
        content = expand_template('snippet/reconfigure_jobs.groovy.em', data)
        write_groovy_script_and_configs(
            groovy_script, content, job_configs)
Пример #35
0
def configure_devel_jobs(
        config_url, rosdistro_name, source_build_name):
    """
    Configure all Jenkins devel jobs.

    L{configure_release_job} will be invoked for source repository and target
    which matches the build file criteria.
    """
    config = get_config_index(config_url)
    build_files = get_source_build_files(config, rosdistro_name)
    build_file = build_files[source_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    view_name = get_devel_view_name(rosdistro_name, source_build_name)
    view = configure_devel_view(jenkins, view_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    job_names = []
    for repo_name in sorted(repo_names):
        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            print("Skipping repository '%s': no source section" % repo_name)
            continue
        if not repo.source_repository.version:
            print("Skipping repository '%s': no source version" % repo_name)
            continue

        for os_name, os_code_name, arch in targets:
            try:
                job_name = configure_devel_job(
                    config_url, rosdistro_name, source_build_name,
                    repo_name, os_name, os_code_name, arch,
                    config=config, build_file=build_file,
                    index=index, dist_file=dist_file, dist_cache=dist_cache,
                    jenkins=jenkins, view=view)
                job_names.append(job_name)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    # delete obsolete jobs in this view
    remove_jobs(jenkins, '%s__' % view_name, job_names)
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'catkin_workspace/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            print(template_path, file=sys.stderr)
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    templates.template_hooks = [hook]

    # use random source repo to pass to devel job template
    source_repository = deepcopy(list(repositories.values())[0])
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - catkin_make_isolated_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/catkin_workspace:/tmp/catkin_workspace'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'catkin_workspace_overlay:/tmp/catkin_workspace_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__))})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
Пример #37
0
def configure_devel_job(
        config_url, rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    if repo_name not in repo_names:
        return "Invalid repository name '%s' " % repo_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(repo_names))

    repo = dist_file.repositories[repo_name]

    if not repo.source_repository:
        return "Repository '%s' has no source section" % repo_name
    if not repo.source_repository.version:
        return "Repository '%s' has no source version" % repo_name

    if os_name not in build_file.targets.keys():
        return "Invalid OS name '%s' " % os_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(build_file.targets.keys()))
    if os_code_name not in build_file.targets[os_name].keys():
        return "Invalid OS code name '%s' " % os_code_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(build_file.targets[os_name].keys()))
    if arch not in build_file.targets[os_name][os_code_name]:
        return "Invalid architecture '%s' " % arch + \
            'choose one of the following: ' + \
            ', '.join(sorted(
                build_file.targets[os_name][os_code_name]))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_devel_view_name(rosdistro_name, source_build_name)
        configure_devel_view(jenkins, view_name)

    job_name = get_devel_job_name(
        rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, repo.source_repository,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)
Пример #38
0
def configure_devel_jobs(config_url,
                         rosdistro_name,
                         source_build_name,
                         groovy_script=None,
                         dry_run=False,
                         whitelist_repository_names=None):
    """
    Configure all Jenkins devel jobs.

    L{configure_release_job} will be invoked for source repository and target
    which matches the build file criteria.
    """
    config = get_config_index(config_url)
    build_files = get_source_build_files(config, rosdistro_name)
    build_file = build_files[source_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    devel_view_name = get_devel_view_name(rosdistro_name,
                                          source_build_name,
                                          pull_request=False)
    pull_request_view_name = get_devel_view_name(rosdistro_name,
                                                 source_build_name,
                                                 pull_request=True)

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url) if groovy_script is None else False

    view_configs = {}
    views = {}
    if build_file.test_commits_force is not False:
        views[devel_view_name] = configure_devel_view(jenkins,
                                                      devel_view_name,
                                                      dry_run=dry_run)
    if build_file.test_pull_requests_force is not False:
        views[pull_request_view_name] = configure_devel_view(
            jenkins, pull_request_view_name, dry_run=dry_run)
    if not jenkins:
        view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(view_configs),
    }

    repo_names = dist_file.repositories.keys()
    filtered_repo_names = build_file.filter_repositories(repo_names)

    devel_job_names = []
    pull_request_job_names = []
    job_configs = OrderedDict()
    for repo_name in sorted(repo_names):
        if whitelist_repository_names:
            if repo_name not in whitelist_repository_names:
                print(
                    "Skipping repository '%s' not in explicitly passed list" %
                    repo_name,
                    file=sys.stderr)
                continue

        is_disabled = repo_name not in filtered_repo_names
        if is_disabled and build_file.skip_ignored_repositories:
            print("Skipping ignored repository '%s'" % repo_name,
                  file=sys.stderr)
            continue

        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            print("Skipping repository '%s': no source section" % repo_name)
            continue
        if not repo.source_repository.version:
            print("Skipping repository '%s': no source version" % repo_name)
            continue

        job_types = []
        # check for testing commits
        if build_file.test_commits_force is False:
            print(("Skipping repository '%s': 'test_commits' is forced to " +
                   "false in the build file") % repo_name)
        elif repo.source_repository.test_commits is False:
            print(("Skipping repository '%s': 'test_commits' of the " +
                   "repository set to false") % repo_name)
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            print(("Skipping repository '%s': 'test_commits' defaults to " +
                   "false in the build file") % repo_name)
        else:
            job_types.append('commit')

        if not is_disabled:
            # check for testing pull requests
            if build_file.test_pull_requests_force is False:
                # print(("Skipping repository '%s': 'test_pull_requests' " +
                #        "is forced to false in the build file") % repo_name)
                pass
            elif repo.source_repository.test_pull_requests is False:
                # print(("Skipping repository '%s': 'test_pull_requests' of " +
                #        "the repository set to false") % repo_name)
                pass
            elif repo.source_repository.test_pull_requests is None and \
                    not build_file.test_pull_requests_default:
                # print(("Skipping repository '%s': 'test_pull_requests' " +
                #        "defaults to false in the build file") % repo_name)
                pass
            else:
                print("Pull request job for repository '%s'" % repo_name)
                job_types.append('pull_request')

        for job_type in job_types:
            pull_request = job_type == 'pull_request'
            for os_name, os_code_name, arch in targets:
                try:
                    job_name, job_config = configure_devel_job(
                        config_url,
                        rosdistro_name,
                        source_build_name,
                        repo_name,
                        os_name,
                        os_code_name,
                        arch,
                        pull_request,
                        config=config,
                        build_file=build_file,
                        index=index,
                        dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins,
                        views=views,
                        is_disabled=is_disabled,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                    if not pull_request:
                        devel_job_names.append(job_name)
                    else:
                        pull_request_job_names.append(job_name)
                    if groovy_script is not None:
                        print("Configuration for job '%s'" % job_name)
                        job_configs[job_name] = job_config
                except JobValidationError as e:
                    print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    devel_job_prefix = '%s__' % devel_view_name
    pull_request_job_prefix = '%s__' % pull_request_view_name
    if not whitelist_repository_names:
        groovy_data['job_prefixes_and_names']['devel'] = \
            (devel_job_prefix, devel_job_names)
        groovy_data['job_prefixes_and_names']['pull_request'] = \
            (pull_request_job_prefix, pull_request_job_names)

        if groovy_script is None:
            # delete obsolete jobs in these views
            from ros_buildfarm.jenkins import remove_jobs
            print('Removing obsolete devel jobs')
            remove_jobs(jenkins,
                        devel_job_prefix,
                        devel_job_names,
                        dry_run=dry_run)
            print('Removing obsolete pull request jobs')
            remove_jobs(jenkins,
                        pull_request_job_prefix,
                        pull_request_job_names,
                        dry_run=dry_run)
    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(view_configs), len(job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script,
                                        content,
                                        job_configs,
                                        view_configs=view_configs)
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker
    from catkin_pkg.package import InvalidPackage, parse_package_string

    prev_rosdistro_name = None

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)
    valid_rosdistro_names = list(index.distributions.keys())
    valid_rosdistro_names.sort()
    if rosdistro_name is None:
        rosdistro_name = valid_rosdistro_names[-1]
    print('Checking packages for "%s" distribution' % rosdistro_name)

    # skip distributions with a different type if the information is available
    distro_type = index.distributions[rosdistro_name].get('distribution_type')
    if distro_type is not None:
        valid_rosdistro_names = [
            n for n in valid_rosdistro_names
            if distro_type == index.distributions[n].get('distribution_type')]

    # Find the previous distribution to the current one
    try:
        i = valid_rosdistro_names.index(rosdistro_name)
    except ValueError:
        print('Distribution key not found in list of valid distributions.', file=sys.stderr)
        exit(-1)
    if i == 0:
        print('No previous distribution found.', file=sys.stderr)
        exit(-1)
    prev_rosdistro_name = valid_rosdistro_names[i - 1]

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(
        index, prev_rosdistro_name, cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(
            repo for repo in keys if _is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(
            repo for repo in repo_names if _is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(
        repo for repo in keys if _is_released(repo, distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names
        for pkg in distro_file.repositories[repo].release_repository.package_names)

    released_repos = prev_repo_names.intersection(
        current_repo_names)

    unreleased_repos = list(prev_repo_names.difference(
        current_repo_names))

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(
            rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set()  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    try:
                        package_dependencies |= dependency_walker.get_recursive_depends(
                            package, ['build', 'buildtool', 'run', 'test'], ros_packages_only=True,
                            limit_depth=1)
                    except AssertionError as e:
                        print(e, file=sys.stderr)

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = {}
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    pkg_xml = prev_distribution.get_release_package_xml(pkg_name)
                    if pkg_xml is not None:
                        try:
                            pkg = parse_package_string(pkg_xml)
                        except InvalidPackage:
                            pass
                        else:
                            pkg_maintainers = {m.name: m.email for m in pkg.maintainers}
                            if unreleased_repo_name not in maintainers:
                                maintainers[unreleased_repo_name] = {}
                            maintainers[unreleased_repo_name].update(pkg_maintainers)
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = None
                    blocking_repo = prev_distro_file.repositories[blocking_repo_name]
                    if blocking_repo.source_repository:
                        repo_url = blocking_repo.source_repository.url
                    elif blocking_repo.doc_repository:
                        repo_url = blocking_repo.doc_repository.url
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set([])
                    repos_info[blocking_repo_name]['repos_blocking'].add(repo_name)

            # Get url of repo
            repo_url = None
            if repo.source_repository:
                repo_url = repo.source_repository.url
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get('repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name]['recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info