def get_job_config(args, config):
    template_name = 'misc/rosdistro_cache_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    reconfigure_job_names = []
    build_files = get_release_build_files(config, args.rosdistro_name)
    for release_build_name in sorted(build_files.keys()):
        group_name = get_release_job_prefix(
            args.rosdistro_name, release_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_job_names.append(job_name)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,

        'repository_args': repository_args,

        'reconfigure_job_names': reconfigure_job_names,

        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],

        'git_ssh_credential_id': config.git_ssh_credential_id,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Пример #2
0
def get_targets_by_repo(config, ros_distro_name):
    # collect all target repositories (building) and their targets
    # from all release build files
    target_dicts_by_repo = {}
    release_build_files = get_release_build_files(config, ros_distro_name)
    for release_build_file in release_build_files.values():
        target_repository = release_build_file.target_repository
        merged_os_names = target_dicts_by_repo.setdefault(
            target_repository, {})
        for os_name in release_build_file.targets.keys():
            os_code_names = release_build_file.targets[os_name]
            merged_os_code_names = merged_os_names.setdefault(os_name, {})
            for os_code_name in os_code_names.keys():
                arches = os_code_names[os_code_name]
                merged_arches = merged_os_code_names.setdefault(
                    os_code_name, {})
                for arch in arches.keys():
                    merged_arches.setdefault(arch, {})

    # flatten each os_code_name and arch into a single colon separated string
    targets_by_repo = {}
    for target_repository in target_dicts_by_repo.keys():
        targets_by_repo[target_repository] = []
        targets = target_dicts_by_repo[target_repository]
        # TODO support other OS names
        if 'ubuntu' in targets:
            ubuntu_targets = targets['ubuntu']
            for os_code_name in sorted(ubuntu_targets.keys()):
                target = '%s:source' % os_code_name
                targets_by_repo[target_repository].append(target)
                for arch in sorted(ubuntu_targets[os_code_name].keys()):
                    target = '%s:%s' % (os_code_name, arch)
                    targets_by_repo[target_repository].append(target)
    return targets_by_repo
def get_targets_by_repo(config, ros_distro_name):
    # collect all target repositories (building) and their targets
    # from all release build files
    target_dicts_by_repo = {}
    release_build_files = get_release_build_files(config, ros_distro_name)
    for release_build_file in release_build_files.values():
        target_repository = release_build_file.target_repository
        merged_os_names = target_dicts_by_repo.setdefault(
            target_repository, {})
        for os_name in release_build_file.targets.keys():
            os_code_names = release_build_file.targets[os_name]
            merged_os_code_names = merged_os_names.setdefault(os_name, {})
            for os_code_name in os_code_names.keys():
                arches = os_code_names[os_code_name]
                merged_arches = merged_os_code_names.setdefault(
                    os_code_name, {})
                for arch in arches.keys():
                    merged_arches.setdefault(arch, {})

    # flatten each os_code_name and arch into a single colon separated string
    targets_by_repo = {}
    for target_repository in target_dicts_by_repo.keys():
        targets_by_repo[target_repository] = []
        targets = target_dicts_by_repo[target_repository]
        # TODO support other OS names
        for os_name in ['debian', 'ubuntu']:
            if os_name not in targets:
                continue
            for os_code_name in sorted(targets[os_name].keys()):
                target = '%s:source' % os_code_name
                targets_by_repo[target_repository].append(target)
                for arch in sorted(targets[os_name][os_code_name].keys()):
                    target = '%s:%s' % (os_code_name, arch)
                    targets_by_repo[target_repository].append(target)
    return targets_by_repo
Пример #4
0
def configure_sync_packages_to_testing_job(config_url,
                                           rosdistro_name,
                                           release_build_name,
                                           os_code_name,
                                           arch,
                                           config=None,
                                           build_file=None,
                                           jenkins=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_testing_job_name(rosdistro_name,
                                                     os_code_name, arch)
    job_config = _get_sync_packages_to_testing_job_config(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config, build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_management_view(jenkins)
        configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file)

    jenkins = connect(config.jenkins_url)

    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    group_name = get_release_view_name(
        args.rosdistro_name, args.release_build_name)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(jenkins, job_name, reconfigure_jobs_job_config, view=view)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(jenkins, job_name, trigger_jobs_job_config, view=view)

    job_name = 'import_upstream'
    configure_job(jenkins, job_name, import_upstream_job_config, view=view)
def get_upstream_job_names(config, repo):
    distributions = config.distributions.keys()
    if repo == 'main':
        upstream_job_names = ['{0}_sync-packages-to-{1}'.format(
            get_release_job_prefix(rosdistro), repo) for rosdistro in distributions]
    elif repo == 'testing':
        upstream_job_names = []
        for rosdistro in distributions:
            architectures_by_code_name = {}
            build_files = get_release_build_files(config, rosdistro)
            for build_file in build_files.values():
                for os_name in build_file.targets.keys():
                    for code_name, architectures in build_file.targets[os_name].items():
                        architectures_by_code_name[code_name] = \
                            architectures_by_code_name.get(code_name, set()) | \
                            set(architectures.keys())

            for code_name, archs in architectures_by_code_name.items():
                for arch in archs:
                    upstream_job_names.append(
                        '{prefix}_sync-packages-to-{repo}_{code_name}_{arch}'.format(
                            prefix=get_release_job_prefix(rosdistro),
                            repo=repo,
                            code_name=code_name,
                            arch=arch))
    else:
        raise JobValidationError("Unknown upstream jobs for job 'upload_{}'." % repo)
    return ','.join(sorted(upstream_job_names))
Пример #7
0
def configure_sync_packages_to_main_job(config_url,
                                        rosdistro_name,
                                        release_build_name,
                                        config=None,
                                        build_file=None,
                                        jenkins=None,
                                        dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)

    package_formats = set(package_format_mapping[os_name]
                          for os_name in build_file.targets.keys())
    assert len(package_formats) == 1
    package_format = package_formats.pop()

    job_name = get_sync_packages_to_main_job_name(rosdistro_name,
                                                  package_format)
    job_config = _get_sync_packages_to_main_job_config(rosdistro_name,
                                                       build_file,
                                                       package_format)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
    return (job_name, job_config)
def get_upstream_job_names(config, repo):
    distributions = config.distributions.keys()
    if repo == 'main':
        upstream_job_names = [
            '{0}_sync-packages-to-{1}'.format(
                get_release_job_prefix(rosdistro), repo)
            for rosdistro in distributions
        ]
    elif repo == 'testing':
        upstream_job_names = []
        for rosdistro in distributions:
            architectures_by_code_name = {}
            build_files = get_release_build_files(config, rosdistro)
            for build_file in build_files.values():
                for os_name in build_file.targets.keys():
                    for code_name, architectures in build_file.targets[
                            os_name].items():
                        architectures_by_code_name[code_name] = \
                            architectures_by_code_name.get(code_name, set()) | \
                            set(architectures.keys())

            for code_name, archs in architectures_by_code_name.items():
                for arch in archs:
                    upstream_job_names.append(
                        '{prefix}_sync-packages-to-{repo}_{code_name}_{arch}'.
                        format(prefix=get_release_job_prefix(rosdistro),
                               repo=repo,
                               code_name=code_name,
                               arch=arch))
    else:
        raise JobValidationError("Unknown upstream jobs for job 'upload_{}'." %
                                 repo)
    upstream_job_names.append('import_upstream')
    return ','.join(sorted(upstream_job_names))
Пример #9
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name,
        append_timestamp=False):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers or build_file.abi_incompatibility_assumed:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            targets.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in targets:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins)

    view_name = get_release_view_name(rosdistro_name, release_build_name)
    view = configure_release_view(jenkins, view_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue

        for os_name, os_code_name in targets:
            configure_release_job(
                config_url, rosdistro_name, release_build_name,
                pkg_name, os_name, os_code_name,
                append_timestamp=append_timestamp,
                config=config, build_file=build_file,
                index=index, dist_file=dist_file, dist_cache=dist_cache,
                jenkins=jenkins, view=view,
                generate_import_package_job=False)
Пример #10
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    package_formats = set(
        package_format_mapping[os_name] for os_name in build_file.targets.keys())
    assert len(package_formats) == 1
    package_format = package_formats.pop()

    group_name = get_release_job_prefix(
        args.rosdistro_name, args.release_build_name)

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    trigger_missed_jobs_job_config = get_trigger_missed_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file, package_format)
    trigger_broken_with_non_broken_upstream_job_config = \
        _get_trigger_broken_with_non_broken_upstream_job_config(
            args.rosdistro_name, args.release_build_name, build_file)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(
        jenkins, job_name, reconfigure_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(
        jenkins, job_name, trigger_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-missed-jobs')
    configure_job(
        jenkins, job_name, trigger_missed_jobs_job_config,
        dry_run=args.dry_run)

    job_name = 'import_upstream%s' % ('' if package_format == 'deb' else '_' + package_format)
    configure_job(
        jenkins, job_name, import_upstream_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % \
        (group_name, 'trigger-broken-with-non-broken-upstream')
    configure_job(
        jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config,
        dry_run=args.dry_run)
Пример #11
0
def partition_packages(config_url,
                       rosdistro_name,
                       release_build_name,
                       target,
                       cache_dir,
                       deduplicate_dependencies=False,
                       dist_cache=None):
    """Check all packages in the rosdistro and compare to the debian packages repository.

    Return the set of all packages and the set of missing ones.
    """
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = rosdistro_get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    # Check that apt repos status
    repo_index = get_package_repo_data(build_file.target_repository, [target],
                                       cache_dir)[target]

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = set()
    all_pkg_names = dist_file.release_packages.keys()

    # Remove packages without versions declared.
    def get_package_version(dist_file, pkg_name):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        return repo.release_repository.version

    all_pkg_names = [
        p for p in all_pkg_names if get_package_version(dist_file, p)
    ]

    distribution = get_cached_distribution(index,
                                           rosdistro_name,
                                           cache=dist_cache)
    pkg_names = filter_buildfile_packages_recursively(all_pkg_names,
                                                      build_file, distribution)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_os_package_name(rosdistro_name, pkg_name)
        if debian_pkg_name in repo_index:
            binary_packages.add(pkg_name)

    # check that all elements from whitelist are present
    missing_binary_packages = set(pkg_names) - binary_packages

    if deduplicate_dependencies:
        # Do not list missing packages that are dependencies of other missing ones
        cached_pkgs = get_package_manifests(distribution)
        missing_binary_packages = filter_blocked_dependent_package_names(
            cached_pkgs, missing_binary_packages)

    return binary_packages, missing_binary_packages
def check_sync_criteria(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    Target = namedtuple('Target', 'os_name os_code_name arch')
    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(
        build_file.target_repository, target, cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count, file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    group_name = get_release_job_prefix(
        args.rosdistro_name, args.release_build_name)

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    trigger_missed_jobs_job_config = get_trigger_missed_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file)
    trigger_broken_with_non_broken_upstream_job_config = \
        _get_trigger_broken_with_non_broken_upstream_job_config(
            args.rosdistro_name, args.release_build_name, build_file)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(
        jenkins, job_name, reconfigure_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(
        jenkins, job_name, trigger_jobs_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % (group_name, 'trigger-missed-jobs')
    configure_job(
        jenkins, job_name, trigger_missed_jobs_job_config,
        dry_run=args.dry_run)

    job_name = 'import_upstream'
    configure_job(
        jenkins, job_name, import_upstream_job_config, dry_run=args.dry_run)

    job_name = '%s_%s' % \
        (group_name, 'trigger-broken-with-non-broken-upstream')
    configure_job(
        jenkins, job_name, trigger_broken_with_non_broken_upstream_job_config,
        dry_run=args.dry_run)
def get_sync_targets(config, repo):
    targets = set()
    distributions = config.distributions.keys()
    for rosdistro in distributions:
        build_files = get_release_build_files(config, rosdistro)
        for build_file in build_files.values():
            for os_name in build_file.targets.keys():
                if os_name in ['debian', 'ubuntu']:
                    targets.add(repo)
                else:
                    targets.add(os_name + '-' + repo)
    return targets
Пример #15
0
def get_job_config(args, config):
    template_name = 'misc/rosdistro_cache_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    reconfigure_job_names = []
    build_files = get_release_build_files(config, args.rosdistro_name)
    for release_build_name in sorted(build_files.keys()):
        group_name = get_release_job_prefix(args.rosdistro_name,
                                            release_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_job_names.append(job_name)

    reconfigure_doc_job_names = []
    build_files = get_doc_build_files(config, args.rosdistro_name)
    for doc_build_name in sorted(build_files.keys()):
        group_name = get_doc_view_name(args.rosdistro_name, doc_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_doc_job_names.append(job_name)

    reconfigure_source_job_names = []
    build_files = get_source_build_files(config, args.rosdistro_name)
    for source_build_name in sorted(build_files.keys()):
        group_name = get_devel_view_name(args.rosdistro_name,
                                         source_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_source_job_names.append(job_name)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository':
        get_repository(),
        'script_generating_key_files':
        script_generating_key_files,
        'rosdistro_index_url':
        config.rosdistro_index_url,
        'repository_args':
        repository_args,
        'reconfigure_job_names':
        reconfigure_job_names,
        'reconfigure_doc_job_names':
        reconfigure_doc_job_names,
        'reconfigure_source_job_names':
        reconfigure_source_job_names,
        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],
        'git_ssh_credential_id':
        config.git_ssh_credential_id,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Пример #16
0
def configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=None, build_file=None, jenkins=None, dry_run=False):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_import_package_job_name(rosdistro_name)
    job_config = _get_import_package_job_config(build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config, dry_run=dry_run)
    return (job_name, job_config)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'release' management jobs on Jenkins")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'release')
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    build_files = get_release_build_files(config, args.rosdistro_name)
    build_file = build_files[args.release_build_name]

    group_name = get_release_job_prefix(args.rosdistro_name,
                                        args.release_build_name)

    reconfigure_jobs_job_config = get_reconfigure_jobs_job_config(
        args, config, build_file)
    trigger_jobs_job_config = get_trigger_jobs_job_config(
        args, config, build_file)
    import_upstream_job_config = get_import_upstream_job_config(
        args, config, build_file)
    trigger_broken_with_non_broken_upstream_job_config = \
        _get_trigger_broken_with_non_broken_upstream_job_config(
            args.rosdistro_name, args.release_build_name, build_file)

    jenkins = connect(config.jenkins_url)

    configure_management_view(jenkins)

    job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
    configure_job(jenkins, job_name, reconfigure_jobs_job_config)

    job_name = '%s_%s' % (group_name, 'trigger-jobs')
    configure_job(jenkins, job_name, trigger_jobs_job_config)

    job_name = 'import_upstream'
    configure_job(jenkins, job_name, import_upstream_job_config)

    job_name = '%s_%s' % \
        (group_name, 'trigger-broken-with-non-broken-upstream')
    configure_job(jenkins, job_name,
                  trigger_broken_with_non_broken_upstream_job_config)
Пример #18
0
def configure_sync_packages_to_main_job(
        config_url, rosdistro_name, release_build_name,
        config=None, build_file=None, jenkins=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_main_job_name(
        rosdistro_name)
    job_config = _get_sync_packages_to_main_job_config(
        rosdistro_name, build_file)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_management_view(jenkins)
        configure_job(jenkins, job_name, job_config)
Пример #19
0
def configure_sync_packages_to_testing_job(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config=None, build_file=None, jenkins=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]
    if jenkins is None:
        jenkins = connect(config.jenkins_url)

    job_name = get_sync_packages_to_testing_job_name(
        rosdistro_name, release_build_name, os_code_name, arch)
    job_config = _get_sync_packages_to_testing_job_config(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config, build_file)
    view = configure_view(jenkins, JENKINS_MANAGEMENT_VIEW)

    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config, view)
Пример #20
0
def run_audit(config_url, rosdistro_name, cache_dir):
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    dist_cache = get_distribution_cache(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    missing_packages = {}
    for bf_name, bf_value in build_files.items():
        missing_packages[bf_name] = copy.deepcopy(bf_value.targets)
        for target in bf_value.get_targets_list():
            all_pkgs, missing_pkgs = partition_packages(
                config_url,
                rosdistro_name,
                bf_name,
                target,
                cache_dir,
                deduplicate_dependencies=True,
                dist_cache=dist_cache)
            missing_packages[bf_name][target] = missing_pkgs
            if 'all' in missing_packages[bf_name]:
                missing_packages[bf_name]['all'] &= missing_pkgs
            else:
                missing_packages[bf_name]['all'] = missing_pkgs

            if 'all' in missing_packages:
                missing_packages['all'] &= missing_pkgs
            else:
                missing_packages['all'] = missing_pkgs

    recommended_actions = len(missing_packages['all'])
    print('# Sync preparation report for %s' % rosdistro_name)
    print('Prepared for configuration: %s' % config_url)
    print('Prepared for rosdistro index: %s' % config.rosdistro_index_url)
    print('\n\n')

    if missing_packages['all']:
        print('## Packages failing on all platforms\n\n'
              'These releases are recommended to be rolled back:\n')
        for mp in sorted(missing_packages['all']):
            print(' - %s ' % mp)
        print('\n\n')
    else:
        print('## No packages detected failing on all platforms\n\n')

    def get_package_repository_link(dist_file, pkg_name):
        """Return the best guess of the url for filing a ticket against the package."""
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if repo.source_repository and repo.source_repository.url:
            return repo.source_repository.url
        if repo.release_repository and repo.release_repository.url:
            return repo.release_repository.url
        return None

    for bf_name in build_files.keys():
        print('## Audit of buildfile %s\n\n' % bf_name)
        # TODO(tfoote) use rosdistro API to print the release build config for editing
        recommended_blacklists = sorted(missing_packages[bf_name]['all'] -
                                        missing_packages['all'])
        recommended_actions += len(recommended_blacklists)
        if not recommended_blacklists:
            print(
                'Congratulations! '
                'No packages are failing to build on all targets for this buildfile.\n\n'
            )
            continue
        print(
            'Attention! '
            'The following packages are failing to build on all targets for this buildfile. '
            'It is recommended to blacklist them in the buildfile.\n\n')
        for rb in recommended_blacklists:
            print(' - %s:' % rb)
            jenkins_urls = get_jenkins_job_urls(
                rosdistro_name, config.jenkins_url, bf_name,
                build_files[bf_name].get_targets_list())
            url = get_package_repository_link(dist_file, rb)
            print('   - Suggested ticket location [%s](%s)' % (url, url))
            print('')
            print('   Title:')
            print('')
            print('       %s in %s fails to build on %s targets' %
                  (rb, rosdistro_name, bf_name))
            print('')
            print('   Body:')
            print('')
            print(
                '       The package %s in %s has been detected as not building'
                % (rb, rosdistro_name) +
                ' on all platforms in the buildfile %s.' % (bf_name) +
                ' The release manager for %s will consider disabling' %
                (rosdistro_name) +
                ' this build if it continues to fail to build.')
            print('       - jenkins_urls:')
            for target, ju in jenkins_urls.items():
                target_str = ' '.join([x for x in target])
                url = ju.format(pkg=rb)
                print('          - [%s](%s)' % (target_str, url))
                # TODO(tfoote) embed build status when buildfarm has https
                # print('    - %s [![Build Status](%s)](%s)' % (' '.join([x for x in target]),
                #       ju.format(pkg = rb) + '/badge/icon', ju.format(pkg = rb)))
            print(
                '       This is being filed because this package is about to be blacklisted.'
                ' If this ticket is resolved please review whether it can be removed from'
                ' the blacklist that should cross reference here.')
            print('')

    return recommended_actions
Пример #21
0
def configure_release_jobs(config_url,
                           rosdistro_name,
                           release_build_name,
                           groovy_script=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = None
    if build_file.notify_maintainers or \
            build_file.abi_incompatibility_assumed or \
            explicitly_ignored_pkg_names:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') %
                  ('ignored'
                   if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(config_url,
                                 rosdistro_name,
                                 release_build_name,
                                 config=config,
                                 build_file=build_file,
                                 jenkins=jenkins)

    configure_sync_packages_to_main_job(config_url,
                                        rosdistro_name,
                                        release_build_name,
                                        config=config,
                                        build_file=build_file,
                                        jenkins=jenkins)
    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins)

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(jenkins, rosdistro_name,
                                    release_build_name, targets)

    if groovy_script is not None:
        # all further configuration will be handled by the groovy script
        jenkins = False

    all_source_job_names = []
    all_binary_job_names = []
    all_job_configs = {}
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name),
                  file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        groovy_script=groovy_script)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    all_job_configs.update(job_configs)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data = {
        'expected_num_jobs': len(all_job_configs),
        'job_prefixes_and_names': {},
    }

    # delete obsolete binary jobs
    for os_name, os_code_name in platforms:
        for arch in build_file.targets[os_name][os_code_name]:
            binary_view = get_release_binary_view_name(rosdistro_name,
                                                       release_build_name,
                                                       os_name, os_code_name,
                                                       arch)
            binary_job_prefix = '%s__' % binary_view

            excluded_job_names = set([
                j for j in all_binary_job_names
                if j.startswith(binary_job_prefix)
            ])
            if groovy_script is None:
                print("Removing obsolete binary jobs with prefix '%s'" %
                      binary_job_prefix)
                remove_jobs(jenkins, binary_job_prefix, excluded_job_names)
            else:
                binary_key = 'binary_%s_%s_%s' % (os_name, os_code_name, arch)
                groovy_data['job_prefixes_and_names'][binary_key] = \
                    (binary_job_prefix, excluded_job_names)

    # delete obsolete source jobs
    # requires knowledge about all other release build files
    for os_name, os_code_name in platforms:
        other_source_job_names = []
        # get source job names for all other release build files
        for other_release_build_name in [
                k for k in build_files.keys() if k != release_build_name
        ]:
            other_build_file = build_files[other_release_build_name]
            other_dist_file = get_distribution_file(index, rosdistro_name,
                                                    other_build_file)
            if not other_dist_file:
                continue

            if os_name not in other_build_file.targets or \
                    os_code_name not in other_build_file.targets[os_name]:
                continue

            if other_build_file.skip_ignored_packages:
                filtered_pkg_names = other_build_file.filter_packages(
                    pkg_names)
            else:
                filtered_pkg_names = pkg_names
            for pkg_name in sorted(filtered_pkg_names):
                pkg = other_dist_file.release_packages[pkg_name]
                repo_name = pkg.repository_name
                repo = other_dist_file.repositories[repo_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue

                other_job_name = get_sourcedeb_job_name(
                    rosdistro_name, other_release_build_name, pkg_name,
                    os_name, os_code_name)
                other_source_job_names.append(other_job_name)

        source_view_prefix = get_release_source_view_name(
            rosdistro_name, os_name, os_code_name)
        source_job_prefix = '%s__' % source_view_prefix
        excluded_job_names = set([
            j for j in (all_source_job_names + other_source_job_names)
            if j.startswith(source_job_prefix)
        ])
        if groovy_script is None:
            print("Removing obsolete source jobs with prefix '%s'" %
                  source_job_prefix)
            remove_jobs(jenkins, source_job_prefix, excluded_job_names)
        else:
            source_key = 'source_%s_%s' % (os_name, os_code_name)
            groovy_data['job_prefixes_and_names'][source_key] = (
                source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print("Writing groovy script '%s' to reconfigure %d jobs" %
              (groovy_script, len(all_job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script, content,
                                        all_job_configs)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    index = get_index(config.rosdistro_index_url)

    condition_context = get_package_condition_context(index,
                                                      args.rosdistro_name)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        for pkg in pkgs.values():
            pkg.evaluate_conditions(condition_context)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(args.workspace_root, 'src',
                                args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api',
                    pkg_name, 'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.safe_load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(
                    rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(args.output_dir, 'api', pkg_name,
                                   'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(
                set([d.name for d in depends
                     if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(
                    set([
                        d.name for d in depends
                        if d.name in valid_package_names
                    ]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(args.output_dir,
                                         ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(args.output_dir,
                                                      'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(
                        os.path.join(pkg_changelog_doc_path, 'changelog.html'),
                        'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes
            ]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(
                prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(args.output_dir, 'rosdoc_tags',
                               '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url':
                '../../../api/%s/html' % pkg.name,
                'location':
                'file://%s' %
                os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package':
                pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(
                rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(
                rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(config.jenkins_url,
                                              args.rosdistro_name,
                                              args.doc_build_name,
                                              args.repository_name,
                                              args.os_name, args.os_code_name,
                                              args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name,
                                                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(config.jenkins_url,
                                                    build_files,
                                                    args.rosdistro_name,
                                                    pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(args.output_dir, 'manifests', pkg.name,
                               'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope('SUBSECTION',
               'overwrite CMakeLists.txt files to only generate messages'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type'
            ]
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'):
        # initialize rosdep view
        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            # since catkin is not a run dependency but provides the setup files
            get_os_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_os_package_name(args.rosdistro_name, 'genmsg'),
        ]

        if '3' == str(condition_context['ROS_PYTHON_VERSION']):
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python3-catkin-pkg-modules', 'python3-kitchen',
                'python3-rospkg-modules', 'python3-sphinx', 'python3-yaml'
            ])
        else:
            if '2' != str(condition_context['ROS_PYTHON_VERSION']):
                print('Unknown python version, using Python 2',
                      condition_context)
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python-catkin-pkg-modules', 'python-epydoc', 'python-kitchen',
                'python-rospkg', 'python-sphinx', 'python-yaml'
            ])

        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_os_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(pkgs.values(), 'build, run and doc',
                                   _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print(
                '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build'
            )
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name':
            args.os_name,
            'os_code_name':
            args.os_code_name,
            'arch':
            args.arch,
            'build_tool':
            doc_build_file.build_tool,
            'distribution_repository_urls':
            args.distribution_repository_urls,
            'distribution_repository_keys':
            get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),
            'environment_variables': [
                'ROS_PYTHON_VERSION={}'.format(
                    condition_context['ROS_PYTHON_VERSION'])
            ],
            'rosdistro_name':
            args.rosdistro_name,
            'uid':
            get_user_id(),
            'dependencies':
            debian_pkg_names,
            'dependency_versions':
            debian_pkg_versions,
            'install_lists': [],
            'canonical_base_url':
            doc_build_file.canonical_base_url,
            'ordered_pkg_tuples':
            ordered_pkg_tuples,
            'rosdoc_config_files':
            rosdoc_config_files,
        }
        create_dockerfile('doc/doc_task.Dockerfile.em', data,
                          args.dockerfile_dir)
Пример #23
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name, groovy_script=None,
        dry_run=False, whitelist_package_names=None):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    platforms = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            platforms.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in platforms:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    pkg_names = dist_file.release_packages.keys()
    filtered_pkg_names = build_file.filter_packages(pkg_names)
    explicitly_ignored_pkg_names = set(pkg_names) - set(filtered_pkg_names)
    if explicitly_ignored_pkg_names:
        print(('The following packages are being %s because of ' +
               'white-/blacklisting:') %
              ('ignored' if build_file.skip_ignored_packages else 'disabled'))
        for pkg_name in sorted(explicitly_ignored_pkg_names):
            print('  -', pkg_name)

    dist_cache = None
    if build_file.notify_maintainers or \
            build_file.abi_incompatibility_assumed or \
            explicitly_ignored_pkg_names:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    if explicitly_ignored_pkg_names:
        # get direct dependencies from distro cache for each package
        direct_dependencies = {}
        for pkg_name in pkg_names:
            direct_dependencies[pkg_name] = _get_direct_dependencies(
                pkg_name, dist_cache, pkg_names) or set([])

        # find recursive downstream deps for all explicitly ignored packages
        ignored_pkg_names = set(explicitly_ignored_pkg_names)
        while True:
            implicitly_ignored_pkg_names = _get_downstream_package_names(
                ignored_pkg_names, direct_dependencies)
            if implicitly_ignored_pkg_names - ignored_pkg_names:
                ignored_pkg_names |= implicitly_ignored_pkg_names
                continue
            break
        implicitly_ignored_pkg_names = \
            ignored_pkg_names - explicitly_ignored_pkg_names

        if implicitly_ignored_pkg_names:
            print(('The following packages are being %s because their ' +
                   'dependencies are being ignored:') % ('ignored'
                  if build_file.skip_ignored_packages else 'disabled'))
            for pkg_name in sorted(implicitly_ignored_pkg_names):
                print('  -', pkg_name)
            filtered_pkg_names = \
                set(filtered_pkg_names) - implicitly_ignored_pkg_names

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    jenkins = connect(config.jenkins_url) if groovy_script is None else False

    all_view_configs = {}
    all_job_configs = {}

    job_name, job_config = configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    job_name, job_config = configure_sync_packages_to_main_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run)
    if not jenkins:
        all_job_configs[job_name] = job_config

    for os_name, os_code_name in platforms:
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            job_name, job_config = configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)
            if not jenkins:
                all_job_configs[job_name] = job_config

    targets = []
    for os_name, os_code_name in platforms:
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
    views = configure_release_views(
        jenkins, rosdistro_name, release_build_name, targets,
        dry_run=dry_run)
    if not jenkins:
        all_view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(views),
    }

    other_build_files = [v for k, v in build_files.items() if k != release_build_name]

    all_source_job_names = []
    all_binary_job_names = []
    for pkg_name in sorted(pkg_names):
        if whitelist_package_names:
            if pkg_name not in whitelist_package_names:
                print("Skipping package '%s' not in the explicitly passed list" %
                      pkg_name, file=sys.stderr)
                continue

        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        is_disabled = pkg_name not in filtered_pkg_names
        if is_disabled and build_file.skip_ignored_packages:
            print("Skipping ignored package '%s' in repository '%s'" %
                  (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name), file=sys.stderr)
            continue

        for os_name, os_code_name in platforms:
            other_build_files_same_platform = []
            for other_build_file in other_build_files:
                if os_name not in other_build_file.targets:
                    continue
                if os_code_name not in other_build_file.targets[os_name]:
                    continue
                other_build_files_same_platform.append(other_build_file)

            try:
                source_job_names, binary_job_names, job_configs = \
                    configure_release_job(
                        config_url, rosdistro_name, release_build_name,
                        pkg_name, os_name, os_code_name,
                        config=config, build_file=build_file,
                        index=index, dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins, views=views,
                        generate_import_package_job=False,
                        generate_sync_packages_jobs=False,
                        is_disabled=is_disabled,
                        other_build_files_same_platform=other_build_files_same_platform,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                all_source_job_names += source_job_names
                all_binary_job_names += binary_job_names
                if groovy_script is not None:
                    print('Configuration for jobs: ' +
                          ', '.join(source_job_names + binary_job_names))
                    all_job_configs.update(job_configs)
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(all_job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    # with an explicit list of packages we don't delete obsolete jobs
    if not whitelist_package_names:
        # delete obsolete binary jobs
        for os_name, os_code_name in platforms:
            for arch in build_file.targets[os_name][os_code_name]:
                binary_view = get_release_binary_view_name(
                    rosdistro_name, release_build_name,
                    os_name, os_code_name, arch)
                binary_job_prefix = '%s__' % binary_view

                excluded_job_names = set([
                    j for j in all_binary_job_names
                    if j.startswith(binary_job_prefix)])
                if groovy_script is None:
                    print("Removing obsolete binary jobs with prefix '%s'" %
                          binary_job_prefix)
                    remove_jobs(
                        jenkins, binary_job_prefix, excluded_job_names,
                        dry_run=dry_run)
                else:
                    binary_key = 'binary_%s_%s_%s' % \
                        (os_name, os_code_name, arch)
                    groovy_data['job_prefixes_and_names'][binary_key] = \
                        (binary_job_prefix, excluded_job_names)

        # delete obsolete source jobs
        # requires knowledge about all other release build files
        for os_name, os_code_name in platforms:
            other_source_job_names = []
            # get source job names for all other release build files
            for other_release_build_name in [
                    k for k in build_files.keys() if k != release_build_name]:
                other_build_file = build_files[other_release_build_name]
                other_dist_file = get_distribution_file(
                    index, rosdistro_name, other_build_file)
                if not other_dist_file:
                    continue

                if os_name not in other_build_file.targets or \
                        os_code_name not in other_build_file.targets[os_name]:
                    continue

                if other_build_file.skip_ignored_packages:
                    filtered_pkg_names = other_build_file.filter_packages(
                        pkg_names)
                else:
                    filtered_pkg_names = pkg_names
                for pkg_name in sorted(filtered_pkg_names):
                    pkg = other_dist_file.release_packages[pkg_name]
                    repo_name = pkg.repository_name
                    repo = other_dist_file.repositories[repo_name]
                    if not repo.release_repository:
                        continue
                    if not repo.release_repository.version:
                        continue

                    other_job_name = get_sourcedeb_job_name(
                        rosdistro_name, other_release_build_name,
                        pkg_name, os_name, os_code_name)
                    other_source_job_names.append(other_job_name)

            source_view_prefix = get_release_source_view_name(
                rosdistro_name, os_name, os_code_name)
            source_job_prefix = '%s__' % source_view_prefix
            excluded_job_names = set([
                j for j in (all_source_job_names + other_source_job_names)
                if j.startswith(source_job_prefix)])
            if groovy_script is None:
                print("Removing obsolete source jobs with prefix '%s'" %
                      source_job_prefix)
                remove_jobs(
                    jenkins, source_job_prefix, excluded_job_names,
                    dry_run=dry_run)
            else:
                source_key = 'source_%s_%s' % (os_name, os_code_name)
                groovy_data['job_prefixes_and_names'][source_key] = (
                    source_job_prefix, excluded_job_names)

    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(all_view_configs), len(all_job_configs)))
        content = expand_template(
            'snippet/reconfigure_jobs.groovy.em', groovy_data)
        write_groovy_script_and_configs(
            groovy_script, content, all_job_configs,
            view_configs=all_view_configs)
Пример #24
0
def configure_release_job(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, views=None,
        generate_import_package_job=True,
        generate_sync_packages_jobs=True,
        is_disabled=False, other_build_files_same_platform=None,
        groovy_script=None,
        filter_arches=None,
        dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(
            jenkins, rosdistro_name, release_build_name, targets,
            dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins,
            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins,
                dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, repo.release_repository, dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name, file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            pkg_name, repo_name, repo.release_repository,
            dist_cache=dist_cache, upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'catkin_workspace/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use random source repo to pass to devel job template
    source_repository = deepcopy(list(repositories.values())[0])
    if not source_repository:
        print(("The repository '%s' does not have a source entry in the distribution " +
               'file. We cannot generate a prerelease without a source entry.') % repo_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository,
        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - catkin_make_isolated_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/catkin_workspace:/tmp/catkin_workspace'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'catkin_workspace_overlay:/tmp/catkin_workspace_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__))})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
Пример #26
0
def configure_release_job_with_validation(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name, append_timestamp=False,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None,
        generate_import_package_job=True,
        filter_arches=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_release_view_name(rosdistro_name, release_build_name)
        configure_release_view(jenkins, view_name)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins)

    # sourcedeb job
    job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, _get_target_arches(
            build_file, os_name, os_code_name, print_skipped=False),
        repo.release_repository, pkg_name,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        if dependency_names is None:
            return

    # binarydeb jobs
    for arch in _get_target_arches(build_file, os_name, os_code_name):
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            repo.release_repository, pkg_name, append_timestamp,
            repo_name, dist_cache=dist_cache,
            upstream_job_names=upstream_job_names)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config)
Пример #27
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate `manifest.yaml` from released package manifests")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'doc')
    add_argument_output_dir(parser, required=True)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    build_files = get_doc_build_files(config, args.rosdistro_name)
    build_file = build_files[args.doc_build_name]

    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    index = get_index(config.rosdistro_index_url)
    distribution = get_cached_distribution(index, args.rosdistro_name)

    # get rosdistro distribution cache
    # iterate over all released repositories
    # which don't have a doc entry
    # extract information from package.xml and generate manifest.yaml

    repo_names = get_repo_names_with_release_but_no_doc(distribution)
    pkg_names = get_package_names(distribution, repo_names)

    filtered_pkg_names = build_file.filter_packages(pkg_names)

    print("Generate 'manifest.yaml' files for the following packages:")
    api_path = os.path.join(args.output_dir, 'api')
    for pkg_name in sorted(filtered_pkg_names):
        print('- %s' % pkg_name)
        try:
            data = get_metadata(distribution, pkg_name)
        except Exception:
            print('Could not extract meta data:', file=sys.stderr)
            traceback.print_exc(file=sys.stderr)
            continue

        # add devel job urls
        rel_pkg = distribution.release_packages[pkg_name]
        repo_name = rel_pkg.repository_name
        repo = distribution.repositories[repo_name]
        if repo.source_repository and repo.source_repository.version:
            build_files = {}
            for build_name in source_build_files.keys():
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, repo_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

        # add release job urls
        build_files = {}
        for build_name in release_build_files.keys():
            build_files[build_name] = release_build_files[build_name]
        release_job_urls = get_release_job_urls(
            config.jenkins_url, build_files, args.rosdistro_name, pkg_name)
        if release_job_urls:
            data['release_jobs'] = release_job_urls

        manifest_yaml = os.path.join(api_path, pkg_name, 'manifest.yaml')
        write_manifest_yaml(manifest_yaml, data)

    return 0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_build_tool(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository, '
             'e.g. "common_tutorials:git:https://github.com/ros/common_tutorials:pullrequest-1"')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        if not repositories[repo_name]:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        if not source_repo:
            print(("The repository '%s' has no source entry in the " +
                   "distribution file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'ws/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *_, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                script = kwargs['locals']['script']
                # reuse existing ros_buildfarm folder if it exists
                if 'Clone ros_buildfarm' in script:
                    lines = script.splitlines()
                    lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then')
                    lines += [
                        'else',
                        'echo "Using existing ros_buildfarm folder"',
                        'fi',
                    ]
                    script = '\n'.join(lines)
                if args.build_tool and ' --build-tool ' in script:
                    script = script.replace(
                        ' --build-tool catkin_make_isolated',
                        ' --build-tool ' + args.build_tool)
                self.scripts.append(script)

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use any source repo to pass to devel job template
    if index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros1':
        package_name = 'catkin'
    elif index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros2':
        package_name = 'ros_workspace'
    else:
        assert False, 'Unsupported ROS version ' + \
            str(index.distributions[args.rosdistro_name].get('distribution_type', None))
    source_repository = deepcopy(
        dist_file.repositories[package_name].source_repository)
    if not source_repository:
        print(("The repository '%s' does not have a source entry in the distribution " +
               'file. We cannot generate a prerelease without a source entry.') % package_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository,
        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - build_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/ws:/tmp/ws'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'ws_overlay:/tmp/ws_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__)),
        'build_tool': args.build_tool or build_file.build_tool})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--rosdoc-lite-dir',
        required=True,
        help='The root path of the rosdoc_lite repository')
    parser.add_argument(
        '--catkin-sphinx-dir',
        required=True,
        help='The root path of the catkin-sphinx repository')
    parser.add_argument(
        '--rosdoc-index-dir',
        required=True,
        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'trusty')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(
            args.workspace_root, 'src', args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name,
                    'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(
                    args.output_dir, 'api', pkg_name, 'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(set([
                d.name for d in depends if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(set([
                    d.name for d in depends if d.name in valid_package_names]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(
                pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(
            args.output_dir, ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(
                    args.output_dir, 'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(os.path.join(
                        pkg_changelog_doc_path, 'changelog.html'), 'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(
                args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url': '../../../api/%s/html' % pkg.name,
                'location': 'file://%s' % os.path.join(
                    args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package': pkg.name,
            }
            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = 'http://docs.ros.org/%s/api/%s/html' % \
                (args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(
                config.jenkins_url, args.rosdistro_name, args.doc_build_name,
                args.repository_name, args.os_name, args.os_code_name,
                args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name,
                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(
                args.output_dir, 'manifests', pkg.name, 'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope(
        'SUBSECTION',
        'overwrite CMakeLists.txt files to only generate messages'
    ):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type']
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" %
                  pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope(
        'SUBSECTION',
        'determine dependencies and generate Dockerfile'
    ):
        # initialize rosdep view
        context = initialize_resolver(
            args.rosdistro_name, args.os_name, args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(
            pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build')
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        build_files = get_doc_build_files(config, args.rosdistro_name)
        build_file = build_files[args.doc_build_name]

        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        # generate Dockerfile
        data = {
            'os_name': args.os_name,
            'os_code_name': args.os_code_name,
            'arch': args.arch,

            'distribution_repository_urls': args.distribution_repository_urls,
            'distribution_repository_keys': get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),

            'rosdistro_name': args.rosdistro_name,

            'uid': get_user_id(),

            'dependencies': debian_pkg_names,
            'dependency_versions': debian_pkg_versions,

            'canonical_base_url': build_file.canonical_base_url,

            'ordered_pkg_tuples': ordered_pkg_tuples,
            'rosdoc_config_files': rosdoc_config_files,
        }
        create_dockerfile(
            'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
Пример #30
0
def configure_release_job(config_url,
                          rosdistro_name,
                          release_build_name,
                          pkg_name,
                          os_name,
                          os_code_name,
                          config=None,
                          build_file=None,
                          index=None,
                          dist_file=None,
                          dist_cache=None,
                          jenkins=None,
                          views=None,
                          generate_import_package_job=True,
                          generate_sync_packages_jobs=True,
                          is_disabled=False,
                          other_build_files_same_platform=None,
                          groovy_script=None,
                          filter_arches=None,
                          dry_run=False):
    """
    Configure a Jenkins release job.

    The following jobs are created for each package:
    - M source jobs, one for each OS node name
    - M * N binary jobs, one for each combination of OS code name and arch
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name, build_file)
        if not dist_file:
            raise JobValidationError(
                'No distribution file matches the build file')

    pkg_names = dist_file.release_packages.keys()

    if pkg_name not in pkg_names:
        raise JobValidationError("Invalid package name '%s' " % pkg_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError("Repository '%s' has no release section" %
                                 repo_name)

    if not repo.release_repository.version:
        raise JobValidationError("Repository '%s' has no release version" %
                                 repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError("Invalid OS name '%s' " % os_name +
                                 'choose one of the following: ' +
                                 ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        from ros_buildfarm.jenkins import connect
        jenkins = connect(config.jenkins_url)
    if views is None:
        targets = []
        targets.append((os_name, os_code_name, 'source'))
        for arch in build_file.targets[os_name][os_code_name]:
            targets.append((os_name, os_code_name, arch))
        configure_release_views(jenkins,
                                rosdistro_name,
                                release_build_name,
                                targets,
                                dry_run=dry_run)

    if generate_import_package_job:
        configure_import_package_job(config_url,
                                     rosdistro_name,
                                     release_build_name,
                                     config=config,
                                     build_file=build_file,
                                     jenkins=jenkins,
                                     dry_run=dry_run)

    if generate_sync_packages_jobs:
        configure_sync_packages_to_main_job(config_url,
                                            rosdistro_name,
                                            release_build_name,
                                            config=config,
                                            build_file=build_file,
                                            jenkins=jenkins,
                                            dry_run=dry_run)
        for arch in build_file.targets[os_name][os_code_name]:
            configure_sync_packages_to_testing_job(config_url,
                                                   rosdistro_name,
                                                   release_build_name,
                                                   os_code_name,
                                                   arch,
                                                   config=config,
                                                   build_file=build_file,
                                                   jenkins=jenkins,
                                                   dry_run=dry_run)

    source_job_names = []
    binary_job_names = []
    job_configs = {}

    # sourcedeb job
    # since sourcedeb jobs are potentially being shared across multiple build
    # files the configuration has to take all of them into account in order to
    # generate a job which all build files agree on
    source_job_name = get_sourcedeb_job_name(rosdistro_name,
                                             release_build_name, pkg_name,
                                             os_name, os_code_name)

    # while the package is disabled in the current build file
    # it might be used by sibling build files
    is_source_disabled = is_disabled
    if is_source_disabled and other_build_files_same_platform:
        # check if sourcedeb job is used by any other build file with the same platform
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                is_source_disabled = False
                break

    job_config = _get_sourcedeb_job_config(
        config_url,
        rosdistro_name,
        release_build_name,
        config,
        build_file,
        os_name,
        os_code_name,
        pkg_name,
        repo_name,
        repo.release_repository,
        dist_cache=dist_cache,
        is_disabled=is_source_disabled,
        other_build_files_same_platform=other_build_files_same_platform)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        from ros_buildfarm.jenkins import configure_job
        configure_job(jenkins, source_job_name, job_config, dry_run=dry_run)
    source_job_names.append(source_job_name)
    job_configs[source_job_name] = job_config

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(pkg_name, dist_cache,
                                                    pkg_names)
        # if dependencies are not yet available in rosdistro cache
        # skip binary jobs
        if dependency_names is None:
            print(("Skipping binary jobs for package '%s' because it is not " +
                   "yet in the rosdistro cache") % pkg_name,
                  file=sys.stderr)
            return source_job_names, binary_job_names, job_configs

    # binarydeb jobs
    for arch in build_file.targets[os_name][os_code_name]:
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(rosdistro_name, release_build_name,
                                          pkg_name, os_name, os_code_name,
                                          arch)

        upstream_job_names = [source_job_name] + [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name, dependency_name, os_name,
                os_code_name, arch) for dependency_name in dependency_names
        ]

        job_config = _get_binarydeb_job_config(
            config_url,
            rosdistro_name,
            release_build_name,
            config,
            build_file,
            os_name,
            os_code_name,
            arch,
            pkg_name,
            repo_name,
            repo.release_repository,
            dist_cache=dist_cache,
            upstream_job_names=upstream_job_names,
            is_disabled=is_disabled)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config, dry_run=dry_run)
        binary_job_names.append(job_name)
        job_configs[job_name] = job_config

    return source_job_names, binary_job_names, job_configs
Пример #31
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Generate all jobs on Jenkins')
    add_argument_config_url(parser)
    parser.add_argument(
        '--ros-distro-names',
        nargs='*',
        metavar='ROS_DISTRO_NAME',
        default=[],
        help='The list of ROS distribution names if not generating all')
    parser.add_argument(
        '--skip-rosdistro-cache-job',
        action='store_true',
        help='Skip generating the rosdistro-cache jobs')
    parser.add_argument(
        '--commit',
        action='store_true',
        help='Apply the changes to Jenkins instead of only showing them')
    args = parser.parse_args(argv)

    if args.commit:
        print('The following changes will be applied to the Jenkins server.')
    else:
        print('This is a dry run. The Jenkins configuration is not changed.')
    print('')

    config = get_index(args.config_url)
    ros_distro_names = sorted(config.distributions.keys())

    invalid_ros_distro_name = [
        n for n in args.ros_distro_names if n not in ros_distro_names]
    if invalid_ros_distro_name:
        parser.error(
            'The following ROS distribution names are not part of the ' +
            'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name)))

    # try to connect to Jenkins master
    jenkins = connect(config.jenkins_url)

    configure_view(
        jenkins, 'Queue', filter_queue=False, dry_run=not args.commit)

    generate_check_slaves_job(args.config_url, dry_run=not args.commit)

    if not args.ros_distro_names:
        generate_dashboard_job(args.config_url, dry_run=not args.commit)

        for doc_build_name in sorted(config.doc_builds.keys()):
            generate_doc_independent_job(
                args.config_url, doc_build_name, dry_run=not args.commit)

    selected_ros_distro_names = [
        n for n in ros_distro_names
        if not args.ros_distro_names or n in args.ros_distro_names]

    for ros_distro_name in selected_ros_distro_names:
        print(ros_distro_name)

        if not args.skip_rosdistro_cache_job:
            generate_rosdistro_cache_job(
                args.config_url, ros_distro_name, dry_run=not args.commit)

        release_build_files = get_release_build_files(config, ros_distro_name)
        for release_build_name in release_build_files.keys():
            generate_release_status_page_job(
                args.config_url, ros_distro_name, release_build_name,
                dry_run=not args.commit)
            generate_release_maintenance_jobs(
                args.config_url, ros_distro_name, release_build_name,
                dry_run=not args.commit)

        source_build_files = get_source_build_files(config, ros_distro_name)
        for source_build_name in source_build_files.keys():
            generate_devel_maintenance_jobs(
                args.config_url, ros_distro_name, source_build_name,
                dry_run=not args.commit)

        doc_build_files = get_doc_build_files(config, ros_distro_name)
        for doc_build_name, doc_build_file in doc_build_files.items():
            if doc_build_file.documentation_type == DOC_TYPE_ROSDOC:
                generate_doc_maintenance_jobs(
                    args.config_url, ros_distro_name, doc_build_name,
                    dry_run=not args.commit)
            elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST:
                generate_doc_metadata_job(
                    args.config_url, ros_distro_name, doc_build_name,
                    dry_run=not args.commit)
            else:
                assert False, ("Unknown documentation type '%s' in doc " +
                               "build file '%s'") % \
                    (doc_build_file.documentation_type, doc_build_name)

        generate_repos_status_page_jobs(
            args.config_url, ros_distro_name, dry_run=not args.commit)
        index = ros_distro_names.index(ros_distro_name)
        if index > 0:
            # Generate comparison pages for this rosdistro against all older ones.
            generate_release_compare_page_job(
                args.config_url, ros_distro_name, ros_distro_names[:index],
                dry_run=not args.commit)
Пример #32
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name,
        append_timestamp=False):
    """
    Configure all Jenkins release jobs.

    L{configure_release_job} will be invoked for every released package and
    target which matches the build file criteria.

    Additionally a job to import Debian packages into the Debian repository is
    created.
    """
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers or build_file.abi_incompatibility_assumed:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            targets.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in targets:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins)

    for os_name, os_code_name in targets:
        if os_name != 'ubuntu':
            continue
        for arch in sorted(build_file.targets[os_name][os_code_name]):
            configure_sync_packages_to_testing_job(
                config_url, rosdistro_name, release_build_name,
                os_code_name, arch,
                config=config, build_file=build_file, jenkins=jenkins)

    view_name = get_release_view_name(rosdistro_name, release_build_name)
    view = configure_release_view(jenkins, view_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    all_job_names = []
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name), file=sys.stderr)
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name), file=sys.stderr)
            continue

        for os_name, os_code_name in targets:
            try:
                job_names = configure_release_job(
                    config_url, rosdistro_name, release_build_name,
                    pkg_name, os_name, os_code_name,
                    append_timestamp=append_timestamp,
                    config=config, build_file=build_file,
                    index=index, dist_file=dist_file, dist_cache=dist_cache,
                    jenkins=jenkins, view=view,
                    generate_import_package_job=False,
                    generate_sync_packages_to_testing_job=False)
                all_job_names += job_names
            except JobValidationError as e:
                print(e.message, file=sys.stderr)

    # delete obsolete jobs in this view
    remove_jobs(jenkins, '%s__' % view_name, all_job_names)
Пример #33
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Generate all jobs on Jenkins')
    add_argument_config_url(parser)
    parser.add_argument(
        '--ros-distro-names',
        nargs='*',
        metavar='ROS_DISTRO_NAME',
        default=[],
        help='The list of ROS distribution names if not generating all')
    parser.add_argument(
        '--skip-rosdistro-cache-job',
        action='store_true',
        help='Skip generating the rosdistro-cache jobs')
    args = parser.parse_args(argv)

    config = get_index(args.config_url)
    ros_distro_names = config.distributions.keys()

    invalid_ros_distro_name = [
        n for n in args.ros_distro_names if n not in ros_distro_names]
    if invalid_ros_distro_name:
        parser.error(
            'The following ROS distribution names are not part of the ' +
            'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name)))

    # try to connect to Jenkins master
    connect(config.jenkins_url)

    generate_check_slaves_job(args.config_url)

    if not args.ros_distro_names:
        generate_dashboard_job(args.config_url)

        for doc_build_name in sorted(config.doc_builds.keys()):
            generate_doc_independent_job(args.config_url, doc_build_name)

    selected_ros_distro_names = [
        n for n in ros_distro_names
        if not args.ros_distro_names or n in args.ros_distro_names]

    for ros_distro_name in sorted(selected_ros_distro_names):
        print(ros_distro_name)

        if not args.skip_rosdistro_cache_job:
            generate_rosdistro_cache_job(args.config_url, ros_distro_name)

        release_build_files = get_release_build_files(config, ros_distro_name)
        for release_build_name in release_build_files.keys():
            generate_release_status_page_job(
                args.config_url, ros_distro_name, release_build_name)
            generate_release_maintenance_jobs(
                args.config_url, ros_distro_name, release_build_name)

        source_build_files = get_source_build_files(config, ros_distro_name)
        for source_build_name in source_build_files.keys():
            generate_devel_maintenance_jobs(
                args.config_url, ros_distro_name, source_build_name)

        doc_build_files = get_doc_build_files(config, ros_distro_name)
        for doc_build_name, doc_build_file in doc_build_files.items():
            if doc_build_file.documentation_type == DOC_TYPE_ROSDOC:
                generate_doc_maintenance_jobs(
                    args.config_url, ros_distro_name, doc_build_name)
            elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST:
                generate_doc_metadata_job(
                    args.config_url, ros_distro_name, doc_build_name)
            else:
                assert False, ("Unknown documentation type '%s' in doc " +
                               "build file '%s'") % \
                    (doc_build_file.documentation_type, doc_build_name)

        generate_repos_status_page_jobs(
            args.config_url, ros_distro_name)