예제 #1
0
def build_binarydeb(rosdistro_name, package_name, sourcedeb_dir):
    # ensure that one source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    source, version = dpkg_parsechangelog(source_dir, ['Source', 'Version'])
    # output package version for job description
    print("Package '%s' version: %s" % (debian_package_name, version))

    cmd = ['apt-src', 'import', source, '--here', '--version', version]
    subprocess.check_call(cmd, cwd=source_dir)

    cmd = ['apt-src', 'build', source]
    print("Invoking '%s' in '%s'" % (' '.join(cmd), source_dir))
    try:
        subprocess.check_call(cmd, cwd=source_dir)
    except subprocess.CalledProcessError:
        traceback.print_exc()
        sys.exit("""
--------------------------------------------------------------------------------------------------
`{0}` failed.
This is usually because of an error building the package.
The traceback from this failure (just above) is printed for completeness, but you can ignore it.
You should look above `E: Building failed` in the build log for the actual cause of the failure.
--------------------------------------------------------------------------------------------------
""".format(' '.join(cmd)))
def append_build_timestamp(rosdistro_name, package_name, sourcedeb_dir):
    # ensure that one source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    source, version, distribution, urgency = dpkg_parsechangelog(
        source_dir, ['Source', 'Version', 'Distribution', 'Urgency'])
    cmd = [
        'debchange',
        '-v',
        '%s.%s' % (version, strftime('%Y%m%d.%H%M%S', gmtime()))
        # Backwards compatibility for #460
        if rosdistro_name not in (
            'indigo', 'jade', 'kinetic', 'lunar', 'ardent')
        else '%s-%s' % (version, strftime('%Y%m%d-%H%M%S%z')),
        '-p',  # preserve directory name
        '-D', distribution,
        '-u', urgency,
        '-m',  # keep maintainer details
        'Append timestamp when binarydeb was built.',
    ]
    print("Invoking '%s' in '%s'" % (' '.join(cmd), source_dir))
    subprocess.check_call(cmd, cwd=source_dir)
예제 #3
0
def append_build_timestamp(rosdistro_name, package_name, sourcedeb_dir):
    # ensure that one source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    source, version, distribution, urgency = dpkg_parsechangelog(
        source_dir, ['Source', 'Version', 'Distribution', 'Urgency'])
    cmd = [
        'debchange',
        '-v',
        '%s.%s' % (version, strftime('%Y%m%d.%H%M%S', gmtime()))
        # Backwards compatibility for #460
        if rosdistro_name not in ('indigo', 'jade', 'kinetic',
                                  'lunar') else '%s-%s' %
        (version, strftime('%Y%m%d-%H%M%S%z')),
        '-p',  # preserve directory name
        '-D',
        distribution,
        '-u',
        urgency,
        '-m',  # keep maintainer details
        'Append timestamp when binarydeb was built.',
    ]
    print("Invoking '%s' in '%s'" % (' '.join(cmd), source_dir))
    subprocess.check_call(cmd, cwd=source_dir)
def build_binarydeb(rosdistro_name, package_name, sourcedeb_dir):
    # ensure that one source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    source, version = dpkg_parsechangelog(
        source_dir, ['Source', 'Version'])
    # output package version for job description
    print("Package '%s' version: %s" % (debian_package_name, version))

    cmd = ['apt-src', 'import', source, '--here', '--version', version]
    subprocess.check_call(cmd, cwd=source_dir)

    cmd = ['apt-src', 'build', source]
    print("Invoking '%s' in '%s'" % (' '.join(cmd), source_dir))
    try:
        subprocess.check_call(cmd, cwd=source_dir)
    except subprocess.CalledProcessError:
        traceback.print_exc()
        sys.exit("""
--------------------------------------------------------------------------------------------------
`{0}` failed.
This is usually because of an error building the package.
The traceback from this failure (just above) is printed for completeness, but you can ignore it.
You should look above `E: Building failed` in the build log for the actual cause of the failure.
--------------------------------------------------------------------------------------------------
""".format(' '.join(cmd)))
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for building the binarydeb")
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_binarydeb_dir(parser)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    debian_package_name = get_debian_package_name(
        args.rosdistro_name, args.package_name)

    # get expected package version from rosdistro
    index = get_index(args.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.package_name in dist_file.release_packages
    pkg = dist_file.release_packages[args.package_name]
    repo = dist_file.repositories[pkg.repository_name]
    package_version = repo.release_repository.version

    debian_package_version = package_version

    # find PKGBUILD dependencies
    pkgbuild_proc = subprocess.Popen(["/bin/bash","-c","source  PKGBUILD ;  echo $(printf \"'%s' \" \"${makedepends[@]}\") $(printf \"'%s' \" \"${depends[@]}\")"], stdout=subprocess.PIPE)
    pkgbuild_out,_ = pkgbuild_proc.communicate()
    archlinux_pkg_names = pkgbuild_proc.decode('ascii').split(" ")

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'uid': get_user_id(),

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'dependencies': archlinux_pkg_names,

        'rosdistro_name': args.rosdistro_name,
        'package_name': args.package_name,
        'binarydeb_dir': args.binarydeb_dir,
    }
    create_dockerfile(
        'release/binary_archlinux_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/binary_archlinux' % args.binarydeb_dir)
예제 #6
0
def _get_binarydeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, arch,
        release_repo_spec, pkg_name, append_timestamp,
        repo_name, dist_cache=None, upstream_job_names=None):
    template_name = 'release/binarydeb_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    apt_mirror_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    binarydeb_files = [
        'binarydeb/*.changes',
        'binarydeb/*.deb',
    ]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'job_priority': build_file.jenkins_job_priority,

        'upstream_projects': upstream_job_names,

        'release_repo_spec': release_repo_spec,

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'apt_mirror_args': apt_mirror_args,

        'append_timestamp': append_timestamp,

        'binarydeb_files': binarydeb_files,

        'import_package_job_name': get_import_package_job_name(
            rosdistro_name, release_build_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'notify_emails': set(config.notify_emails + build_file.notify_emails),
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,

        'timeout_minutes': build_file.jenkins_binarydeb_job_timeout,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, release_repository, dist_cache=None,
        is_disabled=False):
    template_name = 'release/sourcedeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.debian.tar.xz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.changes',
    ]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'github_url': get_github_project_url(release_repository.url),

        'job_priority': build_file.jenkins_source_job_priority,
        'node_label': build_file.jenkins_source_job_label,

        'disabled': is_disabled,

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'repository_args': repository_args,

        'sourcedeb_files': sourcedeb_files,

        'import_package_job_name': get_import_package_job_name(rosdistro_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,

        'timeout_minutes': build_file.jenkins_source_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def check_sync_criteria(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    Target = namedtuple('Target', 'os_name os_code_name arch')
    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(
        build_file.target_repository, target, cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count, file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
예제 #9
0
def check_sync_criteria(config_url, rosdistro_name, release_build_name,
                        os_code_name, arch, cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(build_file.target_repository, target,
                                       cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = [
            pkg_name for pkg_name in build_file.sync_packages
            if pkg_name not in binary_packages or not binary_packages[pkg_name]
        ]
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package
        ])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count,
                  file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
예제 #10
0
def _get_sourcedeb_job_config(config_url,
                              rosdistro_name,
                              release_build_name,
                              config,
                              build_file,
                              os_name,
                              os_code_name,
                              pkg_name,
                              repo_name,
                              release_repository,
                              dist_cache=None,
                              is_disabled=False):
    template_name = 'release/sourcedeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.debian.tar.xz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.changes',
    ]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'github_url': get_github_project_url(release_repository.url),
        'job_priority': build_file.jenkins_source_job_priority,
        'node_label': build_file.jenkins_source_job_label,
        'disabled': is_disabled,
        'ros_buildfarm_repository': get_repository(),
        'script_generating_key_files': script_generating_key_files,
        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'repository_args': repository_args,
        'sourcedeb_files': sourcedeb_files,
        'import_package_job_name': get_import_package_job_name(rosdistro_name),
        'debian_package_name': get_debian_package_name(rosdistro_name,
                                                       pkg_name),
        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'timeout_minutes': build_file.jenkins_source_job_timeout,
        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
예제 #11
0
def get_sourcedeb(
        rosdistro_name, package_name, sourcedeb_dir,
        skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # download sourcedeb
        cmd = ['apt-get', 'source', debian_package_name, '--download-only']
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
예제 #12
0
def get_ros_package_names(rosdistro_name, ros_packages, dist_file):
    """Return list of ros_package_name strings with latest versions"""

    ros_package_names = []
    for pkg_name in sorted(ros_packages):
        pkg_name = pkg_name.replace("-", "_")
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]

        version = repo.release_repository.version
        debian_package_name = get_debian_package_name(rosdistro_name, pkg_name)

        ros_package_name = debian_package_name + '=' + version + '*'
        ros_package_names.append(ros_package_name)

    return ros_package_names
def get_ros_package_names(rosdistro_name, ros_packages, dist_file):
    """Return list of ros_package_name strings with latest versions"""

    ros_package_names = []
    for pkg_name in sorted(ros_packages):
        pkg_name = pkg_name.replace("-", "_")
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]

        version = repo.release_repository.version
        debian_package_name = get_debian_package_name(rosdistro_name, pkg_name)

        ros_package_name = debian_package_name + '=' + version + '*'
        ros_package_names.append(ros_package_name)

    return ros_package_names
예제 #14
0
def build_binarydeb(rosdistro_name, package_name, sourcedeb_dir):
    # ensure that one source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    source, version = dpkg_parsechangelog(
        source_dir, ['Source', 'Version'])
    # output package version for job description
    print("Package '%s' version: %s" % (debian_package_name, version))

    cmd = ['apt-src', 'import', source, '--here', '--version', version]
    subprocess.check_call(cmd, cwd=source_dir)

    cmd = ['apt-src', 'build', source]
    print("Invoking '%s' in '%s'" % (' '.join(cmd), source_dir))
    subprocess.check_call(cmd, cwd=source_dir)
예제 #15
0
def build_binary_archlinux(rosdistro_name, package_name, sourcedeb_dir):
    # ensure that one source subfolder exists
    archlinux_package_name = get_debian_package_name(rosdistro_name,
                                                     package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, archlinux_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    cmd = ['makepkg']

    try:
        subprocess.check_call(cmd, cwd=source_dir)
    except subprocess.CalledProcessError:
        traceback.print_exc()
        sys.exit("""
--------------------------------------------------------------------------------------------------
`{0}` failed.
This is usually because of an error building the package.
The traceback from this failure (just above) is printed for completeness, but you can ignore it.
You should look above `E: Building failed` in the build log for the actual cause of the failure.
--------------------------------------------------------------------------------------------------
""".format(' '.join(cmd)))
예제 #16
0
def _get_sourcedeb_job_config(config_url,
                              rosdistro_name,
                              release_build_name,
                              config,
                              build_file,
                              os_name,
                              os_code_name,
                              pkg_name,
                              repo_name,
                              release_repository,
                              dist_cache=None,
                              is_disabled=False,
                              other_build_files_same_platform=None):
    template_name = 'release/sourcedeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.debian.tar.xz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.buildinfo',
        'sourcedeb/*_source.changes',
    ]

    # collect notify emails from all build files with the job enabled
    notify_emails = set(build_file.notify_emails)
    if other_build_files_same_platform:
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                notify_emails.update(other_build_file.notify_emails)

    # notify maintainers if any build file (with the job enabled) requests it
    notify_maintainers = build_file.notify_maintainers
    if other_build_files_same_platform:
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                if other_build_file.notify_maintainers:
                    notify_maintainers = True

    maintainer_emails = _get_maintainer_emails(dist_cache, pkg_name) \
        if notify_maintainers \
        else set([])

    job_data = {
        'github_url':
        get_github_project_url(release_repository.url),
        'job_priority':
        build_file.jenkins_source_job_priority,
        'node_label':
        get_node_label(
            build_file.jenkins_source_job_label,
            get_default_node_label('%s_%s' % (rosdistro_name, 'sourcedeb'))),
        'disabled':
        is_disabled,
        'ros_buildfarm_repository':
        get_repository(),
        'script_generating_key_files':
        script_generating_key_files,
        'rosdistro_index_url':
        config.rosdistro_index_url,
        'rosdistro_name':
        rosdistro_name,
        'release_build_name':
        release_build_name,
        'pkg_name':
        pkg_name,
        'os_name':
        os_name,
        'os_code_name':
        os_code_name,
        'arch':
        get_system_architecture(),
        'repository_args':
        repository_args,
        'sourcedeb_files':
        sourcedeb_files,
        'import_package_job_name':
        get_import_package_job_name(rosdistro_name),
        'debian_package_name':
        get_debian_package_name(rosdistro_name, pkg_name),
        'notify_emails':
        notify_emails,
        'maintainer_emails':
        maintainer_emails,
        'notify_maintainers':
        notify_maintainers,
        'timeout_minutes':
        build_file.jenkins_source_job_timeout,
        'credential_id':
        build_file.upload_credential_id,
        'git_ssh_credential_id':
        config.git_ssh_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
예제 #17
0
def _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name,
                    os_code_name):
    assert os_name in ['debian', 'ubuntu']
    return 'debian/%s_%s_%s' % \
        (get_debian_package_name(rosdistro_name, pkg_name),
         pkg_version, os_code_name)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the devel job")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    parser.add_argument(
        '--workspace-root',
        nargs='+',
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'xenial')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_dockerfile_dir(parser)
    parser.add_argument(
        '--testing',
        action='store_true',
        help='The flag if the workspace should be built with tests enabled '
             'and instead of installing the tests are ran')
    args = parser.parse_args(argv)

    # get direct build dependencies
    pkgs = {}
    for workspace_root in args.workspace_root:
        source_space = os.path.join(workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs.update(find_packages(source_space))

    pkg_names = [pkg.name for pkg in pkgs.values()]
    print("Found the following packages:")
    for pkg_name in sorted(pkg_names):
        print('  -', pkg_name)

    maintainer_emails = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))

    context = initialize_resolver(
        args.rosdistro_name, args.os_name, args.os_code_name)

    apt_cache = Cache()

    debian_pkg_names = [
        'build-essential',
        'python3',
    ]
    if 'catkin' not in pkg_names:
        debian_pkg_names.append(
            get_debian_package_name(args.rosdistro_name, 'catkin'))
    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    debian_pkg_versions = {}

    # get build dependencies and map them to binary packages
    build_depends = get_dependencies(
        pkgs.values(), 'build', _get_build_and_recursive_run_dependencies)
    debian_pkg_names_building = resolve_names(build_depends, **context)
    debian_pkg_names_building -= set(debian_pkg_names)
    debian_pkg_names += order_dependencies(debian_pkg_names_building)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names))

    # get run and test dependencies and map them to binary packages
    run_and_test_depends = get_dependencies(
        pkgs.values(), 'run and test', _get_run_and_test_dependencies)
    debian_pkg_names_testing = resolve_names(
        run_and_test_depends, **context)
    # all additional run/test dependencies
    # are added after the build dependencies
    # in order to reuse existing images in the docker container
    debian_pkg_names_testing -= set(debian_pkg_names)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names_testing))
    if args.testing:
        debian_pkg_names += order_dependencies(debian_pkg_names_testing)

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'rosdistro_name': args.rosdistro_name,

        'uid': get_user_id(),

        'dependencies': debian_pkg_names,
        'dependency_versions': debian_pkg_versions,

        'testing': args.testing,
        'prerelease_overlay': len(args.workspace_root) > 1,
    }
    create_dockerfile(
        'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/catkin_workspace' % args.workspace_root[-1])
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--rosdoc-lite-dir',
        required=True,
        help='The root path of the rosdoc_lite repository')
    parser.add_argument(
        '--catkin-sphinx-dir',
        required=True,
        help='The root path of the catkin-sphinx repository')
    parser.add_argument(
        '--rosdoc-index-dir',
        required=True,
        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'trusty')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(
            args.workspace_root, 'src', args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name,
                    'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(
                    args.output_dir, 'api', pkg_name, 'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(set([
                d.name for d in depends if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(set([
                    d.name for d in depends if d.name in valid_package_names]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(
                pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(
            args.output_dir, ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(
                    args.output_dir, 'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(os.path.join(
                        pkg_changelog_doc_path, 'changelog.html'), 'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(
                args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url': '../../../api/%s/html' % pkg.name,
                'location': 'file://%s' % os.path.join(
                    args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package': pkg.name,
            }
            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = 'http://docs.ros.org/%s/api/%s/html' % \
                (args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(
                config.jenkins_url, args.rosdistro_name, args.doc_build_name,
                args.repository_name, args.os_name, args.os_code_name,
                args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name,
                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(
                args.output_dir, 'manifests', pkg.name, 'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope(
        'SUBSECTION',
        'overwrite CMakeLists.txt files to only generate messages'
    ):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type']
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" %
                  pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope(
        'SUBSECTION',
        'determine dependencies and generate Dockerfile'
    ):
        # initialize rosdep view
        context = initialize_resolver(
            args.rosdistro_name, args.os_name, args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(
            pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build')
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        build_files = get_doc_build_files(config, args.rosdistro_name)
        build_file = build_files[args.doc_build_name]

        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        # generate Dockerfile
        data = {
            'os_name': args.os_name,
            'os_code_name': args.os_code_name,
            'arch': args.arch,

            'distribution_repository_urls': args.distribution_repository_urls,
            'distribution_repository_keys': get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),

            'rosdistro_name': args.rosdistro_name,

            'uid': get_user_id(),

            'dependencies': debian_pkg_names,
            'dependency_versions': debian_pkg_versions,

            'canonical_base_url': build_file.canonical_base_url,

            'ordered_pkg_tuples': ordered_pkg_tuples,
            'rosdoc_config_files': rosdoc_config_files,
        }
        create_dockerfile(
            'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
예제 #20
0
def _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, binary_arches,
        release_repo_spec, pkg_name,
        repo_name, dist_cache=None):
    template_name = 'release/sourcedeb_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.changes',
    ]

    binary_job_names = [
        get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)
        for arch in binary_arches]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'job_priority': build_file.jenkins_job_priority,

        'release_repo_spec': release_repo_spec,

        'script_generating_key_files': script_generating_key_files,

        'ros_buildfarm_repo': config.ros_buildfarm_repo,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'repository_args': repository_args,

        'sourcedeb_files': sourcedeb_files,

        'import_package_job_name': get_import_package_job_name(
            rosdistro_name, release_build_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'child_projects': binary_job_names,

        'notify_emails': set(config.notify_emails + build_file.notify_emails),
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,

        'timeout_minutes': build_file.jenkins_sourcedeb_job_timeout,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
예제 #21
0
def _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name, os_code_name):
    assert os_name == "ubuntu"
    return "debian/%s_%s_%s" % (get_debian_package_name(rosdistro_name, pkg_name), pkg_version, os_code_name)
def get_sourcedeb(
        rosdistro_index_url, rosdistro_name, package_name, sourcedeb_dir,
        skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        # get the exact sourcedeb version
        showsrc_output = subprocess.check_output([
            'apt-cache', 'showsrc', debian_package_name]).decode()
        line_prefix = 'Version: '
        debian_package_versions = [
            l[len(line_prefix):] for l in showsrc_output.splitlines()
            if l.startswith(line_prefix + package_version)]
        assert len(debian_package_versions) == 1, \
            "Failed to find sourcedeb with version '%s', only found: %s" % \
            (package_version, ', '.join(debian_package_versions))

        # download sourcedeb
        apt_script = os.path.join(
            os.path.dirname(__file__), 'wrapper', 'apt.py')
        cmd = [
            sys.executable, apt_script,
            'source', '--download-only', '--only-source',
            debian_package_name + '=' + debian_package_versions[0]]
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
예제 #23
0
def _get_binarydeb_job_config(config_url,
                              rosdistro_name,
                              release_build_name,
                              config,
                              build_file,
                              os_name,
                              os_code_name,
                              arch,
                              pkg_name,
                              repo_name,
                              release_repository,
                              dist_cache=None,
                              upstream_job_names=None,
                              is_disabled=False):
    template_name = 'release/binarydeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)
    repository_args.append('--target-repository ' +
                           build_file.target_repository)

    binarydeb_files = [
        'binarydeb/*.changes',
        'binarydeb/*.deb',
    ]

    sync_to_testing_job_name = [
        get_sync_packages_to_testing_job_name(rosdistro_name, os_code_name,
                                              arch)
    ]

    maintainer_emails = _get_maintainer_emails(dist_cache, pkg_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'github_url':
        get_github_project_url(release_repository.url),
        'job_priority':
        build_file.jenkins_binary_job_priority,
        'node_label':
        get_node_label(
            build_file.jenkins_binary_job_label,
            get_default_node_label(
                '%s_%s_%s' %
                (rosdistro_name, 'binarydeb', release_build_name))),
        'disabled':
        is_disabled,
        'upstream_projects':
        upstream_job_names,
        'ros_buildfarm_repository':
        get_repository(),
        'script_generating_key_files':
        script_generating_key_files,
        'rosdistro_index_url':
        config.rosdistro_index_url,
        'rosdistro_name':
        rosdistro_name,
        'release_build_name':
        release_build_name,
        'pkg_name':
        pkg_name,
        'os_name':
        os_name,
        'os_code_name':
        os_code_name,
        'arch':
        arch,
        'repository_args':
        repository_args,
        'append_timestamp':
        build_file.abi_incompatibility_assumed,
        'binarydeb_files':
        binarydeb_files,
        'import_package_job_name':
        get_import_package_job_name(rosdistro_name),
        'debian_package_name':
        get_debian_package_name(rosdistro_name, pkg_name),
        'child_projects':
        sync_to_testing_job_name,
        'notify_emails':
        build_file.notify_emails,
        'maintainer_emails':
        maintainer_emails,
        'notify_maintainers':
        build_file.notify_maintainers,
        'timeout_minutes':
        build_file.jenkins_binary_job_timeout,
        'credential_id':
        build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
예제 #24
0
def _get_source_tag(
        rosdistro_name, pkg_name, pkg_version, os_name, os_code_name):
    assert os_name == 'ubuntu'
    return 'debian/%s_%s_%s' % \
        (get_debian_package_name(rosdistro_name, pkg_name),
         pkg_version, os_code_name)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the devel job")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'trusty')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_dockerfile_dir(parser)
    parser.add_argument(
        '--testing',
        action='store_true',
        help='The flag if the workspace should be built with tests enabled '
        'and instead of installing the tests are ran')
    args = parser.parse_args(argv)

    # get direct build dependencies
    pkgs = {}
    for workspace_root in args.workspace_root:
        source_space = os.path.join(workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs.update(find_packages(source_space))

    pkg_names = [pkg.name for pkg in pkgs.values()]
    print("Found the following packages:")
    for pkg_name in sorted(pkg_names):
        print('  -', pkg_name)

    maintainer_emails = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))

    context = initialize_resolver(args.rosdistro_name, args.os_name,
                                  args.os_code_name)

    apt_cache = Cache()

    debian_pkg_names = [
        'build-essential',
        'python3',
    ]
    if 'catkin' not in pkg_names:
        debian_pkg_names.append(
            get_debian_package_name(args.rosdistro_name, 'catkin'))
    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    debian_pkg_versions = {}

    # get build dependencies and map them to binary packages
    build_depends = get_dependencies(
        pkgs.values(), 'build', _get_build_and_recursive_run_dependencies)
    debian_pkg_names_building = resolve_names(build_depends, **context)
    debian_pkg_names_building -= set(debian_pkg_names)
    debian_pkg_names += order_dependencies(debian_pkg_names_building)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names))

    # get run and test dependencies and map them to binary packages
    run_and_test_depends = get_dependencies(pkgs.values(), 'run and test',
                                            _get_run_and_test_dependencies)
    debian_pkg_names_testing = resolve_names(run_and_test_depends, **context)
    # all additional run/test dependencies
    # are added after the build dependencies
    # in order to reuse existing images in the docker container
    debian_pkg_names_testing -= set(debian_pkg_names)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names_testing))
    if args.testing:
        debian_pkg_names += order_dependencies(debian_pkg_names_testing)

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'uid':
        get_user_id(),
        'dependencies':
        debian_pkg_names,
        'dependency_versions':
        debian_pkg_versions,
        'testing':
        args.testing,
        'prerelease_overlay':
        len(args.workspace_root) > 1,
    }
    create_dockerfile('devel/devel_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/catkin_workspace' % args.workspace_root[-1])
예제 #26
0
def get_sourcedeb(rosdistro_index_url,
                  rosdistro_name,
                  package_name,
                  sourcedeb_dir,
                  skip_download_sourcedeb=False):
    # ensure that no source subfolder exists
    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert not subfolders, \
        ("Sourcedeb directory '%s' must not have any " +
         "subfolders starting with '%s-'") % (sourcedeb_dir, package_name)

    debian_package_name = get_debian_package_name(rosdistro_name, package_name)
    if not skip_download_sourcedeb:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        # get the exact sourcedeb version
        showsrc_output = subprocess.check_output(
            ['apt-cache', 'showsrc', debian_package_name]).decode()
        line_prefix = 'Version: '
        debian_package_versions = [
            l[len(line_prefix):] for l in showsrc_output.splitlines()
            if l.startswith(line_prefix + package_version)
        ]
        assert len(debian_package_versions) == 1, \
            "Failed to find sourcedeb with version '%s', only found: %s" % \
            (package_version, ', '.join(debian_package_versions))

        # download sourcedeb
        apt_script = os.path.join(os.path.dirname(__file__), 'wrapper',
                                  'apt.py')
        cmd = [
            sys.executable, apt_script, 'source', '--download-only',
            '--only-source',
            debian_package_name + '=' + debian_package_versions[0]
        ]
        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # extract sourcedeb
    filenames = _get_package_dsc_filename(sourcedeb_dir, debian_package_name)
    assert len(filenames) == 1, filenames
    dsc_filename = filenames[0]
    cmd = ['dpkg-source', '-x', dsc_filename]
    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd, cwd=sourcedeb_dir)

    # ensure that one source subfolder exists
    subfolders = _get_package_subfolders(sourcedeb_dir, debian_package_name)
    assert len(subfolders) == 1, subfolders
    source_dir = subfolders[0]

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(source_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    condition_context = {
        'ROS_DISTRO': args.rosdistro_name,
        'ROS_PYTHON_VERSION': 2,
        'ROS_VERSION': 1,
    }

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        for pkg in pkgs.values():
            pkg.evaluate_conditions(condition_context)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(args.workspace_root, 'src',
                                args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api',
                    pkg_name, 'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(
                    rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(args.output_dir, 'api', pkg_name,
                                   'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(
                set([d.name for d in depends
                     if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(
                    set([
                        d.name for d in depends
                        if d.name in valid_package_names
                    ]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(args.output_dir,
                                         ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(args.output_dir,
                                                      'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(
                        os.path.join(pkg_changelog_doc_path, 'changelog.html'),
                        'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes
            ]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(
                prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(args.output_dir, 'rosdoc_tags',
                               '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url':
                '../../../api/%s/html' % pkg.name,
                'location':
                'file://%s' %
                os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package':
                pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(
                rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(
                rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(config.jenkins_url,
                                              args.rosdistro_name,
                                              args.doc_build_name,
                                              args.repository_name,
                                              args.os_name, args.os_code_name,
                                              args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name,
                                                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(config.jenkins_url,
                                                    build_files,
                                                    args.rosdistro_name,
                                                    pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(args.output_dir, 'manifests', pkg.name,
                               'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope('SUBSECTION',
               'overwrite CMakeLists.txt files to only generate messages'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type'
            ]
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'):
        # initialize rosdep view
        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg-modules',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(pkgs.values(), 'build, run and doc',
                                   _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print(
                '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build'
            )
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name':
            args.os_name,
            'os_code_name':
            args.os_code_name,
            'arch':
            args.arch,
            'build_tool':
            doc_build_file.build_tool,
            'distribution_repository_urls':
            args.distribution_repository_urls,
            'distribution_repository_keys':
            get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),
            'rosdistro_name':
            args.rosdistro_name,
            'uid':
            get_user_id(),
            'dependencies':
            debian_pkg_names,
            'dependency_versions':
            debian_pkg_versions,
            'install_lists': [],
            'canonical_base_url':
            doc_build_file.canonical_base_url,
            'ordered_pkg_tuples':
            ordered_pkg_tuples,
            'rosdoc_config_files':
            rosdoc_config_files,
        }
        create_dockerfile('doc/doc_task.Dockerfile.em', data,
                          args.dockerfile_dir)
예제 #28
0
def _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, release_repository, dist_cache=None,
        is_disabled=False, other_build_files_same_platform=None):
    template_name = 'release/sourcedeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.debian.tar.xz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.changes',
    ]

    # collect notify emails from all build files with the job enabled
    notify_emails = set(build_file.notify_emails)
    if other_build_files_same_platform:
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                notify_emails.update(other_build_file.notify_emails)

    # notify maintainers if any build file (with the job enabled) requests it
    notify_maintainers = build_file.notify_maintainers
    if other_build_files_same_platform:
        for other_build_file in other_build_files_same_platform:
            if other_build_file.filter_packages([pkg_name]):
                if other_build_file.notify_maintainers:
                    notify_maintainers = True

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if notify_maintainers \
        else set([])

    job_data = {
        'github_url': get_github_project_url(release_repository.url),

        'job_priority': build_file.jenkins_source_job_priority,
        'node_label': build_file.jenkins_source_job_label,

        'disabled': is_disabled,

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'repository_args': repository_args,

        'sourcedeb_files': sourcedeb_files,

        'import_package_job_name': get_import_package_job_name(rosdistro_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'notify_emails': notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': notify_maintainers,

        'timeout_minutes': build_file.jenkins_source_job_timeout,

        'credential_id': build_file.upload_credential_id,

        'git_ssh_credential_id': config.git_ssh_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
예제 #29
0
def _get_binarydeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, arch,
        pkg_name, repo_name, release_repository,
        dist_cache=None, upstream_job_names=None,
        is_disabled=False):
    template_name = 'release/binarydeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    binarydeb_files = [
        'binarydeb/*.changes',
        'binarydeb/*.deb',
    ]

    sync_to_testing_job_name = [get_sync_packages_to_testing_job_name(
        rosdistro_name, os_code_name, arch)]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'github_url': get_github_project_url(release_repository.url),

        'job_priority': build_file.jenkins_binary_job_priority,
        'node_label': build_file.jenkins_binary_job_label,

        'disabled': is_disabled,

        'upstream_projects': upstream_job_names,

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'append_timestamp': build_file.abi_incompatibility_assumed,

        'binarydeb_files': binarydeb_files,

        'import_package_job_name': get_import_package_job_name(rosdistro_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'child_projects': sync_to_testing_job_name,

        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,

        'timeout_minutes': build_file.jenkins_binary_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for building the binarydeb")
    add_argument_rosdistro_index_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_binarydeb_dir(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    args = parser.parse_args(argv)

    debian_package_name = get_debian_package_name(
        args.rosdistro_name, args.package_name)

    # get expected package version from rosdistro
    index = get_index(args.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.package_name in dist_file.release_packages
    pkg = dist_file.release_packages[args.package_name]
    repo = dist_file.repositories[pkg.repository_name]
    package_version = repo.release_repository.version

    debian_package_version = package_version

    # build_binarydeb dependencies
    debian_pkg_names = ['apt-src']

    # add build dependencies from .dsc file
    dsc_file = get_dsc_file(
        args.binarydeb_dir, debian_package_name, debian_package_version)
    debian_pkg_names += sorted(get_build_depends(dsc_file))

    # get versions for build dependencies
    apt_cache = Cache()
    debian_pkg_versions = get_binary_package_versions(
        apt_cache, debian_pkg_names)

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'uid': get_user_id(),

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'build_environment_variables': args.env_vars,

        'dependencies': debian_pkg_names,
        'dependency_versions': debian_pkg_versions,
        'install_lists': [],

        'rosdistro_name': args.rosdistro_name,
        'package_name': args.package_name,
        'binarydeb_dir': args.binarydeb_dir,
    }
    create_dockerfile(
        'release/binarydeb_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/binarydeb' % args.binarydeb_dir)
예제 #31
0
def get_sources(
        rosdistro_index_url, rosdistro_name, pkg_name, os_name, os_code_name,
        sources_dir, debian_repository_urls):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(
        rosdistro_name, pkg_name, pkg_version, os_name, os_code_name)

    cmd = [
        'git', 'clone',
        '--branch', tag,
        # fetch all branches and tags but no history
        '--depth', '1', '--no-single-branch',
        repo.release_repository.url, sources_dir]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # If a tarball already exists reuse it
    origtgz_version = pkg_version.split('-')[0]
    debian_package_name = get_debian_package_name(rosdistro_name, pkg_name)
    filename = '%s_%s.orig.tar.gz' % (debian_package_name, origtgz_version)

    URL_TEMPLATE = '%s/pool/main/%s/%s/%s'
    prefix = debian_package_name[0]
    for repo in debian_repository_urls:
        url = URL_TEMPLATE % (repo, prefix, debian_package_name, filename)

        output_file = os.path.join(sources_dir, '..', filename)
        try:
            urlretrieve(url, output_file)
            print("Downloaded original tarball '%s' to '%s'" %
                  (url, output_file))
            break
        except HTTPError:
            print("No tarball found at '%s'" % url)

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))