예제 #1
0
def test_get_index_from_http_with_query_parameters():
    import subprocess
    import sys
    import time
    url = 'http://localhost:9876/index_v3.yaml?raw&at=master'
    # start a http server and wait
    if sys.version_info < (3, 0, 0):
        proc = subprocess.Popen(
            [sys.executable, '-m', 'SimpleHTTPServer', '9876'], cwd=FILES_DIR)
    else:
        proc = subprocess.Popen([sys.executable, '-m', 'http.server', '9876'],
                                cwd=FILES_DIR)
    time.sleep(0.5)
    try:
        i = get_index(url)
        assert len(i.distributions.keys()) == 1
        assert 'foo' in i.distributions.keys()

        # test if every url has the same queries
        for dist_urls in i.distributions['foo'].values():
            if not isinstance(dist_urls, list):
                dist_urls = [dist_urls]
            for dist_url in dist_urls:
                assert dist_url.endswith('?raw&at=master')
        dist_files = get_distribution_files(i, 'foo')
        assert len(dist_files) == 2
        get_distribution_file(i, 'foo')
    finally:
        proc.terminate()
예제 #2
0
def test_get_index_from_http_with_query_parameters():
    import subprocess
    import sys
    import time
    url = 'http://localhost:9876/index_v3.yaml?raw&at=master'
    # start a http server and wait
    if sys.version_info < (3, 0, 0):
        proc = subprocess.Popen([sys.executable, '-m', 'SimpleHTTPServer', '9876'],
                                cwd=FILES_DIR)
    else:
        proc = subprocess.Popen([sys.executable, '-m', 'http.server', '9876'],
                                cwd=FILES_DIR)
    time.sleep(0.5)
    try:
        i = get_index(url)
        assert len(i.distributions.keys()) == 1
        assert 'foo' in i.distributions.keys()

        # test if every url has the same queries
        for key, dist_urls in i.distributions['foo'].items():
            if key in ('distribution_status', 'distribution_type'):
                continue
            if not isinstance(dist_urls, list):
                dist_urls = [dist_urls]
            for dist_url in dist_urls:
                assert dist_url.endswith('?raw&at=master')
        dist_files = get_distribution_files(i, 'foo')
        assert len(dist_files) == 2
        get_distribution_file(i, 'foo')
    finally:
        proc.terminate()
예제 #3
0
def test_get_index_v3():
    url = 'file://' + FILES_DIR + '/index_v3.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
def test_get_index_v3():
    url = 'file://' + FILES_DIR + '/index_v3.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
예제 #5
0
def test_get_index_v3_invalid():
    url = 'file://' + FILES_DIR + '/index_v3_invalid.yaml'
    i = get_index(url)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    try:
        get_distribution_file(i, 'foo')
        assert False
    except AssertionError:
        pass
def test_get_index_v3_invalid():
    url = 'file://' + FILES_DIR + '/index_v3_invalid.yaml'
    i = get_index(url)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    try:
        get_distribution_file(i, 'foo')
        assert False
    except AssertionError:
        pass
예제 #7
0
def test_get_index_v4():
    url = 'file://' + FILES_DIR + '/index_v4.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    assert i.distributions['foo']['distribution_status'] == 'active'
    assert i.distributions['foo']['distribution_type'] == 'ros1'

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
예제 #8
0
def test_get_index_v4():
    url = 'file://' + FILES_DIR + '/index_v4.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    assert i.distributions['foo']['distribution_status'] == 'active'
    assert i.distributions['foo']['distribution_type'] == 'ros1'

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
예제 #9
0
def main(index_url, rosdistro_name):
    index = get_index(index_url)
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    success = True
    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        repos = [repo.release_repository, repo.source_repository, repo.doc_repository]
        for repo in [r for r in repos if r]:
            if repo.url.startswith('file://'):
                print()
                print("Repository '%s' with url '%s' must not be a local 'file://' url" % (repo_name, repo.url), file=sys.stderr)
                success = False
            if repo.type == 'git':
                prefixes = ['http://github.com/', '[email protected]:']
                for prefix in prefixes:
                    if repo.url.startswith(prefix):
                        print()
                        print("Repository '%s' with url '%s' must use 'https://github.com/%s' instead" % (repo_name, repo.url, repo.url[len(prefix):]), file=sys.stderr)
                        success = False
    print()

    return success
예제 #10
0
def call_abi_checker(workspace_root, ros_version, env):
    import rosdistro

    condition_context = {}
    condition_context['ROS_DISTRO'] = env['ROS_DISTRO']
    condition_context['ROS_VERSION'] = ros_version
    condition_context['ROS_PYTHON_VERSION'] = \
        (env or os.environ).get('ROS_PYTHON_VERSION')
    pkgs = get_packages_in_workspaces(workspace_root, condition_context)
    pkg_names = [pkg.name for pkg in pkgs.values()]
    assert pkg_names, 'No packages found in the workspace'

    # Filter packages in source space that has been released
    index = rosdistro.get_index(rosdistro.get_index_url())
    dist_file = rosdistro.get_distribution_file(index, env['ROS_DISTRO'])
    pkg_names_released = [
        pkg_name for pkg_name in pkg_names
        if pkg_name in dist_file.release_packages
    ]

    assert len(
        workspace_root
    ) == 1, 'auto-abi tool needs the implementation of multiple local-dir'
    # ROS_DISTRO is set in the env object
    cmd = [
        'auto-abi.py ' + '--orig-type ros-pkg --orig ' +
        ",".join(pkg_names_released) + ' ' + '--new-type ros-ws --new ' +
        os.path.join(workspace_root[0], 'install_isolated') + ' ' +
        '--report-dir ' + workspace_root[0] + ' ' + '--no-fail-if-empty ' +
        '--display-exec-time'
    ]
    print("Invoking '%s'" % (cmd))
    return subprocess.call(cmd, shell=True, stderr=subprocess.STDOUT, env=env)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for building the binarydeb")
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_binarydeb_dir(parser)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    debian_package_name = get_debian_package_name(
        args.rosdistro_name, args.package_name)

    # get expected package version from rosdistro
    index = get_index(args.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.package_name in dist_file.release_packages
    pkg = dist_file.release_packages[args.package_name]
    repo = dist_file.repositories[pkg.repository_name]
    package_version = repo.release_repository.version

    debian_package_version = package_version

    # find PKGBUILD dependencies
    pkgbuild_proc = subprocess.Popen(["/bin/bash","-c","source  PKGBUILD ;  echo $(printf \"'%s' \" \"${makedepends[@]}\") $(printf \"'%s' \" \"${depends[@]}\")"], stdout=subprocess.PIPE)
    pkgbuild_out,_ = pkgbuild_proc.communicate()
    archlinux_pkg_names = pkgbuild_proc.decode('ascii').split(" ")

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'uid': get_user_id(),

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'dependencies': archlinux_pkg_names,

        'rosdistro_name': args.rosdistro_name,
        'package_name': args.package_name,
        'binarydeb_dir': args.binarydeb_dir,
    }
    create_dockerfile(
        'release/binary_archlinux_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/binary_archlinux' % args.binarydeb_dir)
예제 #12
0
def main(index_url, rosdistro_name):
    index = get_index(index_url)
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    success = True
    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        repos = [repo.release_repository, repo.source_repository, repo.doc_repository]
        for repo in [r for r in repos if r]:
            if repo.url.startswith('file://'):
                print()
                print("Repository '%s' with url '%s' must not be a local 'file://' url" % (repo_name, repo.url), file=sys.stderr)
                success = False
            if repo.type == 'git':
                prefixes = ['http://github.com/', '[email protected]:']
                for prefix in prefixes:
                    if repo.url.startswith(prefix):
                        print()
                        print("Repository '%s' with url '%s' must use 'https://github.com/%s' instead" % (repo_name, repo.url, repo.url[len(prefix):]), file=sys.stderr)
                        success = False
                for prefix in prefixes + ['https://github.com/']:
                    if repo.url.startswith(prefix) and not repo.url.endswith('.git'):
                        print()
                        print("Repository '%s' with url '%s' should end with `.git` but does not." % (repo_name, repo.url))
                        success = False
    print()

    return success
def main(repo_type, rosdistro_name):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
    print()

    return True
예제 #14
0
def main(repo_type, rosdistro_name):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
    print()

    return True
예제 #15
0
class RepoForm(FlaskForm):
    architectures = ['amd64', 'arm64', 'armhf', 'i386', 'source']
    selected_arch = MultiCheckboxField(
        'Architectures',
        choices=[(arch, arch) for arch in architectures],
        validators=[DataRequired('Select at least one valid architecture')],
    )

    from rosdistro import get_distribution_file, get_index, get_index_url

    index = get_index(get_index_url())
    ros_distributions = index.distributions.keys()
    distributions_combo_list = list()
    list_index = 0
    for item in ros_distributions:
        distribution_file = get_distribution_file(index, item)
        for ubuntu in distribution_file.release_platforms['ubuntu']:
            distributions_combo_list.append(
                (list_index, dict({
                    'ros': item,
                    'ubuntu': ubuntu
                })))
            list_index += 1
    distributions_combo_list = sorted(distributions_combo_list,
                                      key=lambda v: v[1]['ros'])
    selected_distros = MultiCheckboxField(
        'Distributions',
        choices=[(str(dist[0]), dist[1]['ros'] + ' - ' + dist[1]['ubuntu'])
                 for dist in distributions_combo_list],
        validators=[DataRequired('Select at least one valid distribution')],
    )
    submit = SubmitField('Next')
예제 #16
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name,
        append_timestamp=False):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers or build_file.abi_incompatibility_assumed:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            targets.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in targets:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins)

    view_name = get_release_view_name(rosdistro_name, release_build_name)
    view = configure_release_view(jenkins, view_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue

        for os_name, os_code_name in targets:
            configure_release_job(
                config_url, rosdistro_name, release_build_name,
                pkg_name, os_name, os_code_name,
                append_timestamp=append_timestamp,
                config=config, build_file=build_file,
                index=index, dist_file=dist_file, dist_cache=dist_cache,
                jenkins=jenkins, view=view,
                generate_import_package_job=False)
예제 #17
0
def main(repo_type, rosdistro_name, check_for_wet_packages=False):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
                continue
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
            continue

        if check_for_wet_packages:
            path = tempfile.mkdtemp()
            try:
                if repo.type == 'git':
                    clone_git_repo(repo.url, repo.version, path)
                elif repo.type == 'hg':
                    clone_hg_repo(repo.url, repo.version, path)
                elif repo.type == 'svn':
                    checkout_svn_repo(repo.url, repo.version, path)
            except RuntimeError as e:
                print()
                print("Could not clone repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
                continue
            else:
                package_paths = find_package_paths(path)
                if not package_paths:
                    print()
                    print("Repository '%s' (%s [%s]) does not contain any wet packages" % (repo.name, repo.url, repo.version), file=sys.stderr)
                    continue
            finally:
                shutil.rmtree(path)

    print()

    return True
예제 #18
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name, os_code_name, sources_dir):
    from rosdistro import get_distribution_file
    from rosdistro import get_index

    index = get_index(rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return "Not a released package name: %s" % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name, os_code_name)

    cmd = [
        "git",
        "clone",
        "--branch",
        tag,
        # fetch all branches and tags but no history
        "--depth",
        "1",
        "--no-single-branch",
        repo.release_repository.url,
        sources_dir,
    ]

    print("Invoking '%s'" % " ".join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ["Version"])[0]
    if not source_version.startswith(pkg_version) or (
        len(source_version) > len(pkg_version) and source_version[len(pkg_version)] in "0123456789"
    ):
        raise RuntimeError(
            (
                "The cloned package version from the GBP (%s) does not match "
                + "the expected package version from the distribution file (%s)"
            )
            % (source_version, pkg_version)
        )

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package

    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print("Package maintainer emails: %s" % " ".join(sorted(maintainer_emails)))
예제 #19
0
def test_verify_files_parsable():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    index = get_index(url)
    distribution_file = get_distribution_file(index, 'foo')
    data = yaml_from_distribution_file(distribution_file)
    with open(os.path.join(FILES_DIR, 'foo', 'distribution.yaml'), 'r') as f:
        expected = f.read()
    assert data == expected, get_diff(expected, data)
예제 #20
0
def get_all_distribution_files(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_files = []
    i = rosdistro.get_index(url)
    for d in i.distributions:
        distribution_files.append(rosdistro.get_distribution_file(i, d))
    return distribution_files
def test_get_distribution_file():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    dist_file = get_distribution_file(i, 'foo')
    _validate_dist_file(dist_file)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 1
예제 #22
0
def get_all_distribution_files(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_files = []
    i = rosdistro.get_index(url)
    for d in i.distributions:
        distribution_files.append(rosdistro.get_distribution_file(i, d))
    return distribution_files
예제 #23
0
def test_verify_files_parsable():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    index = get_index(url)
    distribution_file = get_distribution_file(index, 'foo')
    data = yaml_from_distribution_file(distribution_file)
    with open(os.path.join(FILES_DIR, 'foo', 'distribution.yaml'), 'r') as f:
        expected = f.read()
    assert data == expected, get_diff(expected, data)
def test_get_distribution_file():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    dist_file = get_distribution_file(i, 'foo')
    _validate_dist_file(dist_file)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 1
def check_sync_criteria(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    Target = namedtuple('Target', 'os_name os_code_name arch')
    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(
        build_file.target_repository, target, cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count, file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
예제 #26
0
def check_sync_criteria(config_url, rosdistro_name, release_build_name,
                        os_code_name, arch, cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(build_file.target_repository, target,
                                       cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = [
            pkg_name for pkg_name in build_file.sync_packages
            if pkg_name not in binary_packages or not binary_packages[pkg_name]
        ]
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package
        ])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count,
                  file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
예제 #27
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name,
                os_code_name, sources_dir):
    from rosdistro import get_distribution_file
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name,
                          os_code_name)

    cmd = [
        'git',
        'clone',
        '--branch',
        tag,
        # fetch all branches and tags but no history
        '--depth',
        '1',
        '--no-single-branch',
        repo.release_repository.url,
        sources_dir
    ]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
예제 #28
0
def get_rosdistro_counts(index_path):
    i = rosdistro.get_index(index_path)
    results = []
    for d in valid_distros:
        try:
            d_file = rosdistro.get_distribution_file(i, d)
            count = len(d_file.release_packages)
            results.append(count)
        except:
            results.append(0)
    return results
 def get_distro(self, distro):
     if self.__distribution is None:
         try:
             index = get_index(get_index_url())
             self.__distribution = get_distribution_file(
                 index,
                 distro
             )
         except:
             print "failed to get data about repo %s in distribution %s" % (self.repo_name, self.distro_name)
             raise
     return self.__distribution
예제 #30
0
def configure_devel_jobs(
        config_url, rosdistro_name, source_build_name):
    config = get_config_index(config_url)
    build_files = get_source_build_files(config, rosdistro_name)
    build_file = build_files[source_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    view_name = get_devel_view_name(rosdistro_name, source_build_name)
    view = configure_devel_view(jenkins, view_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    for repo_name in sorted(repo_names):
        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            print("Skipping repository '%s': no source section" % repo_name)
            continue
        if not repo.source_repository.version:
            print("Skipping repository '%s': no source version" % repo_name)
            continue

        for os_name, os_code_name, arch in targets:
            configure_devel_job(
                config_url, rosdistro_name, source_build_name,
                repo_name, os_name, os_code_name, arch,
                config=config, build_file=build_file,
                index=index, dist_file=dist_file, dist_cache=dist_cache,
                jenkins=jenkins, view=view)
예제 #31
0
 def handle_arguments(self, args):
     self.interactive = args.interactive
     self.debian_inc = args.debian_inc
     self.os_name = args.os_name
     self.distros = args.distros
     if self.distros in [None, []]:
         index = rosdistro.get_index(rosdistro.get_index_url())
         distribution_file = rosdistro.get_distribution_file(
             index, self.rosdistro)
         if self.os_name not in distribution_file.release_platforms:
             if args.os_not_required:
                 warning(
                     "No platforms defined for os '{0}' in release file for the "
                     "'{1}' distro. This os was not required; continuing without error."
                     .format(self.os_name, self.rosdistro))
                 sys.exit(0)
             error(
                 "No platforms defined for os '{0}' in release file for the '{1}' distro."
                 .format(self.os_name, self.rosdistro),
                 exit=True)
         self.distros = distribution_file.release_platforms[self.os_name]
     self.install_prefix = args.install_prefix
     if args.install_prefix is None:
         self.install_prefix = self.default_install_prefix
     self.prefix = args.prefix
     self.branches = match_branches_with_prefix(self.prefix,
                                                get_branches,
                                                prune=not args.match_all)
     if len(self.branches) == 0:
         error("No packages found, check your --prefix or --src arguments.",
               exit=True)
     self.packages = {}
     self.tag_names = {}
     self.names = []
     self.branch_args = []
     self.debian_branches = []
     for branch in self.branches:
         package = get_package_from_branch(branch)
         if package is None:
             # This is an ignored package
             continue
         self.packages[package.name] = package
         self.names.append(package.name)
         args = self.generate_branching_arguments(package, branch)
         # First branch is debian/[<rosdistro>/]<package>
         self.debian_branches.append(args[0][0])
         self.branch_args.extend(args)
    def __init__(self, repo, distro_name, track, bump):
        self.repo_name = repo
        self.track = track
        self.distro_name = distro_name
        self.bump = bump
        self.pretend = False

        try:
            self.index = get_index(get_index_url())
            self.distribution = get_distribution_file(
                self.index,
                self.distro_name
            )
            self.repo = self.distribution.repositories[self.repo_name]
        except:
            print "failed to get data about repo %s in distribution %s" % (self.repo_name, self.distro_name)
            raise
예제 #33
0
 def handle_arguments(self, args):
     self.interactive = args.interactive
     self.debian_inc = args.debian_inc
     self.os_name = args.os_name
     self.distros = args.distros
     if self.distros in [None, []]:
         index = rosdistro.get_index(rosdistro.get_index_url())
         distribution_file = rosdistro.get_distribution_file(index, self.rosdistro)
         if self.os_name not in distribution_file.release_platforms:
             if args.os_not_required:
                 warning("No platforms defined for os '{0}' in release file for the "
                         "'{1}' distro. This os was not required; continuing without error."
                         .format(self.os_name, self.rosdistro))
                 sys.exit(0)
             error("No platforms defined for os '{0}' in release file for the '{1}' distro."
                   .format(self.os_name, self.rosdistro), exit=True)
         self.distros = distribution_file.release_platforms[self.os_name]
     self.install_prefix = args.install_prefix
     if args.install_prefix is None:
         self.install_prefix = self.default_install_prefix
     self.prefix = args.prefix
     self.branches = match_branches_with_prefix(self.prefix, get_branches, prune=not args.match_all)
     if len(self.branches) == 0:
         error(
             "No packages found, check your --prefix or --src arguments.",
             exit=True
         )
     self.packages = {}
     self.tag_names = {}
     self.names = []
     self.branch_args = []
     self.debian_branches = []
     for branch in self.branches:
         package = get_package_from_branch(branch)
         if package is None:
             # This is an ignored package
             continue
         self.packages[package.name] = package
         self.names.append(package.name)
         args = self.generate_branching_arguments(package, branch)
         # First branch is debian/[<rosdistro>/]<package>
         self.debian_branches.append(args[0][0])
         self.branch_args.extend(args)
예제 #34
0
def get_rosdistro_counts(index_path):
    index_uri = os.path.join(index_path, 'index.yaml')
    if not os.path.exists(index_uri):
        print('failed to find %s falling back to v4' % index_uri)
        index_uri = os.path.join(index_path, 'index-v4.yaml')
        if not os.path.exists(index_uri):
            print('Could not find index at this path either %s %s' % (index_path, index_uri))
            subprocess.call('ls %s' % index_path, shell=True)
            return []
    index_uri = 'file://' + index_uri
    i = rosdistro.get_index(index_uri)
    results = []
    for d in valid_distros:
        try:
            d_file = rosdistro.get_distribution_file(i, d)
            count = len(d_file.release_packages)
            results.append(count)
        except:
            results.append(0)
    return results
예제 #35
0
def run_audit(config_url, rosdistro_name, cache_dir):
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    dist_cache = get_distribution_cache(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    missing_packages = {}
    for bf_name, bf_value in build_files.items():
        missing_packages[bf_name] = copy.deepcopy(bf_value.targets)
        for target in bf_value.get_targets_list():
            all_pkgs, missing_pkgs = partition_packages(
                config_url,
                rosdistro_name,
                bf_name,
                target,
                cache_dir,
                deduplicate_dependencies=True,
                dist_cache=dist_cache)
            missing_packages[bf_name][target] = missing_pkgs
            if 'all' in missing_packages[bf_name]:
                missing_packages[bf_name]['all'] &= missing_pkgs
            else:
                missing_packages[bf_name]['all'] = missing_pkgs

            if 'all' in missing_packages:
                missing_packages['all'] &= missing_pkgs
            else:
                missing_packages['all'] = missing_pkgs

    recommended_actions = len(missing_packages['all'])
    print('# Sync preparation report for %s' % rosdistro_name)
    print('Prepared for configuration: %s' % config_url)
    print('Prepared for rosdistro index: %s' % config.rosdistro_index_url)
    print('\n\n')

    if missing_packages['all']:
        print('## Packages failing on all platforms\n\n'
              'These releases are recommended to be rolled back:\n')
        for mp in sorted(missing_packages['all']):
            print(' - %s ' % mp)
        print('\n\n')
    else:
        print('## No packages detected failing on all platforms\n\n')

    def get_package_repository_link(dist_file, pkg_name):
        """Return the best guess of the url for filing a ticket against the package."""
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if repo.source_repository and repo.source_repository.url:
            return repo.source_repository.url
        if repo.release_repository and repo.release_repository.url:
            return repo.release_repository.url
        return None

    for bf_name in build_files.keys():
        print('## Audit of buildfile %s\n\n' % bf_name)
        # TODO(tfoote) use rosdistro API to print the release build config for editing
        recommended_blacklists = sorted(missing_packages[bf_name]['all'] -
                                        missing_packages['all'])
        recommended_actions += len(recommended_blacklists)
        if not recommended_blacklists:
            print(
                'Congratulations! '
                'No packages are failing to build on all targets for this buildfile.\n\n'
            )
            continue
        print(
            'Attention! '
            'The following packages are failing to build on all targets for this buildfile. '
            'It is recommended to blacklist them in the buildfile.\n\n')
        for rb in recommended_blacklists:
            print(' - %s:' % rb)
            jenkins_urls = get_jenkins_job_urls(
                rosdistro_name, config.jenkins_url, bf_name,
                build_files[bf_name].get_targets_list())
            url = get_package_repository_link(dist_file, rb)
            print('   - Suggested ticket location [%s](%s)' % (url, url))
            print('')
            print('   Title:')
            print('')
            print('       %s in %s fails to build on %s targets' %
                  (rb, rosdistro_name, bf_name))
            print('')
            print('   Body:')
            print('')
            print(
                '       The package %s in %s has been detected as not building'
                % (rb, rosdistro_name) +
                ' on all platforms in the buildfile %s.' % (bf_name) +
                ' The release manager for %s will consider disabling' %
                (rosdistro_name) +
                ' this build if it continues to fail to build.')
            print('       - jenkins_urls:')
            for target, ju in jenkins_urls.items():
                target_str = ' '.join([x for x in target])
                url = ju.format(pkg=rb)
                print('          - [%s](%s)' % (target_str, url))
                # TODO(tfoote) embed build status when buildfarm has https
                # print('    - %s [![Build Status](%s)](%s)' % (' '.join([x for x in target]),
                #       ju.format(pkg = rb) + '/badge/icon', ju.format(pkg = rb)))
            print(
                '       This is being filed because this package is about to be blacklisted.'
                ' If this ticket is resolved please review whether it can be removed from'
                ' the blacklist that should cross reference here.')
            print('')

    return recommended_actions
예제 #36
0
 def get_package_list(self):
     distr_file = rosdistro.get_distribution_file(self._index,
                                                  self._distro_name)
     return distr_file.release_packages.keys()
예제 #37
0
def test_get_distribution_file():
    url = 'file://' + FILES_DIR + '/index.yaml'
    i = get_index(url)
    dist_file = get_distribution_file(i, 'foo')
    _validate_dist_file(dist_file)
    'ros_core',
    'ros_base',
    'robot',
    'viz',
    'desktop',
    'perception',
    'simulators',
    'desktop_full',
]

# Get packages which make up each layer of the veriants
mp_sets = {}
index = get_index(get_index_url())
hydro = get_cached_distribution(index, 'hydro')
indigo = get_cached_distribution(index, 'indigo')
dist_file = get_distribution_file(index, 'hydro')
indigo_dist_file = get_distribution_file(index, 'indigo')
dw = DependencyWalker(hydro)
for mp in keys:
    # print("Fetching deps for: ", mp)
    deps = list(set(metapackages[mp].run_depends))
    mp_sets[mp] = set([])
    for dep in deps:
        mp_sets[mp].update(set([dep.name]))
        if dep.name in keys:
            continue
        # print(" ", dep.name)
        previous_pkgs = set([])
        for mp_, mp_set in mp_sets.items():
            if mp == mp_:
                continue
예제 #39
0
def trigger_release_jobs(
        config_url, rosdistro_name, release_build_name,
        missing_only, source_only, cache_dir):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    Target = namedtuple('Target', 'os_name os_code_name arch')
    targets = []
    for os_name in sorted(build_file.targets.keys()):
        if os_name != 'ubuntu':
            continue
        for os_code_name in sorted(build_file.targets[os_name].keys()):
            targets.append(Target('ubuntu', os_code_name, 'source'))
            if source_only:
                continue
            for arch in sorted(
                    build_file.targets[os_name][os_code_name].keys()):
                # TODO support for non amd64 arch missing
                if arch not in ['amd64']:
                    print('Skipping arch:', arch)
                    continue
                targets.append(Target('ubuntu', os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  - %s %s %s' % ('ubuntu', os_code_name, arch))

    dist_file = get_distribution_file(index, rosdistro_name)

    repo_data = None
    if missing_only:
        repo_data = get_debian_repo_data(
            build_file.target_repository, targets, cache_dir)

    jenkins = connect(config.jenkins_url)
    jenkins_queue = jenkins.get_queue()

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    triggered_jobs = []
    skipped_jobs = []
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("  Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("  Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue
        pkg_version = repo.release_repository.version

        debian_package_name = get_debian_package_name(rosdistro_name, pkg_name)

        for target in targets:
            job_name = get_sourcedeb_job_name(
                rosdistro_name, release_build_name,
                pkg_name, target.os_name, target.os_code_name)
            if target.arch != 'source':
                # binary job can be skipped if source job was triggered
                if job_name in triggered_jobs:
                    print(("  Skipping binary jobs of '%s' since the source " +
                           "job was triggered") % job_name)
                    continue
                job_name = get_binarydeb_job_name(
                    rosdistro_name, release_build_name,
                    pkg_name, target.os_name, target.os_code_name, target.arch)

            if repo_data:
                # check if artifact is missing
                repo_index = repo_data[target]
                if debian_package_name in repo_index:
                    version = repo_index[debian_package_name]
                    version = _strip_version_suffix(version)
                    if version == pkg_version:
                        print(("  Skipping job '%s' since the artifact is " +
                               "already up-to-date") % job_name)
                        continue

            success = invoke_job(jenkins, job_name, queue=jenkins_queue)
            if success:
                triggered_jobs.append(job_name)
            else:
                skipped_jobs.append(job_name)

    print('Triggered %d jobs, skipped %d jobs.' %
          (len(triggered_jobs), len(skipped_jobs)))
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    index = get_index(config.rosdistro_index_url)

    condition_context = get_package_condition_context(index,
                                                      args.rosdistro_name)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        for pkg in pkgs.values():
            pkg.evaluate_conditions(condition_context)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(args.workspace_root, 'src',
                                args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api',
                    pkg_name, 'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.safe_load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(
                    rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(args.output_dir, 'api', pkg_name,
                                   'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(
                set([d.name for d in depends
                     if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(
                    set([
                        d.name for d in depends
                        if d.name in valid_package_names
                    ]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(args.output_dir,
                                         ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(args.output_dir,
                                                      'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(
                        os.path.join(pkg_changelog_doc_path, 'changelog.html'),
                        'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes
            ]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(
                prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(args.output_dir, 'rosdoc_tags',
                               '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url':
                '../../../api/%s/html' % pkg.name,
                'location':
                'file://%s' %
                os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package':
                pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(
                rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(
                rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(config.jenkins_url,
                                              args.rosdistro_name,
                                              args.doc_build_name,
                                              args.repository_name,
                                              args.os_name, args.os_code_name,
                                              args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name,
                                                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(config.jenkins_url,
                                                    build_files,
                                                    args.rosdistro_name,
                                                    pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(args.output_dir, 'manifests', pkg.name,
                               'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope('SUBSECTION',
               'overwrite CMakeLists.txt files to only generate messages'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type'
            ]
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'):
        # initialize rosdep view
        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            # since catkin is not a run dependency but provides the setup files
            get_os_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_os_package_name(args.rosdistro_name, 'genmsg'),
        ]

        if '3' == str(condition_context['ROS_PYTHON_VERSION']):
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python3-catkin-pkg-modules', 'python3-kitchen',
                'python3-rospkg-modules', 'python3-sphinx', 'python3-yaml'
            ])
        else:
            if '2' != str(condition_context['ROS_PYTHON_VERSION']):
                print('Unknown python version, using Python 2',
                      condition_context)
            # the following are required by rosdoc_lite
            debian_pkg_names.extend([
                'python-catkin-pkg-modules', 'python-epydoc', 'python-kitchen',
                'python-rospkg', 'python-sphinx', 'python-yaml'
            ])

        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_os_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(pkgs.values(), 'build, run and doc',
                                   _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print(
                '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build'
            )
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name':
            args.os_name,
            'os_code_name':
            args.os_code_name,
            'arch':
            args.arch,
            'build_tool':
            doc_build_file.build_tool,
            'distribution_repository_urls':
            args.distribution_repository_urls,
            'distribution_repository_keys':
            get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),
            'environment_variables': [
                'ROS_PYTHON_VERSION={}'.format(
                    condition_context['ROS_PYTHON_VERSION'])
            ],
            'rosdistro_name':
            args.rosdistro_name,
            'uid':
            get_user_id(),
            'dependencies':
            debian_pkg_names,
            'dependency_versions':
            debian_pkg_versions,
            'install_lists': [],
            'canonical_base_url':
            doc_build_file.canonical_base_url,
            'ordered_pkg_tuples':
            ordered_pkg_tuples,
            'rosdoc_config_files':
            rosdoc_config_files,
        }
        create_dockerfile('doc/doc_task.Dockerfile.em', data,
                          args.dockerfile_dir)
예제 #41
0
def configure_release_job_with_validation(
        config_url, rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name, append_timestamp=False,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None,
        generate_import_package_job=True,
        filter_arches=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_release_build_files(config, rosdistro_name)
        build_file = build_files[release_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    if pkg_name not in pkg_names:
        raise JobValidationError(
            "Invalid package name '%s' " % pkg_name +
            'choose one of the following: ' + ', '.join(sorted(pkg_names)))

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]

    if not repo.release_repository:
        raise JobValidationError(
            "Repository '%s' has no release section" % repo_name)

    if not repo.release_repository.version:
        raise JobValidationError(
            "Repository '%s' has no release version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))

    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))

    if dist_cache is None and \
            (build_file.notify_maintainers or
             build_file.abi_incompatibility_assumed):
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_release_view_name(rosdistro_name, release_build_name)
        configure_release_view(jenkins, view_name)

    if generate_import_package_job:
        configure_import_package_job(
            config_url, rosdistro_name, release_build_name,
            config=config, build_file=build_file, jenkins=jenkins)

    # sourcedeb job
    job_name = get_sourcedeb_job_name(
        rosdistro_name, release_build_name,
        pkg_name, os_name, os_code_name)

    job_config = _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, _get_target_arches(
            build_file, os_name, os_code_name, print_skipped=False),
        repo.release_repository, pkg_name,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    dependency_names = []
    if build_file.abi_incompatibility_assumed:
        dependency_names = _get_direct_dependencies(
            pkg_name, dist_cache, pkg_names)
        if dependency_names is None:
            return

    # binarydeb jobs
    for arch in _get_target_arches(build_file, os_name, os_code_name):
        if filter_arches and arch not in filter_arches:
            continue

        job_name = get_binarydeb_job_name(
            rosdistro_name, release_build_name,
            pkg_name, os_name, os_code_name, arch)

        upstream_job_names = [
            get_binarydeb_job_name(
                rosdistro_name, release_build_name,
                dependency_name, os_name, os_code_name, arch)
            for dependency_name in dependency_names]

        job_config = _get_binarydeb_job_config(
            config_url, rosdistro_name, release_build_name,
            config, build_file, os_name, os_code_name, arch,
            repo.release_repository, pkg_name, append_timestamp,
            repo_name, dist_cache=dist_cache,
            upstream_job_names=upstream_job_names)
        # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
        if isinstance(jenkins, object) and jenkins is not False:
            configure_job(jenkins, job_name, job_config)
예제 #42
0
def configure_devel_job(
        config_url, rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None):
    """
    Configure a single Jenkins devel job.

    This includes the following steps:
    - clone the source repository to use
    - clone the ros_buildfarm repository
    - write the distribution repository keys into files
    - invoke the release/run_devel_job.py script
    """
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    if repo_name not in repo_names:
        raise JobValidationError(
            "Invalid repository name '%s' " % repo_name +
            'choose one of the following: %s' % ', '.join(sorted(repo_names)))

    repo = dist_file.repositories[repo_name]

    if not repo.source_repository:
        raise JobValidationError(
            "Repository '%s' has no source section" % repo_name)
    if not repo.source_repository.version:
        raise JobValidationError(
            "Repository '%s' has no source version" % repo_name)

    if os_name not in build_file.targets.keys():
        raise JobValidationError(
            "Invalid OS name '%s' " % os_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets.keys())))
    if os_code_name not in build_file.targets[os_name].keys():
        raise JobValidationError(
            "Invalid OS code name '%s' " % os_code_name +
            'choose one of the following: ' +
            ', '.join(sorted(build_file.targets[os_name].keys())))
    if arch not in build_file.targets[os_name][os_code_name]:
        raise JobValidationError(
            "Invalid architecture '%s' " % arch +
            'choose one of the following: %s' % ', '.join(sorted(
                build_file.targets[os_name][os_code_name])))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_devel_view_name(rosdistro_name, source_build_name)
        configure_devel_view(jenkins, view_name)

    job_name = get_devel_job_name(
        rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, repo.source_repository,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)

    return job_name
    'ros_core',
    'ros_base',
    'robot',
    'viz',
    'desktop',
    'perception',
    'simulators',
    'desktop_full',
]

# Get packages which make up each layer of the veriants
mp_sets = {}
index = get_index(get_index_url())
indigo = get_cached_distribution(index, 'indigo')
jade = get_cached_distribution(index, 'jade')
dist_file = get_distribution_file(index, 'indigo')
jade_dist_file = get_distribution_file(index, 'jade')
dw = DependencyWalker(indigo)
for mp in keys:
    # print("Fetching deps for: ", mp)
    deps = list(set(metapackages[mp].run_depends))
    mp_sets[mp] = set([])
    for dep in deps:
        mp_sets[mp].update(set([dep.name]))
        if dep.name in keys:
            continue
        # print(" ", dep.name)
        previous_pkgs = set([])
        for mp_, mp_set in mp_sets.items():
            if mp == mp_:
                continue
예제 #44
0
def get_target_distros(rosdistro):
    print("Fetching targets")
    index = get_index(get_index_url())
    dist_file = get_distribution_file(index, rosdistro)
    return dist_file.release_platforms
def main(argv=sys.argv[1:]):
    """Create Dockerfiles for images from platform and image yaml data"""

    # Create the top-level parser
    parser = DockerfileArgParser(
        description="Generate the 'Dockerfile's for the base docker images")
    parser.set()
    args = parser.parse_args(argv)

    # If paths were given explicitly
    if args.subparser_name == 'explicit':
        platform_path = args.platform
        images_path = args.images
        output_path = args.output

    # Else just use the given directory path
    elif args.subparser_name == 'dir':
        platform_path = 'platform.yaml'
        images_path = 'images.yaml.em'
        platform_path = os.path.join(args.directory, platform_path)
        images_path = os.path.join(args.directory, images_path)
        output_path = args.directory

    # Read platform perams
    with open(platform_path, 'r') as f:
        # use safe_load instead load
        platform = yaml.safe_load(f)['platform']

    # Read image perams using platform perams
    images_yaml = StringIO()
    try:
        interpreter = Interpreter(output=images_yaml)
        interpreter.file(open(images_path, 'r'), locals=platform)
        images_yaml = images_yaml.getvalue()
    except Exception as e:
        print("Error processing %s" % images_path)
        raise
    finally:
        interpreter.shutdown()
        interpreter = None
    # Use ordered dict
    images = OrderedLoad(images_yaml, yaml.SafeLoader)['images']

    # Fetch rosdistro data
    index_url = get_index_url()
    index = get_index(index_url)
    dist_file = get_distribution_file(index, platform['rosdistro_name'])

    # For each image tag
    for image in images:

        # Get data for image
        data = dict(images[image])
        data['tag_name'] = image

        # Add platform perams
        data.update(platform)

        # Get debian package names for ros
        if 'ros_packages' in data:
            data['ros_packages'] = get_ros_package_names(
                data['rosdistro_name'], data['ros_packages'], dist_file)

        # Get path to save Docker file
        dockerfile_dir = os.path.join(output_path, image)
        if not os.path.exists(dockerfile_dir):
            os.makedirs(dockerfile_dir)
        data['dockerfile_dir'] = dockerfile_dir

        # generate Dockerfile
        create_dockerfile(data)
예제 #46
0
def get_distribution_file(distro):
    global _rosdistro_distribution_files
    if distro not in _rosdistro_distribution_files:
        _rosdistro_distribution_files[distro] = rosdistro.get_distribution_file(get_index(), distro)
    return _rosdistro_distribution_files[distro]
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for building the binarydeb")
    add_argument_rosdistro_index_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_binarydeb_dir(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    args = parser.parse_args(argv)

    debian_package_name = get_debian_package_name(
        args.rosdistro_name, args.package_name)

    # get expected package version from rosdistro
    index = get_index(args.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.package_name in dist_file.release_packages
    pkg = dist_file.release_packages[args.package_name]
    repo = dist_file.repositories[pkg.repository_name]
    package_version = repo.release_repository.version

    debian_package_version = package_version

    # build_binarydeb dependencies
    debian_pkg_names = ['apt-src']

    # add build dependencies from .dsc file
    dsc_file = get_dsc_file(
        args.binarydeb_dir, debian_package_name, debian_package_version)
    debian_pkg_names += sorted(get_build_depends(dsc_file))

    # get versions for build dependencies
    apt_cache = Cache()
    debian_pkg_versions = get_binary_package_versions(
        apt_cache, debian_pkg_names)

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'uid': get_user_id(),

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'build_environment_variables': args.env_vars,

        'dependencies': debian_pkg_names,
        'dependency_versions': debian_pkg_versions,
        'install_lists': [],

        'rosdistro_name': args.rosdistro_name,
        'package_name': args.package_name,
        'binarydeb_dir': args.binarydeb_dir,
    }
    create_dockerfile(
        'release/binarydeb_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/binarydeb' % args.binarydeb_dir)
예제 #48
0
def get_release_file(distro):
    _check_cache()
    if distro not in _RDCache.release_files:
        dist_file = rosdistro.get_distribution_file(get_index(), distro)
        _RDCache.release_files[distro] = ReleaseFile(dist_file)
    return _RDCache.release_files[distro]
예제 #49
0
def test_get_distribution_file():
    url = 'file://' + FILES_DIR + '/index.yaml'
    i = get_index(url)
    dist_file = get_distribution_file(i, 'foo')
    _validate_dist_file(dist_file)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--rosdoc-lite-dir',
        required=True,
        help='The root path of the rosdoc_lite repository')
    parser.add_argument(
        '--catkin-sphinx-dir',
        required=True,
        help='The root path of the catkin-sphinx repository')
    parser.add_argument(
        '--rosdoc-index-dir',
        required=True,
        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'trusty')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(
            args.workspace_root, 'src', args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name,
                    'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(
                    args.output_dir, 'api', pkg_name, 'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(set([
                d.name for d in depends if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(set([
                    d.name for d in depends if d.name in valid_package_names]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(
                pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(
            args.output_dir, ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(
                    args.output_dir, 'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(os.path.join(
                        pkg_changelog_doc_path, 'changelog.html'), 'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(
                args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url': '../../../api/%s/html' % pkg.name,
                'location': 'file://%s' % os.path.join(
                    args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package': pkg.name,
            }
            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = 'http://docs.ros.org/%s/api/%s/html' % \
                (args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(
                config.jenkins_url, args.rosdistro_name, args.doc_build_name,
                args.repository_name, args.os_name, args.os_code_name,
                args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name,
                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(
                args.output_dir, 'manifests', pkg.name, 'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope(
        'SUBSECTION',
        'overwrite CMakeLists.txt files to only generate messages'
    ):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type']
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" %
                  pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope(
        'SUBSECTION',
        'determine dependencies and generate Dockerfile'
    ):
        # initialize rosdep view
        context = initialize_resolver(
            args.rosdistro_name, args.os_name, args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(
            pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build')
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        build_files = get_doc_build_files(config, args.rosdistro_name)
        build_file = build_files[args.doc_build_name]

        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        # generate Dockerfile
        data = {
            'os_name': args.os_name,
            'os_code_name': args.os_code_name,
            'arch': args.arch,

            'distribution_repository_urls': args.distribution_repository_urls,
            'distribution_repository_keys': get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),

            'rosdistro_name': args.rosdistro_name,

            'uid': get_user_id(),

            'dependencies': debian_pkg_names,
            'dependency_versions': debian_pkg_versions,

            'canonical_base_url': build_file.canonical_base_url,

            'ordered_pkg_tuples': ordered_pkg_tuples,
            'rosdoc_config_files': rosdoc_config_files,
        }
        create_dockerfile(
            'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
예제 #51
0
# There is a possibility that the source_ref has a different distribution file
# layout. Check that they match.
source_ref_index_yaml = yaml.safe_load(show(args.source_ref, 'index-v4.yaml'))
if source_ref_index_yaml['distributions'][args.source]['distribution'] != \
  index_yaml['distributions'][args.source]['distribution']:
      raise RuntimeError('The distribution file layout has changed between the source ref and now.')

source_distribution_filename = index_yaml['distributions'][args.source]['distribution'][0]
dest_distribution_filename = index_yaml['distributions'][args.dest]['distribution'][0]

# Fetch the source distribution file from the exact point in the repository history requested.
source_distfile_data = yaml.safe_load(show(args.source_ref, source_distribution_filename))
source_distribution = DistributionFile(args.source, source_distfile_data)

# Prepare the destination distribution for new bloom releases from the source distribution.
dest_distribution = get_distribution_file(index, args.dest)
new_repositories = []
repositories_to_retry = []
for repo_name, repo_data in sorted(source_distribution.repositories.items()):
    if repo_name not in dest_distribution.repositories:
        dest_repo_data = copy.deepcopy(repo_data)
        if dest_repo_data.release_repository:
            new_repositories.append(repo_name)
            release_tag = dest_repo_data.release_repository.tags['release']
            release_tag = release_tag.replace(args.source,args.dest)
            dest_repo_data.release_repository.tags['release'] = release_tag
        dest_distribution.repositories[repo_name] = dest_repo_data
    elif dest_distribution.repositories[repo_name].release_repository is not None and \
            dest_distribution.repositories[repo_name].release_repository.version is None:
        dest_distribution.repositories[repo_name].release_repository.version = repo_data.release_repository.version
        repositories_to_retry.append(repo_name)
예제 #52
0
def _get_rosdistro_release(distro):
    index = rosdistro.get_index(rosdistro.get_index_url())
    return rosdistro.get_distribution_file(index, distro)
예제 #53
0
def _get_rosdistro_release(distro):
    index = rosdistro.get_index(rosdistro.get_index_url())
    return rosdistro.get_distribution_file(index, distro)
예제 #54
0
def get_release_file(distro):
    _check_cache()
    if distro not in _RDCache.release_files:
        dist_file = rosdistro.get_distribution_file(get_index(), distro)
        _RDCache.release_files[distro] = ReleaseFile(dist_file)
    return _RDCache.release_files[distro]
예제 #55
0
def configure_devel_job(
        config_url, rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch,
        config=None, build_file=None,
        index=None, dist_file=None, dist_cache=None,
        jenkins=None, view=None):
    if config is None:
        config = get_config_index(config_url)
    if build_file is None:
        build_files = get_source_build_files(config, rosdistro_name)
        build_file = build_files[source_build_name]

    if index is None:
        index = get_index(config.rosdistro_index_url)
    if dist_file is None:
        dist_file = get_distribution_file(index, rosdistro_name)

    repo_names = dist_file.repositories.keys()
    repo_names = build_file.filter_repositories(repo_names)

    if repo_name not in repo_names:
        return "Invalid repository name '%s' " % repo_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(repo_names))

    repo = dist_file.repositories[repo_name]

    if not repo.source_repository:
        return "Repository '%s' has no source section" % repo_name
    if not repo.source_repository.version:
        return "Repository '%s' has no source version" % repo_name

    if os_name not in build_file.targets.keys():
        return "Invalid OS name '%s' " % os_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(build_file.targets.keys()))
    if os_code_name not in build_file.targets[os_name].keys():
        return "Invalid OS code name '%s' " % os_code_name + \
            'choose one of the following: ' + \
            ', '.join(sorted(build_file.targets[os_name].keys()))
    if arch not in build_file.targets[os_name][os_code_name]:
        return "Invalid architecture '%s' " % arch + \
            'choose one of the following: ' + \
            ', '.join(sorted(
                build_file.targets[os_name][os_code_name]))

    if dist_cache is None and build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)
    if jenkins is None:
        jenkins = connect(config.jenkins_url)
    if view is None:
        view_name = get_devel_view_name(rosdistro_name, source_build_name)
        configure_devel_view(jenkins, view_name)

    job_name = get_devel_job_name(
        rosdistro_name, source_build_name,
        repo_name, os_name, os_code_name, arch)

    job_config = _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, repo.source_repository,
        repo_name, dist_cache=dist_cache)
    # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
    if isinstance(jenkins, object) and jenkins is not False:
        configure_job(jenkins, job_name, job_config)