Esempio n. 1
0
def call_abi_checker(workspace_root, ros_version, env):
    import rosdistro

    condition_context = {}
    condition_context['ROS_DISTRO'] = env['ROS_DISTRO']
    condition_context['ROS_VERSION'] = ros_version
    condition_context['ROS_PYTHON_VERSION'] = \
        (env or os.environ).get('ROS_PYTHON_VERSION')
    pkgs = get_packages_in_workspaces(workspace_root, condition_context)
    pkg_names = [pkg.name for pkg in pkgs.values()]
    assert pkg_names, 'No packages found in the workspace'

    # Filter packages in source space that has been released
    index = rosdistro.get_index(rosdistro.get_index_url())
    dist_file = rosdistro.get_distribution_file(index, env['ROS_DISTRO'])
    pkg_names_released = [
        pkg_name for pkg_name in pkg_names
        if pkg_name in dist_file.release_packages
    ]

    assert len(
        workspace_root
    ) == 1, 'auto-abi tool needs the implementation of multiple local-dir'
    # ROS_DISTRO is set in the env object
    cmd = [
        'auto-abi.py ' + '--orig-type ros-pkg --orig ' +
        ",".join(pkg_names_released) + ' ' + '--new-type ros-ws --new ' +
        os.path.join(workspace_root[0], 'install_isolated') + ' ' +
        '--report-dir ' + workspace_root[0] + ' ' + '--no-fail-if-empty ' +
        '--display-exec-time'
    ]
    print("Invoking '%s'" % (cmd))
    return subprocess.call(cmd, shell=True, stderr=subprocess.STDOUT, env=env)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for building the binarydeb")
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_binarydeb_dir(parser)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    debian_package_name = get_debian_package_name(
        args.rosdistro_name, args.package_name)

    # get expected package version from rosdistro
    index = get_index(args.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.package_name in dist_file.release_packages
    pkg = dist_file.release_packages[args.package_name]
    repo = dist_file.repositories[pkg.repository_name]
    package_version = repo.release_repository.version

    debian_package_version = package_version

    # find PKGBUILD dependencies
    pkgbuild_proc = subprocess.Popen(["/bin/bash","-c","source  PKGBUILD ;  echo $(printf \"'%s' \" \"${makedepends[@]}\") $(printf \"'%s' \" \"${depends[@]}\")"], stdout=subprocess.PIPE)
    pkgbuild_out,_ = pkgbuild_proc.communicate()
    archlinux_pkg_names = pkgbuild_proc.decode('ascii').split(" ")

    # generate Dockerfile
    data = {
        'os_name': args.os_name,
        'os_code_name': args.os_code_name,
        'arch': args.arch,

        'uid': get_user_id(),

        'distribution_repository_urls': args.distribution_repository_urls,
        'distribution_repository_keys': get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),

        'dependencies': archlinux_pkg_names,

        'rosdistro_name': args.rosdistro_name,
        'package_name': args.package_name,
        'binarydeb_dir': args.binarydeb_dir,
    }
    create_dockerfile(
        'release/binary_archlinux_task.Dockerfile.em', data, args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/binary_archlinux' % args.binarydeb_dir)
def main(index_url, rosdistro_name):
    index = get_index(index_url)
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    success = True
    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        repos = [repo.release_repository, repo.source_repository, repo.doc_repository]
        for repo in [r for r in repos if r]:
            if repo.url.startswith('file://'):
                print()
                print("Repository '%s' with url '%s' must not be a local 'file://' url" % (repo_name, repo.url), file=sys.stderr)
                success = False
            if repo.type == 'git':
                prefixes = ['http://github.com/', '[email protected]:']
                for prefix in prefixes:
                    if repo.url.startswith(prefix):
                        print()
                        print("Repository '%s' with url '%s' must use 'https://github.com/%s' instead" % (repo_name, repo.url, repo.url[len(prefix):]), file=sys.stderr)
                        success = False
    print()

    return success
Esempio n. 4
0
def test_build_caches():
    with Fold():
        print(
            """Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                pkgs[pkg_name] = parse_package_string(pkg_xml)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
Esempio n. 5
0
def test_get_doc_build_files():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    files = get_doc_build_files(i, 'foo')
    assert len(files) == 1
    build_file = files[0]
    assert build_file.jenkins_job_timeout == 23
Esempio n. 6
0
class RepoForm(FlaskForm):
    architectures = ['amd64', 'arm64', 'armhf', 'i386', 'source']
    selected_arch = MultiCheckboxField(
        'Architectures',
        choices=[(arch, arch) for arch in architectures],
        validators=[DataRequired('Select at least one valid architecture')],
    )

    from rosdistro import get_distribution_file, get_index, get_index_url

    index = get_index(get_index_url())
    ros_distributions = index.distributions.keys()
    distributions_combo_list = list()
    list_index = 0
    for item in ros_distributions:
        distribution_file = get_distribution_file(index, item)
        for ubuntu in distribution_file.release_platforms['ubuntu']:
            distributions_combo_list.append(
                (list_index, dict({
                    'ros': item,
                    'ubuntu': ubuntu
                })))
            list_index += 1
    distributions_combo_list = sorted(distributions_combo_list,
                                      key=lambda v: v[1]['ros'])
    selected_distros = MultiCheckboxField(
        'Distributions',
        choices=[(str(dist[0]), dist[1]['ros'] + ' - ' + dist[1]['ubuntu'])
                 for dist in distributions_combo_list],
        validators=[DataRequired('Select at least one valid distribution')],
    )
    submit = SubmitField('Next')
Esempio n. 7
0
 def __init__(self, name, ros_distro):
     SrcAptBase.__init__(self, name)
     self.ros_distro = self.detect_ros_distribution(ros_distro)
     self.rosdistro_index = rosdistro.get_index(rosdistro.get_index_url())
     self.cache = rosdistro.get_distribution_cache(self.rosdistro_index,
                                                   self.ros_distro)
     self.distro_file = self.cache.distribution_file
     # More logic could be needed with new ros distributions
     # ROS1 - https://www.ros.org/reps/rep-0003.html
     # ROS2 - http://design.ros2.org/articles/changes.html
     if self.ros_distro == 'melodic':
         self.compilation_flags.append('--std=c++14')
     else:
         self.compilation_flags.append('--std=c++17')
         # needed for gazebo_ros_pkgs
         self.compilation_flags.append('-DBOOST_HAS_PTHREADS=1')
         # gtest-vendor is ROS2
         self.compilation_flags.append('-I' +
             join('/opt/ros/', self.ros_distro, 'src', 'gtest_vendor', 'include'))
         # flag to avoid problems in rcutils
         # https://github.com/osrf/auto-abi-checker/issues/17
         self.compilation_flags.append('-DRCUTILS__STDATOMIC_HELPER_H_')
         # flags for rmw_connext packages
         self.compilation_flags.append('-DRTI_UNIX')
         for rti_path in glob.glob('/opt/rti.com/rti_connext_dds-*'):
             self.compilation_flags.append('-I' + rti_path + '/include/')
             self.compilation_flags.append('-I' + rti_path + '/include/ndds')
     # Needs to add /opt/ros includes to compile ROS software
     self.compilation_flags.append('-I' +
         join('/opt/ros/', self.ros_distro, 'include'))
def test_get_release_builds():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    d = get_release(i, 'foo')
    builds = get_release_builds(i, d)
    assert len(builds) == 1
    build = builds[0]
    assert build.jenkins_sourcedeb_job_timeout == 5
    assert build.jenkins_binarydeb_job_timeout == 42

    os_names = build.get_target_os_names()
    assert set(os_names) == set(['ubuntu'])
    os_code_names = build.get_target_os_code_names('ubuntu')
    assert set(os_code_names) == set(['precise', 'quantal', 'raring'])
    arches = build.get_target_arches('ubuntu', 'precise')
    assert set(arches) == set(['amd64', 'i386'])

    c = build.get_target_configuration()
    assert len(c.keys()) == 2
    assert set(c.keys()) == set(['apt_target_repository', 'foo'])
    assert c['apt_target_repository'] == 'http://repo.example.com/'
    assert c['foo'] == 'bar'

    c = build.get_target_configuration('ubuntu', 'precise')
    assert 'foo' in c.keys()
    assert c['foo'] == 'bar'
    assert 'ping' in c.keys()
    assert c['ping'] == 'pong'

    c = build.get_target_configuration('ubuntu', 'precise', 'amd64')
    assert 'foo' in c.keys()
    assert c['foo'] == 'baz'
Esempio n. 9
0
def _check_platform_helper() -> Tuple[str, dict, dict]:
    """
    Check ROS_DISTRO environment variables and distribution installed.

    :return: string of distro name, dict of distribution info, dict of release platforms info
    """
    distro_name = os.environ.get('ROS_DISTRO')
    if not distro_name:
        doctor_error('ROS_DISTRO is not set.')
        return
    distro_name = distro_name.lower()
    u = rosdistro.get_index_url()
    if not u:
        doctor_error(
            'Unable to access ROSDISTRO_INDEX_URL or DEFAULT_INDEX_URL. '
            'Check network setting to make sure machine is connected to internet.'
        )
        return
    i = rosdistro.get_index(u)
    distro_info = i.distributions.get(distro_name)
    if not distro_info:
        doctor_warn(f'Distribution name {distro_name} is not found')
        return
    try:
        distro_data = rosdistro.get_distribution(i, distro_name).get_data()
    except AttributeError:
        distro_data = ''
    return distro_name, distro_info, distro_data
Esempio n. 10
0
def main(index_url, rosdistro_name):
    index = get_index(index_url)
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    success = True
    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        repos = [repo.release_repository, repo.source_repository, repo.doc_repository]
        for repo in [r for r in repos if r]:
            if repo.url.startswith('file://'):
                print()
                print("Repository '%s' with url '%s' must not be a local 'file://' url" % (repo_name, repo.url), file=sys.stderr)
                success = False
            if repo.type == 'git':
                prefixes = ['http://github.com/', '[email protected]:']
                for prefix in prefixes:
                    if repo.url.startswith(prefix):
                        print()
                        print("Repository '%s' with url '%s' must use 'https://github.com/%s' instead" % (repo_name, repo.url, repo.url[len(prefix):]), file=sys.stderr)
                        success = False
                for prefix in prefixes + ['https://github.com/']:
                    if repo.url.startswith(prefix) and not repo.url.endswith('.git'):
                        print()
                        print("Repository '%s' with url '%s' should end with `.git` but does not." % (repo_name, repo.url))
                        success = False
    print()

    return success
Esempio n. 11
0
def get_rosdistro(quiet):
    global _rosdistro_cache
    dist = None
    if "ROS_DISTRO" in os.environ:
        distro_id = os.environ["ROS_DISTRO"]
        if distro_id not in _rosdistro_cache:
            try:
                from rosdistro import get_index, get_index_url, get_cached_distribution
                url = get_index_url()
                if not quiet:
                    sys.stderr.write(
                        "catkin_lint: downloading %s package index from %s\n" %
                        (distro_id, url))
                index = get_index(url)
                dist = get_cached_distribution(index,
                                               distro_id,
                                               allow_lazy_load=True)
            except Exception as err:
                if not quiet:
                    sys.stderr.write(
                        "catkin_lint: cannot initialize rosdistro: %s\n" %
                        str(err))
            _rosdistro_cache[distro_id] = dist
        dist = _rosdistro_cache[distro_id]
    return Rosdistro(dist=dist, quiet=quiet)
def main(repo_type, rosdistro_name):
    index = get_index(get_index_url())
    if repo_type == 'doc':
        try:
            distro_file = get_doc_file(index, rosdistro_name)
        except RuntimeError as e:
            print("Could not load doc file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
            return False
    if repo_type == 'source':
        try:
            distro_file = get_source_file(index, rosdistro_name)
        except RuntimeError as e:
            print("Could not load source file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
            return False

    for repo_name in sorted(distro_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distro_file.repositories[repo_name]
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
    print()

    return True
Esempio n. 13
0
def test_get_index_from_http_with_query_parameters():
    import subprocess
    import sys
    import time
    url = 'http://localhost:9876/index_v3.yaml?raw&at=master'
    # start a http server and wait
    if sys.version_info < (3, 0, 0):
        proc = subprocess.Popen(
            [sys.executable, '-m', 'SimpleHTTPServer', '9876'], cwd=FILES_DIR)
    else:
        proc = subprocess.Popen([sys.executable, '-m', 'http.server', '9876'],
                                cwd=FILES_DIR)
    time.sleep(0.5)
    try:
        i = get_index(url)
        assert len(i.distributions.keys()) == 1
        assert 'foo' in i.distributions.keys()

        # test if every url has the same queries
        for dist_urls in i.distributions['foo'].values():
            if not isinstance(dist_urls, list):
                dist_urls = [dist_urls]
            for dist_url in dist_urls:
                assert dist_url.endswith('?raw&at=master')
        dist_files = get_distribution_files(i, 'foo')
        assert len(dist_files) == 2
        get_distribution_file(i, 'foo')
    finally:
        proc.terminate()
Esempio n. 14
0
def get_sourcerpm(rosdistro_index_url,
                  rosdistro_name,
                  package_name,
                  sourcepkg_dir,
                  skip_download_sourcepkg=False):
    # ensure that no source subfolder exists
    rpm_package_name = get_os_package_name(rosdistro_name, package_name)
    if not skip_download_sourcepkg:
        # get expected package version from rosdistro
        from rosdistro import get_distribution_cache
        from rosdistro import get_index
        index = get_index(rosdistro_index_url)
        dist_cache = get_distribution_cache(index, rosdistro_name)
        dist_file = dist_cache.distribution_file
        assert package_name in dist_file.release_packages
        pkg = dist_file.release_packages[package_name]
        repo = dist_file.repositories[pkg.repository_name]
        package_version = repo.release_repository.version

        cmd = [
            'mock', '--resultdir',
            '%s' % sourcepkg_dir, '--no-cleanup-after', '--verbose', '--root',
            'ros_buildfarm', '--dnf-cmd', '--', 'download', '--source',
            '--disablerepo', '*', '--enablerepo',
            'ros-buildfarm-target-source',
            '%s-%s.*' % (rpm_package_name, package_version)
        ]

        print("Invoking '%s'" % ' '.join(cmd))
        subprocess.check_call(cmd, cwd=sourcepkg_dir)
Esempio n. 15
0
def test_get_release_builds():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    d = get_release(i, 'foo')
    builds = get_release_builds(i, d)
    assert len(builds) == 1
    build = builds[0]
    assert build.jenkins_sourcedeb_job_timeout == 5
    assert build.jenkins_binarydeb_job_timeout == 42

    os_names = build.get_target_os_names()
    assert set(os_names) == set(['ubuntu'])
    os_code_names = build.get_target_os_code_names('ubuntu')
    assert set(os_code_names) == set(['precise', 'quantal', 'raring'])
    arches = build.get_target_arches('ubuntu', 'precise')
    assert set(arches) == set(['amd64', 'i386'])

    c = build.get_target_configuration()
    assert len(c.keys()) == 2
    assert set(c.keys()) == set(['apt_target_repository', 'foo'])
    assert c['apt_target_repository'] == 'http://repo.example.com/'
    assert c['foo'] == 'bar'

    c = build.get_target_configuration('ubuntu', 'precise')
    assert 'foo' in c.keys()
    assert c['foo'] == 'bar'
    assert 'ping' in c.keys()
    assert c['ping'] == 'pong'

    c = build.get_target_configuration('ubuntu', 'precise', 'amd64')
    assert 'foo' in c.keys()
    assert c['foo'] == 'baz'
Esempio n. 16
0
def test_get_index_from_http_with_query_parameters():
    import subprocess
    import sys
    import time
    url = 'http://localhost:9876/index_v3.yaml?raw&at=master'
    # start a http server and wait
    if sys.version_info < (3, 0, 0):
        proc = subprocess.Popen([sys.executable, '-m', 'SimpleHTTPServer', '9876'],
                                cwd=FILES_DIR)
    else:
        proc = subprocess.Popen([sys.executable, '-m', 'http.server', '9876'],
                                cwd=FILES_DIR)
    time.sleep(0.5)
    try:
        i = get_index(url)
        assert len(i.distributions.keys()) == 1
        assert 'foo' in i.distributions.keys()

        # test if every url has the same queries
        for key, dist_urls in i.distributions['foo'].items():
            if key in ('distribution_status', 'distribution_type'):
                continue
            if not isinstance(dist_urls, list):
                dist_urls = [dist_urls]
            for dist_url in dist_urls:
                assert dist_url.endswith('?raw&at=master')
        dist_files = get_distribution_files(i, 'foo')
        assert len(dist_files) == 2
        get_distribution_file(i, 'foo')
    finally:
        proc.terminate()
Esempio n. 17
0
def test_get_doc_build_files():
    url = 'file://' + FILES_DIR + '/index.yaml'
    i = get_index(url)
    files = get_doc_build_files(i, 'foo')
    assert len(files) == 1
    build_file = files[0]
    assert build_file.jenkins_job_timeout == 23
def test_build_caches():
    with Fold():
        print("""Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            print("Parsing manifest files for '%s'" % dist_name)
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                pkgs[pkg_name] = parse_package_string(pkg_xml)
            print("Order all packages in '%s' topologically" % dist_name)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
Esempio n. 19
0
def main(repo_type, rosdistro_name):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
    print()

    return True
Esempio n. 20
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name, os_code_name, sources_dir):
    from rosdistro import get_distribution_file
    from rosdistro import get_index

    index = get_index(rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return "Not a released package name: %s" % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name, os_code_name)

    cmd = [
        "git",
        "clone",
        "--branch",
        tag,
        # fetch all branches and tags but no history
        "--depth",
        "1",
        "--no-single-branch",
        repo.release_repository.url,
        sources_dir,
    ]

    print("Invoking '%s'" % " ".join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ["Version"])[0]
    if not source_version.startswith(pkg_version) or (
        len(source_version) > len(pkg_version) and source_version[len(pkg_version)] in "0123456789"
    ):
        raise RuntimeError(
            (
                "The cloned package version from the GBP (%s) does not match "
                + "the expected package version from the distribution file (%s)"
            )
            % (source_version, pkg_version)
        )

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package

    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print("Package maintainer emails: %s" % " ".join(sorted(maintainer_emails)))
Esempio n. 21
0
def configure_release_jobs(
        config_url, rosdistro_name, release_build_name,
        append_timestamp=False):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers or build_file.abi_incompatibility_assumed:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            targets.append((os_name, os_code_name))
    print('The build file contains the following targets:')
    for os_name, os_code_name in targets:
        print('  - %s %s: %s' % (os_name, os_code_name, ', '.join(
            build_file.targets[os_name][os_code_name])))

    dist_file = get_distribution_file(index, rosdistro_name)

    jenkins = connect(config.jenkins_url)

    configure_import_package_job(
        config_url, rosdistro_name, release_build_name,
        config=config, build_file=build_file, jenkins=jenkins)

    view_name = get_release_view_name(rosdistro_name, release_build_name)
    view = configure_release_view(jenkins, view_name)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue

        for os_name, os_code_name in targets:
            configure_release_job(
                config_url, rosdistro_name, release_build_name,
                pkg_name, os_name, os_code_name,
                append_timestamp=append_timestamp,
                config=config, build_file=build_file,
                index=index, dist_file=dist_file, dist_cache=dist_cache,
                jenkins=jenkins, view=view,
                generate_import_package_job=False)
Esempio n. 22
0
def get_manifest_from_rosdistro(package_name, distro_name):
    """
    Get the rosdistro repository data and package information.

    @param package_name: name of package or repository to get manifest information for.
    It gives package symbols precedence over repository names.
    @type  package_name: str
    @param distro_name: name of ROS distribution
    @type  distro_name: str

    @return: (manifest data, 'package'|'repository').
    @rtype: ({str: str}, str, str)
    @raise IOError: if data cannot be loaded
    """
    data = {}
    type_ = None
    index = get_index(get_index_url())
    try:
        distribution_cache = get_cached_distribution(index, distro_name)
    except RuntimeError as runerr:
        if (runerr.message.startswith("Unknown release")):
            return None
        raise

    if package_name in distribution_cache.release_packages:
        pkg = distribution_cache.release_packages[package_name]
        #print('pkg', pkg.name)
        pkg_xml = distribution_cache.get_release_package_xml(package_name)
        pkg_manifest = parse_package_string(pkg_xml)
        data['description'] = pkg_manifest.description
        website_url = [u.url for u in pkg_manifest.urls if u.type == 'website']
        if website_url:
            data['url'] = website_url[0]
        repo_name = pkg.repository_name
        meta_export = [exp for exp in pkg_manifest.exports if exp.tagname == 'metapackage']
        if meta_export:
            type_ = 'metapackage'
        else:
            type_ = 'package'
    else:
        repo_name = package_name
        type_ = 'repository'
    data['repo_name'] = repo_name
    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].release_repository
        if repo:
            data['packages'] = repo.package_names

    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].source_repository
        if not repo:
            return None
        data['vcs'] = repo.type
        data['vcs_uri'] = repo.url
        data['vcs_version'] = repo.version
    else:
        return None

    return (data, type_, None)
Esempio n. 23
0
def main(repo_type, rosdistro_name, check_for_wet_packages=False):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
                continue
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
            continue

        if check_for_wet_packages:
            path = tempfile.mkdtemp()
            try:
                if repo.type == 'git':
                    clone_git_repo(repo.url, repo.version, path)
                elif repo.type == 'hg':
                    clone_hg_repo(repo.url, repo.version, path)
                elif repo.type == 'svn':
                    checkout_svn_repo(repo.url, repo.version, path)
            except RuntimeError as e:
                print()
                print("Could not clone repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
                continue
            else:
                package_paths = find_package_paths(path)
                if not package_paths:
                    print()
                    print("Repository '%s' (%s [%s]) does not contain any wet packages" % (repo.name, repo.url, repo.version), file=sys.stderr)
                    continue
            finally:
                shutil.rmtree(path)

    print()

    return True
Esempio n. 24
0
def test_verify_files_parsable():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    index = get_index(url)
    distribution_file = get_distribution_file(index, 'foo')
    data = yaml_from_distribution_file(distribution_file)
    with open(os.path.join(FILES_DIR, 'foo', 'distribution.yaml'), 'r') as f:
        expected = f.read()
    assert data == expected, get_diff(expected, data)
Esempio n. 25
0
def test_get_index_v2():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    assert 'distribution_status' not in i.distributions['foo']
    assert 'distribution_type' not in i.distributions['foo']
Esempio n. 26
0
def get_all_distribution_files(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_files = []
    i = rosdistro.get_index(url)
    for d in i.distributions:
        distribution_files.append(rosdistro.get_distribution_file(i, d))
    return distribution_files
Esempio n. 27
0
def test_verify_files_parsable():
    url = 'file://' + FILES_DIR + '/index.yaml'
    index = get_index(url)
    release_file = get_release_file(index, 'foo')
    data = yaml_from_release_file(release_file)
    with open(os.path.join(FILES_DIR, 'foo', 'release.yaml'), 'r') as f:
        expected = f.read()
    assert data == expected, get_diff(expected, data)
Esempio n. 28
0
 def execute(self, rosdistro_path, distro):
     self.rosdistro_path = rosdistro_path
     internal_index_path = (rosdistro_path / 'rosdistro' /
                            'index.yaml').resolve().as_uri()
     self.internal_index = get_index(internal_index_path)
     self.internal_distro = get_distribution(self.internal_index, distro)
     self.internal_distro_file = self.internal_index.distributions[distro][
         'distribution'][-1]
def test_get_distribution_file():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    dist_file = get_distribution_file(i, 'foo')
    _validate_dist_file(dist_file)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 1
Esempio n. 30
0
def test_get_index_v2():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    assert 'distribution_status' not in i.distributions['foo']
    assert 'distribution_type' not in i.distributions['foo']
def get_all_distribution_files(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_files = []
    i = rosdistro.get_index(url)
    for d in i.distributions:
        distribution_files.append(rosdistro.get_distribution_file(i, d))
    return distribution_files
Esempio n. 32
0
def test_verify_files_parsable():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    index = get_index(url)
    distribution_file = get_distribution_file(index, 'foo')
    data = yaml_from_distribution_file(distribution_file)
    with open(os.path.join(FILES_DIR, 'foo', 'distribution.yaml'), 'r') as f:
        expected = f.read()
    assert data == expected, get_diff(expected, data)
def test_get_distribution_file():
    url = 'file://' + FILES_DIR + '/index_v2.yaml'
    i = get_index(url)
    dist_file = get_distribution_file(i, 'foo')
    _validate_dist_file(dist_file)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 1
Esempio n. 34
0
def partition_packages(config_url,
                       rosdistro_name,
                       release_build_name,
                       target,
                       cache_dir,
                       deduplicate_dependencies=False,
                       dist_cache=None):
    """Check all packages in the rosdistro and compare to the debian packages repository.

    Return the set of all packages and the set of missing ones.
    """
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = rosdistro_get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    # Check that apt repos status
    repo_index = get_package_repo_data(build_file.target_repository, [target],
                                       cache_dir)[target]

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = set()
    all_pkg_names = dist_file.release_packages.keys()

    # Remove packages without versions declared.
    def get_package_version(dist_file, pkg_name):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        return repo.release_repository.version

    all_pkg_names = [
        p for p in all_pkg_names if get_package_version(dist_file, p)
    ]

    distribution = get_cached_distribution(index,
                                           rosdistro_name,
                                           cache=dist_cache)
    pkg_names = filter_buildfile_packages_recursively(all_pkg_names,
                                                      build_file, distribution)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_os_package_name(rosdistro_name, pkg_name)
        if debian_pkg_name in repo_index:
            binary_packages.add(pkg_name)

    # check that all elements from whitelist are present
    missing_binary_packages = set(pkg_names) - binary_packages

    if deduplicate_dependencies:
        # Do not list missing packages that are dependencies of other missing ones
        cached_pkgs = get_package_manifests(distribution)
        missing_binary_packages = filter_blocked_dependent_package_names(
            cached_pkgs, missing_binary_packages)

    return binary_packages, missing_binary_packages
Esempio n. 35
0
def test_get_index_v3():
    url = 'file://' + FILES_DIR + '/index_v3.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
def check_sync_criteria(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    Target = namedtuple('Target', 'os_name os_code_name arch')
    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(
        build_file.target_repository, target, cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count, file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
    def __init__(self, distro):
        self.distro = distro

        try:
            index = get_index(get_index_url())
            self._distribution_file = get_distribution_cache(index, distro).distribution_file
        except:
            logger.error("Could not load rosdistro distribution cache")
            self._distribution_file = None
Esempio n. 38
0
def main():
    rosdistro_index = rosdistro.get_index(ROSDISTRO_URL)

    cache = generate_distribution_cache(rosdistro_index, 'indigo')
    cached_distro = rosdistro.get_cached_distribution(rosdistro_index,
                                                      'indigo',
                                                      cache=cache)

    root_packages = {'roscpp'}

    package_names = root_packages.union(
        get_recursive_dependencies(cached_distro, root_packages))

    print(f'Found {len(package_names)} packages.')

    rosinstall_data = generate_rosinstall(cached_distro,
                                          package_names,
                                          flat=True,
                                          tar=True)

    remote_files = []

    for rosinstall_pkg in rosinstall_data:
        name = rosinstall_pkg['tar']['local-name']
        url = rosinstall_pkg['tar']['uri'].replace('.tar.gz', '.zip')
        print(name, url)

        # Fetch tarball to get its sha1sum
        r = requests.get(url)
        r.raise_for_status()
        sha1sum = hashlib.sha1(r.content).hexdigest()

        remote_files.append({
            'name': name,
            'url': url,
            'sha1': sha1sum,
        })

    sh.mkdir('-p', 'ros/rosdistro')

    # Save BUCK file with remote_file rules
    with open('ros/rosdistro/BUCK', 'w') as out_f:
        for rf in remote_files:
            s = f"""remote_file(
  name = '{rf['name']}.zip',
  url = '{rf['url']}',
  sha1 = '{rf['sha1']}',
  visibility = ['PUBLIC'],
)
"""
            out_f.write(s)

    # Save DEFS file with the list of tarballs
    with open('ros/rosdistro/DEFS', 'w') as out_f:
        out_f.write("rosdistro_tarballs = [\n{}\n]".format('\n'.join([
            f"  '//ros/rosdistro:{rf['name']}.zip'," for rf in remote_files
        ])))
def test_get_index_v3():
    url = 'file://' + FILES_DIR + '/index_v3.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
def check_sync_criteria(config_url, rosdistro_name, release_build_name,
                        os_code_name, arch, cache_dir):
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    target = Target('ubuntu', os_code_name, arch)

    repo_index = get_debian_repo_index(build_file.target_repository, target,
                                       cache_dir)

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = {}
    all_pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
        binary_packages[pkg_name] = debian_pkg_name in repo_index

    # check that all elements from whitelist are present
    if build_file.sync_packages:
        missing_binary_packages = [
            pkg_name for pkg_name in build_file.sync_packages
            if pkg_name not in binary_packages or not binary_packages[pkg_name]
        ]
        if missing_binary_packages:
            print('The following binary packages are missing to sync:',
                  file=sys.stderr)
            for pkg_name in sorted(missing_binary_packages):
                print('-', pkg_name, file=sys.stderr)
            return False
        print('All required binary packages are available:')
        for pkg_name in sorted(build_file.sync_packages):
            print('-', pkg_name)

    # check that count is satisfied
    if build_file.sync_package_count is not None:
        binary_package_count = len([
            pkg_name
            for pkg_name, has_binary_package in binary_packages.items()
            if has_binary_package
        ])
        if binary_package_count < build_file.sync_package_count:
            print('Only %d binary packages available ' % binary_package_count +
                  '(at least %d are required to sync)' %
                  build_file.sync_package_count,
                  file=sys.stderr)
            return False
        print('%d binary packages available ' % binary_package_count +
              '(more or equal then the configured sync limit of %d)' %
              build_file.sync_package_count)

    return True
Esempio n. 41
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Create a workspace from vcs repos files.')
    add_argument_rosdistro_name(parser)
    add_argument_repos_file_urls(parser)
    add_argument_repository_names(parser, optional=True)
    add_argument_test_branch(parser)
    parser.add_argument('--workspace-root',
                        help='The path of the desired workspace',
                        required=True)
    args = parser.parse_args(argv)

    assert args.repos_file_urls or args.repository_names

    ensure_workspace_exists(args.workspace_root)

    repos_files = []
    if args.repository_names:
        with Scope('SUBSECTION', 'get repository information from rosdistro'):
            index = get_index(get_index_url())
            dist = get_distribution(index, args.rosdistro_name)
            data = {}
            for repo_name in args.repository_names:
                repo = dist.repositories[repo_name]
                src_repo = repo.source_repository
                repo_data = {
                    'type': src_repo.type,
                    'url': src_repo.url,
                }
                if src_repo.version is not None:
                    repo_data['version'] = src_repo.version
                data[repo_name] = repo_data
            repos_file = os.path.join(args.workspace_root,
                                      'repositories-from-rosdistro.repos')
            with open(repos_file, 'w') as h:
                h.write(
                    yaml.safe_dump({'repositories': data},
                                   default_flow_style=False))
            repos_files.append(repos_file)

    with Scope('SUBSECTION', 'fetch repos files(s)'):
        for repos_file_url in args.repos_file_urls:
            repos_file = os.path.join(args.workspace_root,
                                      os.path.basename(repos_file_url))
            print('Fetching \'%s\' to \'%s\'' % (repos_file_url, repos_file))
            urlretrieve(repos_file_url, repos_file)
            repos_files += [repos_file]

    with Scope('SUBSECTION', 'import repositories'):
        source_space = os.path.join(args.workspace_root, 'src')
        for repos_file in repos_files:
            print('Importing repositories from \'%s\'' % (repos_file))
            import_repositories(source_space, repos_file, args.test_branch)

    with Scope('SUBSECTION', 'vcs export --exact'):
        # if a repo has been rebased against the default branch vcs can't detect the remote
        export_repositories(args.workspace_root, check=not args.test_branch)
Esempio n. 42
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name,
                os_code_name, sources_dir):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name,
                          os_code_name)

    cmd = [
        'git',
        'clone',
        '--branch',
        tag,
        # fetch all branches and tags but no history
        '--depth',
        '1',
        '--no-single-branch',
        repo.release_repository.url,
        sources_dir
    ]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Esempio n. 43
0
def get_all_distribution_filenames(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_filenames = []
    i = rosdistro.get_index(url)
    for d in i.distributions.values():
        dpath = os.path.abspath(urlparse(d['distribution']).path)
        distribution_filenames.append(dpath)
    return distribution_filenames
Esempio n. 44
0
def init_environment():
    global os_name, os_version, rdistro, ctx, os_installers, default_os_installer, dist_data, rindex, rcache, rview

    ctx = create_default_installer_context()
    os_installers = ctx.get_os_installer_keys(os_name)
    default_os_installer = ctx.get_default_os_installer_key(os_name)
    rindex = get_index(get_index_url())
    dist_data = _get_dist_file_data(rindex, rdistro, 'distribution')
    rcache = get_distribution(rindex, rdistro)
    rview = get_catkin_view(rdistro, os_name, os_version, False)
Esempio n. 45
0
def get_eol_distribution_filenames(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_filenames = []
    i = rosdistro.get_index(url)
    for d_name, d in i.distributions.items():
        if d_name in EOL_DISTROS:
            dpath = os.path.abspath(urlparse(d['distribution']).path)
            distribution_filenames.append(dpath)
    return distribution_filenames
def test_build_caches():
    print("""
Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
    index = 'file://' + os.path.abspath(INDEX_YAML)
    index = get_index(index)
    dist_names = sorted(index.distributions.keys())
    dist_names = [n for n in dist_names if n not in eol_distro_names]
    generate_distribution_caches(INDEX_YAML, dist_names=dist_names)
def test_get_index_v3_invalid():
    url = 'file://' + FILES_DIR + '/index_v3_invalid.yaml'
    i = get_index(url)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    try:
        get_distribution_file(i, 'foo')
        assert False
    except AssertionError:
        pass
Esempio n. 48
0
def test_rosdistro_urls():
    index_url = 'file://' + FILES_DIR + '/index.yaml'
    index = get_index(index_url)
    success = True
    for distro_name in index.distributions.keys():
        print("""
Checking if distribution.yaml contains valid urls for known hosting services.
If this fails you can run 'scripts/check_rosdistro_urls.py file://`pwd`/%s %s' to perform the same check locally.
""" % ('index.yaml', distro_name))
        success &= check_rosdistro_urls(index_url, distro_name)
    assert success
Esempio n. 49
0
def test_get_index_v3_invalid():
    url = 'file://' + FILES_DIR + '/index_v3_invalid.yaml'
    i = get_index(url)

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    try:
        get_distribution_file(i, 'foo')
        assert False
    except AssertionError:
        pass
Esempio n. 50
0
def get_index():
    global _rosdistro_index
    if _rosdistro_index is None:
        _rosdistro_index = rosdistro.get_index(rosdistro.get_index_url())
        if _rosdistro_index.version == 1:
            error("This version of bloom does not support rosdistro version "
                  "'{0}', please use an older version of bloom."
                  .format(_rosdistro_index.version), exit=True)
        if _rosdistro_index.version > 2:
            error("This version of bloom does not support rosdistro version "
                  "'{0}', please update bloom.".format(_rosdistro_index.version), exit=True)
    return _rosdistro_index
def test_rosdistro_urls():
    index_url = 'file://' + FILES_DIR + '/index.yaml'
    index = get_index(index_url)
    failed_distros = []
    for distro_name in index.distributions.keys():
        print("""
Checking if distribution.yaml contains valid urls for known hosting services.
If this fails you can run 'scripts/check_rosdistro_urls.py file://`pwd`/%s %s' to perform the same check locally.
""" % ('index.yaml', distro_name))
        if not check_rosdistro_urls(index_url, distro_name):
            failed_distros.append(distro_name)
    assert not failed_distros, "There were problems with urls in the 'distribution.yaml' file for these distros: %s" % failed_distros
Esempio n. 52
0
def test_get_index_v4():
    url = 'file://' + FILES_DIR + '/index_v4.yaml'
    i = get_index(url)
    assert len(i.distributions.keys()) == 1
    assert 'foo' in i.distributions.keys()

    assert i.distributions['foo']['distribution_status'] == 'active'
    assert i.distributions['foo']['distribution_type'] == 'ros1'

    dist_files = get_distribution_files(i, 'foo')
    assert len(dist_files) == 2
    get_distribution_file(i, 'foo')
 def get_distro(self, distro):
     if self.__distribution is None:
         try:
             index = get_index(get_index_url())
             self.__distribution = get_distribution_file(
                 index,
                 distro
             )
         except:
             print "failed to get data about repo %s in distribution %s" % (self.repo_name, self.distro_name)
             raise
     return self.__distribution
Esempio n. 54
0
def scrape_for_release_message_packages(track):
    url = rosdistro.get_index_url()
    index = rosdistro.get_index(url)
    cache = rosdistro.get_release_cache(index, 'hydro')
    packages = []
    for package_name, package_string in cache.package_xmls.iteritems():
        package = catkin_pkg.package.parse_package_string(package_string)
        #print("  Name: %s" % package_name)
        #print("  Buildtool Depends %s" % package.build)
        if has_build_depend_on_message_generation(package):
            packages.append({'name': package_name, 'version': package.version})
    return packages
def generate_deb_status_table(package, rosdistro_from, rosdistro_to):
    DISTROS = collections.OrderedDict()
    rosdistro_index = get_index(get_index_url())
    for distro in sorted(rosdistro_index.distributions.keys()):
        distribution_files = get_distribution_files(rosdistro_index, distro)
        if len(distribution_files) > 1:
            sys.stderr.write('distribution_files has multiple entories {}\n'.format(distribution_files))
            sys.exit(1)
        platform = distribution_files[0].release_platforms['ubuntu']
        DISTROS[distro] = platform
        #print('DISTROS[{}] = {}'.format(distro, platform))

    table = []
    for bit, arch in zip(['v8', 'hf', '32', '64'],
                         ['arm64', 'armhf', 'i386', 'amd64']):
        if not table:  # first row
            headers = ['Package']
        row = ['{} ({})'.format(package, arch)]
        for distro, os_list in DISTROS.items():
            if not (ord(rosdistro_from) <= ord(distro[0]) <=
                    ord(rosdistro_to)):
                continue

            for os in os_list:
                if arch.startswith('arm'):
                    if os == 'xenial':
                        os_arch = 'ux{bit}_u'.format(bit=bit)
                    else:
                        os_arch = 'arm_u'
                else:
                    os_arch = 'u'

                if not table:  # first row
                    headers.append(
                        '{} ({})'.format(distro.capitalize(), os.capitalize()))

                url = 'http://build.ros.org/job/{prefix_ros}bin_{os_arch}{prefix_os}{bit}__{package}__ubuntu_{os}_{arch}__binary'  # NOQA
                url = url.format(
                    bit=bit,
                    arch=arch,
                    os_arch=os_arch,
                    prefix_os=os[0].upper(),
                    prefix_ros=distro[0].upper(),
                    package=package,
                    os=os,
                )
                template_md = '[![Build Status]({url}/badge/icon)]({url})'
                row.append(template_md.format(url=url))
        table.append(row)

    print(tabulate.tabulate(table, headers=headers, tablefmt='pipe'))
Esempio n. 56
0
def test_build_caches():
    with Fold():
        print('Checking that the index.yaml and index-v4.yaml files contain '
              'the same information expect additional metadata in the v4.')
        index_v3 = get_index('file://' + os.path.abspath(INDEX_V3_YAML))
        index_v4 = get_index('file://' + os.path.abspath(INDEX_V4_YAML))

        dist_names_v3 = list(sorted(index_v3.distributions.keys()))
        dist_names_v4 = list(sorted(index_v4.distributions.keys()))
        assert dist_names_v3 == dist_names_v4, \
            'Different set of distribution names'

        for dist_name in dist_names_v3:
            dist_v3_data = index_v3.distributions[dist_name]
            dist_v4_data = index_v4.distributions[dist_name]

            for key, value in dist_v3_data.items():
                assert key in dist_v4_data, \
                    "For distribution '%s' index.yaml contains the key '%s' " \
                    "but v4 doesn't contain it" % (dist_name, key)
                assert dist_v4_data[key] == value, \
                    "For distribution '%s' both yaml files contains the key " \
                    "'%s' but with different values" % (dist_name, key)