Example #1
0
def test_build_caches():
    with Fold():
        print(
            """Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                pkgs[pkg_name] = parse_package_string(pkg_xml)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
def test_build_caches():
    with Fold():
        print("""Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            print("Parsing manifest files for '%s'" % dist_name)
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                pkgs[pkg_name] = parse_package_string(pkg_xml)
            print("Order all packages in '%s' topologically" % dist_name)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
Example #3
0
def main():
    rosdistro_index = rosdistro.get_index(ROSDISTRO_URL)

    cache = generate_distribution_cache(rosdistro_index, 'indigo')
    cached_distro = rosdistro.get_cached_distribution(rosdistro_index,
                                                      'indigo',
                                                      cache=cache)

    root_packages = {'roscpp'}

    package_names = root_packages.union(
        get_recursive_dependencies(cached_distro, root_packages))

    print(f'Found {len(package_names)} packages.')

    rosinstall_data = generate_rosinstall(cached_distro,
                                          package_names,
                                          flat=True,
                                          tar=True)

    remote_files = []

    for rosinstall_pkg in rosinstall_data:
        name = rosinstall_pkg['tar']['local-name']
        url = rosinstall_pkg['tar']['uri'].replace('.tar.gz', '.zip')
        print(name, url)

        # Fetch tarball to get its sha1sum
        r = requests.get(url)
        r.raise_for_status()
        sha1sum = hashlib.sha1(r.content).hexdigest()

        remote_files.append({
            'name': name,
            'url': url,
            'sha1': sha1sum,
        })

    sh.mkdir('-p', 'ros/rosdistro')

    # Save BUCK file with remote_file rules
    with open('ros/rosdistro/BUCK', 'w') as out_f:
        for rf in remote_files:
            s = f"""remote_file(
  name = '{rf['name']}.zip',
  url = '{rf['url']}',
  sha1 = '{rf['sha1']}',
  visibility = ['PUBLIC'],
)
"""
            out_f.write(s)

    # Save DEFS file with the list of tarballs
    with open('ros/rosdistro/DEFS', 'w') as out_f:
        out_f.write("rosdistro_tarballs = [\n{}\n]".format('\n'.join([
            f"  '//ros/rosdistro:{rf['name']}.zip'," for rf in remote_files
        ])))
Example #4
0
def test_build_caches():
    with Fold():
        print("""Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        for dist_name in dist_names:
            with Fold():
                try:
                    generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
        if errors:
            raise RuntimeError('\n'.join(errors))
def test_build_caches():
    with Fold():
        print(
            """Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                else:
                    caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            print("Parsing manifest files for '%s'" % dist_name)
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                # Collect parsing warnings and fail if version convention are not respected
                warnings = []
                pkgs[pkg_name] = parse_package_string(pkg_xml,
                                                      warnings=warnings)
                for warning in warnings:
                    if 'version conventions' in warning:
                        errors.append('%s: %s' % (pkg_name, warning))
                    else:
                        print('%s: WARNING: %s' % (pkg_name, warning))
            print("Order all packages in '%s' topologically" % dist_name)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))