def generate_html(index, distro_names, start_time, template_file, resource_path):
    headers = ['Repo', 'Maintainer'] + [d[0].upper() + d[1:].lower() for d in distro_names]
    distros = [get_cached_distribution(index, d) for d in distro_names]
    repos = {}

    repo_names = [d.repositories.keys() for d in distros]
    repo_names = [x for y in repo_names for x in y]

    for repo_name in repo_names:
        row = format_row(repo_name, distros)
        if row:
            repos[repo_name] = row

    rows = []
    for repo_name in sorted(repos.keys()):
        rows.append(repos[repo_name])
    repos = repos.keys()

    resource_hashes = get_resource_hashes()

    output = StringIO()
    try:
        interpreter = em.Interpreter(output=output)
        interpreter.file(open(template_file), locals=locals())
        return output.getvalue()
    finally:
        interpreter.shutdown()
Esempio n. 2
0
def get_rosdistro(quiet):
    global _rosdistro_cache
    dist = None
    if "ROS_DISTRO" in os.environ:
        distro_id = os.environ["ROS_DISTRO"]
        if distro_id not in _rosdistro_cache:
            try:
                from rosdistro import get_index, get_index_url, get_cached_distribution
                url = get_index_url()
                if not quiet:
                    sys.stderr.write(
                        "catkin_lint: downloading %s package index from %s\n" %
                        (distro_id, url))
                index = get_index(url)
                dist = get_cached_distribution(index,
                                               distro_id,
                                               allow_lazy_load=True)
            except Exception as err:
                if not quiet:
                    sys.stderr.write(
                        "catkin_lint: cannot initialize rosdistro: %s\n" %
                        str(err))
            _rosdistro_cache[distro_id] = dist
        dist = _rosdistro_cache[distro_id]
    return Rosdistro(dist=dist, quiet=quiet)
Esempio n. 3
0
def get_manifest_from_rosdistro(package_name, distro_name):
    """
    Get the rosdistro repository data and package information.

    @param package_name: name of package or repository to get manifest information for.
    It gives package symbols precedence over repository names.
    @type  package_name: str
    @param distro_name: name of ROS distribution
    @type  distro_name: str

    @return: (manifest data, 'package'|'repository').
    @rtype: ({str: str}, str, str)
    @raise IOError: if data cannot be loaded
    """
    data = {}
    type_ = None
    index = get_index(get_index_url())
    try:
        distribution_cache = get_cached_distribution(index, distro_name)
    except RuntimeError as runerr:
        if (runerr.message.startswith("Unknown release")):
            return None
        raise

    if package_name in distribution_cache.release_packages:
        pkg = distribution_cache.release_packages[package_name]
        #print('pkg', pkg.name)
        pkg_xml = distribution_cache.get_release_package_xml(package_name)
        pkg_manifest = parse_package_string(pkg_xml)
        data['description'] = pkg_manifest.description
        website_url = [u.url for u in pkg_manifest.urls if u.type == 'website']
        if website_url:
            data['url'] = website_url[0]
        repo_name = pkg.repository_name
        meta_export = [exp for exp in pkg_manifest.exports if exp.tagname == 'metapackage']
        if meta_export:
            type_ = 'metapackage'
        else:
            type_ = 'package'
    else:
        repo_name = package_name
        type_ = 'repository'
    data['repo_name'] = repo_name
    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].release_repository
        if repo:
            data['packages'] = repo.package_names

    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].source_repository
        if not repo:
            return None
        data['vcs'] = repo.type
        data['vcs_uri'] = repo.url
        data['vcs_version'] = repo.version
    else:
        return None

    return (data, type_, None)
Esempio n. 4
0
def partition_packages(config_url,
                       rosdistro_name,
                       release_build_name,
                       target,
                       cache_dir,
                       deduplicate_dependencies=False,
                       dist_cache=None):
    """Check all packages in the rosdistro and compare to the debian packages repository.

    Return the set of all packages and the set of missing ones.
    """
    # fetch debian package list
    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)
    dist_file = rosdistro_get_distribution_file(index, rosdistro_name)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    # Check that apt repos status
    repo_index = get_package_repo_data(build_file.target_repository, [target],
                                       cache_dir)[target]

    # for each release package which matches the release build file
    # check if a binary package exists
    binary_packages = set()
    all_pkg_names = dist_file.release_packages.keys()

    # Remove packages without versions declared.
    def get_package_version(dist_file, pkg_name):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        return repo.release_repository.version

    all_pkg_names = [
        p for p in all_pkg_names if get_package_version(dist_file, p)
    ]

    distribution = get_cached_distribution(index,
                                           rosdistro_name,
                                           cache=dist_cache)
    pkg_names = filter_buildfile_packages_recursively(all_pkg_names,
                                                      build_file, distribution)
    for pkg_name in sorted(pkg_names):
        debian_pkg_name = get_os_package_name(rosdistro_name, pkg_name)
        if debian_pkg_name in repo_index:
            binary_packages.add(pkg_name)

    # check that all elements from whitelist are present
    missing_binary_packages = set(pkg_names) - binary_packages

    if deduplicate_dependencies:
        # Do not list missing packages that are dependencies of other missing ones
        cached_pkgs = get_package_manifests(distribution)
        missing_binary_packages = filter_blocked_dependent_package_names(
            cached_pkgs, missing_binary_packages)

    return binary_packages, missing_binary_packages
Esempio n. 5
0
def main():
    rosdistro_index = rosdistro.get_index(ROSDISTRO_URL)

    cache = generate_distribution_cache(rosdistro_index, 'indigo')
    cached_distro = rosdistro.get_cached_distribution(rosdistro_index,
                                                      'indigo',
                                                      cache=cache)

    root_packages = {'roscpp'}

    package_names = root_packages.union(
        get_recursive_dependencies(cached_distro, root_packages))

    print(f'Found {len(package_names)} packages.')

    rosinstall_data = generate_rosinstall(cached_distro,
                                          package_names,
                                          flat=True,
                                          tar=True)

    remote_files = []

    for rosinstall_pkg in rosinstall_data:
        name = rosinstall_pkg['tar']['local-name']
        url = rosinstall_pkg['tar']['uri'].replace('.tar.gz', '.zip')
        print(name, url)

        # Fetch tarball to get its sha1sum
        r = requests.get(url)
        r.raise_for_status()
        sha1sum = hashlib.sha1(r.content).hexdigest()

        remote_files.append({
            'name': name,
            'url': url,
            'sha1': sha1sum,
        })

    sh.mkdir('-p', 'ros/rosdistro')

    # Save BUCK file with remote_file rules
    with open('ros/rosdistro/BUCK', 'w') as out_f:
        for rf in remote_files:
            s = f"""remote_file(
  name = '{rf['name']}.zip',
  url = '{rf['url']}',
  sha1 = '{rf['sha1']}',
  visibility = ['PUBLIC'],
)
"""
            out_f.write(s)

    # Save DEFS file with the list of tarballs
    with open('ros/rosdistro/DEFS', 'w') as out_f:
        out_f.write("rosdistro_tarballs = [\n{}\n]".format('\n'.join([
            f"  '//ros/rosdistro:{rf['name']}.zip'," for rf in remote_files
        ])))
Esempio n. 6
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name,
                os_code_name, sources_dir):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name,
                          os_code_name)

    cmd = [
        'git',
        'clone',
        '--branch',
        tag,
        # fetch all branches and tags but no history
        '--depth',
        '1',
        '--no-single-branch',
        repo.release_repository.url,
        sources_dir
    ]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Esempio n. 7
0
def get_sources(
        rosdistro_index_url, rosdistro_name, pkg_name, os_name, os_code_name,
        sources_dir):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(
        rosdistro_name, pkg_name, pkg_version, os_name, os_code_name)

    cmd = [
        'git', 'clone',
        '--branch', tag,
        # fetch all branches and tags but no history
        '--depth', '1', '--no-single-branch',
        repo.release_repository.url, sources_dir]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
Esempio n. 8
0
def build_release_compare_page(config_url,
                               rosdistro_names,
                               output_dir,
                               copy_resources=False):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index

    start_time = time.time()

    config = get_config_index(config_url)

    index = get_index(config.rosdistro_index_url)

    # get all input data
    distros = [get_cached_distribution(index, d) for d in rosdistro_names]

    pkg_names = [d.release_packages.keys() for d in distros]
    pkg_names = [x for y in pkg_names for x in y]

    pkgs_data = {}
    for pkg_name in pkg_names:
        pkg_data = _compare_package_version(distros, pkg_name)
        if pkg_data:
            pkgs_data[pkg_name] = pkg_data

    template_name = 'status/release_compare_page.html.em'
    data = {
        'title':
        'ROS packages in %s' %
        ' '.join([x.capitalize() for x in rosdistro_names]),
        'start_time':
        start_time,
        'start_time_local_str':
        time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),
        'resource_hashes':
        get_resource_hashes(),
        'rosdistro_names':
        rosdistro_names,
        'pkgs_data':
        pkgs_data,
    }
    html = expand_template(template_name, data)
    output_filename = os.path.join(
        output_dir, 'compare_%s.html' % '_'.join(rosdistro_names))
    print("Generating compare page: '%s'" % output_filename)
    with open(output_filename, 'w') as h:
        h.write(html)

    additional_resources(output_dir, copy_resources=copy_resources)
Esempio n. 9
0
def get_rosdistro(quiet): # pragma: no cover
    dist = None
    if "ROS_DISTRO" in os.environ:
        distro_id = os.environ["ROS_DISTRO"]
        try:
            from rosdistro import get_index, get_index_url, get_cached_distribution
            url = get_index_url()
            if not quiet:
                sys.stderr.write("catkin_lint: downloading %s package index from %s\n" % (distro_id, url))
            index = get_index(url)
            dist = get_cached_distribution(index, distro_id, allow_lazy_load=True)
        except Exception as err:
            if not quiet:
                sys.stderr.write("catkin_lint: cannot initialize rosdistro: %s\n" % str(err))
    return Rosdistro(dist=dist, quiet=quiet)
def build_release_compare_page(
        config_url, rosdistro_names,
        output_dir, copy_resources=False):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index

    start_time = time.time()

    config = get_config_index(config_url)

    index = get_index(config.rosdistro_index_url)

    # get all input data
    distros = [get_cached_distribution(index, d) for d in rosdistro_names]

    pkg_names = [d.release_packages.keys() for d in distros]
    pkg_names = [x for y in pkg_names for x in y]

    pkgs_data = {}
    for pkg_name in pkg_names:
        pkg_data = _compare_package_version(distros, pkg_name)
        if pkg_data:
            pkgs_data[pkg_name] = pkg_data

    template_name = 'status/release_compare_page.html.em'
    data = {
        'title': 'ROS packages in %s' % ' '.join([x.capitalize() for x in rosdistro_names]),

        'start_time': start_time,
        'start_time_local_str': time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),

        'resource_hashes': get_resource_hashes(),

        'rosdistro_names': rosdistro_names,

        'pkgs_data': pkgs_data,
    }
    html = expand_template(template_name, data)
    output_filename = os.path.join(
        output_dir, 'compare_%s.html' % '_'.join(rosdistro_names))
    print("Generating compare page: '%s'" % output_filename)
    with open(output_filename, 'w') as h:
        h.write(html)

    additional_resources(output_dir, copy_resources=copy_resources)
Esempio n. 11
0
    def __init__(self, rosdistro_name):
        self._rosdistro = rosdistro_name
        self._targets = None
        self._index = get_index(get_index_url())
        if self._rosdistro not in self._index.distributions:
            print("Unknown distribution '%s'" % self._rosdistro, file=sys.stderr)
            sys.exit(1)
        self._dist = get_cached_distribution(self._index, self._rosdistro)
        self._build_files = get_release_build_files(self._index, self._rosdistro)

        self._repoinfo = {}
        self._package_in_repo = {}
        for name in self._dist.repositories.keys():
            repo = self._dist.repositories[name].release_repository
            if not repo:
                continue
            self._repoinfo[name] = RepoMetadata(name, repo.url, repo.version)
            self._repoinfo[name].packages = {}
            for pkg_name in repo.package_names:
                pkg = self._dist.release_packages[pkg_name]
                self._repoinfo[name].packages[pkg_name] = pkg.subfolder
                self._package_in_repo[pkg_name] = name
Esempio n. 12
0
    def __init__(self, distro_name, python_version=None):
        index = get_index(get_index_url())
        self._distro = get_cached_distribution(index, distro_name)
        self.distro_name = distro_name
        # set up ROS environments
        if python_version is None:
            python_version = index.distributions[distro_name]["python_version"]
        os.environ["ROS_PYTHON_VERSION"] = "{0}".format(python_version)
        os.environ["ROS_DISTRO"] = "{0}".format(distro_name)
        if "ROS_ROOT" in os.environ:
            os.environ.pop("ROS_ROOT")
        if "ROS_PACKAGE_PATH" in os.environ:
            os.environ.pop("ROS_PACKAGE_PATH")
        self._walker = DependencyWalker(self._distro,
                                        evaluate_condition_context=os.environ)

        # cache distribution type
        self._distribution_type = index.distributions[distro_name][
            "distribution_type"]
        self._python_version = index.distributions[distro_name][
            "python_version"]
        self.build_packages = set()

        os.environ["ROS_VERSION"] = "1" if self.check_ros1() else "2"
Esempio n. 13
0
    valid_comparison_keys = valid_distro_keys[:]
    valid_comparison_keys.remove(distro_key)
    if args.comparison not in valid_comparison_keys:
        print('Invalid rosdistro [%s] selected for comparison to [%s].' % (args.comparison, distro_key),
              file=sys.stderr)
        print('Valid rosdistros are %s.' % valid_comparison_keys, file=sys.stderr)
        exit(-1)
    prev_distro_key = args.comparison
else:
    prev_distro_key = valid_distro_keys[i - 1]

cache = rosdistro.get_distribution_cache(index, distro_key)
distro_file = cache.distribution_file

prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
prev_distribution = rosdistro.get_cached_distribution(
    index, prev_distro_key, cache=prev_cache)

prev_distro_file = prev_cache.distribution_file

dependency_walker = DependencyWalker(prev_distribution)

if repo_names_argument is None:
    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    repo_names_argument = prev_distro_file.repositories.keys()

prev_repo_names = set(
    repo for repo in repo_names_argument if is_released(repo, prev_distro_file))

keys = distro_file.repositories.keys()
Esempio n. 14
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate `manifest.yaml` from released package manifests")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'doc')
    add_argument_output_dir(parser, required=True)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    build_files = get_doc_build_files(config, args.rosdistro_name)
    build_file = build_files[args.doc_build_name]

    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    index = get_index(config.rosdistro_index_url)
    distribution = get_cached_distribution(index, args.rosdistro_name)

    # get rosdistro distribution cache
    # iterate over all released repositories
    # which don't have a doc entry
    # extract information from package.xml and generate manifest.yaml

    repo_names = get_repo_names_with_release_but_no_doc(distribution)
    pkg_names = get_package_names(distribution, repo_names)

    filtered_pkg_names = build_file.filter_packages(pkg_names)

    print("Generate 'manifest.yaml' files for the following packages:")
    api_path = os.path.join(args.output_dir, 'api')
    for pkg_name in sorted(filtered_pkg_names):
        print('- %s' % pkg_name)
        try:
            data = get_metadata(distribution, pkg_name)
        except Exception:
            print('Could not extract meta data:', file=sys.stderr)
            traceback.print_exc(file=sys.stderr)
            continue

        # add devel job urls
        rel_pkg = distribution.release_packages[pkg_name]
        repo_name = rel_pkg.repository_name
        repo = distribution.repositories[repo_name]
        if repo.source_repository and repo.source_repository.version:
            build_files = {}
            for build_name in source_build_files.keys():
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, repo_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

        # add release job urls
        build_files = {}
        for build_name in release_build_files.keys():
            build_files[build_name] = release_build_files[build_name]
        release_job_urls = get_release_job_urls(
            config.jenkins_url, build_files, args.rosdistro_name, pkg_name)
        if release_job_urls:
            data['release_jobs'] = release_job_urls

        manifest_yaml = os.path.join(api_path, pkg_name, 'manifest.yaml')
        write_manifest_yaml(manifest_yaml, data)

    return 0
keys = [
    'ros_core',
    'ros_base',
    'robot',
    'viz',
    'desktop',
    'perception',
    'simulators',
    'desktop_full',
]

# Get packages which make up each layer of the veriants
mp_sets = {}
index = get_index(get_index_url())
hydro = get_cached_distribution(index, 'hydro')
indigo = get_cached_distribution(index, 'indigo')
dist_file = get_distribution_file(index, 'hydro')
indigo_dist_file = get_distribution_file(index, 'indigo')
dw = DependencyWalker(hydro)
for mp in keys:
    # print("Fetching deps for: ", mp)
    deps = list(set(metapackages[mp].run_depends))
    mp_sets[mp] = set([])
    for dep in deps:
        mp_sets[mp].update(set([dep.name]))
        if dep.name in keys:
            continue
        # print(" ", dep.name)
        previous_pkgs = set([])
        for mp_, mp_set in mp_sets.items():
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker
    from catkin_pkg.package import InvalidPackage, parse_package_string

    prev_rosdistro_name = None

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)
    valid_rosdistro_names = list(index.distributions.keys())
    valid_rosdistro_names.sort()
    if rosdistro_name is None:
        rosdistro_name = valid_rosdistro_names[-1]
    print('Checking packages for "%s" distribution' % rosdistro_name)

    # skip distributions with a different type if the information is available
    distro_type = index.distributions[rosdistro_name].get('distribution_type')
    if distro_type is not None:
        valid_rosdistro_names = [
            n for n in valid_rosdistro_names
            if distro_type == index.distributions[n].get('distribution_type')]

    # Find the previous distribution to the current one
    try:
        i = valid_rosdistro_names.index(rosdistro_name)
    except ValueError:
        print('Distribution key not found in list of valid distributions.', file=sys.stderr)
        exit(-1)
    if i == 0:
        print('No previous distribution found.', file=sys.stderr)
        exit(-1)
    prev_rosdistro_name = valid_rosdistro_names[i - 1]

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(
        index, prev_rosdistro_name, cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(
            repo for repo in keys if _is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(
            repo for repo in repo_names if _is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(
        repo for repo in keys if _is_released(repo, distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names
        for pkg in distro_file.repositories[repo].release_repository.package_names)

    released_repos = prev_repo_names.intersection(
        current_repo_names)

    unreleased_repos = list(prev_repo_names.difference(
        current_repo_names))

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(
            rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set()  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    try:
                        package_dependencies |= dependency_walker.get_recursive_depends(
                            package, ['build', 'buildtool', 'run', 'test'], ros_packages_only=True,
                            limit_depth=1)
                    except AssertionError as e:
                        print(e, file=sys.stderr)

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = {}
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    pkg_xml = prev_distribution.get_release_package_xml(pkg_name)
                    if pkg_xml is not None:
                        try:
                            pkg = parse_package_string(pkg_xml)
                        except InvalidPackage:
                            pass
                        else:
                            pkg_maintainers = {m.name: m.email for m in pkg.maintainers}
                            if unreleased_repo_name not in maintainers:
                                maintainers[unreleased_repo_name] = {}
                            maintainers[unreleased_repo_name].update(pkg_maintainers)
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = None
                    blocking_repo = prev_distro_file.repositories[blocking_repo_name]
                    if blocking_repo.source_repository:
                        repo_url = blocking_repo.source_repository.url
                    elif blocking_repo.doc_repository:
                        repo_url = blocking_repo.doc_repository.url
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set([])
                    repos_info[blocking_repo_name]['repos_blocking'].add(repo_name)

            # Get url of repo
            repo_url = None
            if repo.source_repository:
                repo_url = repo.source_repository.url
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get('repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name]['recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
Esempio n. 17
0
        data['column_label'] = column_label.format(**data)
        data['view_url'] = JENKINS_HOST + '/view/%s/' % \
            view_name.format(**data)

        if is_source:
            job_name = 'ros-{rosdistro}-{{pkg}}_sourcedeb'
        else:
            data['arch'] = arch
            job_name = 'ros-{rosdistro}-{{pkg}}_binarydeb_{distro}_{arch}'
        data['job_url'] = ('{view_url}job/%s/' % job_name).format(**data)

        return data

    if args.rosdistro != 'fuerte':
        index = get_index(get_index_url())
        cached_distribution = get_cached_distribution(index, args.rosdistro)
    else:
        cached_distribution = None

    print('Transforming .csv into .html file...')
    template_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'resources', 'status_page.html.em')
    with open(csv_file, 'r') as f:
        html = transform_csv_to_html(f, metadata_builder, args.rosdistro,
                                     start_time, template_file, args.resources, cached_distribution)
    html_file = os.path.join(args.basedir, '%s.html' % args.rosdistro)
    with open(html_file, 'w') as f:
        f.write(html)

    print('Symlinking js and css...')
    for res in ['js', 'css']:
        dst = os.path.join(args.basedir, res)
Esempio n. 18
0
def build_release_status_page(config_url,
                              rosdistro_name,
                              release_build_name,
                              cache_dir,
                              output_dir,
                              copy_resources=False):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index

    start_time = time.time()

    config = get_config_index(config_url)
    release_build_files = get_release_build_files(config, rosdistro_name)
    build_file = release_build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    targets = []
    for os_name in sorted(build_file.targets.keys()):
        if os_name not in ['debian', 'fedora', 'rhel', 'ubuntu']:
            continue
        for os_code_name in sorted(build_file.targets[os_name].keys()):
            targets.append(Target(os_name, os_code_name, 'source'))
            for arch in sorted(build_file.targets[os_name][os_code_name]):
                targets.append(Target(os_name, os_code_name, arch))
    if not targets:
        print('The build file contains no supported targets', file=sys.stderr)
        return
    print('The build file contains the following targets:')
    for _, os_code_name, arch in targets:
        print('  - %s %s' % (os_code_name, arch))

    # get all input data
    dist = get_cached_distribution(index, rosdistro_name)

    rosdistro_info = get_rosdistro_info(dist, build_file)

    # derive testing and main urls from building url
    building_repo_url = build_file.target_repository
    base_url = os.path.dirname(building_repo_url)
    testing_repo_url = os.path.join(base_url, 'testing')
    main_repo_url = os.path.join(base_url, 'main')

    building_repo_data = get_package_repo_data(building_repo_url, targets,
                                               cache_dir)
    testing_repo_data = get_package_repo_data(testing_repo_url, targets,
                                              cache_dir)
    main_repo_data = get_package_repo_data(main_repo_url, targets, cache_dir)

    repos_data = [building_repo_data, testing_repo_data, main_repo_data]

    # compute derived attributes
    package_descriptors = get_rosdistro_package_descriptors(
        rosdistro_info, rosdistro_name)

    affected_by_sync = get_affected_by_sync(package_descriptors, targets,
                                            testing_repo_data, main_repo_data)

    regressions = get_regressions(package_descriptors, targets,
                                  building_repo_data, testing_repo_data,
                                  main_repo_data)

    version_status = get_version_status(package_descriptors,
                                        targets,
                                        repos_data,
                                        strip_version=True)

    homogeneous = get_homogeneous(package_descriptors, targets, repos_data)

    package_counts = get_package_counts(package_descriptors, targets,
                                        repos_data)

    jenkins_job_urls = get_jenkins_job_urls(rosdistro_name, config.jenkins_url,
                                            release_build_name, targets)

    # generate output
    repo_urls = [building_repo_url, testing_repo_url, main_repo_url]
    repo_names = get_url_names(repo_urls)

    ordered_pkgs = []
    for pkg_name in sorted(rosdistro_info.keys()):
        ordered_pkgs.append(rosdistro_info[pkg_name])

    template_name = 'status/release_status_page.html.em'
    data = {
        'title':
        'ROS packages for %s' % rosdistro_name.capitalize(),
        'start_time':
        start_time,
        'start_time_local_str':
        time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),
        'resource_hashes':
        get_resource_hashes(),
        'repo_names':
        repo_names,
        'repo_urls':
        repo_urls,
        'has_repository_column':
        True,
        'has_status_column':
        True,
        'has_maintainer_column':
        True,
        'ordered_pkgs':
        ordered_pkgs,
        'targets':
        targets,
        'short_arches':
        dict([(t.arch, get_short_arch(t.arch)) for t in targets]),
        'short_code_names': {
            t.os_code_name: get_short_os_code_name(t.os_code_name)
            for t in targets
        },
        'repos_data':
        repos_data,
        'affected_by_sync':
        affected_by_sync,
        'homogeneous':
        homogeneous,
        'jenkins_job_urls':
        jenkins_job_urls,
        'package_counts':
        package_counts,
        'regressions':
        regressions,
        'version_status':
        version_status,
    }
    html = expand_template(template_name, data)
    output_filename = os.path.join(
        output_dir, 'ros_%s_%s.html' % (rosdistro_name, release_build_name))
    print("Generating status page '%s':" % output_filename)
    with open(output_filename, 'w') as h:
        h.write(html)

    additional_resources(output_dir, copy_resources=copy_resources)

    yaml_folder = os.path.join(output_dir, 'yaml')
    if not os.path.exists(yaml_folder):
        os.mkdir(yaml_folder)

    yaml_filename = os.path.join(
        yaml_folder, 'ros_%s_%s.yaml' % (rosdistro_name, release_build_name))
    write_yaml(yaml_filename, ordered_pkgs, repos_data)
Esempio n. 19
0
def _get_blocked_source_entries_info(config_url, rosdistro_name):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    from rosdistro.dependency_walker import DependencyWalker

    config = get_config_index(config_url)
    index = get_index(config.rosdistro_index_url)

    print('Getting blocked source entries for', rosdistro_name)

    try:
        prev_rosdistro_name = _prev_rosdistro(index, rosdistro_name)
    except ValueError as e:
        print(e.args[0], file=sys.stderr)
        exit(-1)

    print('Comparing', rosdistro_name, 'with', prev_rosdistro_name)

    dist = get_cached_distribution(index, rosdistro_name)
    prev_dist = get_cached_distribution(index, prev_rosdistro_name)

    prev_walker = DependencyWalker(prev_dist)

    prev_released_repos = set(_released_repos(prev_dist))
    source_entry_repos = set(_source_entry_repos(dist))
    missing_repos = prev_released_repos.difference(source_entry_repos)

    # Assume repos will provide the same packages as previous distro
    missing_packages = set(_released_packages(prev_dist, missing_repos))

    repos_info = defaultdict(dict)

    # Give all repos some basic info
    for repo_name in prev_released_repos.union(source_entry_repos).union(
            missing_repos):
        repos_info[repo_name]['url'] = ''
        repos_info[repo_name]['repos_blocking'] = set()
        repos_info[repo_name]['recursive_repos_blocking'] = set()
        repos_info[repo_name]['released'] = False
        repos_info[repo_name]['version'] = 'no'
        repos_info[repo_name]['repos_blocked_by'] = {}
        repos_info[repo_name]['maintainers'] = defaultdict(dict)

    for repo_name in prev_released_repos:
        repos_info[repo_name]['url'] = _repo_url(prev_dist, repo_name)

    # has a source entry? Call that 'released' with a 'version' to reuse _format_repo_table_row
    for repo_name in source_entry_repos:
        repos_info[repo_name]['released'] = True
        repos_info[repo_name]['version'] = 'yes'

    # Determine which repos directly block the missing ones
    for repo_name in missing_repos:
        package_dependencies = set()
        for pkg in _released_packages(prev_dist, (repo_name, )):
            package_dependencies.update(_package_dependencies(
                prev_walker, pkg))

        for dep in package_dependencies:
            if dep in missing_packages:
                blocking_repo = prev_dist.release_packages[dep].repository_name
                if blocking_repo == repo_name:
                    # ignore packages in the same repo
                    continue
                repos_info[repo_name]['repos_blocked_by'][blocking_repo] = \
                    _repo_url(prev_dist, repo_name)
                repos_info[repo_name]['maintainers'][blocking_repo].update(
                    dict(_maintainers(prev_dist, dep)))

                # Mark blocking relationship in other direction
                repos_info[blocking_repo]['repos_blocking'].add(repo_name)

    # Compute which repos block another recursively
    for repo_name in repos_info.keys():
        checked_repos = set()
        repos_to_check = set([repo_name])
        while repos_to_check:
            next_repo = repos_to_check.pop()
            new_repos_blocking = repos_info[next_repo]['repos_blocking']
            repos_info[repo_name]['recursive_repos_blocking'].update(
                new_repos_blocking)
            checked_repos.add(next_repo)
            repos_to_check.update(repos_info[next_repo]
                                  ['repos_blocking'].difference(checked_repos))

    return repos_info
#!/usr/bin/env python

import subprocess
import yaml

from catkin_pkg.package import parse_package_string
import rosdistro

print '<html><body>'

i = rosdistro.get_index(rosdistro.get_index_url())
for distro in reversed(sorted(i.distributions.keys())):
    d = rosdistro.get_cached_distribution(i, distro)
    f = d._distribution_file
    print '<h1>%s</h1>' % distro
    print '<table>'
    for repo_name in sorted(f.repositories.keys()):
        repo = f.repositories[repo_name]
        if not repo.release_repository:
            continue
        if not repo.doc_repository and not repo.source_repository:
            continue

        release_repo = repo.release_repository
        if release_repo.version is None:
            continue
        url = release_repo.url

        strands_prefix = 'https://github.com/strands-project-releases'
        if not url.startswith(strands_prefix):
            continue
Esempio n. 21
0
import os
import sys

from rosdistro import get_cached_distribution
from rosdistro import get_index
from rosdistro import get_index_url

import yaml

repos = {}

index = get_index(get_index_url())
for name, d in index.distributions.items():
    if d['distribution_status'] not in ('active', 'rolling'):
        continue
    d = get_cached_distribution(index, name)
    for r in d.repositories.values():
        if r.source_repository is None:
            continue
        sr = r.source_repository
        if sr.type != 'git':
            continue
        prefix = 'https://github.com/'
        suffix = '.git'
        if not sr.url.startswith(prefix) or not sr.url.endswith(suffix):
            continue
        url = sr.url[len(prefix):-len(suffix)]

        print('*', end='')
        sys.stdout.flush()
def get_distro(distro_name):
    index = get_index(get_index_url())
    return get_cached_distribution(index, distro_name)
Esempio n. 23
0
def trigger_release_jobs(config_url,
                         rosdistro_name,
                         release_build_name,
                         missing_only,
                         source_only,
                         cache_dir,
                         cause=None,
                         groovy_script=None,
                         not_failed_only=False):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    targets = []
    for os_name in sorted(build_file.targets.keys()):
        for os_code_name in sorted(build_file.targets[os_name].keys()):
            targets.append(Target(os_name, os_code_name, 'source'))
            if source_only:
                continue
            for arch in sorted(
                    build_file.targets[os_name][os_code_name].keys()):
                targets.append(Target(os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  - %s %s %s' % (os_name, os_code_name, arch))

    dist_file = get_cached_distribution(index, rosdistro_name)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    repo_data = None
    if missing_only:
        repo_data = get_package_repo_data(build_file.target_repository,
                                          targets, cache_dir)

    if groovy_script is None:
        jenkins = connect(config.jenkins_url)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    triggered_jobs = []
    skipped_jobs = []
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("  Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("  Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue
        pkg_version = repo.release_repository.version

        debian_package_name = get_os_package_name(rosdistro_name, pkg_name)

        for target in targets:
            job_name = get_sourcedeb_job_name(rosdistro_name,
                                              release_build_name, pkg_name,
                                              target.os_name,
                                              target.os_code_name)
            if target.arch != 'source':
                # binary job can be skipped if source job was triggered
                if job_name in triggered_jobs:
                    print(("  Skipping binary jobs of '%s' since the source " +
                           "job was triggered") % job_name)
                    continue
                job_name = get_binarydeb_job_name(rosdistro_name,
                                                  release_build_name, pkg_name,
                                                  target.os_name,
                                                  target.os_code_name,
                                                  target.arch)

            if repo_data:
                # check if artifact is missing
                repo_index = repo_data[target]
                if debian_package_name in repo_index:
                    version = repo_index[debian_package_name]
                    version = _strip_version_suffix(version)
                    if version == pkg_version:
                        print(("  Skipping job '%s' since the artifact is " +
                               "already up-to-date") % job_name)
                        continue

            if groovy_script is None:
                success = invoke_job(jenkins, job_name, cause=cause)
            else:
                success = True
            if success:
                triggered_jobs.append(job_name)
            else:
                skipped_jobs.append(job_name)

    if groovy_script is None:
        print('Triggered %d jobs, skipped %d jobs.' %
              (len(triggered_jobs), len(skipped_jobs)))
    else:
        print("Writing groovy script '%s' to trigger %d jobs" %
              (groovy_script, len(triggered_jobs)))
        data = {
            'job_names': triggered_jobs,
            'not_failed_only': not_failed_only,
        }
        content = expand_template('release/trigger_jobs.groovy.em', data)
        with open(groovy_script, 'w') as h:
            h.write(content)
Esempio n. 24
0
 def get_distro(self):
     """Get a rosdistro object from the distro name configured in this object"""
     self._index = rosdistro.get_index(rosdistro.get_index_url())
     return rosdistro.get_cached_distribution(self._index,
                                              self._distro_name)
Esempio n. 25
0
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker
    from catkin_pkg.package import InvalidPackage, parse_package_string

    prev_rosdistro_name = None

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)
    valid_rosdistro_names = list(index.distributions.keys())
    valid_rosdistro_names.sort()
    if rosdistro_name is None:
        rosdistro_name = valid_rosdistro_names[-1]
    print('Checking packages for "%s" distribution' % rosdistro_name)

    # Find the previous distribution to the current one
    try:
        i = valid_rosdistro_names.index(rosdistro_name)
    except ValueError:
        print('Distribution key not found in list of valid distributions.',
              file=sys.stderr)
        exit(-1)
    if i == 0:
        print('No previous distribution found.', file=sys.stderr)
        exit(-1)
    prev_rosdistro_name = valid_rosdistro_names[i - 1]

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(index,
                                                          prev_rosdistro_name,
                                                          cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(repo for repo in keys
                              if _is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(repo for repo in repo_names
                              if _is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(repo for repo in keys
                             if _is_released(repo, distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names for pkg in
        distro_file.repositories[repo].release_repository.package_names)

    released_repos = prev_repo_names.intersection(current_repo_names)

    unreleased_repos = list(prev_repo_names.difference(current_repo_names))

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set(
        )  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    try:
                        package_dependencies |= dependency_walker.get_recursive_depends(
                            package, ['build', 'buildtool', 'run', 'test'],
                            ros_packages_only=True,
                            limit_depth=1)
                    except AssertionError as e:
                        print(e, file=sys.stderr)

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = {}
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    pkg_xml = prev_distribution.get_release_package_xml(
                        pkg_name)
                    if pkg_xml is not None:
                        try:
                            pkg = parse_package_string(pkg_xml)
                        except InvalidPackage:
                            pass
                        else:
                            pkg_maintainers = {
                                m.name: m.email
                                for m in pkg.maintainers
                            }
                            if unreleased_repo_name not in maintainers:
                                maintainers[unreleased_repo_name] = {}
                            maintainers[unreleased_repo_name].update(
                                pkg_maintainers)
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = None
                    blocking_repo = prev_distro_file.repositories[
                        blocking_repo_name]
                    if blocking_repo.source_repository:
                        repo_url = blocking_repo.source_repository.url
                    elif blocking_repo.doc_repository:
                        repo_url = blocking_repo.doc_repository.url
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set(
                            [])
                    repos_info[blocking_repo_name]['repos_blocking'].add(
                        repo_name)

            # Get url of repo
            repo_url = None
            if repo.source_repository:
                repo_url = repo.source_repository.url
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(
                repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get(
                    'repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name][
                    'recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
        data['column_label'] = column_label.format(**data)
        data['view_url'] = JENKINS_HOST + '/view/%s/' % \
            view_name.format(**data)

        if is_source:
            job_name = 'ros-{rosdistro}-{{pkg}}_sourcedeb'
        else:
            data['arch'] = arch
            job_name = 'ros-{rosdistro}-{{pkg}}_binarydeb_{distro}_{arch}'
        data['job_url'] = ('{view_url}job/%s/' % job_name).format(**data)

        return data

    if args.rosdistro != 'fuerte':
        index = get_index(get_index_url())
        cached_distribution = get_cached_distribution(index, args.rosdistro)
    else:
        cached_distribution = None

    print('Transforming .csv into .html file...')
    template_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                 '..', 'resources', 'status_page.html.em')
    with open(csv_file, 'r') as f:
        html = transform_csv_to_html(f, metadata_builder, args.rosdistro,
                                     start_time, template_file, args.resources,
                                     cached_distribution)
    html_file = os.path.join(args.basedir, '%s.html' % args.rosdistro)
    with open(html_file, 'w') as f:
        f.write(html)

    print('Symlinking js and css...')
Esempio n. 27
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate `manifest.yaml` from released package manifests")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'doc')
    add_argument_output_dir(parser, required=True)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)
    build_files = get_doc_build_files(config, args.rosdistro_name)
    build_file = build_files[args.doc_build_name]

    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    index = get_index(config.rosdistro_index_url)
    distribution = get_cached_distribution(index, args.rosdistro_name)

    # get rosdistro distribution cache
    # iterate over all released repositories
    # which don't have a doc entry
    # extract information from package.xml and generate manifest.yaml

    repo_names = get_repo_names_with_release_but_no_doc(distribution)
    pkg_names = get_package_names(distribution, repo_names)

    filtered_pkg_names = build_file.filter_packages(pkg_names)

    print("Generate 'manifest.yaml' files for the following packages:")
    api_path = os.path.join(args.output_dir, 'api')
    for pkg_name in sorted(filtered_pkg_names):
        print('- %s' % pkg_name)
        try:
            data = get_metadata(distribution, pkg_name)
        except Exception:
            print('Could not extract meta data:', file=sys.stderr)
            traceback.print_exc(file=sys.stderr)
            continue

        # add devel job urls
        rel_pkg = distribution.release_packages[pkg_name]
        repo_name = rel_pkg.repository_name
        repo = distribution.repositories[repo_name]
        if repo.source_repository and repo.source_repository.version:
            build_files = {}
            for build_name in source_build_files.keys():
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name, repo_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

        # add release job urls
        build_files = {}
        for build_name in release_build_files.keys():
            build_files[build_name] = release_build_files[build_name]
        release_job_urls = get_release_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name, pkg_name)
        if release_job_urls:
            data['release_jobs'] = release_job_urls

        manifest_yaml = os.path.join(api_path, pkg_name, 'manifest.yaml')
        write_manifest_yaml(manifest_yaml, data)

    return 0
keys = [
    'ros_core',
    'ros_base',
    'robot',
    'viz',
    'desktop',
    'perception',
    'simulators',
    'desktop_full',
]

# Get packages which make up each layer of the veriants
mp_sets = {}
index = get_index(get_index_url())
indigo = get_cached_distribution(index, 'indigo')
jade = get_cached_distribution(index, 'jade')
dist_file = get_distribution_file(index, 'indigo')
jade_dist_file = get_distribution_file(index, 'jade')
dw = DependencyWalker(indigo)
for mp in keys:
    # print("Fetching deps for: ", mp)
    deps = list(set(metapackages[mp].run_depends))
    mp_sets[mp] = set([])
    for dep in deps:
        mp_sets[mp].update(set([dep.name]))
        if dep.name in keys:
            continue
        # print(" ", dep.name)
        previous_pkgs = set([])
        for mp_, mp_set in mp_sets.items():
Esempio n. 29
0
        print('Invalid rosdistro [%s] selected for comparison to [%s].' %
              (args.comparison, distro_key),
              file=sys.stderr)
        print('Valid rosdistros are %s.' % valid_comparison_keys,
              file=sys.stderr)
        exit(-1)
    prev_distro_key = args.comparison
else:
    prev_distro_key = valid_distro_keys[i - 1]

cache = rosdistro.get_distribution_cache(index, distro_key)
distro_file = cache.distribution_file

prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
prev_distribution = rosdistro.get_cached_distribution(index,
                                                      prev_distro_key,
                                                      cache=prev_cache)

prev_distro_file = prev_cache.distribution_file

dependency_walker = DependencyWalker(prev_distribution)

if repo_names_argument is None:
    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    repo_names_argument = prev_distro_file.repositories.keys()

prev_repo_names = set(repo for repo in repo_names_argument
                      if is_released(repo, prev_distro_file))
def get_distro(distro_name):
    index = get_index(get_index_url())
    return get_cached_distribution(index, distro_name)
Esempio n. 31
0
def _get_blocked_releases_info(config_url, rosdistro_name, repo_names=None):
    import rosdistro
    from rosdistro.dependency_walker import DependencyWalker

    config = get_config_index(config_url)

    index = rosdistro.get_index(config.rosdistro_index_url)

    print('Checking packages for "%s" distribution' % rosdistro_name)

    # Find the previous distribution to the current one
    try:
        prev_rosdistro_name = _prev_rosdistro(index, rosdistro_name)
    except ValueError as e:
        print(e.args[0], file=sys.stderr)
        exit(-1)

    cache = rosdistro.get_distribution_cache(index, rosdistro_name)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_rosdistro_name)
    prev_distribution = rosdistro.get_cached_distribution(index,
                                                          prev_rosdistro_name,
                                                          cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    # Check missing dependencies for packages that were in the previous
    # distribution that have not yet been released in the current distribution
    # Filter repos without a version or a release repository
    prev_repo_names = set(_released_repos(prev_distro_file))

    if repo_names is not None:
        ignored_inputs = prev_repo_names.difference(repo_names)
        prev_repo_names.intersection_update(repo_names)
        repo_names = prev_repo_names

        if len(ignored_inputs) > 0:
            print(
                'Ignoring inputs for which repository info not found in previous distribution '
                '(did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    current_repo_names = set(_released_repos(distro_file))

    # Get a list of currently released packages
    current_package_names = set(
        _released_packages(distro_file, current_repo_names))

    released_repos = prev_repo_names.intersection(current_repo_names)

    if prev_repo_names.issubset(current_repo_names):
        print('All inputs already released in {0}.'.format(rosdistro_name))

    repos_info = defaultdict(dict)
    unprocessed_repos = prev_repo_names
    while unprocessed_repos:
        print('Processing repos:\n%s' %
              '\n'.join(['- %s' % r for r in sorted(unprocessed_repos)]))
        new_repos_to_process = set(
        )  # set containing repos that come up while processing others

        for repo_name in unprocessed_repos:
            repos_info[repo_name]['released'] = repo_name in released_repos

            if repo_name in released_repos:
                repo = distro_file.repositories[repo_name]
                version = repo.release_repository.version
                repos_info[repo_name]['version'] = version

            else:
                # Gather info on which required repos have not been released yet
                # Assume dependencies will be the same as in the previous distribution and find
                # which ones have been released
                repo = prev_distro_file.repositories[repo_name]
                release_repo = repo.release_repository
                package_dependencies = set()
                packages = release_repo.package_names
                # Accumulate all dependencies for those packages
                for package in packages:
                    package_dependencies.update(
                        _package_dependencies(dependency_walker, package))

                # For all package dependencies, check if they are released yet
                unreleased_pkgs = package_dependencies.difference(
                    current_package_names)
                # Remove the packages which this repo provides
                unreleased_pkgs = unreleased_pkgs.difference(packages)

                # Get maintainer info and repo of unreleased packages
                maintainers = defaultdict(dict)
                repos_blocked_by = set()
                for pkg_name in unreleased_pkgs:
                    unreleased_repo_name = \
                        prev_distro_file.release_packages[pkg_name].repository_name
                    repos_blocked_by.add(unreleased_repo_name)
                    maintainers[unreleased_repo_name].update(
                        dict(_maintainers(prev_distribution, pkg_name)))
                if maintainers:
                    repos_info[repo_name]['maintainers'] = maintainers

                repos_info[repo_name]['repos_blocked_by'] = {}
                for blocking_repo_name in repos_blocked_by:
                    # Get url of blocking repos
                    repo_url = _repo_url(prev_distribution, blocking_repo_name)
                    repos_info[repo_name]['repos_blocked_by'].update(
                        {blocking_repo_name: repo_url})

                    # Mark blocking relationship in other direction
                    if blocking_repo_name not in repos_info:
                        new_repos_to_process.add(blocking_repo_name)
                        repos_info[blocking_repo_name] = {}
                    if 'repos_blocking' not in repos_info[blocking_repo_name]:
                        repos_info[blocking_repo_name]['repos_blocking'] = set(
                            [])
                    repos_info[blocking_repo_name]['repos_blocking'].add(
                        repo_name)

            # Get url of repo
            repo_url = _repo_url(prev_distribution, repo_name)
            if repo_url:
                repos_info[repo_name]['url'] = repo_url

            new_repos_to_process.discard(
                repo_name)  # this repo has been fully processed now

        for repo_name in repos_info.keys():
            # Recursively get all repos being blocked by this repo
            recursive_blocks = set([])
            repos_to_check = set([repo_name])
            while repos_to_check:
                next_repo_to_check = repos_to_check.pop()
                blocks = repos_info[next_repo_to_check].get(
                    'repos_blocking', set([]))
                new_blocks = blocks - recursive_blocks
                repos_to_check |= new_blocks
                recursive_blocks |= new_blocks
            if recursive_blocks:
                repos_info[repo_name][
                    'recursive_repos_blocking'] = recursive_blocks
        unprocessed_repos = new_repos_to_process

    return repos_info
Esempio n. 32
0
def get_blocking_info(distro_key, repo_names, depth):
    prev_distro_key = None

    index = rosdistro.get_index(rosdistro.get_index_url())
    valid_distro_keys = index.distributions.keys()
    valid_distro_keys.sort()
    if distro_key is None:
        distro_key = valid_distro_keys[-1]
    print('Checking packages for "%s" distribution' % distro_key)

    # Find the previous distribution to the current one
    try:
        i = valid_distro_keys.index(distro_key)
    except ValueError:
        print('Distribution key not found in list of valid distributions.')
        exit(-1)
    if i == 0:
        print('No previous distribution found.')
        exit(-1)
    prev_distro_key = valid_distro_keys[i - 1]

    cache = rosdistro.get_distribution_cache(index, distro_key)
    distro_file = cache.distribution_file

    prev_cache = rosdistro.get_distribution_cache(index, prev_distro_key)
    prev_distribution = rosdistro.get_cached_distribution(
        index, prev_distro_key, cache=prev_cache)

    prev_distro_file = prev_cache.distribution_file

    dependency_walker = DependencyWalker(prev_distribution)

    if repo_names is None:
        # Check missing dependencies for packages that were in the previous
        # distribution that have not yet been released in the current distribution
        # Filter repos without a version or a release repository
        keys = prev_distro_file.repositories.keys()
        prev_repo_names = set(
            repo for repo in keys if is_released(repo, prev_distro_file))
        repo_names = prev_repo_names
        ignored_inputs = []
    else:
        prev_repo_names = set(
            repo for repo in repo_names if is_released(repo, prev_distro_file))
        ignored_inputs = list(set(repo_names).difference(prev_repo_names))
        if len(ignored_inputs) > 0:
            print('Ignoring inputs for which repository info not found in previous distribution' +
                    ' (did you list a package instead of a repository?):')
            print('\n'.join(
                sorted('\t{0}'.format(repo) for repo in ignored_inputs)))

    keys = distro_file.repositories.keys()
    current_repo_names = set(
        repo for repo in keys if is_released(repo, distro_file))

    released_repos = prev_repo_names.intersection(
        current_repo_names)
    
    unreleased_repos = list(prev_repo_names.difference(
        current_repo_names))

    # Get a list of currently released packages
    current_package_names = set(
        pkg for repo in current_repo_names
        for pkg in distro_file.repositories[repo].release_repository.package_names)

    # Construct a dictionary where keys are repository names and values are a list
    # of the repos blocking/blocked by that repo
    blocked_repos = {}
    blocking_repos = {}
    unblocked_blocking_repos = set()

    if len(unreleased_repos) == 0:
        print('All inputs already released in {0}.'.format(
            distro_key))

    # Process repo dependencies
    unblocked_repos = set()
    total_blocking_repos = set()

    for repository_name in unreleased_repos:
        repo = prev_distro_file.repositories[repository_name]
        release_repo = repo.release_repository
        package_dependencies = set()
        packages = release_repo.package_names
        # Accumulate all dependencies for those packages
        for package in packages:
            recursive_dependencies = dependency_walker.get_recursive_depends(
                package, ['build', 'run', 'buildtool'], ros_packages_only=True,
                limit_depth=depth)
            package_dependencies = package_dependencies.union(
                recursive_dependencies)

        # For all package dependencies, check if they are released yet
        unreleased_pkgs = package_dependencies.difference(
            current_package_names)
        # remove the packages which this repo provides.
        unreleased_pkgs = unreleased_pkgs.difference(packages)
        # Now get the repositories for these packages.
        blocking_repos_for_this_repo = set(prev_distro_file.release_packages[pkg].repository_name
                            for pkg in unreleased_pkgs)
        if len(blocking_repos_for_this_repo) == 0:
            unblocked_repos.add(repository_name)
        else:
            # Get the repository for the unreleased packages
            blocked_repos[repository_name] = blocking_repos_for_this_repo
            total_blocking_repos |= blocking_repos_for_this_repo
            
            for blocking_repo in blocking_repos_for_this_repo:
                try: 
                    blocking_repos[blocking_repo] |= set([repository_name]) 
                except KeyError:
                    blocking_repos[blocking_repo] = set([repository_name])

    unblocked_blocking_repos_names = total_blocking_repos.intersection(unblocked_repos)
    unblocked_blocking_repos = {
        repo:blocking for repo, blocking in blocking_repos.iteritems() 
        if repo in unblocked_blocking_repos_names
        }
    unblocked_leaf_repos = unblocked_repos.difference(unblocked_blocking_repos_names)

    # Double-check repositories that we think are leaf repos
    for repo in unblocked_leaf_repos:
        # Check only one level of depends_on
        depends_on = dependency_walker.get_depends_on(package, 'build') | \
            dependency_walker.get_depends_on(package, 'run') | \
            dependency_walker.get_depends_on(package, 'buildtool')
        if len(depends_on) != 0:
            # There are packages that depend on this "leaf", but we didn't find
            # them initially because they weren't related to our inputs
            for package in depends_on:
                depends_on_repo = prev_distro_file.release_packages[package].repository_name
                try: 
                    unblocked_blocking_repos[repo] |= set([depends_on_repo]) 
                except KeyError:
                    unblocked_blocking_repos[repo] = set([depends_on_repo])

    unblocked_unblocking_repos = unblocked_leaf_repos.difference(
        unblocked_blocking_repos.keys())
    
    if not len(repo_names) == (len(ignored_inputs) + len(released_repos) + len(blocked_repos.keys()) + 
        len(unblocked_blocking_repos.keys()) + len(unblocked_unblocking_repos)):
        raise Exception('Somewhere a repo has not been accounted for')
    return released_repos, blocked_repos, unblocked_blocking_repos, unblocked_unblocking_repos
Esempio n. 33
0
def get_manifest_from_rosdistro(package_name, distro_name):
    """
    Get the rosdistro repository data and package information.

    @param package_name: name of package or repository to get manifest information for.
    It gives package symbols precedence over repository names.
    @type  package_name: str
    @param distro_name: name of ROS distribution
    @type  distro_name: str

    @return: (manifest data, 'package'|'repository').
    @rtype: ({str: str}, str, str)
    @raise IOError: if data cannot be loaded
    """
    data = {}
    type_ = None
    index = get_index(get_index_url())
    try:
        distribution_cache = get_cached_distribution(index, distro_name)
    except RuntimeError as runerr:
        if (runerr.message.startswith("Unknown release")):
            return None
        raise

    if package_name in distribution_cache.release_packages:
        pkg = distribution_cache.release_packages[package_name]
        #print('pkg', pkg.name)
        pkg_xml = distribution_cache.get_release_package_xml(package_name)
        pkg_manifest = parse_package_string(pkg_xml)
        data['description'] = pkg_manifest.description
        website_url = [u.url for u in pkg_manifest.urls if u.type == 'website']
        if website_url:
            data['url'] = website_url[0]
        repo_name = pkg.repository_name
        meta_export = [
            exp for exp in pkg_manifest.exports if exp.tagname == 'metapackage'
        ]
        if meta_export:
            type_ = 'metapackage'
        else:
            type_ = 'package'
    else:
        repo_name = package_name
        type_ = 'repository'
    data['repo_name'] = repo_name
    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].release_repository
        if repo:
            data['packages'] = repo.package_names

    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].source_repository
        if not repo:
            return None
        data['vcs'] = repo.type
        data['vcs_uri'] = repo.url
        data['vcs_version'] = repo.version
    else:
        return None

    return (data, type_, None)
def build_release_status_page(
        config_url, rosdistro_name, release_build_name,
        cache_dir, output_dir, copy_resources=False):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index

    start_time = time.time()

    config = get_config_index(config_url)
    release_build_files = get_release_build_files(config, rosdistro_name)
    build_file = release_build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    targets = []
    for os_name in sorted(build_file.targets.keys()):
        if os_name not in ['debian', 'ubuntu']:
            continue
        for os_code_name in sorted(build_file.targets[os_name].keys()):
            targets.append(Target(os_name, os_code_name, 'source'))
            for arch in sorted(build_file.targets[os_name][os_code_name]):
                targets.append(Target(os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for _, os_code_name, arch in targets:
        print('  - %s %s' % (os_code_name, arch))

    # get all input data
    dist = get_cached_distribution(index, rosdistro_name)

    rosdistro_info = get_rosdistro_info(dist, build_file)

    # derive testing and main urls from building url
    building_repo_url = build_file.target_repository
    base_url = os.path.dirname(building_repo_url)
    testing_repo_url = os.path.join(base_url, 'testing')
    main_repo_url = os.path.join(base_url, 'main')

    building_repo_data = get_debian_repo_data(
        building_repo_url, targets, cache_dir)
    testing_repo_data = get_debian_repo_data(
        testing_repo_url, targets, cache_dir)
    main_repo_data = get_debian_repo_data(main_repo_url, targets, cache_dir)

    repos_data = [building_repo_data, testing_repo_data, main_repo_data]

    # compute derived attributes
    package_descriptors = get_rosdistro_package_descriptors(
        rosdistro_info, rosdistro_name)

    affected_by_sync = get_affected_by_sync(
        package_descriptors, targets, testing_repo_data, main_repo_data)

    regressions = get_regressions(
        package_descriptors, targets,
        building_repo_data, testing_repo_data, main_repo_data)

    version_status = get_version_status(
        package_descriptors, targets, repos_data, strip_version=True)

    homogeneous = get_homogeneous(package_descriptors, targets, repos_data)

    package_counts = get_package_counts(
        package_descriptors, targets, repos_data)

    jenkins_job_urls = get_jenkins_job_urls(
        rosdistro_name, config.jenkins_url, release_build_name, targets)

    # generate output
    repo_urls = [building_repo_url, testing_repo_url, main_repo_url]
    repo_names = get_url_names(repo_urls)

    ordered_pkgs = []
    for pkg_name in sorted(rosdistro_info.keys()):
        ordered_pkgs.append(rosdistro_info[pkg_name])

    template_name = 'status/release_status_page.html.em'
    data = {
        'title': 'ROS packages for %s' % rosdistro_name.capitalize(),
        'start_time': start_time,
        'start_time_local_str': time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),

        'resource_hashes': get_resource_hashes(),

        'repo_names': repo_names,
        'repo_urls': repo_urls,

        'has_repository_column': True,
        'has_status_column': True,
        'has_maintainer_column': True,

        'ordered_pkgs': ordered_pkgs,
        'targets': targets,
        'short_arches': dict(
            [(t.arch, get_short_arch(t.arch)) for t in targets]),
        'repos_data': repos_data,

        'affected_by_sync': affected_by_sync,
        'homogeneous': homogeneous,
        'jenkins_job_urls': jenkins_job_urls,
        'package_counts': package_counts,
        'regressions': regressions,
        'version_status': version_status,
    }
    html = expand_template(template_name, data)
    output_filename = os.path.join(
        output_dir, 'ros_%s_%s.html' % (rosdistro_name, release_build_name))
    print("Generating status page '%s':" % output_filename)
    with open(output_filename, 'w') as h:
        h.write(html)

    additional_resources(output_dir, copy_resources=copy_resources)

    yaml_folder = os.path.join(output_dir, 'yaml')
    if not os.path.exists(yaml_folder):
        os.mkdir(yaml_folder)

    yaml_filename = os.path.join(
        yaml_folder, 'ros_%s_%s.yaml' % (rosdistro_name, release_build_name))
    write_yaml(yaml_filename, ordered_pkgs, repos_data)
Esempio n. 35
0
def build_release_status_page(
        config_url, rosdistro_name, release_build_name,
        building_repo_url, testing_repo_url, main_repo_url,
        cache_dir, output_dir):
    start_time = time.localtime()

    config = get_config_index(config_url)
    release_build_files = get_release_build_files(config, rosdistro_name)
    build_file = release_build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    targets = []
    for os_name in sorted(build_file.targets.keys()):
        if os_name != 'ubuntu':
            continue
        for os_code_name in sorted(build_file.targets[os_name].keys()):
            targets.append(Target(os_code_name, 'source'))
            for arch in sorted(build_file.targets[os_name][os_code_name]):
                targets.append(Target(os_code_name, arch))
    print('The build file contains the following targets:')
    for os_code_name, arch in targets:
        print('  - %s %s' % (os_code_name, arch))

    # get all input data
    dist = get_cached_distribution(index, rosdistro_name)

    rosdistro_info = get_rosdistro_info(dist, build_file)

    building_repo_data = get_debian_repo_data(
        building_repo_url, targets, cache_dir)
    testing_repo_data = get_debian_repo_data(
        testing_repo_url, targets, cache_dir)
    main_repo_data = get_debian_repo_data(main_repo_url, targets, cache_dir)

    repos_data = [building_repo_data, testing_repo_data, main_repo_data]

    # compute derived attributes
    package_descriptors = get_rosdistro_package_descriptors(
        rosdistro_info, rosdistro_name)

    affected_by_sync = get_affected_by_sync(
        package_descriptors, targets, testing_repo_data, main_repo_data)

    regressions = get_regressions(
        package_descriptors, targets,
        building_repo_data, testing_repo_data, main_repo_data)

    version_status = get_version_status(
        package_descriptors, targets, repos_data, strip_version=True)

    homogeneous = get_homogeneous(package_descriptors, targets, repos_data)

    package_counts = get_package_counts(
        package_descriptors, targets, repos_data)

    jenkins_job_urls = get_jenkins_job_urls(
        rosdistro_name, config.jenkins_url, release_build_name, targets)

    # generate output
    repo_urls = [building_repo_url, testing_repo_url, main_repo_url]
    repo_names = get_url_names(repo_urls)

    ordered_pkgs = []
    for pkg_name in sorted(rosdistro_info.keys()):
        ordered_pkgs.append(rosdistro_info[pkg_name])

    template_name = 'status/release_status_page.html.em'
    data = {
        'title': 'ROS %s - release status' % rosdistro_name.capitalize(),
        'start_time': time.strftime('%Y-%m-%d %H:%M:%S %Z', start_time),

        'resource_hashes': get_resource_hashes(),

        'repo_names': repo_names,
        'repo_urls': repo_urls,

        'has_repository_column': True,
        'has_status_column': True,
        'has_maintainer_column': True,

        'ordered_pkgs': ordered_pkgs,
        'targets': targets,
        'target_prefix': rosdistro_name[0].upper(),
        'repos_data': repos_data,

        'affected_by_sync': affected_by_sync,
        'homogeneous': homogeneous,
        'jenkins_job_urls': jenkins_job_urls,
        'package_counts': package_counts,
        'regressions': regressions,
        'version_status': version_status,
    }
    html = expand_template(template_name, data)
    output_filename = os.path.join(
        output_dir, '%s_%s.html' % (rosdistro_name, release_build_name))
    print("Generating status page '%s':" % output_filename)
    with open(output_filename, 'w') as h:
        h.write(html)

    additional_resources(output_dir)
Esempio n. 36
0
def build_sourcerpm(
        rosdistro_index_url, rosdistro_name, pkg_name, os_name, os_code_name,
        sources_dir):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(
        rosdistro_name, pkg_name, pkg_version, os_name, os_code_name)
    os_pkg_name = get_os_package_name(rosdistro_name, pkg_name)
    release_repository_url = repo.release_repository.url

    clone_cmd = [
        'git', 'clone',
        '--branch', tag,
        # fetch all branches and tags but no history
        '--depth', '1', '--no-single-branch',
        release_repository_url, os_pkg_name]

    cmd = [
        'mock',
        '--scm-option', 'git_get=%s' % ' '.join(clone_cmd),
        '--scm-option', 'package=%s' % os_pkg_name,
        '--scm-option', 'branch=%s' % tag,
        '--scm-option', 'spec=rpm/%s.spec' % os_pkg_name,
        '--scm-enable',
        '--enable-network',
        '--disable-plugin', 'root_cache',
        '--resultdir', '%s' % sources_dir,
        '--no-cleanup-after',
        '--postinstall',
        '--verbose',
        '--root', 'ros_buildfarm',
        '--buildsrpm']

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    mock_root_path = subprocess.check_output(
        ['mock', '--root', 'ros_buildfarm', '--print-root-path']).decode('utf-8').strip()
    mock_sources_path = os.path.join(mock_root_path, 'builddir', 'build', 'SOURCES')

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(mock_sources_path)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' % (
            ' '.join(sorted(maintainer_emails))))
Esempio n. 37
0
def get_sources(rosdistro_index_url, rosdistro_name, pkg_name, os_name,
                os_code_name, sources_dir, debian_repository_urls):
    from rosdistro import get_cached_distribution
    from rosdistro import get_index
    index = get_index(rosdistro_index_url)
    dist_file = get_cached_distribution(index, rosdistro_name)
    if pkg_name not in dist_file.release_packages:
        return 'Not a released package name: %s' % pkg_name

    pkg = dist_file.release_packages[pkg_name]
    repo_name = pkg.repository_name
    repo = dist_file.repositories[repo_name]
    if not repo.release_repository.version:
        return "Repository '%s' has no release version" % repo_name

    pkg_version = repo.release_repository.version
    tag = _get_source_tag(rosdistro_name, pkg_name, pkg_version, os_name,
                          os_code_name)

    cmd = [
        'git',
        'clone',
        '--branch',
        tag,
        # fetch all branches and tags but no history
        '--depth',
        '1',
        '--no-single-branch',
        repo.release_repository.url,
        sources_dir
    ]

    print("Invoking '%s'" % ' '.join(cmd))
    subprocess.check_call(cmd)

    # ensure that the package version is correct
    source_version = dpkg_parsechangelog(sources_dir, ['Version'])[0]
    if not source_version.startswith(pkg_version) or \
            (len(source_version) > len(pkg_version) and
             source_version[len(pkg_version)] in '0123456789'):
        raise RuntimeError(
            ('The cloned package version from the GBP (%s) does not match ' +
             'the expected package version from the distribution file (%s)') %
            (source_version, pkg_version))

    # If a tarball already exists reuse it
    origtgz_version = pkg_version.split('-')[0]
    debian_package_name = get_os_package_name(rosdistro_name, pkg_name)
    filename = '%s_%s.orig.tar.gz' % (debian_package_name, origtgz_version)

    URL_TEMPLATE = '%s/pool/main/%s/%s/%s'
    prefix = debian_package_name[0]
    for repo in debian_repository_urls:
        url = URL_TEMPLATE % (repo, prefix, debian_package_name, filename)

        output_file = os.path.join(sources_dir, '..', filename)
        try:
            urlretrieve(url, output_file)
            print("Downloaded original tarball '%s' to '%s'" %
                  (url, output_file))
            break
        except HTTPError:
            print("No tarball found at '%s'" % url)

    # output package version for job description
    print("Package '%s' version: %s" % (pkg_name, source_version))

    # output package maintainers for job notification
    from catkin_pkg.package import parse_package
    pkg = parse_package(sources_dir)
    maintainer_emails = set([])
    for m in pkg.maintainers:
        maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))
def trigger_release_jobs(
        config_url, rosdistro_name, release_build_name,
        missing_only, source_only, cache_dir, cause=None, groovy_script=None,
        not_failed_only=False):
    config = get_config_index(config_url)
    build_files = get_release_build_files(config, rosdistro_name)
    build_file = build_files[release_build_name]

    index = get_index(config.rosdistro_index_url)

    # get targets
    targets = []
    for os_name in sorted(build_file.targets.keys()):
        for os_code_name in sorted(build_file.targets[os_name].keys()):
            targets.append(Target(os_name, os_code_name, 'source'))
            if source_only:
                continue
            for arch in sorted(
                    build_file.targets[os_name][os_code_name].keys()):
                targets.append(Target(os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  - %s %s %s' % (os_name, os_code_name, arch))

    dist_file = get_cached_distribution(index, rosdistro_name)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    repo_data = None
    if missing_only:
        repo_data = get_debian_repo_data(
            build_file.target_repository, targets, cache_dir)

    if groovy_script is None:
        jenkins = connect(config.jenkins_url)

    pkg_names = dist_file.release_packages.keys()
    pkg_names = build_file.filter_packages(pkg_names)

    triggered_jobs = []
    skipped_jobs = []
    for pkg_name in sorted(pkg_names):
        pkg = dist_file.release_packages[pkg_name]
        repo_name = pkg.repository_name
        repo = dist_file.repositories[repo_name]
        if not repo.release_repository:
            print(("  Skipping package '%s' in repository '%s': no release " +
                   "section") % (pkg_name, repo_name))
            continue
        if not repo.release_repository.version:
            print(("  Skipping package '%s' in repository '%s': no release " +
                   "version") % (pkg_name, repo_name))
            continue
        pkg_version = repo.release_repository.version

        debian_package_name = get_debian_package_name(rosdistro_name, pkg_name)

        for target in targets:
            job_name = get_sourcedeb_job_name(
                rosdistro_name, release_build_name,
                pkg_name, target.os_name, target.os_code_name)
            if target.arch != 'source':
                # binary job can be skipped if source job was triggered
                if job_name in triggered_jobs:
                    print(("  Skipping binary jobs of '%s' since the source " +
                           "job was triggered") % job_name)
                    continue
                job_name = get_binarydeb_job_name(
                    rosdistro_name, release_build_name,
                    pkg_name, target.os_name, target.os_code_name, target.arch)

            if repo_data:
                # check if artifact is missing
                repo_index = repo_data[target]
                if debian_package_name in repo_index:
                    version = repo_index[debian_package_name]
                    version = _strip_version_suffix(version)
                    if version == pkg_version:
                        print(("  Skipping job '%s' since the artifact is " +
                               "already up-to-date") % job_name)
                        continue

            if groovy_script is None:
                success = invoke_job(jenkins, job_name, cause=cause)
            else:
                success = True
            if success:
                triggered_jobs.append(job_name)
            else:
                skipped_jobs.append(job_name)

    if groovy_script is None:
        print('Triggered %d jobs, skipped %d jobs.' %
              (len(triggered_jobs), len(skipped_jobs)))
    else:
        print("Writing groovy script '%s' to trigger %d jobs" %
              (groovy_script, len(triggered_jobs)))
        data = {
            'job_names': triggered_jobs,
            'not_failed_only': not_failed_only,
        }
        content = expand_template('release/trigger_jobs.groovy.em', data)
        with open(groovy_script, 'w') as h:
            h.write(content)