Ejemplo n.º 1
0
    def test_parse_package_string(self):
        filename = os.path.join(test_data_dir, 'valid_package.xml')
        xml = _get_package_xml(filename)[0]

        assert isinstance(xml, str)
        parse_package_string(xml)

        if sys.version_info[0] == 2:
            xml = xml.decode('utf-8')
            assert not isinstance(xml, str)
        else:
            xml = xml.encode('utf-8')
            assert isinstance(xml, bytes)
        parse_package_string(xml)
Ejemplo n.º 2
0
    def test_parse_package_string(self):
        filename = os.path.join(test_data_dir, 'valid_package.xml')
        xml = _get_package_xml(filename)[0]

        assert isinstance(xml, str)
        parse_package_string(xml)

        if sys.version_info[0] == 2:
            xml = xml.decode('utf-8')
            assert not isinstance(xml, str)
        else:
            xml = xml.encode('utf-8')
            assert isinstance(xml, bytes)
        parse_package_string(xml)
Ejemplo n.º 3
0
def _get_and_parse_distribution_cache(index, rosdistro_name, pkg_names):
    from catkin_pkg.package import parse_package_string
    from catkin_pkg.package import Dependency
    dist_cache = get_distribution_cache(index, rosdistro_name)
    pkg_names = set(['ros_workspace']).union(pkg_names)
    cached_pkgs = {
        pkg_name: parse_package_string(pkg_xml)
        for pkg_name, pkg_xml in dist_cache.release_package_xmls.items()
        if pkg_name in pkg_names
    }

    condition_context = get_package_condition_context(index, rosdistro_name)
    for pkg in cached_pkgs.values():
        pkg.evaluate_conditions(condition_context)
    for pkg in cached_pkgs.values():
        for group_depend in pkg.group_depends:
            if group_depend.evaluated_condition is not False:
                group_depend.extract_group_members(cached_pkgs.values())

    # for ROS 2 distributions bloom injects a dependency on ros_workspace
    # into almost all packages (except its dependencies)
    # therefore the same dependency needs to to be injected here
    distribution_type = index.distributions[rosdistro_name].get(
        'distribution_type')
    if distribution_type == 'ros2' and 'ros_workspace' in cached_pkgs:
        no_ros_workspace_dep = set(['ros_workspace']).union(
            get_direct_dependencies('ros_workspace', cached_pkgs, pkg_names))

        for pkg_name, pkg in cached_pkgs.items():
            if pkg_name not in no_ros_workspace_dep:
                pkg.exec_depends.append(Dependency('ros_workspace'))

    return cached_pkgs
Ejemplo n.º 4
0
def git_source_manifest_provider(repo):
    try:
        with _temp_git_clone(repo.url, repo.version) as git_repo_path:
            # Include the git hash in our cache dictionary.
            git_hash = Git(git_repo_path).command('rev-parse',
                                                  'HEAD')['output']
            cache = SourceRepositoryCache.from_ref(git_hash)

            # Find package.xml files inside the repo.
            for package_path in find_package_paths(git_repo_path):
                if package_path == '.':
                    package_path = ''
                with open(
                        os.path.join(git_repo_path, package_path,
                                     'package.xml'), 'r') as f:
                    package_xml = f.read()
                try:
                    name = parse_package_string(package_xml).name
                except InvalidPackage:
                    raise RuntimeError(
                        'Unable to parse package.xml file found in %s' %
                        repo.url)
                cache.add(name, package_path, package_xml)

    except Exception as e:
        raise RuntimeError('Unable to fetch source package.xml files: %s' % e)

    return cache
Ejemplo n.º 5
0
 def download_manifest(self, name):
     if self.dist is None: raise KeyError()
     if not self.quiet:
         sys.stderr.write("catkin_lint: downloading package manifest for '%s'\n" % name)
     package_xml = self.dist.get_release_package_xml(name)
     if package_xml is None: return None
     return parse_package_string(package_xml)
Ejemplo n.º 6
0
def test_build_caches():
    with Fold():
        print(
            """Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                pkgs[pkg_name] = parse_package_string(pkg_xml)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
Ejemplo n.º 7
0
def inject_status_and_maintainer(cached_release, header, counts, rows):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    header[3:3] = ['Status', 'Maintainer']
    counts[3:3] = [[], []]
    for row in rows:
        status_cell = ''
        maintainer_cell = ''
        if row[2] == 'wet':
            pkg_name = row[0].split(' ')[0]
            pkg = cached_release.packages[pkg_name]
            repo = cached_release.repositories[pkg.repository_name]
            status = 'unknown'
            if pkg.status is not None:
                status = pkg.status
            elif repo.status is not None:
                status = repo.status
            status_description = ''
            if pkg.status_description is not None:
                status_description = pkg.status_description
            elif repo.status_description is not None:
                status_description = repo.status_description
            status_cell = '<div class="%s"%s>%s</div>' % (status, ' title="%s"' % status_description if status_description else '', status)
            pkg_xml = cached_release.get_package_xml(pkg_name)
            if pkg_xml is not None:
                try:
                    pkg = parse_package_string(pkg_xml)
                    maintainer_cell = ',<br />'.join(['<a href="mailto:%s">%s</a>' % (m.email, m.name) for m in pkg.maintainers])
                except InvalidPackage as e:
                    maintainer_cell = 'invalid package.xml'
            else:
                maintainer_cell = '?'
        else:
            status_cell = '<div class="unknown">--</div>'
        row[3:3] = [status_cell, maintainer_cell]
Ejemplo n.º 8
0
    def get_dependencies(self):
        if not self.depends1:
            url = self.url
            url = url.replace(
                '.git',
                '/release/%s/%s/package.xml' % (self.name, self.version))
            url = url.replace('git://', 'https://raw.')
            print url
            retries = 5
            while not self.depends1 and retries > 0:
                package_xml = urllib.urlopen(url).read()
                append_pymodules_if_needed()
                from catkin_pkg import package
                try:
                    pkg = package.parse_package_string(package_xml)
                except package.InvalidPackage as e:
                    print "!!!! Failed to download package.xml for package %s at url %s" % (
                        self.name, url)
                    time.sleep(5.0)

                res = {}
                res['build'] = [d.name for d in pkg.build_depends]
                res['test'] = [d.name for d in pkg.test_depends]
                self.depends1 = res

        return self.depends1
def test_build_caches():
    with Fold():
        print("""Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            print("Parsing manifest files for '%s'" % dist_name)
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                pkgs[pkg_name] = parse_package_string(pkg_xml)
            print("Order all packages in '%s' topologically" % dist_name)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
Ejemplo n.º 10
0
 def download_manifest(self, name):
     if self.dist is None: raise KeyError()
     if not self.quiet:
         sys.stderr.write("catkin_lint: downloading package manifest for '%s'\n" % name)
     package_xml = self.dist.get_release_package_xml(name)
     if package_xml is None: return None
     return parse_package_string(package_xml)
Ejemplo n.º 11
0
 def update_single_project(yaml_p, s, server_cache):
     cached_p = next(
         (q for q in server_cache.projects if q.id == yaml_p["id"]), None)
     p = GitlabProject(server=server_name,
                       name=yaml_p["name_with_namespace"],
                       id=yaml_p["id"],
                       website=yaml_p["web_url"],
                       url={
                           "ssh": yaml_p["ssh_url_to_repo"],
                           "http": yaml_p["http_url_to_repo"]
                       },
                       master_branch=yaml_p.get("default_branch", "master"),
                       packages=None,
                       last_modified=date_parse(yaml_p["last_activity_at"]),
                       workspace_path=None,
                       server_path=yaml_p["path_with_namespace"])
     if not force_update and cached_p is not None and cached_p.last_modified == p.last_modified:
         p.packages = cached_p.packages
         for prj in p.packages:
             prj.project = p
     else:
         if verbose:
             msg("@{cf}Updating@|: %s\n" % p.website)
         manifests = crawl_project_for_packages(s,
                                                url,
                                                p.id,
                                                "",
                                                depth=crawl_depth,
                                                timeout=timeout)
         old_manifests = {}
         if cached_p is not None:
             for old_p in cached_p.packages:
                 old_manifests[old_p.manifest_blob] = old_p.manifest_xml
         p.packages = []
         for path, blob in manifests:
             if blob not in old_manifests:
                 r = s.get(urljoin(
                     url, "api/v4/projects/%s/repository/blobs/%s/raw" %
                     (p.id, blob)),
                           timeout=timeout)
                 r.raise_for_status()
                 xml_data = r.content
             else:
                 xml_data = old_manifests[blob]
             filename = os.path.join(path, PACKAGE_MANIFEST_FILENAME)
             try:
                 manifest = parse_package_string(xml_data, filename)
                 if verbose:
                     msg("@{cf}Updated@|:  @{yf}%s@| [%s]\n" %
                         (manifest.name, p.name))
                 p.packages.append(
                     GitlabPackage(manifest=manifest,
                                   project=p,
                                   project_path=path,
                                   manifest_blob=blob,
                                   manifest_xml=xml_data))
             except InvalidPackage as e:
                 warning("invalid package manifest '%s': %s\n" %
                         (filename, str(e)))
     return p
Ejemplo n.º 12
0
def inject_status_and_maintainer(cached_distribution, header, counts, rows):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    header[4:4] = ['Status', 'Maintainer']
    counts[4:4] = [[], []]
    for row in rows:
        status_cell = ''
        maintainer_cell = '<a>?</a>'
        # Use website url if defined, otherwise default to ros wiki
        pkg_name = row[0].split(' ')[0]
        url = 'http://wiki.ros.org/%s' % pkg_name
        repo_name = row[1]
        repo_url = None
        repo_version = None
        if row[3] == 'wet' and cached_distribution:
            pkg = cached_distribution.release_packages[pkg_name]
            repo = cached_distribution.repositories[pkg.repository_name]
            status = 'unknown'
            if pkg.status is not None:
                status = pkg.status
            elif repo.status is not None:
                status = repo.status
            status_description = ''
            if pkg.status_description is not None:
                status_description = pkg.status_description
            elif repo.status_description is not None:
                status_description = repo.status_description
            status_cell = '<a class="%s"%s/>' % (status, ' title="%s"' %
                                                 status_description
                                                 if status_description else '')
            pkg_xml = cached_distribution.get_release_package_xml(pkg_name)
            if pkg_xml is not None:
                try:
                    pkg = parse_package_string(pkg_xml)
                    maintainer_cell = ''.join([
                        '<a href="mailto:%s">%s</a>' % (m.email, m.name)
                        for m in pkg.maintainers
                    ])
                    for u in pkg['urls']:
                        if u.type == 'website':
                            url = u
                            break
                except InvalidPackage:
                    maintainer_cell = '<a><b>bad package.xml</b></a>'
            if repo.source_repository:
                repo_url = repo.source_repository.url
                repo_version = repo.source_repository.version
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
                repo_version = repo.doc_repository.version
        else:
            status_cell = '<a class="unknown"/>'
        row[0] = row[0].replace(pkg_name,
                                '<a href="%s">%s</a>' % (url, pkg_name), 1)
        if repo_url:
            if repo_url.startswith(
                    'https://github.com/') and repo_url.endswith(
                        '.git') and repo_version:
                repo_url = '%s/tree/%s' % (repo_url[:-4], repo_version)
            row[1] = '<a href="%s">%s</a>' % (repo_url, repo_name)
        row[4:4] = [status_cell, maintainer_cell]
Ejemplo n.º 13
0
 def __init__(self, pkg_xml, evaluate_condition_context=None):
     self.upstream_email = None
     self.upstream_name = None
     self.homepage = 'https://wiki.ros.org'
     pkg = parse_package_string(pkg_xml)
     if evaluate_condition_context:
         pkg.evaluate_conditions(evaluate_condition_context)
     self.upstream_license = pkg.licenses
     self.description = pkg.description
     if 'website' in [url.type for url in pkg.urls]:
         self.homepage = [
             url.url for url in pkg.urls if url.type == 'website'
         ][0]
     elif len(pkg.urls) > 0:
         self.homepage = [
             url.url for url in pkg.urls
         ][0]
     self.longdescription = pkg.description
     self.upstream_email = [
         author.email for author in pkg.maintainers
     ][0]
     self.upstream_name = [
         author.name for author in pkg.maintainers
     ][0]
     self.author_email = [
         author.email for author in pkg.authors
     ][0] if pkg.authors else ''
     self.author_name = [
         author.name for author in pkg.authors
     ][0] if pkg.authors else ''
     self.member_of_groups = [
         group.name for group in pkg.member_of_groups
     ]
     self.build_type = pkg.get_build_type()
Ejemplo n.º 14
0
def _compare_package_version(distros, pkg_name):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    row = CompareRow(pkg_name)
    for distro in distros:
        repo_url = None
        version = None
        branch = None
        if pkg_name in distro.release_packages:
            pkg = distro.release_packages[pkg_name]
            row.repo_name = pkg.repository_name
            repo = distro.repositories[pkg.repository_name]

            rel_repo = repo.release_repository
            if rel_repo:
                version = rel_repo.version
                pkg_xml = distro.get_release_package_xml(pkg_name)
                if pkg_xml is not None:
                    try:
                        pkg = parse_package_string(pkg_xml)
                        for m in pkg.maintainers:
                            row.maintainers[m.name] = '<a href="mailto:%s">%s</a>' % \
                                (m.email, m.name)
                    except InvalidPackage:
                        row.maintainers['zzz'] = '<b>invalid package.xml in %s</b>' % \
                            distro.name

                if repo.source_repository:
                    repo_url = repo.source_repository.url
                elif repo.doc_repository:
                    repo_url = repo.doc_repository.url

            source_repo = repo.source_repository
            if source_repo:
                branch = source_repo.version
            else:
                doc_repo = repo.source_repository
                if doc_repo:
                    branch = doc_repo.version

        row.repo_urls.append(repo_url)
        row.versions.append(version)
        row.branches.append(branch)

    # skip if no versions available
    if not [v for v in row.versions if v]:
        return None

    data = [row.pkg_name, row.get_repo_name_with_link(), row.get_maintainers()] + \
        [v if v else '' for v in row.versions]

    labels = row.get_labels(distros)
    if len(labels) > 0:
        data[1] += ' <span class="ht">%s</span>' % ' '.join(labels)

    # div-wrap all cells for layout reasons
    for i, value in enumerate(data):
        data[i] = '<div>%s</div>' % value

    return data
Ejemplo n.º 15
0
def _compare_package_version(distros, pkg_name):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    row = CompareRow(pkg_name)
    for distro in distros:
        repo_url = None
        version = None
        branch = None
        if pkg_name in distro.release_packages:
            pkg = distro.release_packages[pkg_name]
            row.repo_name = pkg.repository_name
            repo = distro.repositories[pkg.repository_name]

            rel_repo = repo.release_repository
            if rel_repo:
                version = rel_repo.version
                pkg_xml = distro.get_release_package_xml(pkg_name)
                if pkg_xml is not None:
                    try:
                        pkg = parse_package_string(pkg_xml)
                        for m in pkg.maintainers:
                            row.maintainers[m.name] = '<a href="mailto:%s">%s</a>' % \
                                (m.email, m.name)
                    except InvalidPackage:
                        row.maintainers['zzz'] = '<b>invalid package.xml in %s</b>' % \
                            distro.name

                if repo.source_repository:
                    repo_url = repo.source_repository.url
                elif repo.doc_repository:
                    repo_url = repo.doc_repository.url

            source_repo = repo.source_repository
            if source_repo:
                branch = source_repo.version
            else:
                doc_repo = repo.source_repository
                if doc_repo:
                    branch = doc_repo.version

        row.repo_urls.append(repo_url)
        row.versions.append(version)
        row.branches.append(branch)

    # skip if no versions available
    if not [v for v in row.versions if v]:
        return None

    data = [row.pkg_name, row.get_repo_name_with_link(), row.get_maintainers()] + \
        [v if v else '' for v in row.versions]

    labels = row.get_labels(distros)
    if len(labels) > 0:
        data[1] += ' <span class="ht">%s</span>' % ' '.join(labels)

    # div-wrap all cells for layout reasons
    for i, value in enumerate(data):
        data[i] = '<div>%s</div>' % value

    return data
Ejemplo n.º 16
0
def get_manifest_from_rosdistro(package_name, distro_name):
    """
    Get the rosdistro repository data and package information.

    @param package_name: name of package or repository to get manifest information for.
    It gives package symbols precedence over repository names.
    @type  package_name: str
    @param distro_name: name of ROS distribution
    @type  distro_name: str

    @return: (manifest data, 'package'|'repository').
    @rtype: ({str: str}, str, str)
    @raise IOError: if data cannot be loaded
    """
    data = {}
    type_ = None
    index = get_index(get_index_url())
    try:
        distribution_cache = get_cached_distribution(index, distro_name)
    except RuntimeError as runerr:
        if (runerr.message.startswith("Unknown release")):
            return None
        raise

    if package_name in distribution_cache.release_packages:
        pkg = distribution_cache.release_packages[package_name]
        #print('pkg', pkg.name)
        pkg_xml = distribution_cache.get_release_package_xml(package_name)
        pkg_manifest = parse_package_string(pkg_xml)
        data['description'] = pkg_manifest.description
        website_url = [u.url for u in pkg_manifest.urls if u.type == 'website']
        if website_url:
            data['url'] = website_url[0]
        repo_name = pkg.repository_name
        meta_export = [exp for exp in pkg_manifest.exports if exp.tagname == 'metapackage']
        if meta_export:
            type_ = 'metapackage'
        else:
            type_ = 'package'
    else:
        repo_name = package_name
        type_ = 'repository'
    data['repo_name'] = repo_name
    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].release_repository
        if repo:
            data['packages'] = repo.package_names

    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].source_repository
        if not repo:
            return None
        data['vcs'] = repo.type
        data['vcs_uri'] = repo.url
        data['vcs_version'] = repo.version
    else:
        return None

    return (data, type_, None)
Ejemplo n.º 17
0
    def test_parse_package_string(self):
        filename = os.path.join(test_data_dir, 'valid_package.xml')
        xml = _get_package_xml(filename)[0]

        assert isinstance(xml, str)
        parse_package_string(xml)

        if sys.version_info[0] == 2:
            xml = xml.decode('utf-8')
            assert not isinstance(xml, str)
        else:
            xml = xml.encode('utf-8')
            assert isinstance(xml, bytes)
        parse_package_string(xml)

        xml_string = """
<package>
  <name>valid_package</name>
  <version>0.1.0</version>
  <description>valid_package description</description>
  <maintainer email="*****@*****.**>Forgotten end quote</maintainer>
  <license>BSD</license>
</package>
"""
        self.assertRaises(InvalidPackage, parse_package_string, xml_string)

        xml_string = """
<package>
  <name>valid_package</name>
  <version>0.1.0</version>
  <description>Invalid < character in description</description>
  <maintainer email="*****@*****.**">user</maintainer>
  <license>BSD</license>
</package>
"""
        self.assertRaises(InvalidPackage, parse_package_string, xml_string)
        xml_string = """
<package>
  <name>valid_package</name>
  <version>0.1.0</version>
  <description>valid_package description</description>
  <maintainer email="*****@*****.**">user</maintainer>
  <license>BSD</license>
</package><extra>Unwanted junk</extra>
"""
        self.assertRaises(InvalidPackage, parse_package_string, xml_string)
Ejemplo n.º 18
0
def _get_doc_job_config(
        config, config_url, rosdistro_name, doc_build_name,
        build_file, os_name, os_code_name, arch, doc_repo_spec,
        repo_name, dist_cache=None, is_disabled=False):
    template_name = 'doc/doc_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache and repo_name:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_data = {
        'github_url': get_github_project_url(doc_repo_spec.url),

        'job_priority': build_file.jenkins_job_priority,
        'node_label': build_file.jenkins_job_label,

        'doc_repo_spec': doc_repo_spec,

        'disabled': is_disabled,

        'github_orgunit': git_github_orgunit(doc_repo_spec.url),

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'doc_build_name': doc_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,

        'timeout_minutes': build_file.jenkins_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Ejemplo n.º 19
0
def inject_status_and_maintainer(cached_distribution, header, counts, rows):
    from catkin_pkg.package import InvalidPackage, parse_package_string

    header[4:4] = ["Status", "Maintainer"]
    counts[4:4] = [[], []]
    for row in rows:
        status_cell = ""
        maintainer_cell = "<a>?</a>"
        # Use website url if defined, otherwise default to ros wiki
        pkg_name = row[0].split(" ")[0]
        url = "http://wiki.ros.org/%s" % pkg_name
        repo_name = row[1]
        repo_url = None
        repo_version = None
        if row[3] == "wet" and cached_distribution:
            pkg = cached_distribution.release_packages[pkg_name]
            repo = cached_distribution.repositories[pkg.repository_name]
            status = "unknown"
            if pkg.status is not None:
                status = pkg.status
            elif repo.status is not None:
                status = repo.status
            status_description = ""
            if pkg.status_description is not None:
                status_description = pkg.status_description
            elif repo.status_description is not None:
                status_description = repo.status_description
            status_cell = '<a class="%s"%s/>' % (
                status,
                ' title="%s"' % status_description if status_description else "",
            )
            pkg_xml = cached_distribution.get_release_package_xml(pkg_name)
            if pkg_xml is not None:
                try:
                    pkg = parse_package_string(pkg_xml)
                    maintainer_cell = "".join(
                        ['<a href="mailto:%s">%s</a>' % (m.email, m.name) for m in pkg.maintainers]
                    )
                    for u in pkg["urls"]:
                        if u.type == "website":
                            url = u
                            break
                except InvalidPackage:
                    maintainer_cell = "<a><b>bad package.xml</b></a>"
            if repo.source_repository:
                repo_url = repo.source_repository.url
                repo_version = repo.source_repository.version
            elif repo.doc_repository:
                repo_url = repo.doc_repository.url
                repo_version = repo.doc_repository.version
        else:
            status_cell = '<a class="unknown"/>'
        row[0] = row[0].replace(pkg_name, '<a href="%s">%s</a>' % (url, pkg_name), 1)
        if repo_url:
            if repo_url.startswith("https://github.com/") and repo_url.endswith(".git") and repo_version:
                repo_url = "%s/tree/%s" % (repo_url[:-4], repo_version)
            row[1] = '<a href="%s">%s</a>' % (repo_url, repo_name)
        row[4:4] = [status_cell, maintainer_cell]
Ejemplo n.º 20
0
 def _get_package(self, pkg_name):
     if pkg_name not in self._packages:
         pkg_xml = self._release_instance.get_package_xml(pkg_name)
         try:
             pkg = parse_package_string(pkg_xml)
         except InvalidPackage as e:
             raise InvalidPackage(pkg_name + ': %s' % str(e))
         self._packages[pkg_name] = pkg
     return self._packages[pkg_name]
Ejemplo n.º 21
0
 def _get_package(self, pkg_name):
     if pkg_name not in self._packages:
         pkg_xml = self._release_instance.get_package_xml(pkg_name)
         try:
             pkg = parse_package_string(pkg_xml)
         except InvalidPackage as e:
             raise InvalidPackage(pkg_name + ': %s' % str(e))
         self._packages[pkg_name] = pkg
     return self._packages[pkg_name]
Ejemplo n.º 22
0
def _get_doc_job_config(config,
                        config_url,
                        rosdistro_name,
                        doc_build_name,
                        build_file,
                        os_name,
                        os_code_name,
                        arch,
                        doc_repo_spec,
                        repo_name,
                        dist_cache=None,
                        is_disabled=False):
    template_name = 'doc/doc_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache and repo_name:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_data = {
        'github_url': get_github_project_url(doc_repo_spec.url),
        'job_priority': build_file.jenkins_job_priority,
        'node_label': build_file.jenkins_job_label,
        'doc_repo_spec': doc_repo_spec,
        'disabled': is_disabled,
        'github_orgunit': git_github_orgunit(doc_repo_spec.url),
        'ros_buildfarm_repository': get_repository(),
        'script_generating_key_files': script_generating_key_files,
        'config_url': config_url,
        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'doc_build_name': doc_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,
        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,
        'timeout_minutes': build_file.jenkins_job_timeout,
        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Ejemplo n.º 23
0
def _get_maintainer_emails(dist_cache, pkg_name):
    maintainer_emails = set([])
    # add maintainers listed in latest release to recipients
    if dist_cache and pkg_name in dist_cache.release_package_xmls:
        from catkin_pkg.package import parse_package_string
        pkg_xml = dist_cache.release_package_xmls[pkg_name]
        pkg = parse_package_string(pkg_xml)
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    return maintainer_emails
Ejemplo n.º 24
0
def _get_maintainer_emails(dist_cache, pkg_name):
    maintainer_emails = set([])
    # add maintainers listed in latest release to recipients
    if dist_cache and pkg_name in dist_cache.release_package_xmls:
        from catkin_pkg.package import parse_package_string
        pkg_xml = dist_cache.release_package_xmls[pkg_name]
        pkg = parse_package_string(pkg_xml)
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    return maintainer_emails
Ejemplo n.º 25
0
def github_source_manifest_provider(repo):
    server, path = repo.get_url_parts()
    if not server.endswith('github.com'):
        logger.debug('Skip non-github url "%s"' % repo.url)
        raise RuntimeError('can not handle non github urls')

    tree_url = 'https://api.github.com/repos/%s/git/trees/%s?recursive=1' % (
        path, repo.version)
    req = Request(tree_url)
    if GITHUB_USER and GITHUB_PASSWORD:
        logger.debug(
            '- using http basic auth from supplied environment variables.')
        credential_pair = '%s:%s' % (GITHUB_USER, GITHUB_PASSWORD)
        authheader = 'Basic %s' % base64.b64encode(
            credential_pair.encode()).decode()
        req.add_header('Authorization', authheader)
    try:
        tree_json = json.loads(_get_url_contents(req))
        logger.debug('- load repo tree from %s' % tree_url)
    except URLError as e:
        raise RuntimeError('Unable to fetch JSON tree from %s: %s' %
                           (tree_url, e))

    if tree_json['truncated']:
        raise RuntimeError('JSON tree is truncated, must perform full clone.')

    package_xml_paths = set()
    for obj in tree_json['tree']:
        if obj['path'].split('/')[-1] == 'package.xml':
            package_xml_paths.add(os.path.dirname(obj['path']))

    # Filter out ones that are inside other packages (eg, part of tests)
    def package_xml_in_parent(path):
        if path == '':
            return True
        parent = path
        while True:
            parent = os.path.dirname(parent)
            if parent in package_xml_paths:
                return False
            if parent == '':
                return True

    package_xml_paths = list(filter(package_xml_in_parent, package_xml_paths))

    cache = SourceRepositoryCache.from_ref(tree_json['sha'])
    for package_xml_path in package_xml_paths:
        url = 'https://raw.githubusercontent.com/%s/%s/%s' % \
            (path, cache.ref(), package_xml_path + '/package.xml' if package_xml_path else 'package.xml')
        logger.debug('- load package.xml from %s' % url)
        package_xml = _get_url_contents(url)
        name = parse_package_string(package_xml).name
        cache.add(name, package_xml_path, package_xml)

    return cache
Ejemplo n.º 26
0
def get_packages(workspace, rd_obj, skip_update=False):
    packages = {}
    checkout_info = rd_obj.get_package_checkout_info()
    for pkg_name in sorted(checkout_info.keys()):
        pkg_string = rd_obj.get_package_xml(pkg_name)
        try:
            p = parse_package_string(pkg_string)
            packages[p.name] = p
        except InvalidPackage as ex:
            print("package.xml for '%s' is invalid. Error: %s" % (pkg_name, ex))
    return packages
Ejemplo n.º 27
0
def get_package_dependencies(package_xml):
    if not os.path.abspath("/usr/lib/pymodules/python2.7") in sys.path:
        sys.path.append("/usr/lib/pymodules/python2.7")
    from catkin_pkg import package as catkin_pkg

    pkg = catkin_pkg.parse_package_string(package_xml)
    depends1 = {'build': [d.name for d in pkg.build_depends],
                'buildtool':  [d.name for d in pkg.buildtool_depends],
                'test':  [d.name for d in pkg.test_depends],
                'run':  [d.name for d in pkg.run_depends]}
    return depends1
Ejemplo n.º 28
0
def get_package_dependencies(package_xml):
    if not os.path.abspath("/usr/lib/pymodules/python2.7") in sys.path:
        sys.path.append("/usr/lib/pymodules/python2.7")
    from catkin_pkg import package as catkin_pkg

    pkg = catkin_pkg.parse_package_string(package_xml)
    depends1 = {'build': [d.name for d in pkg.build_depends],
                'buildtool': [d.name for d in pkg.buildtool_depends],
                'test': [d.name for d in pkg.test_depends],
                'run': [d.name for d in pkg.run_depends]}
    return depends1
Ejemplo n.º 29
0
 def _get_package(self, pkg_name):
     if pkg_name not in self._packages:
         repo = self._distribution_instance.repositories[self._distribution_instance.source_packages[pkg_name].repository_name].source_repository
         assert repo is not None, "Package '%s' in repository '%s' is missing a source entry." % (pkg_name, repo.name)
         pkg_xml = self._distribution_instance.get_source_package_xml(pkg_name)
         try:
             pkg = parse_package_string(pkg_xml)
         except InvalidPackage as e:
             raise InvalidPackage(pkg_name + ': %s' % str(e))
         self._packages[pkg_name] = pkg
     return self._packages[pkg_name]
Ejemplo n.º 30
0
def _maintainers(distro, pkg_name):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    pkg_xml = distro.get_release_package_xml(pkg_name)
    if pkg_xml is not None:
        try:
            pkg = parse_package_string(pkg_xml)
        except InvalidPackage:
            pass
        else:
            for m in pkg.maintainers:
                yield m.name, m.email
Ejemplo n.º 31
0
def get_package_manifests(dist):
    cached_pkgs = {}
    for pkg_name in dist.release_packages.keys():
        pkg_xml = dist.get_release_package_xml(pkg_name)
        if pkg_xml is not None:
            from catkin_pkg.package import InvalidPackage, parse_package_string
            try:
                pkg_manifest = parse_package_string(pkg_xml)
            except InvalidPackage:
                continue
            cached_pkgs[pkg_name] = pkg_manifest
    return cached_pkgs
def get_packages(workspace, rd_obj, skip_update=False):
    packages = {}
    checkout_info = rd_obj.get_package_checkout_info()
    for pkg_name in sorted(checkout_info.keys()):
        pkg_string = rd_obj.get_package_xml(pkg_name)
        try:
            p = parse_package_string(pkg_string)
            packages[p.name] = p
        except InvalidPackage as ex:
            print("package.xml for '%s' is invalid.  Error: %s" %
                  (pkg_name, ex))
    return packages
Ejemplo n.º 33
0
 def _get_package(self, pkg_name):
     if pkg_name not in self._packages:
         repo = self._distribution_instance.repositories[self._distribution_instance.release_packages[pkg_name].repository_name].release_repository
         assert repo is not None and repo.version is not None, "Package '%s' in repository '%s' has no version set" % (pkg_name, repo.name)
         assert 'release' in repo.tags, "Package '%s' in repository '%s' has no 'release' tag set" % (pkg_name, repo.name)
         pkg_xml = self._distribution_instance.get_release_package_xml(pkg_name)
         try:
             pkg = parse_package_string(pkg_xml)
         except InvalidPackage as e:
             raise InvalidPackage(pkg_name + ': %s' % str(e))
         self._packages[pkg_name] = pkg
     return self._packages[pkg_name]
Ejemplo n.º 34
0
def _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, source_repo_spec,
        repo_name, dist_cache=None):
    template_name = 'devel/devel_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'job_priority': build_file.jenkins_job_priority,

        'source_repo_spec': source_repo_spec,

        'script_generating_key_files': script_generating_key_files,

        'ros_buildfarm_repo': config.ros_buildfarm_repo,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'source_build_name': source_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_emails': set(config.notify_emails + build_file.notify_emails),
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,

        'timeout_minutes': build_file.jenkins_job_timeout,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Ejemplo n.º 35
0
def _get_direct_dependencies(pkg_name, dist_cache, pkg_names):
    from catkin_pkg.package import parse_package_string
    if pkg_name not in dist_cache.release_package_xmls:
        return None
    pkg_xml = dist_cache.release_package_xmls[pkg_name]
    pkg = parse_package_string(pkg_xml)
    depends = set([
        d.name
        for d in (pkg.buildtool_depends + pkg.build_depends +
                  pkg.buildtool_export_depends + pkg.build_export_depends +
                  pkg.exec_depends + pkg.test_depends) if d.name in pkg_names
    ])
    return depends
Ejemplo n.º 36
0
def generate_distribution_cache(index,
                                dist_name,
                                preclean=False,
                                ignore_local=False,
                                debug=False):
    dist, cache = _get_cached_distribution(index,
                                           dist_name,
                                           preclean=preclean,
                                           ignore_local=ignore_local)
    # fetch all manifests
    print('- fetch missing manifests')
    errors = []
    for pkg_name in sorted(dist.release_packages.keys()):
        repo = dist.repositories[
            dist.release_packages[pkg_name].repository_name].release_repository
        if repo.version is None:
            if debug:
                print('  - skip "%s" since it has no version' % pkg_name)
            continue
        if debug:
            print('  - fetch "%s"' % pkg_name)
        else:
            sys.stdout.write('.')
            sys.stdout.flush()
        # check that package.xml is fetchable
        package_xml = dist.get_release_package_xml(pkg_name)
        if not package_xml:
            errors.append('%s: missing package.xml file for package "%s"' %
                          (dist_name, pkg_name))
            continue
        # check that package.xml is parseable
        try:
            pkg = parse_package_string(package_xml)
        except InvalidPackage as e:
            errors.append('%s: invalid package.xml file for package "%s": %s' %
                          (dist_name, pkg_name, e))
            continue
        # check that version numbers match (at least without deb inc)
        if not re.match('^%s-\d+$' % re.escape(pkg.version), repo.version):
            errors.append(
                '%s: different version in package.xml (%s) for package "%s" than for the repository (%s) (after removing the debian increment)'
                % (dist_name, pkg.version, pkg_name, repo.version))

    if not debug:
        print('')

    if errors:
        raise RuntimeError('\n'.join(errors))

    return cache
Ejemplo n.º 37
0
def github_source_manifest_provider(repo):
    server, path = repo.get_url_parts()
    if not server.endswith('github.com'):
        logger.debug('Skip non-github url "%s"' % repo.url)
        raise RuntimeError('can not handle non github urls')

    tree_url = 'https://api.github.com/repos/%s/git/trees/%s?recursive=1' % (path, repo.version)
    req = Request(tree_url)
    if GITHUB_USER and GITHUB_PASSWORD:
        logger.debug('- using http basic auth from supplied environment variables.')
        authheader = 'Basic %s' % base64.b64encode('%s:%s' % (GITHUB_USER, GITHUB_PASSWORD))
        req.add_header('Authorization', authheader)
    try:
        tree_json = json.loads(_get_url_contents(req))
        logger.debug('- load repo tree from %s' % tree_url)
    except URLError as e:
        raise RuntimeError('Unable to fetch JSON tree from %s: %s' % (tree_url, e))

    if tree_json['truncated']:
        raise RuntimeError('JSON tree is truncated, must perform full clone.')

    package_xml_paths = set()
    for obj in tree_json['tree']:
        if obj['path'].split('/')[-1] == 'package.xml':
            package_xml_paths.add(os.path.dirname(obj['path']))

    # Filter out ones that are inside other packages (eg, part of tests)
    def package_xml_in_parent(path):
        if path == '':
            return True
        parent = path
        while True:
            parent = os.path.dirname(parent)
            if parent in package_xml_paths:
                return False
            if parent == '':
                return True
    package_xml_paths = list(filter(package_xml_in_parent, package_xml_paths))

    cache = SourceRepositoryCache.from_ref(tree_json['sha'])
    for package_xml_path in package_xml_paths:
        url = 'https://raw.githubusercontent.com/%s/%s/%s' % \
            (path, cache.ref(), package_xml_path + '/package.xml' if package_xml_path else 'package.xml')
        logger.debug('- load package.xml from %s' % url)
        package_xml = _get_url_contents(url)
        name = parse_package_string(package_xml).name
        cache.add(name, package_xml_path, package_xml)

    return cache
Ejemplo n.º 38
0
def get_metadata(distribution, pkg_name):
    rel_pkg = distribution.release_packages[pkg_name]
    repo_name = rel_pkg.repository_name
    repository = distribution.repositories[repo_name]

    xml = distribution.get_release_package_xml(pkg_name)
    pkg = parse_package_string(xml)

    data = {}
    data['repo_name'] = repo_name
    data['timestamp'] = time.time()

    pkg_status = None
    pkg_status_description = None
    # package level status information
    if pkg.name in repository.status_per_package:
        pkg_status_data = repository.status_per_package[pkg.name]
        pkg_status = pkg_status_data.get('status', None)
        pkg_status_description = pkg_status_data.get('status_description',
                                                     None)
    # repository level status information
    if pkg_status is None:
        pkg_status = repository.status
    if pkg_status_description is None:
        pkg_status_description = repository.status_description
    if pkg_status is not None:
        data['maintainer_status'] = pkg_status
    if pkg_status_description is not None:
        data['maintainer_status_description'] = pkg_status_description

    data['description'] = pkg.description
    data['maintainers'] = ', '.join([str(m) for m in pkg.maintainers])
    data['license'] = ', '.join(pkg.licenses)

    website_urls = [u.url for u in pkg.urls if u.type == 'website']
    if website_urls:
        data['url'] = website_urls[0]

    data['authors'] = ', '.join([str(a) for a in pkg.authors])

    depends = pkg.build_depends + pkg.buildtool_depends + pkg.run_depends
    data['depends'] = sorted(set([dep.name for dep in depends]))

    is_metapackage = 'metapackage' in [e.tagname for e in pkg.exports]
    data['package_type'] = 'metapackage' if is_metapackage else 'package'
    if is_metapackage:
        data['packages'] = sorted([dep.name for dep in pkg.run_depends])

    return data
Ejemplo n.º 39
0
def get_metadata(distribution, pkg_name):
    rel_pkg = distribution.release_packages[pkg_name]
    repo_name = rel_pkg.repository_name
    repository = distribution.repositories[repo_name]

    xml = distribution.get_release_package_xml(pkg_name)
    pkg = parse_package_string(xml)

    data = {}
    data['repo_name'] = repo_name
    data['timestamp'] = time.time()

    pkg_status = None
    pkg_status_description = None
    # package level status information
    if pkg.name in repository.status_per_package:
        pkg_status_data = repository.status_per_package[pkg.name]
        pkg_status = pkg_status_data.get('status', None)
        pkg_status_description = pkg_status_data.get(
            'status_description', None)
    # repository level status information
    if pkg_status is None:
        pkg_status = repository.status
    if pkg_status_description is None:
        pkg_status_description = repository.status_description
    if pkg_status is not None:
        data['maintainer_status'] = pkg_status
    if pkg_status_description is not None:
        data['maintainer_status_description'] = pkg_status_description

    data['description'] = pkg.description
    data['maintainers'] = ', '.join([str(m) for m in pkg.maintainers])
    data['license'] = ', '.join(pkg.licenses)

    website_urls = [u.url for u in pkg.urls if u.type == 'website']
    if website_urls:
        data['url'] = website_urls[0]

    data['authors'] = ', '.join([str(a) for a in pkg.authors])

    depends = pkg.build_depends + pkg.buildtool_depends + pkg.run_depends
    data['depends'] = sorted(set([dep.name for dep in depends]))

    is_metapackage = 'metapackage' in [e.tagname for e in pkg.exports]
    data['package_type'] = 'metapackage' if is_metapackage else 'package'
    if is_metapackage:
        data['packages'] = sorted([dep.name for dep in pkg.run_depends])

    return data
Ejemplo n.º 40
0
def get_maintainer_emails(dist_cache, repo_name):
    maintainer_emails = set([])
    if dist_cache and repo_name in dist_cache.distribution_file.repositories:
        from catkin_pkg.package import parse_package_string
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)
    return maintainer_emails
Ejemplo n.º 41
0
 def _get_package(self, pkg_name):
     if pkg_name not in self._packages:
         repo = self._distribution_instance.repositories[
             self._distribution_instance.source_packages[pkg_name].
             repository_name].source_repository
         assert repo is not None, "Package '%s' in repository '%s' is missing a source entry." % (
             pkg_name, repo.name)
         pkg_xml = self._distribution_instance.get_source_package_xml(
             pkg_name)
         try:
             pkg = parse_package_string(pkg_xml)
         except InvalidPackage as e:
             raise InvalidPackage(pkg_name + ': %s' % str(e))
         self._packages[pkg_name] = pkg
     return self._packages[pkg_name]
Ejemplo n.º 42
0
def get_maintainer_emails(dist_cache, repo_name):
    maintainer_emails = set([])
    if dist_cache and repo_name in dist_cache.distribution_file.repositories:
        from catkin_pkg.package import parse_package_string
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)
    return maintainer_emails
Ejemplo n.º 43
0
def _get_direct_dependencies(pkg_name, dist_cache, pkg_names):
    from catkin_pkg.package import parse_package_string
    if pkg_name not in dist_cache.release_package_xmls:
        return None
    pkg_xml = dist_cache.release_package_xmls[pkg_name]
    pkg = parse_package_string(pkg_xml)
    depends = set([
        d.name for d in (
            pkg.buildtool_depends +
            pkg.build_depends +
            pkg.buildtool_export_depends +
            pkg.build_export_depends +
            pkg.exec_depends +
            pkg.test_depends)
        if d.name in pkg_names])
    return depends
Ejemplo n.º 44
0
def _get_direct_dependencies(pkg_name, dist_cache, pkg_names):
    from catkin_pkg.package import parse_package_string
    if pkg_name not in dist_cache.release_package_xmls:
        return None
    pkg_xml = dist_cache.release_package_xmls[pkg_name]
    pkg = parse_package_string(pkg_xml)
    # test dependencies are treated as build dependencies by bloom
    # so we need them here to ensure that all dependencies are available
    # before starting a build
    depends = set([
        d.name
        for d in (pkg.buildtool_depends + pkg.build_depends +
                  pkg.buildtool_export_depends + pkg.build_export_depends +
                  pkg.exec_depends + pkg.test_depends) if d.name in pkg_names
    ])
    return depends
Ejemplo n.º 45
0
def get_packages(workspace, rd_obj, skip_update=False):
    packages = {}

    vcs_cache = VcsFileCache(workspace, skip_update=skip_update)

    errors = []
    urls_updated = set([])
    checkout_info = rd_obj.get_package_checkout_info()
    for pkg_name in sorted(checkout_info.keys()):
        pkg_info = checkout_info[pkg_name]
        url = pkg_info['url']
        print("Get '%s' from '%s' from tag '%s'" % (pkg_name, url, pkg_info['full_version']))
        url_updated_before = url in urls_updated
        urls_updated.add(url)
        vcs_cache._skip_update = skip_update or url_updated_before
        try:
            try:
                pkg_string = vcs_cache.get_file_contents('git',
                                                         url,
                                                         pkg_info['full_version'],
                                                         'package.xml')
            except VcsError as ex:
                print("  trying tag '%s'" % pkg_info['version'])
                pkg_string = vcs_cache.get_file_contents('git',
                                                         url,
                                                         pkg_info['version'],
                                                         'package.xml')

            try:
                p = parse_package_string(pkg_string)
                packages[p.name] = p
            except InvalidPackage as ex:
                print("package.xml for '%s' is invalid.  Error: %s" % (pkg_name, ex))
                errors.append(pkg_name)
        except VcsError as ex:
            print("Failed to get package.xml for '%s'.  Error: %s" % (pkg_name, ex))
            errors.append(pkg_name)

        if not vcs_cache._skip_update:
            print("Sleeping for github slowdown")
            time.sleep(1)

    if errors:
        raise RuntimeError('Could not fetch stacks: %s' % ', '.join(errors))

    return packages
Ejemplo n.º 46
0
 def _get_package(self, pkg_name):
     if pkg_name not in self._packages:
         repo = self._distribution_instance.repositories[
             self._distribution_instance.release_packages[pkg_name].
             repository_name].release_repository
         assert repo is not None and repo.version is not None, "Package '%s' in repository '%s' has no version set" % (
             pkg_name, repo.name)
         assert 'release' in repo.tags, "Package '%s' in repository '%s' has no 'release' tag set" % (
             pkg_name, repo.name)
         pkg_xml = self._distribution_instance.get_release_package_xml(
             pkg_name)
         try:
             pkg = parse_package_string(pkg_xml)
         except InvalidPackage as e:
             raise InvalidPackage(pkg_name + ': %s' % str(e))
         self._packages[pkg_name] = pkg
     return self._packages[pkg_name]
Ejemplo n.º 47
0
def get_packages(workspace, rd_obj, skip_update=False):
    packages = {}

    vcs_cache = VcsFileCache(workspace, skip_update=skip_update)

    errors = []
    urls_updated = set([])
    checkout_info = rd_obj.get_package_checkout_info()
    for pkg_name in sorted(checkout_info.keys()):
        pkg_info = checkout_info[pkg_name]
        url = pkg_info['url']
        print("Get '%s' from '%s' from tag '%s'" %
              (pkg_name, url, pkg_info['full_version']))
        url_updated_before = url in urls_updated
        urls_updated.add(url)
        vcs_cache._skip_update = skip_update or url_updated_before
        try:
            try:
                pkg_string = vcs_cache.get_file_contents(
                    'git', url, pkg_info['full_version'], 'package.xml')
            except VcsError as ex:
                print("  trying tag '%s'" % pkg_info['version'])
                pkg_string = vcs_cache.get_file_contents(
                    'git', url, pkg_info['version'], 'package.xml')

            try:
                p = parse_package_string(pkg_string)
                packages[p.name] = p
            except InvalidPackage as ex:
                print("package.xml for '%s' is invalid.  Error: %s" %
                      (pkg_name, ex))
                errors.append(pkg_name)
        except VcsError as ex:
            print("Failed to get package.xml for '%s'.  Error: %s" %
                  (pkg_name, ex))
            errors.append(pkg_name)

        if not vcs_cache._skip_update:
            print("Sleeping for github slowdown")
            time.sleep(1)

    if errors:
        raise RuntimeError('Could not fetch stacks: %s' % ', '.join(errors))

    return packages
def test_build_caches():
    with Fold():
        print(
            """Checking if the 'package.xml' files for all packages are fetchable.
If this fails you can run 'rosdistro_build_cache index.yaml' to perform the same check locally.
""")
        index = 'file://' + os.path.abspath(INDEX_YAML)
        index = get_index(index)
        dist_names = sorted(index.distributions.keys())
        dist_names = [n for n in dist_names if n not in eol_distro_names]

        errors = []
        caches = OrderedDict()
        for dist_name in dist_names:
            with Fold():
                try:
                    cache = generate_distribution_cache(index, dist_name)
                except RuntimeError as e:
                    errors.append(str(e))
                else:
                    caches[dist_name] = cache

        # also check topological order to prevent circular dependencies
        for dist_name, cache in caches.items():
            pkgs = {}
            print("Parsing manifest files for '%s'" % dist_name)
            for pkg_name, pkg_xml in cache.release_package_xmls.items():
                # Collect parsing warnings and fail if version convention are not respected
                warnings = []
                pkgs[pkg_name] = parse_package_string(pkg_xml,
                                                      warnings=warnings)
                for warning in warnings:
                    if 'version conventions' in warning:
                        errors.append('%s: %s' % (pkg_name, warning))
                    else:
                        print('%s: WARNING: %s' % (pkg_name, warning))
            print("Order all packages in '%s' topologically" % dist_name)
            try:
                topological_order_packages(pkgs)
            except RuntimeError as e:
                errors.append('%s: %s' % (dist_name, e))

        if errors:
            raise RuntimeError('\n'.join(errors))
Ejemplo n.º 49
0
 def update_single_project(yaml_p, s, server_cache):
     cached_p = next((q for q in server_cache.projects if q.id == yaml_p["id"]), None)
     p = GitlabProject(
         server=server_name,
         name=yaml_p["name_with_namespace"],
         id=yaml_p["id"],
         website=yaml_p["web_url"],
         url={"ssh": yaml_p["ssh_url_to_repo"], "http": yaml_p["http_url_to_repo"]},
         master_branch=yaml_p.get("default_branch", "master"),
         packages=None,
         last_modified=date_parse(yaml_p["last_activity_at"]),
         workspace_path=None,
         server_path=yaml_p["path_with_namespace"]
     )
     if not force_update and cached_p is not None and cached_p.last_modified == p.last_modified:
         p.packages = cached_p.packages
         for prj in p.packages:
             prj.project = p
     else:
         if verbose:
             msg("@{cf}Updating@|: %s\n" % p.website)
         manifests = crawl_project_for_packages(s, url, p.id, "", depth=crawl_depth, timeout=timeout)
         old_manifests = {}
         if cached_p is not None:
             for old_p in cached_p.packages:
                 old_manifests[old_p.manifest_blob] = old_p.manifest_xml
         p.packages = []
         for path, blob in manifests:
             if blob not in old_manifests:
                 r = s.get(urljoin(url, "api/v4/projects/%s/repository/blobs/%s/raw" % (p.id, blob)), timeout=timeout)
                 r.raise_for_status()
                 xml_data = r.content
             else:
                 xml_data = old_manifests[blob]
             filename = os.path.join(path, PACKAGE_MANIFEST_FILENAME)
             try:
                 manifest = parse_package_string(xml_data, filename)
                 if verbose:
                     msg("@{cf}Updated@|:  @{yf}%s@| [%s]\n" % (manifest.name, p.name))
                 p.packages.append(GitlabPackage(manifest=manifest, project=p, project_path=path, manifest_blob=blob, manifest_xml=xml_data))
             except InvalidPackage as e:
                 warning("invalid package manifest '%s': %s\n" % (filename, str(e)))
     return p
Ejemplo n.º 50
0
def _get_direct_dependencies(pkg_name, dist_cache, pkg_names):
    from catkin_pkg.package import parse_package_string
    if pkg_name not in dist_cache.release_package_xmls:
        return None
    pkg_xml = dist_cache.release_package_xmls[pkg_name]
    pkg = parse_package_string(pkg_xml)
    # test dependencies are treated as build dependencies by bloom
    # so we need them here to ensure that all dependencies are available
    # before starting a build
    depends = set([
        d.name for d in (
            pkg.buildtool_depends +
            pkg.build_depends +
            pkg.buildtool_export_depends +
            pkg.build_export_depends +
            pkg.exec_depends +
            pkg.test_depends)
        if d.name in pkg_names])
    return depends
Ejemplo n.º 51
0
def main(argv=sys.argv[1:]):
    """
    Extract the information from package.xml and make them accessible to CMake.

    Parse the given package.xml file and
    print CMake code defining several variables containing the content.
    """
    parser = argparse.ArgumentParser(
        description='Parse package.xml file and print CMake code defining '
                    'several variables',
    )
    parser.add_argument(
        'package_xml',
        type=argparse.FileType('r', encoding='utf-8'),
        help='The path to a package.xml file',
    )
    parser.add_argument(
        'outfile',
        nargs='?',
        help='The filename where the output should be written to',
    )
    args = parser.parse_args(argv)

    try:
        package = parse_package_string(
            args.package_xml.read(), filename=args.package_xml.name)
    except Exception as e:
        print("Error parsing '%s':" % args.package_xml.name, file=sys.stderr)
        raise e
    finally:
        args.package_xml.close()

    lines = generate_cmake_code(package)
    if args.outfile:
        with open(args.outfile, 'w', encoding='utf-8') as f:
            for line in lines:
                f.write('%s\n' % line)
    else:
        for line in lines:
            print(line)
Ejemplo n.º 52
0
def main(argv=sys.argv[1:]):
    """
    Extract the information from package.xml and make them accessible to CMake.

    Parse the given package.xml file and
    print CMake code defining several variables containing the content.
    """
    parser = argparse.ArgumentParser(
        description='Parse package.xml file and print CMake code defining '
                    'several variables',
    )
    parser.add_argument(
        'package_xml',
        type=argparse.FileType('r', encoding='utf-8'),
        help='The path to a package.xml file',
    )
    parser.add_argument(
        'outfile',
        nargs='?',
        help='The filename where the output should be written to',
    )
    args = parser.parse_args(argv)

    try:
        package = parse_package_string(
            args.package_xml.read(), filename=args.package_xml.name)
    except Exception as e:
        print("Error parsing '%s':" % args.package_xml.name, file=sys.stderr)
        raise e
    finally:
        args.package_xml.close()

    lines = generate_cmake_code(package)
    if args.outfile:
        with open(args.outfile, 'w', encoding='utf-8') as f:
            for line in lines:
                f.write('%s\n' % line)
    else:
        for line in lines:
            print(line)
def get_packages_dependencies(package_names, distro):
  """Gets a set of dependencies for packages to build and run the packages named.
Returns a dict with keys of package names whose values are the set of packages
which that package requires to build."""

  from collections import deque

  package_dependencies = {}
  packages_to_process = deque(package_names)
  while len(packages_to_process) > 0:
    pkg_name = packages_to_process.popleft()
    if pkg_name in package_dependencies:
      continue
    if pkg_name not in distro.package_xmls:
      raise "Can't find package %s in the distro cache" % (pkg_name)
    pkg = parse_package_string(distro.package_xmls[pkg_name])

    package_dependencies[pkg_name] = set([p.name for p in (pkg.buildtool_depends + pkg.build_depends + pkg.run_depends) if p.name in distro.package_xmls])
    for name in package_dependencies[pkg_name]:
      packages_to_process.append(name)

  return package_dependencies
Ejemplo n.º 54
0
 def __init__(self, pkg_xml):
     self.upstream_email = None
     self.upstream_name = None
     self.homepage = 'https://wiki.ros.org'
     pkg = parse_package_string(pkg_xml)
     self.upstream_license = pkg.licenses
     self.description = pkg.description
     if 'website' in [url.type for url in pkg.urls]:
         self.homepage = [
             url.url for url in pkg.urls if url.type == 'website'
         ][0]
     elif len(pkg.urls) > 0:
         self.homepage = [
             url.url for url in pkg.urls
         ][0]
     self.longdescription = pkg.description
     self.upstream_email = [
         author.email for author in pkg.maintainers
     ][0]
     self.upstream_name = [
         author.name for author in pkg.maintainers
     ][0]
     self.author_email = [
         author.email for author in pkg.authors
     ][0] if pkg.authors else ''
     self.author_name = [
         author.name for author in pkg.authors
     ][0] if pkg.authors else ''
     self.member_of_groups = [
         group.name for group in pkg.member_of_groups
     ]
     tag_remover = re.compile('<.*?>')
     build_type = [
         re.sub(tag_remover, '', str(e))
         for e in pkg.exports if 'build_type' in str(e)
     ]
     self.build_type = 'catkin'
     if build_type:
         self.build_type = build_type[0]
def generate_release_cache(index, dist_name, preclean=False, debug=False):
    dist, cache = _get_cached_release(index, dist_name, preclean)
    # fetch all manifests
    print('- fetch missing manifests')
    errors = []
    for pkg_name in sorted(dist.packages.keys()):
        repo = dist.repositories[dist.packages[pkg_name].repository_name]
        if repo.version is None:
            if debug:
                print('  - skip "%s" since it has no version' % pkg_name)
            continue
        if debug:
            print('  - fetch "%s"' % pkg_name)
        else:
            sys.stdout.write('.')
            sys.stdout.flush()
        # check that package.xml is fetchable
        package_xml = dist.get_package_xml(pkg_name)
        if not package_xml:
            errors.append('%s: missing package.xml file for package "%s"' % (dist_name, pkg_name))
            continue
        # check that package.xml is parseable
        try:
            pkg = parse_package_string(package_xml)
        except InvalidPackage:
            errors.append('%s: invalid package.xml file for package "%s"' % (dist_name, pkg_name))
            continue
        # check that version numbers match (at least without deb inc)
        if not re.match('^%s(-[\dA-z~\+\.]+)?$' % re.escape(pkg.version), repo.version):
            errors.append('%s: different version in package.xml (%s) for package "%s" than for the repository (%s) (after removing the debian increment)' % (dist_name, pkg.version, pkg_name, repo.version))

    if not debug:
        print('')

    if errors:
        raise RuntimeError('\n'.join(errors))

    return cache
Ejemplo n.º 56
0
    def get_dependencies(self):
        if not self.depends1:
            url = self.url
            url = url.replace('.git', '/release/%s/%s/package.xml'%(self.name, self.version))
            url = url.replace('git://', 'https://raw.')
            print url
            retries = 5
            while not self.depends1 and retries > 0:
                package_xml = urllib.urlopen(url).read()
                append_pymodules_if_needed()
                from catkin_pkg import package
                try:
                    pkg = package.parse_package_string(package_xml)
                except package.InvalidPackage as e:
                    print "!!!! Failed to download package.xml for package %s at url %s"%(self.name, url)
                    time.sleep(5.0)

                res = {}
                res['build'] = [d.name for d in pkg.build_depends]
                res['test'] = [d.name for d in pkg.test_depends]
                self.depends1 = res

        return self.depends1
Ejemplo n.º 57
0
def git_source_manifest_provider(repo):
    try:
        with _temp_git_clone(repo.url, repo.version) as git_repo_path:
            # Include the git hash in our cache dictionary.
            result = Git(git_repo_path).command('rev-parse', 'HEAD')
            cache = { '_ref': result['output'] }

            # Find package.xml files inside the repo.
            for package_path in find_package_paths(git_repo_path):
                if package_path == '.':
                    package_path = ''
                with open(os.path.join(git_repo_path, package_path, 'package.xml'), 'r') as f:
                    package_xml = f.read()
                try:
                    name = parse_package_string(package_xml).name
                except InvalidPackage:
                    raise RuntimeError('Unable to parse package.xml file found in %s' % repo.url)
                cache[name] = [ package_path, package_xml ]

    except Exception as e:
        raise RuntimeError('Unable to fetch source package.xml files: %s' % e)

    return cache
Ejemplo n.º 58
0
def _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, source_repo_spec,
        repo_name, pull_request, job_name, dist_cache=None,
        is_disabled=False):
    template_name = 'devel/devel_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache and repo_name and \
            repo_name in dist_cache.distribution_file.repositories:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_priority = \
        build_file.jenkins_commit_job_priority \
        if not pull_request \
        else build_file.jenkins_pull_request_job_priority

    job_data = {
        'github_url': get_github_project_url(source_repo_spec.url),

        'job_priority': job_priority,
        'node_label': build_file.jenkins_job_label,

        'pull_request': pull_request,

        'source_repo_spec': source_repo_spec,

        'disabled': is_disabled,

        # this should not be necessary
        'job_name': job_name,

        'github_orgunit': git_github_orgunit(source_repo_spec.url),

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'source_build_name': source_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_compiler_warnings': build_file.notify_compiler_warnings,
        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,

        'timeout_minutes': build_file.jenkins_job_timeout,

        'git_ssh_credential_id': config.git_ssh_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
mp_repo_sets = {}
for key in keys:
    mp_repo_sets[key] = {}
    for dep in mp_sets[key]:
        if dep not in repos_by_package:
            continue
        repo = repos_by_package[dep]
        mp_repo_sets[key][repo] = mp_repo_sets[key].get(repo, {})
        maintainer = ''
        try:
            pkg_xml = indigo.get_release_package_xml(dep)
        except KeyError:
            pkg_xml = None
        if pkg_xml:
            try:
                pkg = parse_package_string(pkg_xml)
                maintainer = '<<BR>>'.join(['<<MailTo(%s, %s)>>' % (m.email.replace('@', ' AT ').replace('.', ' DOT '), m.name) for m in pkg.maintainers])
                # maintainer = '<<BR>>'.join(['[[mailto:%s|%s]]' % (m.email, m.name) for m in pkg.maintainers])
            except InvalidPackage:
                maintainer = ''
        mp_repo_sets[key][repo][dep] = maintainer

for key in keys:
    print("== " + key + " ==")
    print()
    print("||<tablewidth=\"100%\">'''Repository'''||'''Maintenance Status'''||'''Maintainers'''||")
    for repo in sorted(list(mp_repo_sets[key])):
        if repo in keys:
            print("||<bgcolor=\"#eee\">" + repo + "||<bgcolor=\"#eee\">''Variant''|| ||")
    repo_colors = {}
    for repo in sorted(list(mp_repo_sets[key])):
def get_rosdistro_info(dist, build_file):
    all_pkg_names = dist.release_packages.keys()
    pkg_names = build_file.filter_packages(all_pkg_names)

    packages = {}
    for pkg_name in pkg_names:
        # package name
        ros_pkg = RosPackage(pkg_name)
        ros_pkg.debian_name = get_debian_package_name(dist.name, pkg_name)

        pkg = dist.release_packages[pkg_name]
        repo = dist.repositories[pkg.repository_name].release_repository
        # package version
        if not repo.version:
            continue
        ros_pkg.version = repo.version

        # repository name and url
        ros_pkg.repository_name = pkg.repository_name
        repo_url = repo.url
        other_repos = [
            dist.repositories[pkg.repository_name].source_repository,
            dist.repositories[pkg.repository_name].doc_repository]
        for other_repo in other_repos:
            if other_repo:
                repo_url = other_repo.url
                if repo_url.startswith('https://github.com/') and \
                        repo_url.endswith('.git'):
                    if other_repo.version:
                        repo_url = '%s/tree/%s' % \
                            (repo_url[:-4], other_repo.version)
                break
        ros_pkg.repository_url = repo_url

        # package status and description
        ros_pkg.status = 'unknown'
        ros_pkg.status_description = ''
        if dist.repositories[pkg.repository_name].status:
            ros_pkg.status = dist.repositories[pkg.repository_name].status
        if dist.repositories[pkg.repository_name].status_description:
            ros_pkg.status_description = \
                dist.repositories[pkg.repository_name].status_description

        # maintainers and package url from manifest
        ros_pkg.maintainers = []
        ros_pkg.url = None
        pkg_xml = dist.get_release_package_xml(pkg_name)
        if pkg_xml is not None:
            from catkin_pkg.package import InvalidPackage, parse_package_string
            try:
                pkg_manifest = parse_package_string(pkg_xml)
                for m in pkg_manifest.maintainers:
                    ros_pkg.maintainers.append(
                        MaintainerDescriptor(m.name, m.email))
                for u in pkg_manifest['urls']:
                    if u.type == 'website':
                        ros_pkg.url = u.url
                        break
            except InvalidPackage:
                pass

        packages[pkg_name] = ros_pkg
    return packages