Пример #1
0
def add_argument_rosdistro_index_url(parser, required=False):
    help_msg = 'The URL to the ROS distro index'
    if not required:
        from rosdistro import get_index_url
        parser.add_argument(
            '--rosdistro-index-url',
            default=get_index_url(),
            help=("%s (default: '%s', based on the environment variable " +
                  "ROSDISTRO_INDEX_URL") % (help_msg, get_index_url()))
    else:
        parser.add_argument('--rosdistro-index-url',
                            required=True,
                            help=help_msg)
Пример #2
0
def add_argument_rosdistro_index_url(parser, required=False):
    help_msg = 'The URL to the ROS distro index'
    if not required:
        from rosdistro import get_index_url
        parser.add_argument(
            '--rosdistro-index-url',
            default=get_index_url(),
            help=("%s (default: '%s', based on the environment variable " +
                  "ROSDISTRO_INDEX_URL") % (help_msg, get_index_url()))
    else:
        parser.add_argument(
            '--rosdistro-index-url',
            required=True,
            help=help_msg)
Пример #3
0
def _check_platform_helper() -> Tuple[str, dict, dict]:
    """
    Check ROS_DISTRO environment variables and distribution installed.

    :return: string of distro name, dict of distribution info, dict of release platforms info
    """
    distro_name = os.environ.get('ROS_DISTRO')
    if not distro_name:
        doctor_error('ROS_DISTRO is not set.')
        return
    distro_name = distro_name.lower()
    u = rosdistro.get_index_url()
    if not u:
        doctor_error(
            'Unable to access ROSDISTRO_INDEX_URL or DEFAULT_INDEX_URL. '
            'Check network setting to make sure machine is connected to internet.'
        )
        return
    i = rosdistro.get_index(u)
    distro_info = i.distributions.get(distro_name)
    if not distro_info:
        doctor_warn(f'Distribution name {distro_name} is not found')
        return
    try:
        distro_data = rosdistro.get_distribution(i, distro_name).get_data()
    except AttributeError:
        distro_data = ''
    return distro_name, distro_info, distro_data
Пример #4
0
def get_rosdistro(quiet):
    global _rosdistro_cache
    dist = None
    if "ROS_DISTRO" in os.environ:
        distro_id = os.environ["ROS_DISTRO"]
        if distro_id not in _rosdistro_cache:
            try:
                from rosdistro import get_index, get_index_url, get_cached_distribution
                url = get_index_url()
                if not quiet:
                    sys.stderr.write(
                        "catkin_lint: downloading %s package index from %s\n" %
                        (distro_id, url))
                index = get_index(url)
                dist = get_cached_distribution(index,
                                               distro_id,
                                               allow_lazy_load=True)
            except Exception as err:
                if not quiet:
                    sys.stderr.write(
                        "catkin_lint: cannot initialize rosdistro: %s\n" %
                        str(err))
            _rosdistro_cache[distro_id] = dist
        dist = _rosdistro_cache[distro_id]
    return Rosdistro(dist=dist, quiet=quiet)
Пример #5
0
def get_index_url():
    global _rosdistro_index_commit
    index_url = rosdistro.get_index_url()
    pr = urlparse(index_url)
    if pr.netloc == 'raw.github.com':
        # Try to determine what the commit hash was
        tokens = [x for x in pr.path.split('/') if x]
        if len(tokens) <= 3:
            debug("Failed to get commit for rosdistro index file: index url")
            debug(tokens)
            return index_url
        owner = tokens[0]
        repo = tokens[1]
        branch = tokens[2]
        gh = get_github_interface(quiet=True)
        if gh is None:
            # Failed to get it with auth, try without auth (may fail)
            gh = Github(username=None, auth=None)
        try:
            data = gh.get_branch(owner, repo, branch)
        except GithubException:
            debug(traceback.format_exc())
            debug("Failed to get commit for rosdistro index file: api")
            return index_url
        _rosdistro_index_commit = data.get('commit', {}).get('sha', None)
        if _rosdistro_index_commit is not None:
            info("ROS Distro index file associate with commit '{0}'"
                 .format(_rosdistro_index_commit))
        else:
            debug("Failed to get commit for rosdistro index file: json")
    return index_url
Пример #6
0
 def __init__(self, name, ros_distro):
     SrcAptBase.__init__(self, name)
     self.ros_distro = self.detect_ros_distribution(ros_distro)
     self.rosdistro_index = rosdistro.get_index(rosdistro.get_index_url())
     self.cache = rosdistro.get_distribution_cache(self.rosdistro_index,
                                                   self.ros_distro)
     self.distro_file = self.cache.distribution_file
     # More logic could be needed with new ros distributions
     # ROS1 - https://www.ros.org/reps/rep-0003.html
     # ROS2 - http://design.ros2.org/articles/changes.html
     if self.ros_distro == 'melodic':
         self.compilation_flags.append('--std=c++14')
     else:
         self.compilation_flags.append('--std=c++17')
         # needed for gazebo_ros_pkgs
         self.compilation_flags.append('-DBOOST_HAS_PTHREADS=1')
         # gtest-vendor is ROS2
         self.compilation_flags.append('-I' +
             join('/opt/ros/', self.ros_distro, 'src', 'gtest_vendor', 'include'))
         # flag to avoid problems in rcutils
         # https://github.com/osrf/auto-abi-checker/issues/17
         self.compilation_flags.append('-DRCUTILS__STDATOMIC_HELPER_H_')
         # flags for rmw_connext packages
         self.compilation_flags.append('-DRTI_UNIX')
         for rti_path in glob.glob('/opt/rti.com/rti_connext_dds-*'):
             self.compilation_flags.append('-I' + rti_path + '/include/')
             self.compilation_flags.append('-I' + rti_path + '/include/ndds')
     # Needs to add /opt/ros includes to compile ROS software
     self.compilation_flags.append('-I' +
         join('/opt/ros/', self.ros_distro, 'include'))
def main(repo_type, rosdistro_name):
    index = get_index(get_index_url())
    if repo_type == 'doc':
        try:
            distro_file = get_doc_file(index, rosdistro_name)
        except RuntimeError as e:
            print("Could not load doc file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
            return False
    if repo_type == 'source':
        try:
            distro_file = get_source_file(index, rosdistro_name)
        except RuntimeError as e:
            print("Could not load source file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
            return False

    for repo_name in sorted(distro_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distro_file.repositories[repo_name]
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
    print()

    return True
Пример #8
0
class RepoForm(FlaskForm):
    architectures = ['amd64', 'arm64', 'armhf', 'i386', 'source']
    selected_arch = MultiCheckboxField(
        'Architectures',
        choices=[(arch, arch) for arch in architectures],
        validators=[DataRequired('Select at least one valid architecture')],
    )

    from rosdistro import get_distribution_file, get_index, get_index_url

    index = get_index(get_index_url())
    ros_distributions = index.distributions.keys()
    distributions_combo_list = list()
    list_index = 0
    for item in ros_distributions:
        distribution_file = get_distribution_file(index, item)
        for ubuntu in distribution_file.release_platforms['ubuntu']:
            distributions_combo_list.append(
                (list_index, dict({
                    'ros': item,
                    'ubuntu': ubuntu
                })))
            list_index += 1
    distributions_combo_list = sorted(distributions_combo_list,
                                      key=lambda v: v[1]['ros'])
    selected_distros = MultiCheckboxField(
        'Distributions',
        choices=[(str(dist[0]), dist[1]['ros'] + ' - ' + dist[1]['ubuntu'])
                 for dist in distributions_combo_list],
        validators=[DataRequired('Select at least one valid distribution')],
    )
    submit = SubmitField('Next')
Пример #9
0
def main(repo_type, rosdistro_name):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
    print()

    return True
Пример #10
0
def call_abi_checker(workspace_root, ros_version, env):
    import rosdistro

    condition_context = {}
    condition_context['ROS_DISTRO'] = env['ROS_DISTRO']
    condition_context['ROS_VERSION'] = ros_version
    condition_context['ROS_PYTHON_VERSION'] = \
        (env or os.environ).get('ROS_PYTHON_VERSION')
    pkgs = get_packages_in_workspaces(workspace_root, condition_context)
    pkg_names = [pkg.name for pkg in pkgs.values()]
    assert pkg_names, 'No packages found in the workspace'

    # Filter packages in source space that has been released
    index = rosdistro.get_index(rosdistro.get_index_url())
    dist_file = rosdistro.get_distribution_file(index, env['ROS_DISTRO'])
    pkg_names_released = [
        pkg_name for pkg_name in pkg_names
        if pkg_name in dist_file.release_packages
    ]

    assert len(
        workspace_root
    ) == 1, 'auto-abi tool needs the implementation of multiple local-dir'
    # ROS_DISTRO is set in the env object
    cmd = [
        'auto-abi.py ' + '--orig-type ros-pkg --orig ' +
        ",".join(pkg_names_released) + ' ' + '--new-type ros-ws --new ' +
        os.path.join(workspace_root[0], 'install_isolated') + ' ' +
        '--report-dir ' + workspace_root[0] + ' ' + '--no-fail-if-empty ' +
        '--display-exec-time'
    ]
    print("Invoking '%s'" % (cmd))
    return subprocess.call(cmd, shell=True, stderr=subprocess.STDOUT, env=env)
Пример #11
0
def main(repo_type, rosdistro_name, check_for_wet_packages=False):
    index = get_index(get_index_url())
    try:
        distribution_file = get_distribution_file(index, rosdistro_name)
    except RuntimeError as e:
        print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr)
        return False

    for repo_name in sorted(distribution_file.repositories.keys()):
        sys.stdout.write('.')
        sys.stdout.flush()
        repo = distribution_file.repositories[repo_name]
        if repo_type == 'doc':
            repo = repo.doc_repository
        if repo_type == 'source':
            repo = repo.source_repository
        if not repo:
            continue
        try:
            if (repo.type == 'git'):
                check_git_repo(repo.url, repo.version)
            elif (repo.type == 'hg'):
                check_hg_repo(repo.url, repo.version)
            elif (repo.type == 'svn'):
                check_svn_repo(repo.url, repo.version)
            else:
                print()
                print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr)
                continue
        except RuntimeError as e:
            print()
            print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
            continue

        if check_for_wet_packages:
            path = tempfile.mkdtemp()
            try:
                if repo.type == 'git':
                    clone_git_repo(repo.url, repo.version, path)
                elif repo.type == 'hg':
                    clone_hg_repo(repo.url, repo.version, path)
                elif repo.type == 'svn':
                    checkout_svn_repo(repo.url, repo.version, path)
            except RuntimeError as e:
                print()
                print("Could not clone repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr)
                continue
            else:
                package_paths = find_package_paths(path)
                if not package_paths:
                    print()
                    print("Repository '%s' (%s [%s]) does not contain any wet packages" % (repo.name, repo.url, repo.version), file=sys.stderr)
                    continue
            finally:
                shutil.rmtree(path)

    print()

    return True
Пример #12
0
def get_manifest_from_rosdistro(package_name, distro_name):
    """
    Get the rosdistro repository data and package information.

    @param package_name: name of package or repository to get manifest information for.
    It gives package symbols precedence over repository names.
    @type  package_name: str
    @param distro_name: name of ROS distribution
    @type  distro_name: str

    @return: (manifest data, 'package'|'repository').
    @rtype: ({str: str}, str, str)
    @raise IOError: if data cannot be loaded
    """
    data = {}
    type_ = None
    index = get_index(get_index_url())
    try:
        distribution_cache = get_cached_distribution(index, distro_name)
    except RuntimeError as runerr:
        if (runerr.message.startswith("Unknown release")):
            return None
        raise

    if package_name in distribution_cache.release_packages:
        pkg = distribution_cache.release_packages[package_name]
        #print('pkg', pkg.name)
        pkg_xml = distribution_cache.get_release_package_xml(package_name)
        pkg_manifest = parse_package_string(pkg_xml)
        data['description'] = pkg_manifest.description
        website_url = [u.url for u in pkg_manifest.urls if u.type == 'website']
        if website_url:
            data['url'] = website_url[0]
        repo_name = pkg.repository_name
        meta_export = [exp for exp in pkg_manifest.exports if exp.tagname == 'metapackage']
        if meta_export:
            type_ = 'metapackage'
        else:
            type_ = 'package'
    else:
        repo_name = package_name
        type_ = 'repository'
    data['repo_name'] = repo_name
    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].release_repository
        if repo:
            data['packages'] = repo.package_names

    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].source_repository
        if not repo:
            return None
        data['vcs'] = repo.type
        data['vcs_uri'] = repo.url
        data['vcs_version'] = repo.version
    else:
        return None

    return (data, type_, None)
Пример #13
0
def get_all_distribution_files(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_files = []
    i = rosdistro.get_index(url)
    for d in i.distributions:
        distribution_files.append(rosdistro.get_distribution_file(i, d))
    return distribution_files
Пример #14
0
def get_all_distribution_files(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_files = []
    i = rosdistro.get_index(url)
    for d in i.distributions:
        distribution_files.append(rosdistro.get_distribution_file(i, d))
    return distribution_files
Пример #15
0
def get_all_distribution_filenames(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_filenames = []
    i = rosdistro.get_index(url)
    for d in i.distributions.values():
        dpath = os.path.abspath(urlparse(d['distribution']).path)
        distribution_filenames.append(dpath)
    return distribution_filenames
Пример #16
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Create a workspace from vcs repos files.')
    add_argument_rosdistro_name(parser)
    add_argument_repos_file_urls(parser)
    add_argument_repository_names(parser, optional=True)
    add_argument_test_branch(parser)
    parser.add_argument('--workspace-root',
                        help='The path of the desired workspace',
                        required=True)
    args = parser.parse_args(argv)

    assert args.repos_file_urls or args.repository_names

    ensure_workspace_exists(args.workspace_root)

    repos_files = []
    if args.repository_names:
        with Scope('SUBSECTION', 'get repository information from rosdistro'):
            index = get_index(get_index_url())
            dist = get_distribution(index, args.rosdistro_name)
            data = {}
            for repo_name in args.repository_names:
                repo = dist.repositories[repo_name]
                src_repo = repo.source_repository
                repo_data = {
                    'type': src_repo.type,
                    'url': src_repo.url,
                }
                if src_repo.version is not None:
                    repo_data['version'] = src_repo.version
                data[repo_name] = repo_data
            repos_file = os.path.join(args.workspace_root,
                                      'repositories-from-rosdistro.repos')
            with open(repos_file, 'w') as h:
                h.write(
                    yaml.safe_dump({'repositories': data},
                                   default_flow_style=False))
            repos_files.append(repos_file)

    with Scope('SUBSECTION', 'fetch repos files(s)'):
        for repos_file_url in args.repos_file_urls:
            repos_file = os.path.join(args.workspace_root,
                                      os.path.basename(repos_file_url))
            print('Fetching \'%s\' to \'%s\'' % (repos_file_url, repos_file))
            urlretrieve(repos_file_url, repos_file)
            repos_files += [repos_file]

    with Scope('SUBSECTION', 'import repositories'):
        source_space = os.path.join(args.workspace_root, 'src')
        for repos_file in repos_files:
            print('Importing repositories from \'%s\'' % (repos_file))
            import_repositories(source_space, repos_file, args.test_branch)

    with Scope('SUBSECTION', 'vcs export --exact'):
        # if a repo has been rebased against the default branch vcs can't detect the remote
        export_repositories(args.workspace_root, check=not args.test_branch)
    def __init__(self, distro):
        self.distro = distro

        try:
            index = get_index(get_index_url())
            self._distribution_file = get_distribution_cache(index, distro).distribution_file
        except:
            logger.error("Could not load rosdistro distribution cache")
            self._distribution_file = None
Пример #18
0
def get_eol_distribution_filenames(url=None):
    if not url:
        url = rosdistro.get_index_url()
    distribution_filenames = []
    i = rosdistro.get_index(url)
    for d_name, d in i.distributions.items():
        if d_name in EOL_DISTROS:
            dpath = os.path.abspath(urlparse(d['distribution']).path)
            distribution_filenames.append(dpath)
    return distribution_filenames
Пример #19
0
def init_environment():
    global os_name, os_version, rdistro, ctx, os_installers, default_os_installer, dist_data, rindex, rcache, rview

    ctx = create_default_installer_context()
    os_installers = ctx.get_os_installer_keys(os_name)
    default_os_installer = ctx.get_default_os_installer_key(os_name)
    rindex = get_index(get_index_url())
    dist_data = _get_dist_file_data(rindex, rdistro, 'distribution')
    rcache = get_distribution(rindex, rdistro)
    rview = get_catkin_view(rdistro, os_name, os_version, False)
Пример #20
0
def scrape_for_release_message_packages(track):
    url = rosdistro.get_index_url()
    index = rosdistro.get_index(url)
    cache = rosdistro.get_release_cache(index, 'hydro')
    packages = []
    for package_name, package_string in cache.package_xmls.iteritems():
        package = catkin_pkg.package.parse_package_string(package_string)
        #print("  Name: %s" % package_name)
        #print("  Buildtool Depends %s" % package.build)
        if has_build_depend_on_message_generation(package):
            packages.append({'name': package_name, 'version': package.version})
    return packages
 def get_distro(self, distro):
     if self.__distribution is None:
         try:
             index = get_index(get_index_url())
             self.__distribution = get_distribution_file(
                 index,
                 distro
             )
         except:
             print "failed to get data about repo %s in distribution %s" % (self.repo_name, self.distro_name)
             raise
     return self.__distribution
def generate_deb_status_table(package, rosdistro_from, rosdistro_to):
    DISTROS = collections.OrderedDict()
    rosdistro_index = get_index(get_index_url())
    for distro in sorted(rosdistro_index.distributions.keys()):
        distribution_files = get_distribution_files(rosdistro_index, distro)
        if len(distribution_files) > 1:
            sys.stderr.write(
                'distribution_files has multiple entories {}\n'.format(
                    distribution_files))
            sys.exit(1)
        platform = distribution_files[0].release_platforms['ubuntu']
        DISTROS[distro] = platform
        #print('DISTROS[{}] = {}'.format(distro, platform))

    table = []
    for bit, arch in zip(['v8', 'hf', '32', '64'],
                         ['arm64', 'armhf', 'i386', 'amd64']):
        if not table:  # first row
            headers = ['Package']
        row = ['{} ({})'.format(package, arch)]
        for distro, os_list in DISTROS.items():
            if not (ord(rosdistro_from) <= ord(distro[0]) <=
                    ord(rosdistro_to)):
                continue

            for os in os_list:
                if arch.startswith('arm'):
                    if os == 'xenial':
                        os_arch = 'ux{bit}_u'.format(bit=bit)
                    else:
                        os_arch = 'arm_u'
                else:
                    os_arch = 'u'

                if not table:  # first row
                    headers.append('{} ({})'.format(distro.capitalize(),
                                                    os.capitalize()))

                url = 'http://build.ros.org/job/{prefix_ros}bin_{os_arch}{prefix_os}{bit}__{package}__ubuntu_{os}_{arch}__binary'  # NOQA
                url = url.format(
                    bit=bit,
                    arch=arch,
                    os_arch=os_arch,
                    prefix_os=os[0].upper(),
                    prefix_ros=distro[0].upper(),
                    package=package,
                    os=os,
                )
                template_md = '[![Build Status]({url}/badge/icon)]({url})'
                row.append(template_md.format(url=url))
        table.append(row)

    print(tabulate.tabulate(table, headers=headers, tablefmt='pipe'))
Пример #23
0
def scrape_for_release_message_packages(track):
    url = rosdistro.get_index_url()
    index = rosdistro.get_index(url)
    cache = rosdistro.get_release_cache(index, 'kinetic')
    packages = []
    for package_name, package_string in cache.package_xmls.items():
        package = catkin_pkg.package.parse_package_string(package_string)
        #print("  Name: %s" % package_name)
        #print("  Buildtool Depends %s" % package.build)
        if catkin.has_build_depend_on_message_generation(package):
            packages.append({'name': package_name, 'version': package.version})
    return packages
Пример #24
0
def get_index():
    global _rosdistro_index
    if _rosdistro_index is None:
        _rosdistro_index = rosdistro.get_index(rosdistro.get_index_url())
        if _rosdistro_index.version == 1:
            error("This version of bloom does not support rosdistro version "
                  "'{0}', please use an older version of bloom."
                  .format(_rosdistro_index.version), exit=True)
        if _rosdistro_index.version > 2:
            error("This version of bloom does not support rosdistro version "
                  "'{0}', please update bloom.".format(_rosdistro_index.version), exit=True)
    return _rosdistro_index
def generate_deb_status_table(package, rosdistro_from, rosdistro_to):
    DISTROS = collections.OrderedDict()
    rosdistro_index = get_index(get_index_url())
    for distro in sorted(rosdistro_index.distributions.keys()):
        distribution_files = get_distribution_files(rosdistro_index, distro)
        if len(distribution_files) > 1:
            sys.stderr.write('distribution_files has multiple entories {}\n'.format(distribution_files))
            sys.exit(1)
        platform = distribution_files[0].release_platforms['ubuntu']
        DISTROS[distro] = platform
        #print('DISTROS[{}] = {}'.format(distro, platform))

    table = []
    for bit, arch in zip(['v8', 'hf', '32', '64'],
                         ['arm64', 'armhf', 'i386', 'amd64']):
        if not table:  # first row
            headers = ['Package']
        row = ['{} ({})'.format(package, arch)]
        for distro, os_list in DISTROS.items():
            if not (ord(rosdistro_from) <= ord(distro[0]) <=
                    ord(rosdistro_to)):
                continue

            for os in os_list:
                if arch.startswith('arm'):
                    if os == 'xenial':
                        os_arch = 'ux{bit}_u'.format(bit=bit)
                    else:
                        os_arch = 'arm_u'
                else:
                    os_arch = 'u'

                if not table:  # first row
                    headers.append(
                        '{} ({})'.format(distro.capitalize(), os.capitalize()))

                url = 'http://build.ros.org/job/{prefix_ros}bin_{os_arch}{prefix_os}{bit}__{package}__ubuntu_{os}_{arch}__binary'  # NOQA
                url = url.format(
                    bit=bit,
                    arch=arch,
                    os_arch=os_arch,
                    prefix_os=os[0].upper(),
                    prefix_ros=distro[0].upper(),
                    package=package,
                    os=os,
                )
                template_md = '[![Build Status]({url}/badge/icon)]({url})'
                row.append(template_md.format(url=url))
        table.append(row)

    print(tabulate.tabulate(table, headers=headers, tablefmt='pipe'))
Пример #26
0
def get_rosdistro(distroname):
    index_url = get_index_url()
    index = get_index(index_url)

    if index_url == DEFAULT_INDEX_URL:
        logger.error(
            'ROSDISTRO_INDEX_URL is set to the default (did you forget to source a workspace?)'
        )
        exit(1)

    # load rosdistro with patched SourceRepositorySpecification class
    with patch.object(repository, 'SourceRepositorySpecification',
                      SourceRepositorySpecificationMock):
        return get_distribution(index, distroname)
Пример #27
0
def get_rosdistro(quiet): # pragma: no cover
    dist = None
    if "ROS_DISTRO" in os.environ:
        distro_id = os.environ["ROS_DISTRO"]
        try:
            from rosdistro import get_index, get_index_url, get_cached_distribution
            url = get_index_url()
            if not quiet:
                sys.stderr.write("catkin_lint: downloading %s package index from %s\n" % (distro_id, url))
            index = get_index(url)
            dist = get_cached_distribution(index, distro_id, allow_lazy_load=True)
        except Exception as err:
            if not quiet:
                sys.stderr.write("catkin_lint: cannot initialize rosdistro: %s\n" % str(err))
    return Rosdistro(dist=dist, quiet=quiet)
Пример #28
0
    def generate_distro_cache(basepath, distro, skip_keys=[]):
        distro_cache_dir = '{0}/files/{1}/'.format(basepath, distro)
        distro_cache_path = '{0}cache.yaml'.format(distro_cache_dir)
        try:
            index = get_index(get_index_url())
            yaml_str = get_distribution_cache_string(index, distro)
            make_dir(distro_cache_dir)
            with open(distro_cache_path, 'w') as distro_cache_file:
                distro_cache_file.write('# {}/cache.yaml\n'.format(distro))
                distro_cache_file.write(yaml_str)
                ok('Wrote {0}'.format(distro_cache_path))
        except OSError as e:
            err('Failed to write distro cache {} to disk! {}'.format(
                distro_cache_path, e))
            raise e
        # Generate a diff'able cache file
        distro_cache_diff_path = '{}cache.diffme'.format(distro_cache_dir)
        try:

            def replace_all_patterns(d, text):
                for k, v in d.items():
                    text = re.sub(k, v, text, flags=re.M)
                return text

            replacement_table = {
                r"{([^ }][^ }]*)}": r'[[\1]]',
                r"{": r"{\n",
                r"}": r"\n}",
                r"\[\[": r"{",
                r"\]\]": r"}",
                r", ": r",\n",
                r"^    ": r"-----\n",
                r"<version>[^<]*</version>": r"",
                r"><": r">\n<",
                r"^  ": r"-----\n",
                r"^(source_repo_package_xmls:)": r"-----\n\1",
            }
            with open(distro_cache_diff_path, 'w') as distro_cache_diff_file:
                distro_cache_diff_file.write(
                    '# {}/cache.diffme\n'.format(distro))
                yaml_str = replace_all_patterns(replacement_table, yaml_str)
                distro_cache_diff_file.write(yaml_str)
                ok('Wrote {0}'.format(distro_cache_diff_path))
        except OSError as e:
            err('Failed to write diffme distro cache {} to disk! {}'.format(
                distro_cache_diff_path, e))
            raise e
Пример #29
0
 def handle_arguments(self, args):
     self.interactive = args.interactive
     self.debian_inc = args.debian_inc
     self.os_name = args.os_name
     self.distros = args.distros
     if self.distros in [None, []]:
         index = rosdistro.get_index(rosdistro.get_index_url())
         distribution_file = rosdistro.get_distribution_file(
             index, self.rosdistro)
         if self.os_name not in distribution_file.release_platforms:
             if args.os_not_required:
                 warning(
                     "No platforms defined for os '{0}' in release file for the "
                     "'{1}' distro. This os was not required; continuing without error."
                     .format(self.os_name, self.rosdistro))
                 sys.exit(0)
             error(
                 "No platforms defined for os '{0}' in release file for the '{1}' distro."
                 .format(self.os_name, self.rosdistro),
                 exit=True)
         self.distros = distribution_file.release_platforms[self.os_name]
     self.install_prefix = args.install_prefix
     if args.install_prefix is None:
         self.install_prefix = self.default_install_prefix
     self.prefix = args.prefix
     self.branches = match_branches_with_prefix(self.prefix,
                                                get_branches,
                                                prune=not args.match_all)
     if len(self.branches) == 0:
         error("No packages found, check your --prefix or --src arguments.",
               exit=True)
     self.packages = {}
     self.tag_names = {}
     self.names = []
     self.branch_args = []
     self.debian_branches = []
     for branch in self.branches:
         package = get_package_from_branch(branch)
         if package is None:
             # This is an ignored package
             continue
         self.packages[package.name] = package
         self.names.append(package.name)
         args = self.generate_branching_arguments(package, branch)
         # First branch is debian/[<rosdistro>/]<package>
         self.debian_branches.append(args[0][0])
         self.branch_args.extend(args)
    def __init__(self, repo, distro_name, track, bump):
        self.repo_name = repo
        self.track = track
        self.distro_name = distro_name
        self.bump = bump
        self.pretend = False

        try:
            self.index = get_index(get_index_url())
            self.distribution = get_distribution_file(
                self.index,
                self.distro_name
            )
            self.repo = self.distribution.repositories[self.repo_name]
        except:
            print "failed to get data about repo %s in distribution %s" % (self.repo_name, self.distro_name)
            raise
Пример #31
0
def get_rosdistro():
    if 'ROS_DISTRO' in os.environ:
        distroname = os.environ['ROS_DISTRO']
    else:
        raise AssertionError('ROS_DISTRO is not defined in environment')

    index_url = get_index_url()
    index = get_index(index_url)

    if index_url == DEFAULT_INDEX_URL:
        logger.error(
            'ROSDISTRO_INDEX_URL is set to the default (did you forget to source a workspace?)'
        )
        exit(1)

    # load rosdistro with patched SourceRepositorySpecification class
    with patch.object(repository, 'SourceRepositorySpecification',
                      SourceRepositorySpecificationMock):
        return get_distribution(index, distroname)
Пример #32
0
 def handle_arguments(self, args):
     self.interactive = args.interactive
     self.debian_inc = args.debian_inc
     self.os_name = args.os_name
     self.distros = args.distros
     if self.distros in [None, []]:
         index = rosdistro.get_index(rosdistro.get_index_url())
         distribution_file = rosdistro.get_distribution_file(index, self.rosdistro)
         if self.os_name not in distribution_file.release_platforms:
             if args.os_not_required:
                 warning("No platforms defined for os '{0}' in release file for the "
                         "'{1}' distro. This os was not required; continuing without error."
                         .format(self.os_name, self.rosdistro))
                 sys.exit(0)
             error("No platforms defined for os '{0}' in release file for the '{1}' distro."
                   .format(self.os_name, self.rosdistro), exit=True)
         self.distros = distribution_file.release_platforms[self.os_name]
     self.install_prefix = args.install_prefix
     if args.install_prefix is None:
         self.install_prefix = self.default_install_prefix
     self.prefix = args.prefix
     self.branches = match_branches_with_prefix(self.prefix, get_branches, prune=not args.match_all)
     if len(self.branches) == 0:
         error(
             "No packages found, check your --prefix or --src arguments.",
             exit=True
         )
     self.packages = {}
     self.tag_names = {}
     self.names = []
     self.branch_args = []
     self.debian_branches = []
     for branch in self.branches:
         package = get_package_from_branch(branch)
         if package is None:
             # This is an ignored package
             continue
         self.packages[package.name] = package
         self.names.append(package.name)
         args = self.generate_branching_arguments(package, branch)
         # First branch is debian/[<rosdistro>/]<package>
         self.debian_branches.append(args[0][0])
         self.branch_args.extend(args)
Пример #33
0
    def __init__(self, rosdistro_name):
        self._rosdistro = rosdistro_name
        self._targets = None
        self._index = get_index(get_index_url())
        if self._rosdistro not in self._index.distributions:
            print ("Unknown distribution '%s'" % self._rosdistro, file=sys.stderr)
            sys.exit(1)
        self._dist = get_cached_release(self._index, self._rosdistro)
        self._build_files = get_release_build_files(self._index, self._rosdistro)

        self._repoinfo = {}
        self._package_in_repo = {}
        for name, repo in self._dist.repositories.iteritems():
            self._repoinfo[name] = RepoMetadata(name, repo.url, repo.version)
            self._repoinfo[name].packages = {}
            for pkg_name in repo.package_names:
                pkg = self._dist.packages[pkg_name]
                self._repoinfo[name].packages[pkg_name] = pkg.subfolder
                self._package_in_repo[pkg_name] = name
Пример #34
0
def get_index_url():
    global _rosdistro_index_commit, _rosdistro_index_original_branch
    index_url = rosdistro.get_index_url()
    pr = urlparse(index_url)
    if pr.netloc in ['raw.github.com', 'raw.githubusercontent.com']:
        # Try to determine what the commit hash was
        tokens = [x for x in pr.path.split('/') if x]
        if len(tokens) <= 3:
            debug("Failed to get commit for rosdistro index file: index url")
            debug(tokens)
            return index_url
        owner = tokens[0]
        repo = tokens[1]
        branch = tokens[2]
        gh = get_github_interface(quiet=True)
        if gh is None:
            # Failed to get it with auth, try without auth (may fail)
            gh = Github(username=None, auth=None)
        try:
            data = gh.get_branch(owner, repo, branch)
        except GithubException:
            debug(traceback.format_exc())
            debug("Failed to get commit for rosdistro index file: api")
            return index_url
        _rosdistro_index_commit = data.get('commit', {}).get('sha', None)
        if _rosdistro_index_commit is not None:
            info("ROS Distro index file associate with commit '{0}'".format(
                _rosdistro_index_commit))
            # Also mutate the index_url to use the commit (rather than the moving branch name)
            base_info = get_gh_info(index_url)
            base_branch = base_info['branch']
            rosdistro_index_commit = _rosdistro_index_commit  # Copy global into local for substitution
            middle = "{org}/{repo}".format(**base_info)
            index_url = index_url.replace(
                "{pr.netloc}/{middle}/{base_branch}/".format(**locals()),
                "{pr.netloc}/{middle}/{rosdistro_index_commit}/".format(
                    **locals()))
            info("New ROS Distro index url: '{0}'".format(index_url))
            _rosdistro_index_original_branch = base_branch
        else:
            debug("Failed to get commit for rosdistro index file: json")
    return index_url
Пример #35
0
def get_distro_package_versions() -> dict:
    """
    Return repos info using rosdistro API.

    :return: dictionary of rosdistro package name and version
    """
    distro_name = os.environ.get('ROS_DISTRO')
    if not distro_name:
        doctor_error('ROS_DISTRO is not set.')
        return
    distro_name = distro_name.lower()
    url = rosdistro.get_index_url()
    if not url:
        doctor_error(
            'Unable to access ROSDISTRO_INDEX_URL or DEFAULT_INDEX_URL. '
            'Check network setting to make sure machine is connected to internet.'
        )
        return
    i = rosdistro.get_index(url)
    distro_info = rosdistro.get_distribution(i, distro_name)
    if not distro_info:
        doctor_warn(f'Distribution name {distro_name} is not found')
        return
    try:
        repos_info = distro_info.get_data().get('repositories')
    except AttributeError:
        doctor_warn('No repository information found.')
        return
    distro_package_vers = {}
    for package_name, info in repos_info.items():
        try:
            release = info['release']
            ver = release.get('version')
            if 'packages' in release:
                # Metapackage
                for package in release['packages']:
                    distro_package_vers[package] = ver
            else:
                distro_package_vers[package_name] = ver
        except KeyError:
            pass
    return distro_package_vers
Пример #36
0
def load_configuration(ros_distro, repo_name):
    if ros_distro == 'fuerte':
        return load_configuration_fuerte(ros_distro, repo_name)
    from rosdistro import get_doc_file, get_index, get_index_url
    index = get_index(get_index_url())
    doc_file = get_doc_file(index, ros_distro)
    repo_data = _get_repo_data(doc_file, repo_name)
    doc_conf = [repo_data]

    repo = doc_file.repositories[repo_name]
    depends = getattr(repo, 'depends', [])
    depends_conf = []
    for dep_name in depends:
        try:
            repo_data = _get_repo_data(doc_file, dep_name)
        except BuildException:
            raise BuildException('Could not find a dependent repository "%s" of "%s" in doc file' % (dep_name, repo_name))
        depends_conf.append(repo_data)

    return (doc_conf, depends_conf)
Пример #37
0
def check_platform_helper():
    """Check ROS_DISTRO related environment variables and distribution name."""
    distro_name = os.environ.get('ROS_DISTRO')
    if not distro_name:
        sys.stderr.write('WARNING: ROS_DISTRO is not set.\n')
        return
    else:
        distro_name = distro_name.lower()
    u = rosdistro.get_index_url()
    if not u:
        sys.stderr.write('WARNING: Unable to access ROSDISTRO_INDEX_URL '
                         'or DEFAULT_INDEX_URL.\n')
        return
    i = rosdistro.get_index(u)
    distro_info = i.distributions.get(distro_name)
    if not distro_info:
        sys.stderr.write("WARNING: Distribution name '%s' is not found\n" %
                         distro_name)
        return
    distro_data = rosdistro.get_distribution(i, distro_name).get_data()
    return distro_name, distro_info, distro_data
Пример #38
0
def get_distro_package_versions() -> dict:
    """
    Return repos info using rosdistro API.

    :return: dictionary of rosdistro package name and version
    """
    distro_name = os.environ.get('ROS_DISTRO')
    distro_name = distro_name.lower()
    url = rosdistro.get_index_url()
    i = rosdistro.get_index(url)
    distro_data = rosdistro.get_distribution(i, distro_name).get_data()
    repos_info = distro_data.get('repositories')
    distro_package_vers = {}
    for _, info in repos_info.items():
        try:
            release = info['release']
            packages = release['packages']
            ver = release.get('version')
            for p in packages:
                distro_package_vers[p] = ver
        except KeyError:
            pass
    return distro_package_vers
Пример #39
0
def _check_platform_helper() -> Tuple[str, dict, dict]:
    """
    Check ROS_DISTRO environment variables and distribution installed.

    :return: string of distro name, dict of distribution info, dict of release platforms info
    """
    distro_name = os.environ.get('ROS_DISTRO')
    if not distro_name:
        doctor_warn('ROS_DISTRO is not set.')
        return
    else:
        distro_name = distro_name.lower()
    u = rosdistro.get_index_url()
    if not u:
        doctor_warn(
            'Unable to access ROSDISTRO_INDEX_URL or DEFAULT_INDEX_URL.')
        return
    i = rosdistro.get_index(u)
    distro_info = i.distributions.get(distro_name)
    if not distro_info:
        doctor_warn("Distribution name '%s' is not found" % distro_name)
        return
    distro_data = rosdistro.get_distribution(i, distro_name).get_data()
    return distro_name, distro_info, distro_data
Пример #40
0
    def __init__(self, distro_name, python_version=None):
        index = get_index(get_index_url())
        self._distro = get_cached_distribution(index, distro_name)
        self.distro_name = distro_name
        # set up ROS environments
        if python_version is None:
            python_version = index.distributions[distro_name]["python_version"]
        os.environ["ROS_PYTHON_VERSION"] = "{0}".format(python_version)
        os.environ["ROS_DISTRO"] = "{0}".format(distro_name)
        if "ROS_ROOT" in os.environ:
            os.environ.pop("ROS_ROOT")
        if "ROS_PACKAGE_PATH" in os.environ:
            os.environ.pop("ROS_PACKAGE_PATH")
        self._walker = DependencyWalker(self._distro,
                                        evaluate_condition_context=os.environ)

        # cache distribution type
        self._distribution_type = index.distributions[distro_name][
            "distribution_type"]
        self._python_version = index.distributions[distro_name][
            "python_version"]
        self.build_packages = set()

        os.environ["ROS_VERSION"] = "1" if self.check_ros1() else "2"
def document_repo(
    workspace,
    docspace,
    ros_distro,
    repo,
    platform,
    arch,
    homepage,
    no_chroot,
    skip_garbage,
    doc_conf,
    depends_conf,
    tags_db,
):
    doc_job = "doc-%s-%s" % (ros_distro, repo)

    # Get the list of repositories that should have documentation run on them
    # These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    repo_path = os.path.realpath("%s" % (docspace))
    print("Repo path %s" % repo_path)

    # Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf)
    if ros_distro == "indigo":
        if stacks or manifest_packages:
            print("Ignoring dry packages and stacks in '%s'" % ros_distro)
            stacks = {}
            manifest_packages = {}
        if not catkin_packages:
            raise BuildException("No catkin packages found")
    print("Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys()))
    # print "Catkin packages: %s" % catkin_packages
    # print "Manifest packages: %s" % manifest_packages
    # print "Stacks: %s" % stacks

    # Get any non local apt dependencies
    ros_dep = rosdep.RosDepResolver(ros_distro, no_chroot=no_chroot)
    import rosdistro

    if ros_distro == "electric":
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages)
    print("Apt dependencies: %s" % apt_deps)

    # Get rosdistro release file if there are catkin packages to get status
    if catkin_packages and ros_distro not in ["electric", "fuerte"]:
        print("Fetch rosdistro files for: %s" % ros_distro)
        index = rosdistro.get_index(rosdistro.get_index_url())
        rosdistro_release_file = rosdistro.get_release_file(index, ros_distro)
        rosdistro_source_file = rosdistro.get_source_file(index, ros_distro)
    else:
        rosdistro_release_file = None
        rosdistro_source_file = None

    # Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
    if os.path.exists(doc_path):
        shutil.rmtree(doc_path)

    # Write stack manifest files for all stacks, we can just do this off the
    # stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage)

    # Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print("Build order that honors deps:\n%s" % build_order)

    # We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    if not no_chroot:
        print("Installing all dependencies for %s" % repo)

        # XXX this is a really ugly hack to make the hydro doc job for ros_comm pass
        # otherwise roslisp pulls in the rosgraph_msgs package as a Debian dependency
        # which then break catkin_basic since it include the msgs CMake multiple files
        # resulting in duplicate target names (https://github.com/ros/ros_comm/issues/471)
        if repo == "ros_comm" and "ros-hydro-roslisp" in apt_deps:
            apt_deps.remove("ros-hydro-roslisp")

        if apt_deps:
            call("apt-get install %s --yes" % (" ".join(apt_deps)))
        print("Done installing dependencies")

    # Set up the list of things that need to be sourced to run rosdoc_lite
    # TODO: Hack for electric
    if ros_distro == "electric":
        # lucid doesn't have /usr/local on the path by default... weird
        sources = ["export PATH=/usr/local/sbin:/usr/local/bin:$PATH"]
        sources.append("source /opt/ros/fuerte/setup.bash")
        sources.append("export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH")
    else:
        sources = ["source /opt/ros/%s/setup.bash" % ros_distro]

    # We assume that there will be no build errors to start
    build_errors = []

    # Everything that is after fuerte supports catkin workspaces, so everything
    # that has packages with package.xml files
    local_install_path = os.path.join(docspace, "local_installs")
    if os.path.exists(local_install_path):
        shutil.rmtree(local_install_path)

    # Make sure to create some subfolders under the local install path
    def makedirs(path):
        if not os.path.exists(path):
            os.makedirs(path)

    makedirs(os.path.join(local_install_path, "bin"))
    makedirs(os.path.join(local_install_path, "lib/python2.7/dist-packages"))
    makedirs(os.path.join(local_install_path, "share"))

    if catkin_packages and not "rosdoc_lite" in catkin_packages.keys() and not "catkin" in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    # For fuerte catkin, we need to check if we should build catkin stacks
    source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path)
    build_errors.extend(errs)
    sources.append(source)

    # For all our manifest packages (dry or fuerte catkin) we want to build
    # messages. Note, for fuerte catkin, we have to build all the code and
    # install locally to get message generation
    source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    # We want to pull all the tagfiles available once from the server
    tags_location = os.path.join(workspace, ros_distro)
    if os.path.exists(tags_location):
        shutil.rmtree(tags_location)
    command = [
        "bash",
        "-c",
        'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/home/rosbot/docs/%s/tags %s'
        % (ros_distro, tags_location),
    ]
    call_with_list(command)

    repo_tags = document_packages(
        manifest_packages,
        catkin_packages,
        build_order,
        repos_to_doc,
        sources,
        tags_db,
        full_apt_deps,
        ros_dep,
        repo_map,
        repo_path,
        docspace,
        ros_distro,
        homepage,
        doc_job,
        tags_location,
        doc_path,
        rosdistro_release_file,
        rosdistro_source_file,
    )

    # Copy the files to the appropriate place
    folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
    if folders:
        dsts = ["%s/api/%s" % (doc_path, f) for f in folders]
        for dst in dsts:
            with open(os.path.join(dst, "stamp"), "w"):
                pass
        command = [
            "bash",
            "-c",
            'rsync -e "ssh -o StrictHostKeyChecking=no" -qr --delete %s [email protected]:/home/rosbot/docs/%s/api'
            % (" ".join(dsts), ros_distro),
        ]
        call_with_list(command)
    folders = ["%s/changelogs" % doc_path, "%s/tags" % doc_path]
    folders = [f for f in folders if os.path.exists(f)]
    if folders:
        command = [
            "bash",
            "-c",
            'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/home/rosbot/docs/%s'
            % (" ".join(folders), ros_distro),
        ]
        call_with_list(command)

    if not skip_garbage:
        # Remove the autogenerated doc files since they take up a lot of space if left on the server
        shutil.rmtree(tags_location)
        shutil.rmtree(doc_path)

    # Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        # Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace("_", "-"))

        # We only want to write tags for packages that have a valid deb name
        # For others, the only way to get cross referencing is to document everything
        # together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    # Make sure to write changes to tag files and deps
    # We don't want to write hashes on an unsuccessful build
    excludes = ["rosinstall_hashes"] if build_errors else []
    tags_db.commit_db(excludes)
    tags_db.delete_tag_index_repo()

    # Tell jenkins that we've succeeded
    print("Preparing xml test results")
    try:
        os.makedirs(os.path.join(workspace, "test_results"))
        print("Created test results directory")
    except Exception:
        pass

    if build_errors:
        import yaml

        copy_test_results(
            workspace,
            docspace,
            """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s"""
            % (
                build_errors,
                ros_distro,
                repo,
                repo,
                ros_distro,
                yaml.safe_dump(doc_conf, default_flow_style=False),
                yaml.safe_dump(depends_conf, default_flow_style=False),
            ),
            "message_generation_failure",
        )
    else:
        copy_test_results(workspace, docspace)
Пример #42
0
def get_relative_release_file_path(distro):
    release_file_url = urlparse(get_release_file_url(distro))
    index_file_url = urlparse(rosdistro.get_index_url())
    return os.path.relpath(release_file_url.path, os.path.commonprefix([index_file_url.path, release_file_url.path]))
Пример #43
0
def get_target_distros(rosdistro):
    print("Fetching targets")
    index = get_index(get_index_url())
    rel_file = get_release_file(index, rosdistro)
    return rel_file.platforms['fedora']
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on,
                       repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace,
                       sudo=False, no_chroot=False):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    from rosdistro import get_cached_release, get_index, get_index_url, get_source_file
    from rosdistro.dependency_walker import DependencyWalker
    from rosdistro.manifest_provider import get_release_tag

    index = get_index(get_index_url())
    print "Parsing rosdistro file for %s" % ros_distro
    release = get_cached_release(index, ros_distro)
    print "Parsing devel file for %s" % ros_distro
    source_file = get_source_file(index, ros_distro)

    # Create rosdep object
    print "Create rosdep object"
    rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot)

    # download the repo_list from source
    print "Creating rosinstall file for repo list"
    rosinstall = ""
    for repo_name, version in zip(repo_list, version_list):
        if version == 'devel':
            if repo_name not in source_file.repositories:
                raise BuildException("Repository %s does not exist in Devel Distro" % repo_name)
            print "Using devel distro file to download repositories"
            rosinstall += _generate_rosinstall_for_repo(source_file.repositories[repo_name])
        else:
            if repo_name not in release.repositories:
                raise BuildException("Repository %s does not exist in Ros Distro" % repo_name)
            repo = release.repositories[repo_name]
            if version not in ['latest', 'master']:
                assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name
            assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name
            for pkg_name in repo.package_names:
                release_tag = get_release_tag(repo, pkg_name)
                if version in ['latest', 'master']:
                    release_tag = '/'.join(release_tag.split('/')[:-1])
                print 'Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)
                rosinstall += _generate_rosinstall_for_repo(release.repositories[repo_name], version=release_tag)
    print "rosinstall file for all repositories: \n %s" % rosinstall
    with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f:
        f.write(rosinstall)
    print "Install repo list from source"
    os.makedirs(repo_sourcespace)
    call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace))

    # get the repositories build dependencies
    print "Get build dependencies of repo list"
    repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, test_depends=False)
    # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available
    if 'catkin' not in repo_build_dependencies:
        repo_build_dependencies.append('catkin')
    print "Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))
    apt_get_install(repo_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file for repositories that use catkin
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(repo_sourcespace, 'CMakeLists.txt'))
    print "Create a new CMakeLists.txt file using catkin"

    # get environment
    ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro)

    # check if source workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace)

    # make build folder and change into it
    os.makedirs(repo_buildspace)
    os.chdir(repo_buildspace)

    # make test results dir
    test_results_dir = os.path.join(workspace, 'test_results')
    if os.path.exists(test_results_dir):
        shutil.rmtree(test_results_dir)
    os.makedirs(test_results_dir)

    if not non_catkin_pkgs:
        print "Build catkin workspace"
        call("catkin_init_workspace %s" % repo_sourcespace, ros_env)
        repos_test_results_dir = os.path.join(test_results_dir, 'repos')
        call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env)
        #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash'))

        # build repositories and tests
        print "Build repo list"
        call("make", ros_env)
        call("make tests", ros_env)

        # get the repositories test and run dependencies
        print "Get test and run dependencies of repo list"
        repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, test_depends=True)
        print "Install test and run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))
        apt_get_install(repo_test_dependencies, rosdep_resolver, sudo)

        # run tests
        print "Test repo list"
        call("make run_tests", ros_env)

    else:
        print "Build workspace with non-catkin packages in isolation"
        # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin)
        os.makedirs(os.path.join(repo_buildspace, 'devel_isolated'))
        call('catkin_make_isolated --source %s --install-space install_isolated --install' % repo_sourcespace, ros_env)
        setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh')
        ros_env = get_ros_env(setup_file)

    # don't do depends-on on things not in release
    not_in_release = set(repo_list) - set(release.repositories.keys())
    if not_in_release:
        print "Removed [%s] repositories which are not in the " %\
            ', '.join(sorted(not_in_release)), \
            "release file for depends-on testing"
        repo_list = list(set(repo_list) - not_in_release)

    # see if we need to do more work or not
    if not test_depends_on:
        print "We're not testing the depends-on repositories"
        ensure_test_results(test_results_dir)
        return

    # get repo_list depends-on list
    print "Get list of wet repositories that build-depend on repo list: %s" % ', '.join(repo_list)
    walker = DependencyWalker(release)
    depends_on = set([])
    try:
        for repo_name in repo_list:
            print('repo_name', repo_name)
            repo = release.repositories[repo_name]
            for pkg_name in repo.package_names:
                print('pkg_name', pkg_name)
                depends_on |= walker.get_recursive_depends_on(pkg_name, ['buildtool', 'build'], ignore_pkgs=depends_on)
                print('depends_on', depends_on)
    except RuntimeError:
        print "Exception %s: If you are not in the rosdistro and only in the devel", \
            " builds there will be no depends on"
        depends_on = set([])

    print "Build depends_on list of pkg list: %s" % (', '.join(depends_on))
    if len(depends_on) == 0:
        print "No wet packages depend on our repo list. Test finished here"
        ensure_test_results(test_results_dir)
        return

    # install depends_on packages from source from release repositories
    rosinstall = ''
    non_catkin_pkgs = []
    for pkg_name in depends_on:
        repo = release.repositories[release.packages[pkg_name].repository_name]
        if repo.version is None:
            continue
        pkg_xml = release.get_package_xml(pkg_name)
        if pkg_xml is None:
            raise BuildException('Could not retrieve package.xml for package "%s" from rosdistro cache' % pkg_name)
        try:
            pkg = parse_package_string(pkg_xml)
        except InvalidPackage as e:
            raise BuildException('package.xml for package "%s" from rosdistro cache is invalid: %s' % (pkg_name, e))
        if _is_non_catkin_package(pkg):
            non_catkin_pkgs.append(pkg.name)
        rosinstall += _generate_rosinstall_for_pkg(repo, pkg_name)

    if non_catkin_pkgs:
        print 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))
        create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs)))
        return

    print "Rosinstall for depends_on:\n %s" % rosinstall
    with open(workspace + "/depends_on.rosinstall", 'w') as f:
        f.write(rosinstall)
    print "Created rosinstall file for depends on"

    # install all repository and system dependencies of the depends_on list
    print "Install all depends_on from source: %s" % (', '.join(depends_on))
    os.makedirs(dependson_sourcespace)
    call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace))

    # check if depends_on workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(dependson_sourcespace)
    if non_catkin_pkgs:
        print 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))
        create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs)))
        return

    # get build and test dependencies of depends_on list
    dependson_build_dependencies = []
    for d in get_dependencies(dependson_sourcespace, build_depends=True, test_depends=False):
        print "  Checking dependency %s" % d
        if d in dependson_build_dependencies:
            print "    Already in dependson_build_dependencies"
        if d in depends_on:
            print "    Is a direct dependency of the repo list, and is installed from source"
        if d in repo_list:
            print "    Is one of the repositories tested"
        if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list:
            dependson_build_dependencies.append(d)
    print "Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))
    dependson_test_dependencies = []
    for d in get_dependencies(dependson_sourcespace, build_depends=False, test_depends=True):
        if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list:
            dependson_test_dependencies.append(d)
    print "Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))

    # install build dependencies
    print "Install all build dependencies of the depends_on list"
    apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file again
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt'))
    os.makedirs(dependson_buildspace)
    os.chdir(dependson_buildspace)
    print "Create a new CMakeLists.txt file using catkin"
    call("catkin_init_workspace %s" % dependson_sourcespace, ros_env)
    depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on')
    call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env)
    #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash'))

    # build repositories
    print "Build depends-on packages"
    call("make", ros_env)

    # install test dependencies
    print "Install all test dependencies of the depends_on list"
    apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo)

    # test repositories
    print "Test depends-on packages"
    call("make run_tests", ros_env)
    ensure_test_results(test_results_dir)
Пример #45
0
def get_relative_distribution_file_path(distro):
    distribution_file_url = urlparse(get_disitrbution_file_url(distro))
    index_file_url = urlparse(rosdistro.get_index_url())
    return os.path.relpath(distribution_file_url.path,
                           os.path.commonprefix([index_file_url.path, distribution_file_url.path]))
Пример #46
0
def get_manifest_from_rosdistro(package_name, distro_name):
    """
    Get the rosdistro repository data and package information.

    @param package_name: name of package or repository to get manifest information for.
    It gives package symbols precedence over repository names.
    @type  package_name: str
    @param distro_name: name of ROS distribution
    @type  distro_name: str

    @return: (manifest data, 'package'|'repository').
    @rtype: ({str: str}, str, str)
    @raise IOError: if data cannot be loaded
    """
    data = {}
    type_ = None
    index = get_index(get_index_url())
    try:
        distribution_cache = get_cached_distribution(index, distro_name)
    except RuntimeError as runerr:
        if (runerr.message.startswith("Unknown release")):
            return None
        raise

    if package_name in distribution_cache.release_packages:
        pkg = distribution_cache.release_packages[package_name]
        #print('pkg', pkg.name)
        pkg_xml = distribution_cache.get_release_package_xml(package_name)
        pkg_manifest = parse_package_string(pkg_xml)
        data['description'] = pkg_manifest.description
        website_url = [u.url for u in pkg_manifest.urls if u.type == 'website']
        if website_url:
            data['url'] = website_url[0]
        repo_name = pkg.repository_name
        meta_export = [
            exp for exp in pkg_manifest.exports if exp.tagname == 'metapackage'
        ]
        if meta_export:
            type_ = 'metapackage'
        else:
            type_ = 'package'
    else:
        repo_name = package_name
        type_ = 'repository'
    data['repo_name'] = repo_name
    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].release_repository
        if repo:
            data['packages'] = repo.package_names

    if repo_name in distribution_cache.repositories:
        repo = distribution_cache.repositories[repo_name].source_repository
        if not repo:
            return None
        data['vcs'] = repo.type
        data['vcs_uri'] = repo.url
        data['vcs_version'] = repo.version
    else:
        return None

    return (data, type_, None)
Пример #47
0
def get_distro(distro_name):
    index = get_index(get_index_url())
    return get_cached_distribution(index, distro_name)
def test_get_index_url():
    get_index_url()
Пример #49
0
def get_index():
    global _rosdistro_index
    if _rosdistro_index is None:
        _rosdistro_index = rosdistro.get_index(rosdistro.get_index_url())
    return _rosdistro_index
Пример #50
0
def document_repo(workspace, docspace, ros_distro, repo,
                  platform, arch, homepage, no_chroot, skip_garbage,
                  doc_conf, depends_conf, tags_db):
    doc_job = "doc-%s-%s" % (ros_distro, repo)

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    repo_path = os.path.realpath("%s" % (docspace))
    print("Repo path %s" % repo_path)

    #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf)
    if ros_distro == 'indigo':
        if stacks or manifest_packages:
            print("Ignoring dry packages and stacks in '%s'" % ros_distro)
            stacks = {}
            manifest_packages = {}
        if not catkin_packages:
            raise BuildException('No catkin packages found')
    print("Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys()))
    #print "Catkin packages: %s" % catkin_packages
    #print "Manifest packages: %s" % manifest_packages
    #print "Stacks: %s" % stacks

    #Get any non local apt dependencies
    ros_dep = rosdep.RosDepResolver(ros_distro, no_chroot=no_chroot)
    import rosdistro
    if ros_distro == 'electric':
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages)
    print("Apt dependencies: %s" % apt_deps)

    #Get rosdistro release file if there are catkin packages to get status
    if catkin_packages and ros_distro not in ['electric', 'fuerte']:
        print("Fetch rosdistro files for: %s" % ros_distro)
        index = rosdistro.get_index(rosdistro.get_index_url())
        rosdistro_release_file = rosdistro.get_release_file(index, ros_distro)
        rosdistro_source_file = rosdistro.get_source_file(index, ros_distro)
    else:
        rosdistro_release_file = None
        rosdistro_source_file = None

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
    if os.path.exists(doc_path):
        shutil.rmtree(doc_path)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print("Build order that honors deps:\n%s" % build_order)

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    if not no_chroot:
        print("Installing all dependencies for %s" % repo)

        # XXX this is a really ugly hack to make the hydro doc job for ros_comm pass
        # otherwise roslisp pulls in the rosgraph_msgs package as a Debian dependency
        # which then break catkin_basic since it include the msgs CMake multiple files
        # resulting in duplicate target names (https://github.com/ros/ros_comm/issues/471)
        if repo == 'ros_comm' and 'ros-hydro-roslisp' in apt_deps:
            apt_deps.remove('ros-hydro-roslisp')

        if apt_deps:
            call("apt-get install %s --yes" % (' '.join(apt_deps)))
        print("Done installing dependencies")

    #Set up the list of things that need to be sourced to run rosdoc_lite
    #TODO: Hack for electric
    if ros_distro == 'electric':
        #lucid doesn't have /usr/local on the path by default... weird
        sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH']
        sources.append('source /opt/ros/fuerte/setup.bash')
        sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH')
    else:
        sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #We assume that there will be no build errors to start
    build_errors = []

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    local_install_path = os.path.join(docspace, 'local_installs')
    if os.path.exists(local_install_path):
        shutil.rmtree(local_install_path)

    #Make sure to create some subfolders under the local install path
    def makedirs(path):
        if not os.path.exists(path):
            os.makedirs(path)

    makedirs(os.path.join(local_install_path, 'bin'))
    makedirs(os.path.join(local_install_path, 'lib/python2.7/dist-packages'))
    makedirs(os.path.join(local_install_path, 'share'))

    if catkin_packages \
       and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    #For fuerte catkin, we need to check if we should build catkin stacks
    source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path)
    build_errors.extend(errs)
    sources.append(source)

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin, we have to build all the code and
    #install locally to get message generation
    source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    #We want to pull all the tagfiles available once from the server
    tags_location = os.path.join(workspace, ros_distro)
    if os.path.exists(tags_location):
        shutil.rmtree(tags_location)
    command = ['bash', '-c',
               'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/home/rosbot/docs/%s/tags %s' % (ros_distro, tags_location)]
    call_with_list(command)

    repo_tags = document_packages(manifest_packages, catkin_packages, build_order,
                                  repos_to_doc, sources, tags_db, full_apt_deps,
                                  ros_dep, repo_map, repo_path, docspace, ros_distro,
                                  homepage, doc_job, tags_location, doc_path,
                                  rosdistro_release_file, rosdistro_source_file)

    #Copy the files to the appropriate place
    folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
    if folders:
        dsts = ['%s/api/%s' % (doc_path, f) for f in folders]
        for dst in dsts:
            with open(os.path.join(dst, 'stamp'), 'w'):
                pass
        command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr --delete %s [email protected]:/home/rosbot/docs/%s/api' % (' '.join(dsts), ros_distro)]
        call_with_list(command)
    folders = ['%s/changelogs' % doc_path, '%s/tags' % doc_path]
    folders = [f for f in folders if os.path.exists(f)]
    if folders:
        command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/home/rosbot/docs/%s' % (' '.join(folders), ros_distro)]
        call_with_list(command)

    if not skip_garbage:
        #Remove the autogenerated doc files since they take up a lot of space if left on the server
        shutil.rmtree(tags_location)
        shutil.rmtree(doc_path)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    #We don't want to write hashes on an unsuccessful build
    excludes = ['rosinstall_hashes'] if build_errors else []
    tags_db.commit_db(excludes)
    tags_db.delete_tag_index_repo()

    #Tell jenkins that we've succeeded
    print("Preparing xml test results")
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print("Created test results directory")
    except Exception:
        pass

    if build_errors:
        import yaml
        copy_test_results(workspace, docspace,
                          """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s""" % (build_errors,
                              ros_distro,
                              repo,
                              repo,
                              ros_distro,
                              yaml.safe_dump(doc_conf, default_flow_style=False),
                              yaml.safe_dump(depends_conf, default_flow_style=False)),
                          "message_generation_failure")
    else:
        copy_test_results(workspace, docspace)
Пример #51
0
def _get_rosdistro_release(distro):
    index = rosdistro.get_index(rosdistro.get_index_url())
    return rosdistro.get_distribution_file(index, distro)
metapackages = dict([(p.name, p) for pth, p in find_packages(path).items()])

keys = [
    'ros_core',
    'ros_base',
    'robot',
    'viz',
    'desktop',
    'perception',
    'simulators',
    'desktop_full',
]

# Get packages which make up each layer of the veriants
mp_sets = {}
index = get_index(get_index_url())
hydro = get_cached_distribution(index, 'hydro')
indigo = get_cached_distribution(index, 'indigo')
dist_file = get_distribution_file(index, 'hydro')
indigo_dist_file = get_distribution_file(index, 'indigo')
dw = DependencyWalker(hydro)
for mp in keys:
    # print("Fetching deps for: ", mp)
    deps = list(set(metapackages[mp].run_depends))
    mp_sets[mp] = set([])
    for dep in deps:
        mp_sets[mp].update(set([dep.name]))
        if dep.name in keys:
            continue
        # print(" ", dep.name)
        previous_pkgs = set([])
Пример #53
0
def _check_cache():
    if _RDCache.index_url != rosdistro.get_index_url():
        _RDCache.index_url = rosdistro.get_index_url()
        _RDCache.index = None
        _RDCache.release_files = {}
Пример #54
0
    metavar='depth', type=int,
    help='Maxmium depth to crawl the dependency tree')

parser.add_argument(
    '--comparison-rosdistro',
    metavar='ROS_DISTRO',
    dest='comparison',
    help='The rosdistro with which to compare')

args = parser.parse_args()

distro_key = args.rosdistro
repo_names_argument = args.repositories
prev_distro_key = None

index = rosdistro.get_index(rosdistro.get_index_url())
valid_distro_keys = index.distributions.keys()
valid_distro_keys.sort()
if distro_key is None:
    distro_key = valid_distro_keys[-1]

# Find the previous distribution to the current one
try:
    i = valid_distro_keys.index(distro_key)
except ValueError:
    print('Distribution key (%s) not found in list of valid distributions.' % distro_key, file=sys.stderr)
    print('Valid rosdistros are %s.' % valid_distro_keys, file=sys.stderr)
    exit(-1)
if i == 0 and not args.comparison:
    print('No previous distribution found.', file=sys.stderr)
    exit(-1)
Пример #55
0
def _get_rosdistro_release(distro):
    index = rosdistro.get_index(rosdistro.get_index_url())
    return rosdistro.get_release_file(index, distro)
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on,
                       repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace,
                       sudo=False, no_chroot=False):
    from catkin_pkg.package import InvalidPackage, parse_package_string
    from rosdistro import get_cached_release, get_index, get_index_url, get_source_file
    from rosdistro.dependency_walker import DependencyWalker
    from rosdistro.manifest_provider import get_release_tag

    index = get_index(get_index_url())
    print "Parsing rosdistro file for %s" % ros_distro
    release = get_cached_release(index, ros_distro)
    print "Parsing devel file for %s" % ros_distro
    source_file = get_source_file(index, ros_distro)

    # Create rosdep object
    print "Create rosdep object"
    rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot)


    # download the repo_list from source
    print "Creating rosinstall file for repo list"
    rosinstall = ""
    for repo_name, version in zip(repo_list, version_list):
        if version == 'devel':
            if repo_name not in source_file.repositories:
                raise BuildException("Repository %s does not exist in Devel Distro" % repo_name)
            print "Using devel distro file to download repositories"
            rosinstall += _generate_rosinstall_for_repo(source_file.repositories[repo_name])
        else:
            if repo_name not in release.repositories:
                raise BuildException("Repository %s does not exist in Ros Distro" % repo_name)
            repo = release.repositories[repo_name]
            if version not in ['latest', 'master']:
                assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name
            assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name
            for pkg_name in repo.package_names:
                release_tag = get_release_tag(repo, pkg_name)
                if version in ['latest', 'master']:
                    release_tag = '/'.join(release_tag.split('/')[:-1])
                print 'Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)
                rosinstall += _generate_rosinstall_for_repo(release.repositories[repo_name], version=release_tag)
    print "rosinstall file for all repositories: \n %s" % rosinstall
    with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f:
        f.write(rosinstall)
    print "Install repo list from source"
    os.makedirs(repo_sourcespace)
    call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace))

    # get the repositories build dependencies
    print "Get build dependencies of repo list"
    repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, run_depends=False)
    # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available
    if 'catkin' not in repo_build_dependencies:
        repo_build_dependencies.append('catkin')
    print "Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))
    apt_get_install(repo_build_dependencies, rosdep_resolver, sudo)

    # replace the CMakeLists.txt file for repositories that use catkin
    print "Removing the CMakeLists.txt file generated by rosinstall"
    os.remove(os.path.join(repo_sourcespace, 'CMakeLists.txt'))
    print "Create a new CMakeLists.txt file using catkin"

    # get environment
    ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro)

    # check if source workspace contains only package built with catkin
    non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace)

    # make build folder and change into it
    os.makedirs(repo_buildspace)
    os.chdir(repo_buildspace)

    # make test results dir
    test_results_dir = os.path.join(workspace, 'test_results')
    if os.path.exists(test_results_dir):
        shutil.rmtree(test_results_dir)
    os.makedirs(test_results_dir)

    if not non_catkin_pkgs:
        print "Build catkin workspace"
        call("catkin_init_workspace %s" % repo_sourcespace, ros_env)
        repos_test_results_dir = os.path.join(test_results_dir, 'repos')
        helper = subprocess.Popen(('cmake %s -DCMAKE_TOOLCHAIN_FILE=/opt/ros/groovy/share/ros/core/rosbuild/rostoolchain.cmake -DCATKIN_TEST_RESULTS_DIR=%s'%(repo_sourcespace,repos_test_results_dir)).split(' '), env=ros_env)
        helper.communicate()
        res = 0
        if helper.returncode != 0:
            res = helper.returncode
        ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash'))
    
        # build repositories
        print "Build repo list"
        print "CMAKE_PREFIX_PATH: %s"%ros_env['CMAKE_PREFIX_PATH']
        call("make", ros_env)
        
        # Concatenate filelists
        print '-----------------  Concatenate filelists -----------------  '
        filelist = '%s'%repo_buildspace + '/filelist.lst'
        helper = subprocess.Popen(('%s/jenkins_scripts/code_quality/concatenate_filelists.py --dir %s --filelist %s'%(workspace,repo_buildspace, filelist)).split(' '), env=os.environ)
        helper.communicate()
        print '////////////////// cma analysis done ////////////////// \n\n'

        # Run CMA
        print '-----------------  Run CMA analysis -----------------  '
        cmaf = repo_sourcespace#repo_buildspace
        helper = subprocess.Popen(('pal QACPP -cmaf %s -list %s'%(cmaf, filelist)).split(' '), env=os.environ)
        helper.communicate()
        print '////////////////// cma analysis done ////////////////// \n\n'

        # Export metrics to yaml and csv files
        # get uri infos
        #uri= distro.get_repositories()[repo_list[0]].url
        repo_name = repo_list[0]
        repo_data = release.get_data()['repositories'][repo_name]
        print "repo_data", repo_data 
        uri = repo_data['url']
        uri_info = 'master' #repo_data['version']
        vcs_type = 'git' # repo_data['type']

        print '-----------------  Export metrics to yaml and csv files ----------------- '
        helper = subprocess.Popen(('%s/jenkins_scripts/code_quality/wet/export_metrics_to_yaml_wet.py --path %s --path_src %s --doc metrics --csv csv --config %s/jenkins_scripts/code_quality/export_config.yaml --distro %s --stack %s --uri %s --uri_info %s --vcs_type %s'%(workspace, repo_buildspace, repo_sourcespace, workspace, ros_distro, repo_name, uri,  uri_info, vcs_type)).split(' '), env=os.environ)
        helper.communicate()
        print '////////////////// export metrics to yaml and csv files done ////////////////// \n\n'     
 
        # Push results to server
        print '-----------------  Push results to server -----------------  '
        helper = subprocess.Popen(('%s/jenkins_scripts/code_quality/wet/push_results_to_server_wet.py --path %s --doc metrics --path_src %s --meta_package %s'%(workspace, repo_buildspace, repo_sourcespace, repo_list)).split(' '), env=os.environ)
        helper.communicate()
        print '////////////////// push results to server done ////////////////// \n\n' 


        # Upload results to QAVerify
        print ' -----------------  upload results to QAVerify -----------------  '
        shutil.rmtree(os.path.join(workspace, 'snapshots_path'), ignore_errors=True)
        os.makedirs(os.path.join(workspace, 'snapshots_path'))
        snapshots_path = '%s/snapshots_path'%workspace
        project_name = repo_list[0] + '-' + ros_distro
        helper = subprocess.Popen(('%s/jenkins_scripts/code_quality/wet/upload_to_QAVerify_wet.py --path %s --snapshot %s --project %s --stack_name %s'%(workspace, repo_buildspace, snapshots_path, project_name,  repo_list[0])).split(' '), env=os.environ)
        helper.communicate()
        print '////////////////// upload results to QAVerify done ////////////////// \n\n'
        if os.path.exists(snapshots_path):
            shutil.rmtree(snapshots_path)

    else:
        print "Build workspace with non-catkin packages in isolation"
        # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin)
        os.makedirs(os.path.join(repo_buildspace, 'devel_isolated'))
        call('catkin_make_isolated --source %s --install-space install_isolated --install' % repo_sourcespace, ros_env)
        setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh')
        ros_env = get_ros_env(setup_file)

    if res != 0:
        print "helper_return_code is: %s"%(helper.returncode)
        assert 'analysis_wet.py failed'
        raise Exception("analysis_wet.py failed. Check out the console output above for details.")
    
    # create dummy test results
    env = dict()
    env['INSTALL_DIR'] = os.getenv('INSTALL_DIR', '')
    test_results_path = workspace + '/test_results'
    if os.path.exists(test_results_path):
        shutil.rmtree(test_results_path)
    os.makedirs(test_results_path)
    test_file= test_results_path + '/test_file.xml' 
    f = open(test_file, 'w')
    f.write('<?xml version="1.0" encoding="UTF-8"?>\n')
    f.write('<testsuite tests="1" failures="0" time="1" errors="0" name="dummy test">\n')
    f.write('  <testcase name="dummy rapport" classname="Results" /> \n')
    f.write('</testsuite> \n')
    f.close()