def main(repo_type, rosdistro_name, check_for_wet_packages=False): index = get_index(get_index_url()) try: distribution_file = get_distribution_file(index, rosdistro_name) except RuntimeError as e: print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr) return False for repo_name in sorted(distribution_file.repositories.keys()): sys.stdout.write('.') sys.stdout.flush() repo = distribution_file.repositories[repo_name] if repo_type == 'doc': repo = repo.doc_repository if repo_type == 'source': repo = repo.source_repository if not repo: continue try: if (repo.type == 'git'): check_git_repo(repo.url, repo.version) elif (repo.type == 'hg'): check_hg_repo(repo.url, repo.version) elif (repo.type == 'svn'): check_svn_repo(repo.url, repo.version) else: print() print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr) continue except RuntimeError as e: print() print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr) continue if check_for_wet_packages: path = tempfile.mkdtemp() try: if repo.type == 'git': clone_git_repo(repo.url, repo.version, path) elif repo.type == 'hg': clone_hg_repo(repo.url, repo.version, path) elif repo.type == 'svn': checkout_svn_repo(repo.url, repo.version, path) except RuntimeError as e: print() print("Could not clone repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr) continue else: package_paths = find_package_paths(path) if not package_paths: print() print("Repository '%s' (%s [%s]) does not contain any wet packages" % (repo.name, repo.url, repo.version), file=sys.stderr) continue finally: shutil.rmtree(path) print() return True
def test_package_paths_with_hidden_directories(): _create_pkg_in_dir('.test1') _create_pkg_in_dir('.test2') _create_pkg_in_dir('test3') # not hidden _create_pkg_in_dir('.test4') res = find_package_paths('.') assert res == ['test3']
def topological_order(source_root_dir, whitelisted=None, blacklisted=None): paths = find_package_paths(source_root_dir) # print('paths = %s' % paths, file=sys.stderr) # fetch all meta data prefix = os.path.abspath(source_root_dir) + os.sep package_data_list = [] for path in paths: data = PackageData(os.path.join(source_root_dir, path)) # make path relative to root dir if data.path.startswith(prefix): data.path = data.path[len(prefix) :] package_data_list.append(data) return _topological_order_packages(package_data_list, whitelisted, blacklisted)
def get_repo_packages(repo_folder): append_pymodules_if_needed() from catkin_pkg import packages as catkin_packages paths = [] #find wet packages paths.extend([os.path.abspath(os.path.join(repo_folder, pkg_path)) for pkg_path in catkin_packages.find_package_paths(repo_folder)]) #Remove any duplicates paths = list(set(paths)) packages = {} for path in paths: pkg_info = catkin_packages.parse_package(path) packages[pkg_info.name] = path return packages
def git_source_manifest_provider(repo): try: with _temp_git_clone(repo.url, repo.version) as git_repo_path: # Include the git hash in our cache dictionary. result = Git(git_repo_path).command('rev-parse', 'HEAD') cache = { '_ref': result['output'] } # Find package.xml files inside the repo. for package_path in find_package_paths(git_repo_path): if package_path == '.': package_path = '' with open(os.path.join(git_repo_path, package_path, 'package.xml'), 'r') as f: package_xml = f.read() try: name = parse_package_string(package_xml).name except InvalidPackage: raise RuntimeError('Unable to parse package.xml file found in %s' % repo.url) cache[name] = [ package_path, package_xml ] except Exception as e: raise RuntimeError('Unable to fetch source package.xml files: %s' % e) return cache