def git_source_manifest_provider(repo): try: with _temp_git_clone(repo.url, repo.version) as git_repo_path: # Include the git hash in our cache dictionary. git_hash = Git(git_repo_path).command('rev-parse', 'HEAD')['output'] cache = SourceRepositoryCache.from_ref(git_hash) # Find package.xml files inside the repo. for package_path in find_package_paths(git_repo_path): if package_path == '.': package_path = '' with open( os.path.join(git_repo_path, package_path, 'package.xml'), 'r') as f: package_xml = f.read() try: name = parse_package_string(package_xml).name except InvalidPackage: raise RuntimeError( 'Unable to parse package.xml file found in %s' % repo.url) cache.add(name, package_path, package_xml) except Exception as e: raise RuntimeError('Unable to fetch source package.xml files: %s' % e) return cache
def main(repo_type, rosdistro_name, check_for_wet_packages=False): index = get_index(get_index_url()) try: distribution_file = get_distribution_file(index, rosdistro_name) except RuntimeError as e: print("Could not load distribution file for distro '%s': %s" % (rosdistro_name, e), file=sys.stderr) return False for repo_name in sorted(distribution_file.repositories.keys()): sys.stdout.write('.') sys.stdout.flush() repo = distribution_file.repositories[repo_name] if repo_type == 'doc': repo = repo.doc_repository if repo_type == 'source': repo = repo.source_repository if not repo: continue try: if (repo.type == 'git'): check_git_repo(repo.url, repo.version) elif (repo.type == 'hg'): check_hg_repo(repo.url, repo.version) elif (repo.type == 'svn'): check_svn_repo(repo.url, repo.version) else: print() print("Unknown type '%s' for repository '%s'" % (repo.type, repo.name), file=sys.stderr) continue except RuntimeError as e: print() print("Could not fetch repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr) continue if check_for_wet_packages: path = tempfile.mkdtemp() try: if repo.type == 'git': clone_git_repo(repo.url, repo.version, path) elif repo.type == 'hg': clone_hg_repo(repo.url, repo.version, path) elif repo.type == 'svn': checkout_svn_repo(repo.url, repo.version, path) except RuntimeError as e: print() print("Could not clone repository '%s': %s (%s) [%s]" % (repo.name, repo.url, repo.version, e), file=sys.stderr) continue else: package_paths = find_package_paths(path) if not package_paths: print() print("Repository '%s' (%s [%s]) does not contain any wet packages" % (repo.name, repo.url, repo.version), file=sys.stderr) continue finally: shutil.rmtree(path) print() return True
def test_package_paths_with_hidden_directories(): _create_pkg_in_dir('.test1') _create_pkg_in_dir('.test2') _create_pkg_in_dir('test3') # not hidden _create_pkg_in_dir('.test4') res = find_package_paths('.') assert res == ['test3']
def topological_order(source_root_dir, whitelisted=None, blacklisted=None): paths = find_package_paths(source_root_dir) # print('paths = %s' % paths, file=sys.stderr) # fetch all meta data prefix = os.path.abspath(source_root_dir) + os.sep package_data_list = [] for path in paths: data = PackageData(os.path.join(source_root_dir, path)) # make path relative to root dir if data.path.startswith(prefix): data.path = data.path[len(prefix) :] package_data_list.append(data) return _topological_order_packages(package_data_list, whitelisted, blacklisted)
def topological_order(source_root_dir, whitelisted=None, blacklisted=None): paths = find_package_paths(source_root_dir) #print('paths = %s' % paths, file=sys.stderr) # fetch all meta data prefix = os.path.abspath(source_root_dir) + os.sep package_data_list = [] for path in paths: data = PackageData(os.path.join(source_root_dir, path)) # make path relative to root dir if data.path.startswith(prefix): data.path = data.path[len(prefix):] package_data_list.append(data) return _topological_order_packages(package_data_list, whitelisted, blacklisted)
def get_repo_packages(repo_folder): append_pymodules_if_needed() from catkin_pkg import packages as catkin_packages paths = [] #find wet packages paths.extend([os.path.abspath(os.path.join(repo_folder, pkg_path)) for pkg_path in catkin_packages.find_package_paths(repo_folder)]) #Remove any duplicates paths = list(set(paths)) packages = {} for path in paths: pkg_info = catkin_packages.parse_package(path) packages[pkg_info.name] = path return packages
def find_packages(basepath, use_cache=True): global _cache if use_cache: _load_cache() distro_id = os.environ.get("ROS_DISTRO", None) packages = {} package_paths = find_package_paths(basepath) cache_updated = False for path in package_paths: pkg_dir = os.path.realpath(os.path.join(basepath, path)) if use_cache: last_modified = os.path.getmtime( os.path.join(pkg_dir, PACKAGE_MANIFEST_FILENAME)) path_ts = _cache.local_paths[ pkg_dir].timestamp if pkg_dir in _cache.local_paths else 0 if last_modified > path_ts: manifest = parse_package(pkg_dir) _cache.local_paths[pkg_dir] = CacheItem( manifest, last_modified) cache_updated = True else: manifest = _cache.local_paths[pkg_dir].data if distro_id not in _cache.packages: _cache.packages[distro_id] = {} manifest_ts = _cache.packages[distro_id][ manifest.name].timestamp if manifest.name in _cache.packages[ distro_id] else 0 if last_modified > manifest_ts: _cache.packages[distro_id][manifest.name] = CacheItem( PackageCacheData(path=pkg_dir, manifest=manifest), last_modified) cache_updated = True else: manifest = parse_package(pkg_dir) packages[path] = manifest if cache_updated: _store_cache() for package in packages.values(): if hasattr(package, "evaluate_conditions"): package.evaluate_conditions(os.environ) return packages
def tar_source_manifest_provider(repo): assert repo.type == 'tar' try: request = Request(repo.url) if _TAR_USER and _TAR_PASSWORD: logger.debug('- using http basic auth from supplied environment variables.') credential_pair = '%s:%s' % (_TAR_USER, _TAR_PASSWORD) authheader = 'Basic %s' % base64.b64encode(credential_pair.encode()).decode() request.add_header('Authorization', authheader) elif _TAR_PASSWORD: logger.debug('- using private token auth from supplied environment variables.') request.add_header('Private-Token', _TAR_PASSWORD) response = urlopen(request) with tarfile.open(fileobj=io.BytesIO(response.read())) as tar: tmpdir = tempfile.mkdtemp() try: # Extract just the package.xmls tar.extractall(path=tmpdir, members=_package_xml_members(tar)) cache = SourceRepositoryCache.from_ref(None) for package_path in find_package_paths(tmpdir): if package_path == '.': package_path = '' with open(os.path.join(tmpdir, package_path, 'package.xml'), 'r') as f: package_xml = f.read() try: name = parse_package_string(package_xml).name except InvalidPackage: raise RuntimeError('Unable to parse package.xml file found in %s' % repo.url) cache.add(name, package_path, package_xml) return cache finally: rmtree(tmpdir) except Exception as e: raise RuntimeError('Unable to fetch source package.xml files: %s' % e)
def git_source_manifest_provider(repo): try: with _temp_git_clone(repo.url, repo.version) as git_repo_path: # Include the git hash in our cache dictionary. result = Git(git_repo_path).command('rev-parse', 'HEAD') cache = { '_ref': result['output'] } # Find package.xml files inside the repo. for package_path in find_package_paths(git_repo_path): if package_path == '.': package_path = '' with open(os.path.join(git_repo_path, package_path, 'package.xml'), 'r') as f: package_xml = f.read() try: name = parse_package_string(package_xml).name except InvalidPackage: raise RuntimeError('Unable to parse package.xml file found in %s' % repo.url) cache[name] = [ package_path, package_xml ] except Exception as e: raise RuntimeError('Unable to fetch source package.xml files: %s' % e) return cache
def run_cppcheck(args): enable_checks = args.enable quiet = args.quiet verbose = args.verbose cwd = os.getcwd() # Find root of catkin workspace ws = find_enclosing_workspace(cwd) if ws: # Find all packages in the workspace package_paths = find_package_paths(ws) # Get absolute paths package_paths = [os.path.join(ws, p) for p in package_paths] # Run cppcheck on the catkin package paths cppcheck.check(package_paths, enable_checks, quiet=quiet, verbose=verbose) else: logging.error( 'No catkin workspace found. Is "{}" contained in a workspace?'. format(cwd))