def get_package_list_for_remote_repo( ros_distro, repo, version, vcs, url, branch ): with temporary_directory() as tmp_dir: pkgs_to_ignore = [] if version == 'latest': # If "latest" in a release repository, use combined upstream branch branch = 'upstream' logger.info("Cloning '{0}' from '{1}' @ '{2}' with '{3}'...".format( repo, url, branch, vcs )) client = vcstools.get_vcs_client(vcs, tmp_dir) if not client.checkout(url, version=branch, shallow=True): raise RuntimeError( "Failed to checkout branch '{0}' from '{1}'" .format(branch, url) ) # Find the packages in the repository pkg_names = [pkg.name for pth, pkg in find_packages(tmp_dir).items()] if version == 'latest': # Also consider ignored list client.update(version='master') if os.path.exists(ros_distro + '.ignored'): with open(ros_distro + '.ignored', 'r') as f: pkgs_to_ignore = [l.strip() for l in f.read().split() if l] return [p for p in pkg_names if p not in pkgs_to_ignore]
def test_vcs_checkout(): tmp = mkdtemp(prefix="kez-test-") git = pathjoin(tmp, '.git') assert not pathexists(git) client = get_vcs_client("git", tmp) client.checkout(URL1) assert pathexists(git) shutil.rmtree(tmp)
def test_vcs_checkout(): tmp= mkdtemp(prefix="kez-test-") git = pathjoin(tmp, '.git') assert not pathexists(git) client = get_vcs_client("git", tmp) client.checkout(URL1) assert pathexists(git) shutil.rmtree(tmp)
def repo_clone(https_url, vcs_type): """Cloning a specified repository url, with a specified repo type into a `repos_local_path directory`, creating subdirectory by a repo name. Keyword arguments: https_url -- a string of repository url. vcs_type -- a string of a type of a repository, such as: git, svn, hg. """ reponame = https_url.rsplit('/', 1)[1] client = get_vcs_client(vcs_type, repos_local_path + reponame) client.checkout(https_url)
def checkout_rosinstall(rosinstall_data, verbose=False): """ :param rosinstall_data: yaml dict in rosinstall format :raises: rosinstall.common.MultiProjectException for incvalid yaml """ for frag in rosinstall_data: path_spec = get_path_spec_from_yaml(frag) if verbose: print(path_spec.get_scmtype(), path_spec.get_path(), path_spec.get_uri(), path_spec.get_version()) vcs_client = vcstools.get_vcs_client(path_spec.get_scmtype(), path_spec.get_path()) vcs_client.checkout(path_spec.get_uri(), path_spec.get_version())
def test_get_vcs_client(self): try: backup = vcstools.vcs_abstraction._VCS_TYPES vcstools.vcs_abstraction._VCS_TYPES = {} self.assertEqual([], get_registered_vcs_types()) mock_class = Mock() mock_instance = Mock() # mock __init__ constructor mock_class.return_value = mock_instance register_vcs('foo', mock_class) self.assertEqual(mock_instance, get_vcs_client('foo', 'foopath')) self.assertRaises(ValueError, get_vcs_client, 'bar', 'barpath') finally: vcstools.vcs_abstraction._VCS_TYPES = backup
def status(verbose): distro = get_rosdistro() repositories = [ r for r in distro.repositories.values() if r.source_repository and r.source_repository.patched_packages ] for path in os.listdir(target_path): name = os.path.basename(path) repo = [r for r in repositories if r.name == name] if len(repo) != 1: logger.warning('skipping unknown repo: %s', name) continue repo = repo[0] config = generate_rosinstall_for_repos({'DOESNOTMATTER': repo}, version_tag=False, tar=False)[0] assert len(config) == 1 repo_type, attributes = next(iter(config.items())) try: url = attributes['uri'] if 'version' in attributes: version = attributes['version'] except AttributeError as e: logger.warning("Repository '%s' does not provide the necessary " 'information: %s' % (path, e)) continue client = get_vcs_client(repo_type, os.path.join(target_path, path)) if client.get_url() != url: logger.error( "local vcs url is different from the distro's url:\n\tlocal: %s\n\tdistro: %s", client.get_url(), url) current_version = client.get_current_version_label() if current_version == version: print('=== %s (%s) ===' % (name, repo_type)) else: print('=== %s (%s) === @ %s' % (name, repo_type, current_version)) print(client.get_status(untracked=True))
def deploy(args): "(re)build and deploy the application" build_root = '/tmp/build' build_dest = '/tmp/build-dest' os.mkdir(build_root) os.mkdir(build_dest) vcs_client = vcstools.get_vcs_client('git', build_root) vcs_client.checkout('source-url', version='1.2.3') or fail('source checkout failed') # TODO: build environment # TODO: build user if os.path.exists(os.path.join(build_root, 'nla-deploy.sh')): subprocess.call(['bash', 'nla-deploy.sh', build_dest, 'bogus'], cwd=build_root) elif os.path.exists(os.path.join(build_root, 'pom.xml')): subprocess.call(['mvn', 'package'], cwd=build_root) else: print('jvmctl: No build script found (add a pom.xml or nla-deploy.sh)', file=sys.stderr)
def run(self): try: while True: spec = self._qin.get(False) co_path = os.path.join(self._workdir, spec['path']) c = vcstools.get_vcs_client(spec['type'], co_path) repo_exists = c.detect_presence() if repo_exists and spec.get('force-clean', False): self._log.info("{type}: Force clean path: {0}".format( co_path, **spec)) shutil.rmtree(co_path) repo_exists = False if repo_exists: op = "Updating" self._log.debug( "{type}: Updating {0} (url={fetch})".format( co_path, **spec)) ret = c.update(version=spec.get('revision', ''), force_fetch=True) else: op = "Fetching" self._log.debug( "{type}: Fetching url={fetch} -> {0}".format( co_path, **spec)) ret = c.checkout(spec['fetch'], version=spec.get('revision', ''), shallow=spec.get('shallow', False)) self._qin.task_done() if ret: spec['co_revision'] = c.get_version() self._log.debug( "{type}: {0} path '{path}' to {co_revision} succeeded". format(op, **spec)) if not ret: self._log.debug( "{type}: {0} path '{path}' failed: {1}".format( op, ret, **spec)) self.failed.append(spec) self.success = False except queue.Empty: if self.success is None: self.success = True self._log.debug("Stopping work: Input queue is empty") return None
def update_folder(target_path, folder_mapping, verbose): # generate rosinstall file config = generate_rosinstall_for_repos(folder_mapping, version_tag=False, tar=False) for i, item in enumerate(config): assert len(item) == 1 repo_type, attributes = next(iter(item.items())) try: path = attributes['local-name'] except AttributeError as e: logger.warning('Repository #%d does not provide the necessary ' 'information: %s' % (i, e)) continue try: url = attributes['uri'] if 'version' in attributes: version = attributes['version'] except AttributeError as e: logger.warning("Repository '%s' does not provide the necessary " 'information: %s' % (path, e)) continue client = get_vcs_client(repo_type, os.path.join(target_path, path)) if client.detect_presence(): # TODO: backup folder and checkout the new version if client.get_url() != url: logger.error( "local vcs url is different from the distro's url:\n\tlocal: %s\n\tdistro: %s", client.get_url(), url) # skip version because we only want to pull if not client.update(verbose=verbose): logger.error("Could not update %s repo: %s", repo_type, path) exit(1) else: assert client.checkout(url, version=version, verbose=verbose)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--workspace', type=str, default='.') parser.add_argument('distribution_file', type=str) parser.add_argument('--package', type=str, action='append', dest='target_packages', default=[]) parser.add_argument('--repository', type=str, action='append', dest='target_repositories', default=[]) parser.add_argument('--required-only', action='store_true', dest='required_dependencies_only') args = parser.parse_args() if not os.path.exists(args.workspace): os.makedirs(args.workspace) if not os.path.isdir(args.workspace): raise ValueError('Workspace "{:s}" is not a directory.'.format( args.workspace)) # Load the distribution file. with open(args.distribution_file, 'rb') as distribution_file: distribution_raw = yaml.load(distribution_file, Loader=yaml.FullLoader) packages_raw = distribution_raw.get('repositories') if packages_raw is None: raise ValueError('Distribution is missing the "repositories" key.') repositories = { name: Repository(name, options) for name, options in packages_raw.items() } # Build a map from package name to the repository that contains it, based # soley on the information in the distribution file. distribution_package_map = dict() for repository in repositories.values(): for package_name in repository.packages: existing_repository = distribution_package_map.get(package_name) if existing_repository is not None: raise ValueError( 'Duplicate package "{:s}" in repositories "{:s}" and' ' "{:s}".'.format(package_name, existing_repository.name, repository.name)) distribution_package_map[package_name] = Package( package_name, repository) # Aggregate a map of packages that we know about. package_map = dict(distribution_package_map) done_packages = set() # installed and processed installed_packages = set() # installed, but not processed yet pending_packages = set(args.target_packages) for repository_name in args.target_repositories: repository = repositories.get(repository_name) if repository is None: raise ValueError( 'There is no repository named "{:s}".'.format(repository_name)) pending_packages.update(repository.packages) while pending_packages: package_name = pending_packages.pop() print('Processing package "{:s}"'.format(package_name)) package = package_map.get(package_name) if package is None: raise ValueError( 'Package "{:s}" is not in the distribution.'.format( package_name)) # Checkout the repository. repository = package.repository if repository.location is None: repository.location = os.path.join(args.workspace, repository.name) print(' Checking out "{:s}" repository => {:s}'.format( repository.name, repository.location)) client = vcstools.get_vcs_client(repository.vcs_type, repository.location) if client.detect_presence(): detected_url = client.get_url() if not client.url_matches(detected_url, repository.vcs_uri): raise ValueError( 'Directory "{:s}" already contains a VCS repository with' ' URL "{:s}". This does not match the requested URL' ' "{:s}".'.format(repository_name, detected_url, repository.vcs_uri)) client.update(version=repository.vcs_version) else: client.checkout(repository.vcs_uri, version=repository.vcs_version) # Search for packages in the repository. repository_package_map = dict() rospkg.list_by_path(manifest_name='package.xml', path=repository.location, cache=repository_package_map) if package.name not in repository_package_map: raise ValueError( 'Repository "{:s}" checked out from the "{:s}" repository' ' "{:s}" does not contain the package "{:s}".'.format( repository.name, repository.vcs_type, repository.vcs_uri, package.name)) # Mark all of these packages as installed. for package_name, location in repository_package_map.items(): installed_package = package_map.get(package_name) if installed_package is None: installed_package = Package(package_name, repository) package_map[package_name] = installed_package elif (installed_package.repository != repository or installed_package.location is not None): raise ValueError( 'Repository "{:s} installed duplicate package "{:s}"' ' in directory "{:s}". This package was already installed' ' by repository "{:s}" in directory "{:s}".'.format( repository.name, package_name, location, installed_package.repository.name, installed_package.location)) installed_package.location = location print(' Found package "{:s}" => {:s}'.format( installed_package.name, installed_package.location)) installed_packages.update(repository_package_map.keys()) # Crawl dependencies. package_xml_path = os.path.join(package.location, 'package.xml') package_manifest = parse_package(package_xml_path) all_depends = set() for dependency_type in DEPENDENCY_TYPES: for dependency in getattr(package_manifest, dependency_type): all_depends.add(dependency.name) # Remove optional dependencies if args.required_dependencies_only: optional_depends = set() for export in package_manifest.exports: if export.tagname != 'optional': continue dependency_name = export.content if dependency_name not in all_depends: raise ValueError( 'Optional dependency "{:s}" not found in package "{:s}".' .format(dependency_name, package.name)) optional_depends.add(dependency_name) # Note: this rewriting procedure assumes that only one <depend> tag # is on each line. depend_re = re.compile(r'<depend>(.*?)</depend>') def is_optional_dependency(line): depend_matches = re.findall(depend_re, line) return len(depend_matches ) == 1 and depend_matches[0] in optional_depends with open(package_xml_path) as f: lines = f.readlines() with open(package_xml_path, 'w') as f: f.writelines([ line for line in lines if not is_optional_dependency(line) ]) all_depends -= optional_depends # Only keep the dependencies that we know about. def annotate_package_name(package_name): if package_name in done_packages: return package_name + '*' elif package_name in installed_packages: return package_name + '^' else: return package_name known_depends = all_depends.intersection( distribution_package_map.keys()) if known_depends: print(' Depends on:', ' '.join(sorted(map(annotate_package_name, known_depends)))) done_packages.add(package.name) pending_packages.update(known_depends) # Print a summary and generate CATKIN_IGNORE files for installed packages # that we do not explicitly depend on. for package_name in installed_packages: package = package_map[package_name] if package_name not in done_packages: catkin_ignore_path = os.path.join(package.location, 'CATKIN_IGNORE') with open(catkin_ignore_path, 'wb'): pass suffix = ' [IGNORED]' else: suffix = '' print('Package "{:s}" => {:s}{:s}'.format(package.name, package.location, suffix))
def _get_vcs_client(self): return get_vcs_client(self._project.vcs, self._checkout)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--workspace', type=str, default='.') parser.add_argument('distribution_file', type=str) parser.add_argument('package_names', type=str, nargs='+') args = parser.parse_args() # Load the distribution file. with open(args.distribution_file, 'rb') as distribution_file: distribution_raw = yaml.load(distribution_file) packages_raw = distribution_raw.get('repositories') if packages_raw is None: raise ValueError('Distribution is missing the "repositories" key.') repositories = { name: Repository(name, options) for name, options in packages_raw.iteritems() } # Build a map from package name to the repository that contains it, based # soley on the information in the distribution file. distribution_package_map = dict() for repository in repositories.itervalues(): for package_name in repository.packages: existing_repository = distribution_package_map.get(package_name) if existing_repository is not None: raise ValueError( 'Duplicate package "{:s}" in repositories "{:s}" and' ' "{:s}".'.format( package_name, existing_repository.name, repository.name)) distribution_package_map[package_name] = Package( package_name, repository) # Aggregate a map of packages that we know about. package_map = dict(distribution_package_map) done_packages = set() # installed and processed installed_packages = set() # installed, but not processed yet pending_packages = set(args.package_names) while pending_packages: package_name = pending_packages.pop() print('Processing package "{:s}"'.format(package_name)) package = package_map.get(package_name) if package is None: raise ValueError( 'Package "{:s}" is not in the distribution.'.format( package_name)) # Checkout the repository. repository = package.repository if repository.location is None: repository.location = os.path.join(args.workspace, repository.name) print(' Checking out "{:s}" repository => {:s}'.format( repository.name, repository.location)) client = vcstools.get_vcs_client( repository.vcs_type, repository.location) if client.detect_presence(): detected_url = client.get_url() if not client.url_matches(detected_url, repository.vcs_uri): raise ValueError( 'Directory "{:s}" already contains a VCS repository with' ' URL "{:s}". This does not match the requested URL' ' "{:s}".'.format(repository_name, detected_url, repository.vcs_uri)) client.update(version=repository.vcs_version) else: client.checkout(repository.vcs_uri, version=repository.vcs_version) # Search for packages in the repository. repository_package_map = dict() rospkg.list_by_path( manifest_name='package.xml', path=repository.location, cache=repository_package_map) if package.name not in repository_package_map: raise ValueError( 'Repository "{:s}" checked out from the "{:s}" repository' ' "{:s}" does not contain the package "{:s}".'.format( repository.name, repository.vcs_type, repository.vcs_uri, package.name)) # Mark all of these packages as installed. for package_name, location in repository_package_map.iteritems(): installed_package = package_map.get(package_name) if installed_package is None: installed_package = Package(package_name, repository) package_map[package_name] = installed_package elif (installed_package.repository != repository or installed_package.location is not None): raise ValueError( 'Repository "{:s} installed duplicate package "{:s}"' ' in directory "{:s}". This package was already installed' ' by repository "{:s}" in directory "{:s}".'.format( repository.name, package_name, location, installed_package.repository.name, installed_package.location)) installed_package.location = location print(' Found package "{:s}" => {:s}'.format( installed_package.name, installed_package.location)) installed_packages.update(repository_package_map.iterkeys()) # Crawl dependencies. package_xml_path = os.path.join(package.location, 'package.xml') package_manifest = parse_package(package_xml_path) all_depends = set() for dependency_type in DEPENDENCY_TYPES: for dependency in getattr(package_manifest, dependency_type): all_depends.add(dependency.name) # Only keep the dependencies that we know about. def annotate_package_name(package_name): if package_name in done_packages: return package_name + '*' elif package_name in installed_packages: return package_name + '^' else: return package_name known_depends = all_depends.intersection( distribution_package_map.iterkeys()) if known_depends: print(' Depends on:', ' '.join( sorted(map(annotate_package_name, known_depends)))) pending_packages.update(known_depends)
from vcstools import get_vcs_client CURRENT_DIR = path.dirname(path.abspath(__file__)) REPO_CONF_FILE = path.join(CURRENT_DIR, 'modules.json') MODULES_DIR = path.join(CURRENT_DIR, 'modules') DEFAULT_VERSION = defaultdict(lambda: 'master', { 'hg': 'default', }) conf_file = open(REPO_CONF_FILE, 'r') conf_data = json.load(conf_file) conf_file.close() for module, conf in conf_data.iteritems(): module_path = path.join(MODULES_DIR, module) module_ver = conf.get('version', DEFAULT_VERSION[conf['vcs']]) client = get_vcs_client(conf['vcs'], module_path) if not client.path_exists(): print() print("Clonning into '%s'..." % (module,)) client.checkout(conf['url'], version=module_ver) else: print() print("Updating '%s'..." % (module,)) client.update(version=module_ver)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--workspace', type=str, default='.') parser.add_argument('distribution_file', type=str) parser.add_argument('package_names', type=str, nargs='+') args = parser.parse_args() # Load the distribution file. with open(args.distribution_file, 'rb') as distribution_file: distribution_raw = yaml.load(distribution_file) packages_raw = distribution_raw.get('repositories') if packages_raw is None: raise ValueError('Distribution is missing the "repositories" key.') repositories = { name: Repository(name, options) for name, options in packages_raw.iteritems() } # Build a map from package name to the repository that contains it, based # soley on the information in the distribution file. distribution_package_map = dict() for repository in repositories.itervalues(): for package_name in repository.packages: existing_repository = distribution_package_map.get(package_name) if existing_repository is not None: raise ValueError( 'Duplicate package "{:s}" in repositories "{:s}" and' ' "{:s}".'.format(package_name, existing_repository.name, repository.name)) distribution_package_map[package_name] = Package( package_name, repository) # Aggregate a map of packages that we know about. package_map = dict(distribution_package_map) done_packages = set() # installed and processed installed_packages = set() # installed, but not processed yet pending_packages = set(args.package_names) while pending_packages: package_name = pending_packages.pop() print('Processing package "{:s}"'.format(package_name)) package = package_map.get(package_name) if package is None: raise ValueError( 'Package "{:s}" is not in the distribution.'.format( package_name)) # Checkout the repository. repository = package.repository if repository.location is None: repository.location = os.path.join(args.workspace, repository.name) print(' Checking out "{:s}" repository => {:s}'.format( repository.name, repository.location)) client = vcstools.get_vcs_client(repository.vcs_type, repository.location) if client.detect_presence(): detected_url = client.get_url() if not client.url_matches(detected_url, repository.vcs_uri): raise ValueError( 'Directory "{:s}" already contains a VCS repository with' ' URL "{:s}". This does not match the requested URL' ' "{:s}".'.format(repository_name, detected_url, repository.vcs_uri)) client.update(version=repository.vcs_version) else: client.checkout(repository.vcs_uri, version=repository.vcs_version) # Search for packages in the repository. repository_package_map = dict() rospkg.list_by_path(manifest_name='package.xml', path=repository.location, cache=repository_package_map) if package.name not in repository_package_map: raise ValueError( 'Repository "{:s}" checked out from the "{:s}" repository' ' "{:s}" does not contain the package "{:s}".'.format( repository.name, repository.vcs_type, repository.vcs_uri, package.name)) # Mark all of these packages as installed. for package_name, location in repository_package_map.iteritems(): installed_package = package_map.get(package_name) if installed_package is None: installed_package = Package(package_name, repository) package_map[package_name] = installed_package elif (installed_package.repository != repository or installed_package.location is not None): raise ValueError( 'Repository "{:s} installed duplicate package "{:s}"' ' in directory "{:s}". This package was already installed' ' by repository "{:s}" in directory "{:s}".'.format( repository.name, package_name, location, installed_package.repository.name, installed_package.location)) installed_package.location = location print(' Found package "{:s}" => {:s}'.format( installed_package.name, installed_package.location)) installed_packages.update(repository_package_map.iterkeys()) # Crawl dependencies. package_xml_path = os.path.join(package.location, 'package.xml') package_manifest = parse_package(package_xml_path) all_depends = set() for dependency_type in DEPENDENCY_TYPES: for dependency in getattr(package_manifest, dependency_type): all_depends.add(dependency.name) # Only keep the dependencies that we know about. def annotate_package_name(package_name): if package_name in done_packages: return package_name + '*' elif package_name in installed_packages: return package_name + '^' else: return package_name known_depends = all_depends.intersection( distribution_package_map.iterkeys()) if known_depends: print(' Depends on:', ' '.join(sorted(map(annotate_package_name, known_depends)))) pending_packages.update(known_depends)