def get_dependencies(self, package_folder): """Find and parse package.xml file and return a dict of dependencies. Args: package_folder (str): A folder to search package.xml in. Returns: dict: A dictionary with a dependency for each package name. """ path_to_xml = Parser.__get_package_xml_path(package_folder) if not path_to_xml: log.critical(" 'package.xml' not found for package [%s].", self.pkg_name) return None xmldoc = minidom.parse(path_to_xml) all_deps = [] for tag in Parser.TAGS: deps = Parser.__node_to_list(xmldoc, tag) deps = Parser.__fix_dependencies(deps, self.pkg_name) all_deps += deps msg = " {}: Found {} valid dependencies".format( Tools.decorate(self.pkg_name), len(all_deps)) self.printer.print_msg(msg) log.debug(" Dependencies: %s", all_deps) deps_with_urls = self.__init_dep_dict(all_deps) return self.__update_explicit_values(xmldoc, deps_with_urls)
def __clone_dependency(self, pkg_name, url, dep_path, branch): """Clone a single dependency. Return a future to the clone process.""" if self.use_preprint: msg = " {}: {}".format(Tools.decorate(pkg_name), Downloader.CLONING_TAG) self.printer.add_msg(pkg_name, msg) return GitBridge.clone(pkg_name, url, dep_path, branch)
def test_populate_urls_with_name(self): """Test populating urls with names.""" urls = ['blah{package}', '{package}', 'blah'] populated = Tools.populate_urls_with_name(urls=urls, pkg_name='NAME') self.assertIn('blahNAME', populated) self.assertIn('NAME', populated) self.assertNotIn('blah', populated)
def update_packages(self, selected_packages): """Update all the folders to match the remote. Considers the branch. Args: selected_packages (str[]): List of packages picked by the user. Returns: status_msgs (list(tuple)): return a list of tupples (pkg_name, tag) """ log.info(" Pulling packages:") packages = self.filter_packages(selected_packages) status_msgs = [] futures_list = [] for ws_folder, package in packages.items(): picked_tag = None folder = path.join(self.ws_path, ws_folder) futures_list.append( self.thread_pool.submit(self.pick_tag, folder, package)) for future in futures.as_completed(futures_list): package, picked_tag = future.result() # change logger for warning if something is wrong if self.colored: picked_tag = Updater.colorize_tag(picked_tag) # now show the results to the user status_msgs.append((package.name, picked_tag)) msg = " {}: {}".format(Tools.decorate(package.name), picked_tag) self.printer.purge_msg(package.name, msg) return status_msgs
def set_default_urls_if_needed(self, default_urls): """Set default urls if no url set before.""" if not default_urls: return if self.url: log.info( " Package [%s]: Skip default urls. Explicit one defined: %s", self.name, self.url) return self.default_urls = Tools.populate_urls_with_name(urls=default_urls, pkg_name=self.name)
def test_update_deps(self): """Test that we can update the dictionary.""" old_dict = {'test': Dependency(name='test')} new_dict = { 'test2': Dependency(name='test2'), 'test': Dependency(name='test', branch='blah') } updated_dict = Tools.update_deps_dict(old_dict, new_dict) self.assertTrue('test' in updated_dict) self.assertTrue('test2' in updated_dict) self.assertEqual('blah', updated_dict['test'].branch) pass
def __clone_dependencies(self, checked_deps): """Clone dependencies. Args: checked_deps (dict): Dict {name: dep} with valid dependencies. Returns: int: Error code. 0 if all fine, Result from git error otherwise. """ if not checked_deps: # exit early if there is nothing to download return Downloader.NO_ERROR log.info(" Cloning valid dependencies:") error_code = Downloader.NO_ERROR # store all tasks in a futures list futures_list = [] for name, dependency in checked_deps.items(): url = dependency.url branch = dependency.branch log.debug(" prepare clone: url: %s, branch: %s", url, branch) if not branch: branch = "master" if name in self.available_pkgs: msg = " {}: {}".format(Tools.decorate(name), GitBridge.EXISTS_TAG) self.printer.purge_msg(name, msg) continue dep_path = path.join(self.ws_path, name) future = self.thread_pool.submit(self.__clone_dependency, name, url, dep_path, branch) futures_list.append(future) # we have all the futures ready. Now just wait for them to finish. for future in futures.as_completed(futures_list): pkg_name, clone_result = future.result() msg = " {}: {}".format(Tools.decorate(pkg_name), clone_result) self.printer.purge_msg(pkg_name, msg) if clone_result == GitBridge.ERROR_TAG: error_code = 1 return error_code
def __check_dependencies(self, dep_dict): """Check dependencies for validity. We don't want to avoid packages that we ignore or those which repositories do not exist. Args: dep_dict (dict): A dictionary {name: dep} with dependencies. Returns: dict: Only valid dependencies from the input dict. """ checked_deps = {} if not dep_dict: # exit early if there are no new dependencies return checked_deps futures_list = [] log.info(" Checking merged dependencies:") for dependency in dep_dict.values(): log.debug(" Check dependency: %s", dependency) if dependency.name in self.ignore_pkgs: log.debug(" Skipping ignored package '%s'", dependency.name) continue futures_list.append( self.thread_pool.submit(self.__check_dependency, dependency)) for future in futures.as_completed(futures_list): dependency, repo_found = future.result() if repo_found: msg = " {}: {}".format(Tools.decorate(dependency.name), Downloader.FOUND_TAG + dependency.url) self.printer.purge_msg(dependency.name, msg) checked_deps[dependency.name] = dependency else: msg = " {}: {}".format(Tools.decorate(dependency.name), Downloader.NOT_FOUND_TAG) self.printer.purge_msg(dependency.name, msg) return checked_deps
def pick_tag(self, folder, package): """Pick result tag for a folder.""" if self.use_preprint: msg = " {}: {}".format(Tools.decorate(package.name), Updater.RUNNING_TAG) self.printer.add_msg(package.name, msg) output, branch, has_changes = GitBridge.status(folder) if has_changes: return package, Updater.CHANGES_TAG try: output = GitBridge.pull(folder, branch) return package, Updater.tag_from_output(output) except subprocess.CalledProcessError as e: log.debug(" git pull returned error: %s", e) return package, Updater.ERROR_TAG
def main(opts): """Run the script. Args: opts (dict): Options populated by an arg parser. Returns: int: Return code """ # Load the context if opts.verbose: log.setLevel(logging.getLevelName("DEBUG")) log.debug(" Enabling DEBUG output.") else: log.setLevel(logging.getLevelName("INFO")) if opts.no_status: log.info(" Not printing status messages while cloning.") use_preprint = False else: log.info(" Will print status messages while cloning.") use_preprint = True log.info(" Using %s threads.", opts.num_threads) context = Context.load(opts.workspace, opts.profile, opts, append=True) if opts.default_url != Tools.PACKAGE_TAG: opts.default_urls += "," + opts.default_url # Prepare the set of default urls default_urls = Tools.prepare_default_urls(opts.default_urls) if not opts.workspace: log.critical(" Workspace undefined! Abort!") return 1 if opts.verb == 'fetch' or opts.subverb == 'fetch': return fetch(packages=opts.packages, workspace=opts.workspace, context=context, default_urls=default_urls, use_preprint=use_preprint, num_threads=opts.num_threads, pull_after_fetch=opts.update) if opts.subverb == 'update': return update(packages=opts.packages, workspace=opts.workspace, context=context, use_preprint=use_preprint, num_threads=opts.num_threads)
def __update_explicit_values(self, xmldoc, dep_dict): """Specify explicit values instead of default ones. A user can define explicit values for each package in the <export> tag. Values to be specified: url, branch. This function reads the appropriate part of `package.xml` file and replaces the default values with the ones it finds there. Args: xmldoc (minidom): Current xml object dep_dict (dict): A dict {name: dep} with default deps. Returns: dict: A dict with final dependencies parsed from <export> tags """ for url_tag in Parser.URL_TAGS: urls_node = xmldoc.getElementsByTagName(url_tag) for item in urls_node: target = Parser.__get_attr('target', item) if not target: log.warning(" skip xml item: '%s'", item) continue log.debug(" read target:'%s'", target) url = Parser.__get_attr('url', item) if url: if target == 'all': # The target is 'all' so this denotes a default url. prepared_url = Tools.prepare_default_url(url) if prepared_url: self.default_urls.add(prepared_url) else: log.error("Url: '%s' is wrongly formatted.", url) # We are done reading this entry, skip to next now. continue # Here we assume url is a full explicit url to package. dep_dict[target].url = url log.debug(" target url:'%s'", url) branch = Parser.__get_attr('branch', item) if branch: dep_dict[target].branch = branch log.debug(" target branch:'%s'", branch) log.debug(" updated dependency: %s", dep_dict[target]) # Update the default urls for all dependencies for dep in dep_dict.values(): dep.set_default_urls_if_needed(self.default_urls) return dep_dict
def test_prepare_default_urls(self): """Test formatting the default dir.""" urls = [ "git@path", # 0 "git@path2/", # 1 "https://path", # 2 "https://path2/", # 3 "git@some_path.git", # 4 "git@some_path/{package}.git", # 5 "{package}" # 6 ] urls_joined = ','.join(urls) prepared_urls = Tools.prepare_default_urls(urls_joined) self.assertIn(urls[0] + '/{package}' + '.git', prepared_urls) self.assertIn(urls[1] + '{package}' + '.git', prepared_urls) self.assertIn(urls[2] + '/{package}', prepared_urls) self.assertIn(urls[3] + '{package}', prepared_urls) self.assertNotIn(urls[4], prepared_urls) self.assertIn(urls[5], prepared_urls) self.assertIn(urls[6], prepared_urls)
def fetch(packages, workspace, context, default_urls, use_preprint, num_threads, pull_after_fetch): """Fetch dependencies of a package. Args: packages (list): A list of packages provided by the user. workspace (str): Path to a workspace (without src/ in the end). context (Context): Current context. Needed to find current packages. default_urls (set(str)): A set of urls where we search for packages. use_preprint (bool): Show status messages while cloning Returns: int: Return code. 0 if success. Git error code otherwise. """ fetch_all = False if not packages: fetch_all = True ws_path = path.join(workspace, 'src') ignore_pkgs = Tools.list_all_ros_pkgs() already_fetched = set() packages = set(packages) global_error_code = Downloader.NO_ERROR # loop until there are no new dependencies left to download while (True): log.info(" Searching for dependencies.") deps_to_fetch = {} workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[]) available_pkgs = [pkg.name for _, pkg in workspace_packages.items()] initial_cloned_pkgs = len(already_fetched) for package_path, package in workspace_packages.items(): if package.name in already_fetched: continue if fetch_all or (package.name in packages): parser = Parser(default_urls=default_urls, pkg_name=package.name) package_folder = path.join(ws_path, package_path) deps_to_fetch = Tools.update_deps_dict( deps_to_fetch, parser.get_dependencies(package_folder)) if deps_to_fetch is None: sys.exit(1) already_fetched.add(package.name) for new_dep_name in deps_to_fetch.keys(): # make sure we don't stop until we analyzed all # dependencies as we have just added these repositories # we must analyze their dependencies too even if we wanted # to download dependencies for one project only. packages.add(new_dep_name) # Update default url to use the new version of it further on. default_urls.update(parser.default_urls) try: downloader = Downloader(ws_path=ws_path, available_pkgs=available_pkgs, ignore_pkgs=ignore_pkgs, use_preprint=use_preprint, num_threads=num_threads) except ValueError as e: log.critical(" Encountered error. Abort.") log.critical(" Error message: %s", e.message) return 1 error_code = downloader.download_dependencies(deps_to_fetch) if len(already_fetched) == initial_cloned_pkgs: log.info(" No new dependencies. Done.") break if error_code != 0: global_error_code = error_code log.info(" New packages available. Process their dependencies now.") if pull_after_fetch: updater = Updater(ws_path=ws_path, packages=workspace_packages, use_preprint=use_preprint, num_threads=num_threads) updater.update_packages(packages) return global_error_code
def test_default_ros_pkgs(self): """Test that we actually remove the default ros packages from list.""" pkgs = Tools.list_all_ros_pkgs() diff = pkgs.symmetric_difference(Tools.default_ros_packages) print(diff) self.assertTrue(len(diff) < 100)
def test_decorate(self): """Test how we decorate something.""" self.assertEqual('[blah]'.ljust(25), Tools.decorate('blah'))
def __check_dependency(self, dependency): if self.use_preprint: msg = " {}: {}".format(Tools.decorate(dependency.name), Downloader.CHECKING_TAG) self.printer.add_msg(dependency.name, msg) return GitBridge.repository_exists(dependency)