def test_filter_packages_none(self): """Test filtering of packages when nothing is given.""" ws_path = self.test_dir all_packages = { "folder_1": "package_dummy_1", "folder_2": "package_dummy_2", "folder_3": "package_dummy_3", } selected_packages = None updater = Updater(ws_path, all_packages) filtered_packages = updater.filter_packages(selected_packages) self.assertEquals(all_packages, filtered_packages)
def test_filter_packages(self): """Test filtering of packages.""" ws_path = self.test_dir mock_list, all_packages = generate_mock_packages(3) selected_packages = [mock_list[0].name, mock_list[2].name] updater = Updater(ws_path, all_packages) filtered_packages = updater.filter_packages(selected_packages) expected_packages = { "folder_0": mock_list[0], "folder_2": mock_list[2], } self.assertEquals(expected_packages, filtered_packages)
def test_update_full_simple(self): """Test updater end to end on single repo.""" http_url = "https://github.com/niosus/catkin_tools_fetch" GitBridge.clone("fetch", http_url, self.test_dir) pkg = MagicMock() type(pkg).name = PropertyMock(return_value="pkg") packages = {".": pkg} updater = Updater(self.test_dir, packages, "abort") selected_packages = [pkg.name] status_msgs = updater.update_packages(selected_packages) self.assertEquals(len(status_msgs), 1) self.assertEquals(status_msgs[0][0], "pkg") self.assertEquals(status_msgs[0][1], colored(Updater.UP_TO_DATE_TAG, "green"))
def test_init(self): """Test initialization of the updater.""" ws_path = self.test_dir packages = {"test_folder": "package_dummy"} updater = Updater(ws_path, packages) self.assertEquals(ws_path, updater.ws_path) self.assertEquals(packages, updater.packages)
def test_merge_success(self): """Check that we can parse successful pull output.""" output = """From github.com:niosus/catkin_tools_fetch * branch master -> FETCH_HEAD Already up-to-date. """ tag = Updater.tag_from_output(output) self.assertEqual(tag, Updater.UP_TO_DATE_TAG)
def test_tag_from_output(self): """Test getting a tag from a pull output.""" http_url = "https://github.com/niosus/catkin_tools_fetch" _, output = GitBridge.clone("catkin_tools_fetch", http_url, self.test_dir) output = GitBridge.pull(self.test_dir, "master") tag = Updater.tag_from_output(output) self.assertEqual(tag, Updater.UP_TO_DATE_TAG)
def update(packages, workspace, context, use_preprint, num_threads): """Update packages from the available remotes. Args: packages (list): A list of packages provided by the user. workspace (str): Path to a workspace (without src/ in the end). context (Context): Current context. Needed to find current packages. use_preprint (bool): Show status messages while cloning Returns: int: Return code. 0 if success. Git error code otherwise. """ ws_path = path.join(workspace, 'src') workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[]) updater = Updater(ws_path=ws_path, packages=workspace_packages, use_preprint=use_preprint, num_threads=num_threads) updater.update_packages(packages) return 0
def fetch(packages, workspace, context, default_urls, use_preprint, num_threads, pull_after_fetch): """Fetch dependencies of a package. Args: packages (list): A list of packages provided by the user. workspace (str): Path to a workspace (without src/ in the end). context (Context): Current context. Needed to find current packages. default_urls (set(str)): A set of urls where we search for packages. use_preprint (bool): Show status messages while cloning Returns: int: Return code. 0 if success. Git error code otherwise. """ fetch_all = False if not packages: fetch_all = True ws_path = path.join(workspace, 'src') ignore_pkgs = Tools.list_all_ros_pkgs() already_fetched = set() packages = set(packages) global_error_code = Downloader.NO_ERROR # loop until there are no new dependencies left to download while (True): log.info(" Searching for dependencies.") deps_to_fetch = {} workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[]) available_pkgs = [pkg.name for _, pkg in workspace_packages.items()] initial_cloned_pkgs = len(already_fetched) for package_path, package in workspace_packages.items(): if package.name in already_fetched: continue if fetch_all or (package.name in packages): parser = Parser(default_urls=default_urls, pkg_name=package.name) package_folder = path.join(ws_path, package_path) deps_to_fetch = Tools.update_deps_dict( deps_to_fetch, parser.get_dependencies(package_folder)) if deps_to_fetch is None: sys.exit(1) already_fetched.add(package.name) for new_dep_name in deps_to_fetch.keys(): # make sure we don't stop until we analyzed all # dependencies as we have just added these repositories # we must analyze their dependencies too even if we wanted # to download dependencies for one project only. packages.add(new_dep_name) # Update default url to use the new version of it further on. default_urls.update(parser.default_urls) try: downloader = Downloader(ws_path=ws_path, available_pkgs=available_pkgs, ignore_pkgs=ignore_pkgs, use_preprint=use_preprint, num_threads=num_threads) except ValueError as e: log.critical(" Encountered error. Abort.") log.critical(" Error message: %s", e.message) return 1 error_code = downloader.download_dependencies(deps_to_fetch) if len(already_fetched) == initial_cloned_pkgs: log.info(" No new dependencies. Done.") break if error_code != 0: global_error_code = error_code log.info(" New packages available. Process their dependencies now.") if pull_after_fetch: updater = Updater(ws_path=ws_path, packages=workspace_packages, use_preprint=use_preprint, num_threads=num_threads) updater.update_packages(packages) return global_error_code
def test_merge_fail(self): """Check that we get a correct tag from conflict.""" output = """CONFLICT (content): Merge conflict in <fileName> Automatic merge failed; fix conflicts and then commit the result.""" tag = Updater.tag_from_output(output) self.assertEqual(tag, Updater.CONFLICT_TAG)