def test_sort__all_related(self): pkgs = { 'lutris': { 'd': {'unzip', 'python-requests'}, 'p': { 'lutris': 'lutris' }, 'r': 'community' }, 'python-requests': { 'd': {'python-urllib3', 'python-chardet'}, 'p': { 'python-requests': 'python-requests' }, 'r': 'extra' }, 'python-chardet': { 'd': {'python-setuptools'}, 'p': { 'python-chardet': 'python-chardet' }, 'r': 'extra' } } sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) self.assertEqual('python-chardet', sorted_list[0][0]) self.assertEqual('python-requests', sorted_list[1][0]) self.assertEqual('lutris', sorted_list[2][0])
def test_sort__aur_not_related(self): pkgs = { 'google-chrome': { 'd': {'alsa-lib', 'gtk3', 'libcups'}, 'p': { 'google-chrome': 'google-chrome' }, 'r': 'extra' }, 'git-cola': { 'd': {'git', 'python-pyqt5', 'icu qt5-svg'}, 'p': { 'git-cola': 'git-cola' }, 'r': 'extra' }, 'kazam': { 'd': {'python', 'python-cairo'}, 'p': { 'kazam': 'kazam' }, 'r': 'extra' } } sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs)
def test_sort__two_relying_on_the_same_package(self): """ dep order: abc -> ghi jkl -> ghi ghi -> def def -> mno expected: def, ghi, (abc | jkl ) """ pkgs = { 'def': { 'd': {'mno'}, 'p': { 'def': 'def' }, 'r': 'extra' }, 'abc': { 'd': {'ghi', 'xpto'}, 'p': { 'abc': 'abc' }, 'r': 'extra' }, 'ghi': { 'd': {'def'}, 'p': { 'ghi': 'ghi' }, 'r': 'extra' }, 'jkl': { 'd': {'ghi'}, 'p': { 'jkl': 'jkl' }, 'r': 'extra' } } for _ in range( 5): # testing n times to see if the same result is produced sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) self.assertEqual(sorted_list[0][0], 'def') self.assertEqual(sorted_list[1][0], 'ghi') self.assertNotEqual(sorted_list[2][0], sorted_list[3][0]) self.assertIn(sorted_list[2][0], {'abc', 'jkl'}) self.assertIn(sorted_list[3][0], {'abc', 'jkl'})
def test_sort__one_of_three_related(self): pkgs = { 'def': { 'd': {'jkl'}, 'p': { 'def': 'def' }, 'r': 'extra' }, 'abc': { 'd': {'ghi', 'xpto'}, 'p': { 'abc': 'abc' }, 'r': 'extra' }, 'ghi': { 'd': None, 'p': { 'ghi': 'ghi' }, 'r': 'extra' } } for _ in range( 5): # testing n times to see if the same result is produced sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) ghi = [p for p in sorted_list if p[0] == 'ghi'] self.assertEqual(1, len(ghi)) ghi_idx = sorted_list.index(ghi[0]) abc = [p for p in sorted_list if p[0] == 'abc'] self.assertEqual(1, len(abc)) abc_idx = sorted_list.index(abc[0]) self.assertGreater(abc_idx, ghi_idx)
def test_sort__dep_provided_as_a_different_name(self): """ dep order: abc -> fed def (fed) ghi -> abc expected: def, abc, ghi """ pkgs = { 'def': { 'd': None, 'p': { 'def': 'def', 'fed': 'def' }, 'r': 'extra' }, 'abc': { 'd': {'fed'}, 'p': { 'abc': 'abc' }, 'r': 'extra' }, 'ghi': { 'd': {'abc'}, 'p': { 'ghi': 'ghi' }, 'r': 'extra' } } for _ in range(5): sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) self.assertEqual(sorted_list[0][0], 'def') self.assertEqual(sorted_list[1][0], 'abc') self.assertEqual(sorted_list[2][0], 'ghi')
def test_sorting__pkg_should_be_after_its_latest_dependency(self): pkgs = { 'abc': { 'd': {'def', 'ghi'}, 'p': { 'abc': 'abc' }, 'r': 'community' }, 'ghi': { 'd': {'xpto'}, 'p': { 'ghi': 'ghi' }, 'r': 'extra' }, 'xpto': { 'd': {'zzz'}, 'p': { 'xpto': 'xpto' }, 'r': 'extra' }, 'def': { 'd': None, 'p': { 'def': 'def' }, 'r': 'extra' } } for _ in range(5): # to ensure the result is always the same sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) self.assertEqual('def', sorted_list[0][0]) self.assertEqual('xpto', sorted_list[1][0]) self.assertEqual('ghi', sorted_list[2][0]) self.assertEqual('abc', sorted_list[3][0])
def test_sort__aur_pkgs_should_be_always_in_the_end(self): """ dep order: abc -> fed def (fed) ghi -> abc expected: def, abc, ghi """ pkgs = { 'def': { 'd': None, 'p': { 'def': 'def' }, 'r': 'aur' }, 'abc': { 'd': {'ghi'}, 'p': { 'abc': 'abc' }, 'r': 'extra' }, 'ghi': { 'd': {'xxx'}, 'p': { 'ghi': 'ghi' }, 'r': 'extra' } } for _ in range(5): sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) self.assertEqual(sorted_list[0][0], 'ghi') self.assertEqual(sorted_list[1][0], 'abc') self.assertEqual(sorted_list[2][0], 'def')
def test_sort__with_cycle(self): """ dep order: abc -> def -> ghi -> jkl -> abc """ pkgs = { 'def': { 'd': {'ghi'}, 'p': { 'def': 'def' }, 'r': 'extra' }, 'abc': { 'd': {'def'}, 'p': { 'abc': 'abc' }, 'r': 'extra' }, 'ghi': { 'd': {'jkl'}, 'p': { 'ghi': 'ghi' }, 'r': 'extra' }, 'jkl': { 'd': {'abc'}, 'p': { 'jkl': 'jkl' }, 'r': 'extra' } } sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs)
def test_sort__two_of_three_related(self): """ dep order = abc -> ghi -> def expected: def, ghi, abc """ pkgs = { 'def': { 'd': {'jkl'}, 'p': { 'def': 'def' }, 'r': 'extra' }, 'abc': { 'd': {'ghi', 'xpto'}, 'p': { 'abc': 'abc' }, 'r': 'extra' }, 'ghi': { 'd': {'def'}, 'p': { 'ghi': 'ghi' }, 'r': 'extra' } } for _ in range( 5): # testing n times to see if the same result is produced sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs) self.assertEqual(sorted_list[0][0], 'def') self.assertEqual(sorted_list[1][0], 'ghi') self.assertEqual(sorted_list[2][0], 'abc')
def test_sort__all_packages_no_deps(self): pkgs = { 'xpto': { 'd': set(), 'p': { 'xpto': 'xpto' }, 'r': 'extra' }, 'abc': { 'd': None, 'p': { 'abc': 'abc' }, 'r': 'extra' } } sorted_list = sorting.sort(pkgs.keys(), pkgs) self.assertIsInstance(sorted_list, list) self.assertEqual(len(pkgs), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg[0], pkgs)
def summarize(self, pkgs: List[ArchPackage], root_password: str, arch_config: dict) -> UpgradeRequirements: res = UpgradeRequirements([], [], [], []) remote_provided_map = pacman.map_provided(remote=True) remote_repo_map = pacman.map_repositories() context = UpdateRequirementsContext(to_update={}, repo_to_update={}, aur_to_update={}, repo_to_install={}, aur_to_install={}, to_install={}, pkgs_data={}, cannot_upgrade={}, to_remove={}, installed_names=set(), provided_map={}, aur_index=set(), arch_config=arch_config, root_password=root_password, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map) self.__fill_aur_index(context) aur_data = {} aur_srcinfo_threads = [] for p in pkgs: context.to_update[p.name] = p if p.repository == 'aur': context.aur_to_update[p.name] = p t = Thread(target=self._fill_aur_pkg_update_data, args=(p, aur_data), daemon=True) t.start() aur_srcinfo_threads.append(t) else: context.repo_to_update[p.name] = p if context.aur_to_update: for t in aur_srcinfo_threads: t.join() self.logger.info("Filling updates data") if context.repo_to_update: context.pkgs_data.update(pacman.map_updates_data(context.repo_to_update.keys())) if aur_data: context.pkgs_data.update(aur_data) self.__fill_provided_map(context) if context.pkgs_data: self._fill_conflicts(context) try: if not self._fill_to_install(context): self.logger.info("The operation was cancelled by the user") return except PackageNotFoundException as e: self.logger.error("Package '{}' not found".format(e.name)) return if context.to_update: installed_sizes = pacman.get_installed_size(list(context.to_update.keys())) sorted_pkgs = [] if context.repo_to_update: # only sorting by name ( pacman already knows the best order to perform the upgrade ) sorted_pkgs.extend(context.repo_to_update.values()) sorted_pkgs.sort(key=lambda pkg: pkg.name) if context.aur_to_update: # adding AUR packages in the end sorted_aur = sorting.sort(context.aur_to_update.keys(), context.pkgs_data, context.provided_map) for aur_pkg in sorted_aur: sorted_pkgs.append(context.aur_to_update[aur_pkg[0]]) res.to_upgrade = [self._map_requirement(pkg, context, installed_sizes) for pkg in sorted_pkgs] if context.to_remove: res.to_remove = [p for p in context.to_remove.values()] if context.cannot_upgrade: res.cannot_upgrade = [d for d in context.cannot_upgrade.values()] if context.to_install: res.to_install = [self._map_requirement(p, context) for p in context.to_install.values()] return res
def fill_providers_deps( self, missing_deps: List[Tuple[str, str]], provided_map: Dict[str, Set[str]], remote_repo_map: Dict[str, str], already_checked: Set[str], remote_provided_map: Dict[str, Set[str]], deps_data: Dict[str, dict], aur_idx: Iterable[str], sort: bool, watcher: ProcessWatcher, automatch_providers: bool) -> List[Tuple[str, str]]: """ :param missing_deps: :param provided_map: :param remote_repo_map: :param already_checked: :param remote_provided_map: :param deps_data: :param aur_idx: :param sort: :param watcher: :param automatch_providers :return: all deps sorted or None if the user declined the providers options """ deps_providers = map_providers( {data[0] for data in missing_deps if data[1] == '__several__'}, remote_provided_map) if deps_providers: all_providers = set() for providers in deps_providers.values(): all_providers.update(providers) providers_repos = pacman.map_repositories(all_providers) selected_providers = confirmation.request_providers( deps_providers, providers_repos, watcher, self.i18n) if not selected_providers: return else: providers_data = pacman.map_updates_data( selected_providers ) # adding the chosen providers to re-check the missing deps provided_map.update( pacman.map_provided(remote=True, pkgs=selected_providers) ) # adding the providers as "installed" packages providers_deps = self.map_missing_deps( pkgs_data=providers_data, provided_map=provided_map, aur_index=aur_idx, deps_checked=already_checked, deps_data=deps_data, sort=False, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, watcher=watcher, choose_providers=True, automatch_providers=automatch_providers) # cleaning the already mapped providers deps: to_remove = [] for idx, dep in enumerate(missing_deps): if dep[1] == '__several__': to_remove.append(idx) for idx, to_remove in enumerate(to_remove): del missing_deps[to_remove - idx] missing_deps.extend(((p, providers_repos.get(p, 'aur')) for p in selected_providers)) for dep in providers_deps: if dep not in missing_deps and dep[1] != '__several__': missing_deps.append(dep) deps_data.update(providers_data) if not self.fill_providers_deps( missing_deps=missing_deps, provided_map=provided_map, remote_repo_map=remote_repo_map, already_checked=already_checked, aur_idx=aur_idx, remote_provided_map=remote_provided_map, deps_data=deps_data, sort=False, watcher=watcher, automatch_providers=automatch_providers): return if sort: missing_to_sort = { d[0] for d in missing_deps if d[1] != '__several__' } return sorting.sort(missing_to_sort, deps_data, provided_map) return missing_deps
def map_missing_deps( self, pkgs_data: Dict[str, dict], provided_map: Dict[str, Set[str]], remote_provided_map: Dict[str, Set[str]], remote_repo_map: Dict[str, str], aur_index: Iterable[str], deps_checked: Set[str], deps_data: Dict[str, dict], sort: bool, watcher: ProcessWatcher, choose_providers: bool = True, automatch_providers: bool = False) -> List[Tuple[str, str]]: sorted_deps = [ ] # it will hold the proper order to install the missing dependencies missing_deps, repo_missing, aur_missing = set(), set(), set() deps_checked.update(pkgs_data.keys()) for p, data in pkgs_data.items(): if data['d']: for dep in data['d']: if dep in pkgs_data: continue if dep not in provided_map: dep_split = self.re_dep_operator.split(dep) dep_name = dep_split[0].strip() if dep_name not in deps_checked: deps_checked.add(dep_name) if dep_name not in provided_map: self._fill_missing_dep( dep_name=dep_name, dep_exp=dep, aur_index=aur_index, missing_deps=missing_deps, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, repo_deps=repo_missing, aur_deps=aur_missing, watcher=watcher, deps_data=deps_data, automatch_providers=automatch_providers) else: version_pattern = '{}='.format(dep_name) version_found = [ p for p in provided_map if p.startswith(version_pattern) ] if version_found: version_found = version_found[0].split( '=')[1] version_informed = dep_split[2].strip() if ':' not in version_informed: version_found = version_found.split( ':')[-1] if '-' not in version_informed: version_found = version_found.split( '-')[0] try: version_found = parse_version( version_found) version_informed = parse_version( version_informed) op = dep_split[ 1] if dep_split[1] != '=' else '==' match = eval( 'version_found {} version_informed' .format(op)) except: match = False traceback.print_exc() if not match: self._fill_missing_dep( dep_name=dep_name, dep_exp=dep, aur_index=aur_index, missing_deps=missing_deps, remote_provided_map= remote_provided_map, remote_repo_map=remote_repo_map, repo_deps=repo_missing, aur_deps=aur_missing, watcher=watcher, deps_data=deps_data, automatch_providers= automatch_providers) else: self._fill_missing_dep( dep_name=dep_name, dep_exp=dep, aur_index=aur_index, missing_deps=missing_deps, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, repo_deps=repo_missing, aur_deps=aur_missing, watcher=watcher, deps_data=deps_data, automatch_providers=automatch_providers ) if missing_deps: if repo_missing: with_single_providers = [] for d in missing_deps: if d[0] in repo_missing and d[0] not in deps_data: if d[1] == '__several__': deps_data[d[0]] = {'d': None, 'p': d[0], 'r': d[1]} else: with_single_providers.append(d[0]) if with_single_providers: data = pacman.map_updates_data(with_single_providers) if data: deps_data.update(data) if aur_missing: aur_threads = [] for pkgname in aur_missing: t = Thread(target=self.__fill_aur_update_data, args=(pkgname, deps_data), daemon=True) t.start() aur_threads.append(t) for t in aur_threads: t.join() missing_subdeps = self.map_missing_deps( pkgs_data=deps_data, provided_map=provided_map, aur_index=aur_index, deps_checked=deps_checked, sort=False, deps_data=deps_data, watcher=watcher, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, automatch_providers=automatch_providers, choose_providers=False) if missing_subdeps: missing_deps.update(missing_subdeps) if sort: sorted_deps.extend(sorting.sort(deps_data.keys(), deps_data)) else: sorted_deps.extend(((dep[0], dep[1]) for dep in missing_deps)) if sorted_deps and choose_providers: return self.fill_providers_deps( missing_deps=sorted_deps, provided_map=provided_map, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, watcher=watcher, sort=sort, already_checked=deps_checked, aur_idx=aur_index, deps_data=deps_data, automatch_providers=automatch_providers) return sorted_deps
def fill_providers_deps( self, missing_deps: List[Tuple[str, str]], provided_map: Dict[str, Set[str]], remote_repo_map: Dict[str, str], already_checked: Set[str], remote_provided_map: Dict[str, Set[str]], deps_data: Dict[str, dict], aur_idx: Iterable[str], sort: bool, watcher: ProcessWatcher, automatch_providers: bool, prefer_repository_provider: bool ) -> Optional[List[Tuple[str, str]]]: """ :param missing_deps: :param provided_map: :param remote_repo_map: :param already_checked: :param remote_provided_map: :param deps_data: :param aur_idx: :param sort: :param watcher: :param automatch_providers :param prefer_repository_provider :return: all deps sorted or None if the user declined the providers options """ deps_providers = map_providers( {data[0] for data in missing_deps if data[1] == '__several__'}, remote_provided_map) if deps_providers: providers_repos = {} repos_providers = set() for providers in deps_providers.values(): for provider in providers: if remote_repo_map.get(provider) == 'aur': providers_repos[provider] = 'aur' else: repos_providers.add(provider) providers_repos.update(pacman.map_repositories(repos_providers)) selected_providers = confirmation.request_providers( deps_providers, providers_repos, watcher, self.i18n) if not selected_providers: return else: # adding the chosen providers for re-checking the missing dependencies repo_selected, aur_selected = set(), set() for provider in selected_providers: if provider in repos_providers: repo_selected.add(provider) else: aur_selected.add(provider) providers_data = dict() if repo_selected: providers_data.update( pacman.map_updates_data(repo_selected)) # adding the providers as "installed" packages provided_map.update( pacman.map_provided(remote=True, pkgs=repo_selected)) if aur_selected: for pkgname, pkgdata in self.aur_client.gen_updates_data( aur_selected): providers_data[pkgname] = pkgdata for provider in pkgdata[ 'p']: # adding the providers as "installed" packages currently_provided = provided_map.get( provider, set()) provided_map[provider] = currently_provided currently_provided.add(pkgname) providers_deps = self.map_missing_deps( pkgs_data=providers_data, provided_map=provided_map, aur_index=aur_idx, deps_checked=already_checked, deps_data=deps_data, sort=False, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, watcher=watcher, choose_providers=True, automatch_providers=automatch_providers, prefer_repository_provider=prefer_repository_provider) if providers_deps is None: # it means the user called off the installation process return # cleaning the already mapped providers deps: to_remove = [] for idx, dep in enumerate(missing_deps): if dep[1] == '__several__': to_remove.append(idx) for idx, to_remove in enumerate(to_remove): del missing_deps[to_remove - idx] missing_deps.extend(((p, providers_repos.get(p, 'aur')) for p in selected_providers)) for dep in providers_deps: if dep not in missing_deps and dep[1] != '__several__': missing_deps.append(dep) deps_data.update(providers_data) if not self.fill_providers_deps( missing_deps=missing_deps, provided_map=provided_map, remote_repo_map=remote_repo_map, already_checked=already_checked, aur_idx=aur_idx, remote_provided_map=remote_provided_map, deps_data=deps_data, sort=False, watcher=watcher, automatch_providers=automatch_providers, prefer_repository_provider=prefer_repository_provider): return if sort: missing_to_sort = { d[0] for d in missing_deps if d[1] != '__several__' } return sorting.sort(missing_to_sort, deps_data, provided_map) return missing_deps
def map_missing_deps( self, pkgs_data: Dict[str, dict], provided_map: Dict[str, Set[str]], remote_provided_map: Dict[str, Set[str]], remote_repo_map: Dict[str, str], aur_index: Iterable[str], deps_checked: Set[str], deps_data: Dict[str, dict], sort: bool, watcher: ProcessWatcher, choose_providers: bool = True, automatch_providers: bool = False, prefer_repository_provider: bool = False ) -> Optional[List[Tuple[str, str]]]: sorted_deps = [ ] # it will hold the proper order to install the missing dependencies missing_deps, repo_missing, aur_missing = set(), set(), set() deps_checked.update(pkgs_data.keys()) for p, data in pkgs_data.items(): if data['d']: for dep in data['d']: if dep in pkgs_data: continue if dep not in provided_map: dep_split = self.re_dep_operator.split(dep) dep_name = dep_split[0].strip() if dep_name not in deps_checked: deps_checked.add(dep_name) if dep_name not in provided_map: self._fill_missing_dep( dep_name=dep_name, dep_exp=dep, aur_index=aur_index, missing_deps=missing_deps, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, repo_deps=repo_missing, aur_deps=aur_missing, watcher=watcher, deps_data=deps_data, automatch_providers=automatch_providers, prefer_repository_provider= prefer_repository_provider, dependent=p) else: version_pattern = '{}='.format(dep_name) version_found = [ p for p in provided_map if p.startswith(version_pattern) ] if version_found: version_found = version_found[0].split( '=')[1] version_required = dep_split[2] op = dep_split[1].strip() if not match_required_version( version_found, op, version_required): self._fill_missing_dep( dep_name=dep_name, dep_exp=dep, aur_index=aur_index, missing_deps=missing_deps, remote_provided_map= remote_provided_map, remote_repo_map=remote_repo_map, repo_deps=repo_missing, aur_deps=aur_missing, watcher=watcher, deps_data=deps_data, automatch_providers= automatch_providers, prefer_repository_provider= prefer_repository_provider, dependent=p) else: self._fill_missing_dep( dep_name=dep_name, dep_exp=dep, aur_index=aur_index, missing_deps=missing_deps, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, repo_deps=repo_missing, aur_deps=aur_missing, watcher=watcher, deps_data=deps_data, automatch_providers=automatch_providers, prefer_repository_provider= prefer_repository_provider, dependent=p) if missing_deps: self._fill_single_providers_data(missing_deps, repo_missing, aur_missing, deps_data) missing_subdeps = self.map_missing_deps( pkgs_data={**deps_data}, provided_map=provided_map, aur_index=aur_index, deps_checked=deps_checked, sort=False, deps_data=deps_data, watcher=watcher, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, automatch_providers=automatch_providers, choose_providers=False, prefer_repository_provider=prefer_repository_provider) if missing_subdeps: missing_deps.update(missing_subdeps) if sort: sorted_deps.extend(sorting.sort(deps_data.keys(), deps_data)) else: sorted_deps.extend(((dep[0], dep[1]) for dep in missing_deps)) if sorted_deps and choose_providers: return self.fill_providers_deps( missing_deps=sorted_deps, provided_map=provided_map, remote_provided_map=remote_provided_map, remote_repo_map=remote_repo_map, watcher=watcher, sort=sort, already_checked=deps_checked, aur_idx=aur_index, deps_data=deps_data, automatch_providers=automatch_providers, prefer_repository_provider=prefer_repository_provider) return sorted_deps