def test__sort_deps__one_of_three_related(self): deps = { ArchPackage(name='abc', package_base='abc'): {'ghi', 'xpto'}, ArchPackage(name='def', package_base='def'): {'jkl'}, ArchPackage(name='ghi', package_base='ghi'): {} } name_map = {d.name: d for d in deps} for _ in range(5): # testing n times to see if the same result is produced sorted_list = ArchManager._sort_deps(deps, name_map) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps) ghi = [p for p in sorted_list if p.name == 'ghi'] self.assertEqual(1, len(ghi)) ghi_idx = sorted_list.index(ghi[0]) abc = [p for p in sorted_list if p.name == 'abc'] self.assertEqual(1, len(abc)) abc_idx = sorted_list.index(abc[0]) self.assertGreater(abc_idx, ghi_idx)
def install(self, pkg: ArchPackage, root_password: str, watcher: ProcessWatcher, skip_optdeps: bool = False) -> bool: clean_config = False if not self.local_config: self.local_config = read_config() clean_config = True if self.local_config['optimize'] and not os.path.exists( CUSTOM_MAKEPKG_FILE): watcher.change_substatus(self.i18n['arch.makepkg.optimizing']) ArchCompilationOptimizer(self.context.logger).optimize() res = self._install_from_aur(pkg.name, pkg.maintainer, root_password, ProcessHandler(watcher), dependency=False, skip_optdeps=skip_optdeps) if res: if os.path.exists(pkg.get_disk_data_path()): with open(pkg.get_disk_data_path()) as f: data = f.read() if data: data = json.loads(data) pkg.fill_cached_data(data) if clean_config: self.local_config = None return res
def test__sort_deps__two_relying_on_the_same_package(self): """ dep order: abc -> ghi jkl -> ghi ghi -> def def -> mno expected: def, ghi, (abc | jkl ) """ deps = { ArchPackage(name='abc', package_base='abc'): {'ghi', 'xpto'}, ArchPackage(name='def', package_base='def'): {'mno'}, ArchPackage(name='ghi', package_base='ghi'): {'def'}, ArchPackage(name='jkl', package_base='jkl'): {'ghi'} } name_map = {d.name: d for d in deps} for _ in range(5): # testing n times to see if the same result is produced sorted_list = ArchManager._sort_deps(deps, name_map) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps) self.assertEqual(sorted_list[0].name, 'def') self.assertEqual(sorted_list[1].name, 'ghi') self.assertNotEqual(sorted_list[2].name, sorted_list[3].name) self.assertIn(sorted_list[2].name, {'abc', 'jkl'}) self.assertIn(sorted_list[3].name, {'abc', 'jkl'})
def test__sort_deps__a_declared_dep_provided_as_a_different_name(self): """ dep order: abc -> fed def (fed) ghi -> abc expected: def, abc, ghi """ def_pkg = ArchPackage(name='def', package_base='def') deps = { ArchPackage(name='abc', package_base='abc'): {'fed'}, def_pkg: {}, ArchPackage(name='ghi', package_base='ghi'): {'abc'} } name_map = {d.name: d for d in deps} name_map['fed'] = def_pkg for _ in range(5): sorted_list = ArchManager._sort_deps(deps, name_map) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps) self.assertEqual(sorted_list[0].name, 'def') self.assertEqual(sorted_list[1].name, 'abc') self.assertEqual(sorted_list[2].name, 'ghi')
def write(pkg: ArchPackage): data = pkg.get_data_to_cache() Path(pkg.get_disk_cache_path()).mkdir(parents=True, exist_ok=True) with open(pkg.get_disk_data_path(), 'w+') as f: f.write(json.dumps(data))
def test_check_update__pkg_no_last_modified_and_latest_version_higher_than_version( self): mapper = AURDataMapper(i18n=Mock(), logger=Mock(), http_client=Mock()) pkg = ArchPackage(name='test') pkg.last_modified = None pkg.version = '1.0.0' pkg.latest_version = '1.1.0' self.assertTrue(mapper.check_update(pkg=pkg, last_modified=1608143812))
def test_check_update__none_last_modified_and_version_equal_latest_version( self): mapper = AURDataMapper(i18n=Mock(), logger=Mock(), http_client=Mock()) pkg = ArchPackage(name='test') pkg.last_modified = 1608143812 pkg.version = '1.0.0' pkg.latest_version = pkg.version self.assertFalse(mapper.check_update(pkg=pkg, last_modified=None))
def fill_package_build(self, pkg: ArchPackage): cached_pkgbuild = pkg.get_cached_pkgbuild_path() if pkg.installed and os.path.exists(cached_pkgbuild): with open(cached_pkgbuild) as f: pkg.pkgbuild = f.read() else: res = self.http_client.get(pkg.get_pkg_build_url()) if res and res.status_code == 200 and res.text: pkg.pkgbuild = res.text
def _fill_mirror_pkgs(self, mirrors: dict, apps: list): # TODO for name, data in mirrors.items(): app = ArchPackage(name=name, version=data.get('version'), latest_version=data.get('version'), description=data.get('description')) app.installed = True app.mirror = '' # TODO app.update = False # TODO apps.append(app)
def test__sort_deps__all_packages_no_deps(self): deps = { ArchPackage(name='xpto', package_base='xpto'): set(), ArchPackage(name='abc', package_base='abc'): None } sorted_list = ArchManager._sort_deps(deps, {d.name: d for d in deps}) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps)
def test_check_update__pkg_last_modified_less_than_last_modified_and_version_higher_than_latest_version( self): mapper = AURDataMapper(i18n=Mock(), logger=Mock(), http_client=Mock()) pkg = ArchPackage(name='test') pkg.last_modified = 1608143812 pkg.version = '2.0.0' pkg.latest_version = '1.0.0' # in this case, last modified is more relevant than the string version self.assertTrue( mapper.check_update(pkg=pkg, last_modified=pkg.last_modified + 100))
def _fill_conflicts(self, context: UpdateRequirementsContext, blacklist: Iterable[str] = None): self.logger.info("Checking conflicts") root_conflict = self._filter_and_map_conflicts(context) sub_conflict = pacman.get_dependencies_to_remove( root_conflict.keys(), context.root_password) if root_conflict else None to_remove_map = {} if sub_conflict: for dep, source in sub_conflict.items(): if dep not in to_remove_map and (not blacklist or dep not in blacklist): req = ArchPackage(name=dep, installed=True, i18n=self.i18n) to_remove_map[dep] = req reason = "{} '{}'".format( self.i18n['arch.info.depends on'].capitalize(), source) context.to_remove[dep] = UpgradeRequirement(req, reason) if root_conflict: for dep, source in root_conflict.items(): if dep not in to_remove_map and (not blacklist or dep not in blacklist): req = ArchPackage(name=dep, installed=True, i18n=self.i18n) to_remove_map[dep] = req reason = "{} '{}'".format( self.i18n['arch.info.conflicts with'].capitalize(), source) context.to_remove[dep] = UpgradeRequirement(req, reason) if to_remove_map: for name in to_remove_map.keys(): # upgrading lists if name in context.pkgs_data: del context.pkgs_data[name] if name in context.aur_to_update: del context.aur_to_update[name] if name in context.repo_to_update: del context.repo_to_update[name] removed_size = pacman.get_installed_size([*to_remove_map.keys()]) if removed_size: for name, size in removed_size.items(): if size is not None: req = context.to_remove.get(name) if req: req.extra_size = size
def map_api_data(self, apidata: dict, installed: dict) -> ArchPackage: data = installed.get(apidata.get('Name')) app = ArchPackage(name=apidata.get('Name'), installed=bool(data), mirror='aur') app.status = PackageStatus.LOADING_DATA if data: app.version = data.get('version') app.description = data.get('description') self.fill_api_data(app, apidata, fill_version=not data) return app
def fill_icon_path(app: ArchPackage, icon_paths: List[str], only_exact_match: bool): ends_with = re.compile(r'.+/{}\.(png|svg)$'.format(app.icon_path if app.icon_path else app.name), re.IGNORECASE) for path in icon_paths: if ends_with.match(path): app.icon_path = path return if not only_exact_match: pkg_icons_path = pacman.list_icon_paths({app.name}) if pkg_icons_path: app.set_icon(pkg_icons_path)
def test__sort_deps__not_related_packages(self): deps = { ArchPackage(name='google-chrome', package_base='google-chrome'): {'alsa-lib', 'gtk3', 'libcups'}, ArchPackage(name='git-cola', package_base='git-cola'): {'git', 'python-pyqt5', 'icu qt5-svg'}, ArchPackage(name='kazam', package_base='kazam'): {'python', 'python-cairo'} } sorted_list = ArchManager._sort_deps(deps, {d.name: d for d in deps}) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps)
def _map_and_add_package(self, pkg_data: Tuple[str, str], idx: int, output: dict): version = None if pkg_data[1] == 'aur': try: info = self.aur_client.get_src_info(pkg_data[0]) if info: version = info.get('pkgver') if not version: self.logger.warning( "No version declared in SRCINFO of '{}'".format( pkg_data[0])) else: self.logger.warning( "Could not retrieve the SRCINFO for '{}'".format( pkg_data[0])) except: self.logger.warning( "Could not retrieve the SRCINFO for '{}'".format( pkg_data[0])) else: version = pacman.get_version_for_not_installed(pkg_data[0]) output[idx] = ArchPackage(name=pkg_data[0], version=version, latest_version=version, repository=pkg_data[1], i18n=self.i18n)
def fill_icon_path(pkg: ArchPackage, icon_paths: List[str], only_exact_match: bool): clean_name = RE_CLEAN_NAME.sub('', pkg.name) ends_with = re.compile( r'.+/{}\.(png|svg|xpm)$'.format( pkg.icon_path if pkg.icon_path else clean_name), re.IGNORECASE) for path in icon_paths: if ends_with.match(path): pkg.icon_path = path return if not only_exact_match: pkg_icons_path = pacman.list_icon_paths({pkg.name}) if pkg_icons_path: pkg.set_icon(pkg_icons_path)
def map_api_data(self, apidata: dict, pkgs_installed: Optional[dict], categories: Dict[str, List[str]]) -> ArchPackage: data = pkgs_installed.get( apidata.get('Name')) if pkgs_installed else None app = ArchPackage(name=apidata.get('Name'), installed=bool(data), repository='aur', i18n=self.i18n) app.status = PackageStatus.LOADING_DATA if categories: app.categories = categories.get(app.name) if data: app.version = data.get('version') app.description = data.get('description') self.fill_api_data(app, apidata, fill_version=not data) if app.orphan or app.out_of_date: if app.categories is None: app.categories = [] if app.orphan: app.categories.append('orphan') if app.out_of_date: app.categories.append('out_of_date') return app
def set_icon_path(app: ArchPackage, icon_name: str = None): installed_icons = pacman.list_icon_paths({app.name}) if installed_icons: exact_match = re.compile(r'.+/{}\..+$'.format(icon_name.split('.')[0] if icon_name else app.name)) for icon_path in installed_icons: if exact_match.match(icon_path): app.icon_path = icon_path break
def test__sort_deps__with_cycle(self): """ dep order: abc -> def -> ghi -> jkl -> abc """ deps = { ArchPackage(name='abc', package_base='abc'): {'def'}, ArchPackage(name='def', package_base='def'): {'ghi'}, ArchPackage(name='ghi', package_base='ghi'): {'jkl'}, ArchPackage(name='jkl', package_base='jkl'): {'abc'} } sorted_list = ArchManager._sort_deps(deps, {d.name: d for d in deps}) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps)
def _uninstall(self, pkg_name: str, root_password: str, handler: ProcessHandler) -> bool: res = handler.handle( SystemProcess( new_root_subprocess(['pacman', '-R', pkg_name, '--noconfirm'], root_password))) if res: cached_paths = [ ArchPackage.disk_cache_path(pkg_name, 'aur'), ArchPackage.disk_cache_path(pkg_name, 'mirror') ] for path in cached_paths: if os.path.exists(path): shutil.rmtree(path) break return res
def _fill_aur_pkgs(self, not_signed: dict, pkgs: list, disk_loader: DiskCacheLoader, internet_available: bool): downgrade_enabled = git.is_enabled() if internet_available: try: pkgsinfo = self.aur_client.get_info(not_signed.keys()) if pkgsinfo: for pkgdata in pkgsinfo: pkg = self.mapper.map_api_data(pkgdata, not_signed, self.categories) pkg.downgrade_enabled = downgrade_enabled if disk_loader: disk_loader.fill(pkg) pkg.status = PackageStatus.READY pkgs.append(pkg) return except requests.exceptions.ConnectionError: self.logger.warning( 'Could not retrieve installed AUR packages API data. It seems the internet connection is off.' ) self.logger.info("Reading only local AUR packages data") for name, data in not_signed.items(): pkg = ArchPackage(name=name, version=data.get('version'), latest_version=data.get('version'), description=data.get('description'), installed=True, mirror='aur') pkg.categories = self.categories.get(pkg.name) pkg.downgrade_enabled = downgrade_enabled if disk_loader: disk_loader.fill(pkg) pkg.status = PackageStatus.READY pkgs.append(pkg)
def _fill_to_install(self, context: UpdateRequirementsContext) -> bool: ti = time.time() self.logger.info("Discovering updates missing packages") deps_data, deps_checked = {}, set() deps = self.deps_analyser.map_missing_deps(pkgs_data=context.pkgs_data, provided_map=context.provided_map, aur_index=context.aur_index, deps_checked=deps_checked, sort=True, deps_data=deps_data, remote_provided_map=context.remote_provided_map, remote_repo_map=context.remote_repo_map, watcher=self.watcher, automatch_providers=context.arch_config['automatch_providers']) if deps is None: tf = time.time() self.logger.info("It took {0:.2f} seconds to retrieve required upgrade packages".format(tf - ti)) return False # the user called the process off if deps: # filtering selected packages selected_names = {p for p in context.to_update} deps = [dep for dep in deps if dep[0] not in selected_names] if deps: sorted_pkgs = {} aur_to_install_data = {} all_to_install_data = {} for idx, dep in enumerate(deps): data = deps_data[dep[0]] pkg = ArchPackage(name=dep[0], version=data['v'], latest_version=data['v'], repository=dep[1], i18n=self.i18n) sorted_pkgs[idx] = pkg context.to_install[dep[0]] = pkg if pkg.repository == 'aur': context.aur_to_install[pkg.name] = pkg aur_to_install_data[pkg.name] = data else: context.repo_to_install[pkg.name] = pkg if context.repo_to_install: all_to_install_data.update(pacman.map_updates_data(context.repo_to_install.keys())) if aur_to_install_data: all_to_install_data.update(aur_to_install_data) if all_to_install_data: context.pkgs_data.update(all_to_install_data) self._fill_conflicts(context, context.to_remove.keys()) tf = time.time() self.logger.info("It took {0:.2f} seconds to retrieve required upgrade packages".format(tf - ti)) return True
def _add_to_remove(self, pkgs_to_sync: Set[str], names: Dict[str, Set[str]], context: UpdateRequirementsContext, to_ignore: Set[str] = None): blacklist = to_ignore if to_ignore else set() blacklist.update(names) dependents = {} for pname in pkgs_to_sync: if pname not in blacklist: data = context.pkgs_data.get(pname) if data: deps = data.get('d') if deps: for n in names: if n in deps: all_deps = dependents.get(n, set()) all_deps.update(pname) dependents[n] = all_deps else: self.logger.warning( "Package '{}' to sync could not be removed from the transaction context because its data was not loaded" ) for n in names: if n in context.pkgs_data: if n not in context.to_remove: depends_on = names.get(n) if depends_on: reason = "{} '{}'".format( self.i18n['arch.info.depends on'].capitalize(), ', '.join(depends_on)) else: reason = '?' context.to_remove[n] = UpgradeRequirement(pkg=ArchPackage( name=n, installed=True, i18n=self.i18n), reason=reason) all_deps = dependents.get(n) if all_deps: self._add_to_remove(pkgs_to_sync, {dep: {n} for dep in all_deps}, context, blacklist) else: self.logger.warning( "Package '{}' could not be removed from the transaction context because its data was not loaded" )
def fill_last_modified(self, pkg: ArchPackage, api_data: dict): last_modified = api_data.get('LastModified') if last_modified is not None and isinstance(last_modified, int): pkg.last_modified = last_modified self.logger.info( "'last_modified' field ({}) set to package '{}'".format( last_modified, pkg.name)) else: self.logger.warning( "Could not set the 'last_modified' field ({}) to package '{}'". format(last_modified, pkg.name))
def install(self, pkg: ArchPackage, root_password: str, watcher: ProcessWatcher, skip_optdeps: bool = False) -> bool: res = self._install_from_aur(pkg.name, pkg.maintainer, root_password, ProcessHandler(watcher), dependency=False, skip_optdeps=skip_optdeps) if res: if os.path.exists(pkg.get_disk_data_path()): with open(pkg.get_disk_data_path()) as f: data = f.read() if data: data = json.loads(data) pkg.fill_cached_data(data) return res
def get_info(self, pkg: ArchPackage) -> dict: if pkg.installed: t = Thread(target=self.mapper.fill_package_build, args=(pkg, )) t.start() info = pacman.get_info_dict(pkg.name) t.join() if pkg.pkgbuild: info['13_pkg_build'] = pkg.pkgbuild info['14_installed_files'] = pacman.list_installed_files(pkg.name) return info else: info = { '01_id': pkg.id, '02_name': pkg.name, '03_description': pkg.description, '03_version': pkg.version, '04_popularity': pkg.popularity, '05_votes': pkg.votes, '06_package_base': pkg.package_base, '07_maintainer': pkg.maintainer, '08_first_submitted': pkg.first_submitted, '09_last_modified': pkg.last_modified, '10_url': pkg.url_download } srcinfo = self.aur_client.get_src_info(pkg.name) if srcinfo: if srcinfo.get('makedepends'): info['12_makedepends'] = srcinfo['makedepends'] if srcinfo.get('depends'): info['13_dependson'] = srcinfo['depends'] if srcinfo.get('optdepends'): info['14_optdepends'] = srcinfo['optdepends'] if srcinfo.get('checkdepends'): info['15_checkdepends'] = srcinfo['checkdepends'] if pkg.pkgbuild: info['00_pkg_build'] = pkg.pkgbuild else: info['11_pkg_build_url'] = pkg.get_pkg_build_url() return info
def test__sort_deps__two_of_three_related(self): """ dep order = abc -> ghi -> def expected: def, ghi, abc """ deps = { ArchPackage(name='abc', package_base='abc'): {'ghi', 'xpto'}, ArchPackage(name='def', package_base='def'): {'jkl'}, ArchPackage(name='ghi', package_base='ghi'): {'def'} } name_map = {d.name: d for d in deps} for _ in range(5): # testing n times to see if the same result is produced sorted_list = ArchManager._sort_deps(deps, name_map) self.assertIsInstance(sorted_list, list) self.assertEqual(len(deps), len(sorted_list)) for pkg in sorted_list: self.assertIn(pkg, deps) self.assertEqual(sorted_list[0].name, 'def') self.assertEqual(sorted_list[1].name, 'ghi') self.assertEqual(sorted_list[2].name, 'abc')
def run(self): if not any([self.aur, self.repositories]): return ti = time.time() self.task_man.register_task(self.task_id, self.i18n['arch.task.disk_cache'], get_icon_path()) self.logger.info('Pre-caching installed Arch packages data to disk') installed = pacman.map_installed(repositories=self.repositories, aur=self.aur) self.task_man.update_progress( self.task_id, 0, self.i18n['arch.task.disk_cache.reading']) for k in ('signed', 'not_signed'): installed[k] = { p for p in installed[k] if not os.path.exists(ArchPackage.disk_cache_path(p)) } saved = 0 pkgs = {*installed['signed'], *installed['not_signed']} repo_map = {} if installed['not_signed']: repo_map.update({p: 'aur' for p in installed['not_signed']}) if installed['signed']: repo_map.update(pacman.map_repositories(installed['signed'])) self.to_index = len(pkgs) self.progress = self.to_index * 2 self.update_prepared(None, add=False) saved += disk.save_several(pkgs, repo_map, when_prepared=self.update_prepared, after_written=self.update_indexed) self.task_man.update_progress(self.task_id, 100, None) self.task_man.finish_task(self.task_id) tf = time.time() time_msg = 'Took {0:.2f} seconds'.format(tf - ti) self.logger.info( 'Pre-cached data of {} Arch packages to the disk. {}'.format( saved, time_msg))
def _fill_conflicts(self, context: UpdateRequirementsContext, blacklist: Iterable[str] = None): self.logger.info("Checking conflicts") root_conflict = self._filter_and_map_conflicts(context) if root_conflict: for dep, source in root_conflict.items(): if dep not in context.to_remove and (not blacklist or dep not in blacklist): req = ArchPackage(name=dep, installed=True, i18n=self.i18n) reason = "{} '{}'".format( self.i18n['arch.info.conflicts with'].capitalize(), source) context.to_remove[dep] = UpgradeRequirement(req, reason)