def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: if is_url: return SearchResult([], [], 0) downgrade_enabled = git.is_enabled() res = SearchResult([], [], 0) installed = {} read_installed = Thread( target=lambda: installed.update(pacman.list_and_map_installed()), daemon=True) read_installed.start() mapped_words = SEARCH_OPTIMIZED_MAP.get(words) api_res = self.aur_client.search( mapped_words if mapped_words else words) if api_res and api_res.get('results'): read_installed.join() for pkgdata in api_res['results']: self._upgrade_search_result(pkgdata, installed, downgrade_enabled, res, disk_loader) else: # if there are no results from the API (it could be because there were too many), tries the names index: aur_index = self.aur_client.read_local_index() if aur_index: self.logger.info("Querying through the local AUR index") to_query = set() for norm_name, real_name in aur_index.items(): if words in norm_name: to_query.add(real_name) if len(to_query) == 25: break pkgsinfo = self.aur_client.get_info(to_query) if pkgsinfo: read_installed.join() for pkgdata in pkgsinfo: self._upgrade_search_result(pkgdata, installed, downgrade_enabled, res, disk_loader) res.total = len(res.installed) + len(res.new) return res
def list_warnings(self) -> List[str]: warnings = [] if self.arch_distro: if not pacman.is_enabled(): warnings.append(self.i18n['arch.warning.disabled'].format( bold('pacman'))) if not self._is_wget_available(): warnings.append(self.i18n['arch.warning.disabled'].format( bold('wget'))) if not git.is_enabled(): warnings.append(self.i18n['arch.warning.git'].format( bold('git'))) return warnings
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1) -> SearchResult: self.comp_optimizer.join() downgrade_enabled = git.is_enabled() res = SearchResult([], [], 0) installed = {} read_installed = Thread( target=lambda: installed.update(pacman.list_and_map_installed())) read_installed.start() api_res = self.aur_client.search(words) if api_res and api_res.get('results'): read_installed.join() for pkgdata in api_res['results']: self._upgrade_search_result(pkgdata, installed, downgrade_enabled, res, disk_loader) else: # if there are no results from the API (it could be because there were too many), tries the names index: if self.names_index: to_query = set() for norm_name, real_name in self.names_index.items(): if words in norm_name: to_query.add(real_name) if len(to_query) == 25: break pkgsinfo = self.aur_client.get_info(to_query) if pkgsinfo: read_installed.join() for pkgdata in pkgsinfo: self._upgrade_search_result(pkgdata, installed, res) res.total = len(res.installed) + len(res.new) return res
def _fill_aur_pkgs(self, not_signed: dict, pkgs: list, disk_loader: DiskCacheLoader, internet_available: bool): downgrade_enabled = git.is_enabled() if internet_available: try: pkgsinfo = self.aur_client.get_info(not_signed.keys()) if pkgsinfo: for pkgdata in pkgsinfo: pkg = self.mapper.map_api_data(pkgdata, not_signed, self.categories) pkg.downgrade_enabled = downgrade_enabled if disk_loader: disk_loader.fill(pkg) pkg.status = PackageStatus.READY pkgs.append(pkg) return except requests.exceptions.ConnectionError: self.logger.warning( 'Could not retrieve installed AUR packages API data. It seems the internet connection is off.' ) self.logger.info("Reading only local AUR packages data") for name, data in not_signed.items(): pkg = ArchPackage(name=name, version=data.get('version'), latest_version=data.get('version'), description=data.get('description'), installed=True, mirror='aur') pkg.categories = self.categories.get(pkg.name) pkg.downgrade_enabled = downgrade_enabled if disk_loader: disk_loader.fill(pkg) pkg.status = PackageStatus.READY pkgs.append(pkg)