def _map_to_model(self, app_json: dict, installed: bool, disk_loader: DiskCacheLoader, internet: bool = True) -> FlatpakApplication: app = FlatpakApplication(**app_json, i18n=self.i18n) app.installed = installed api_data = self.api_cache.get(app_json['id']) expired_data = api_data and api_data.get( 'expires_at') and api_data['expires_at'] <= datetime.utcnow() if not api_data or expired_data: if not app.runtime: if disk_loader: disk_loader.fill(app) # preloading cached disk data if internet: FlatpakAsyncDataLoader( app=app, api_cache=self.api_cache, manager=self, context=self.context, category_cache=self.category_cache).start() else: app.fill_cached_data(api_data) app.status = PackageStatus.READY return app
def map_json(self, app_json: dict, installed: bool, disk_loader: DiskCacheLoader, internet: bool = True) -> SnapApplication: app = SnapApplication(publisher=app_json.get('publisher'), rev=app_json.get('rev'), notes=app_json.get('notes'), app_type=app_json.get('type'), id=app_json.get('name'), name=app_json.get('name'), version=app_json.get('version'), latest_version=app_json.get('version'), description=app_json.get('description', app_json.get('summary'))) if app.publisher: app.publisher = app.publisher.replace('*', '') app.installed = installed api_data = self.api_cache.get(app_json['name']) expired_data = api_data and api_data.get('expires_at') and api_data['expires_at'] <= datetime.utcnow() if (not api_data or expired_data) and app.is_application(): if disk_loader and app.installed: disk_loader.fill(app) if internet: SnapAsyncDataLoader(app=app, api_cache=self.api_cache, manager=self, context=self.context).start() else: app.fill_cached_data(api_data) return app
def map_json(self, app_json: dict, installed: bool, disk_loader: DiskCacheLoader, internet: bool = True) -> SnapApplication: app = SnapApplication( publisher=app_json.get('publisher'), rev=app_json.get('rev'), notes=app_json.get('notes'), has_apps_field=app_json.get('apps_field', False), id=app_json.get('name'), name=app_json.get('name'), version=app_json.get('version'), latest_version=app_json.get('version'), description=app_json.get('description', app_json.get('summary')), verified_publisher=app_json.get('developer_validation', '') == 'verified', extra_actions=self.custom_actions) if app.publisher and app.publisher.endswith('*'): app.verified_publisher = True app.publisher = app.publisher.replace('*', '') categories = self.categories.get(app.name.lower()) if categories: app.categories = categories app.installed = installed if not app.is_application(): categories = app.categories if categories is None: categories = [] app.categories = categories if 'runtime' not in categories: categories.append('runtime') api_data = self.api_cache.get(app_json['name']) expired_data = api_data and api_data.get( 'expires_at') and api_data['expires_at'] <= datetime.utcnow() if (not api_data or expired_data) and app.is_application(): if disk_loader and app.installed: disk_loader.fill(app) if internet: SnapAsyncDataLoader(app=app, api_cache=self.api_cache, manager=self, context=self.context).start() else: app.fill_cached_data(api_data) return app
def read_installed(self, disk_loader: DiskCacheLoader = None, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult([], None, 0) disk_loader = None net_available = self.context.is_internet_available() if not pkg_types: # any type for man in self.managers: if self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_available) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total else: man_already_used = [] for t in pkg_types: man = self.map.get(t) if man and (man not in man_already_used) and self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_available) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total if disk_loader: disk_loader.stop_working() disk_loader.join() if res.installed: for p in res.installed: if p.is_update_ignored(): if p.categories is None: p.categories = ['updates_ignored'] elif 'updates_ignored' not in p.categories: p.categories.append('updates_ignored') res.installed.sort(key=self._get_package_lower_name) tf = time.time() self.logger.info('Took {0:.2f} seconds'.format(tf - ti)) return res
def _upgrade_search_result(self, apidata: dict, installed_pkgs: dict, downgrade_enabled: bool, res: SearchResult, disk_loader: DiskCacheLoader): app = self.mapper.map_api_data(apidata, installed_pkgs['not_signed']) app.downgrade_enabled = downgrade_enabled if app.installed: res.installed.append(app) if disk_loader: disk_loader.fill(app) else: res.new.append(app) Thread(target=self.mapper.fill_package_build, args=(app, )).start()
def read_installed(self, disk_loader: DiskCacheLoader = None, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, net_check: bool = None) -> SearchResult: ti = time.time() self._wait_to_be_ready() net_check = {} thread_internet_check = self._get_internet_check(net_check) res = SearchResult([], None, 0) disk_loader = None if not pkg_types: # any type for man in self.managers: if self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() if thread_internet_check.isAlive(): thread_internet_check.join() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_check['available']) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total else: man_already_used = [] for t in pkg_types: man = self.map.get(t) if man and (man not in man_already_used) and self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() thread_internet_check.join() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_check['available']) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total if disk_loader: disk_loader.stop_working() disk_loader.join() tf = time.time() self.logger.info('Took {0:.2f} seconds'.format(tf - ti)) return res
def _fill_aur_pkgs(self, not_signed: dict, pkgs: list, disk_loader: DiskCacheLoader, internet_available: bool): downgrade_enabled = git.is_enabled() if internet_available: try: pkgsinfo = self.aur_client.get_info(not_signed.keys()) if pkgsinfo: for pkgdata in pkgsinfo: pkg = self.mapper.map_api_data(pkgdata, not_signed, self.categories) pkg.downgrade_enabled = downgrade_enabled if disk_loader: disk_loader.fill(pkg) pkg.status = PackageStatus.READY pkgs.append(pkg) return except requests.exceptions.ConnectionError: self.logger.warning( 'Could not retrieve installed AUR packages API data. It seems the internet connection is off.' ) self.logger.info("Reading only local AUR packages data") for name, data in not_signed.items(): pkg = ArchPackage(name=name, version=data.get('version'), latest_version=data.get('version'), description=data.get('description'), installed=True, mirror='aur') pkg.categories = self.categories.get(pkg.name) pkg.downgrade_enabled = downgrade_enabled if disk_loader: disk_loader.fill(pkg) pkg.status = PackageStatus.READY pkgs.append(pkg)
def install(self, app: SoftwarePackage, root_password: str, disk_loader: DiskCacheLoader, handler: ProcessWatcher) -> TransactionResult: man = self._get_manager_for(app) if man: ti = time.time() disk_loader = self.disk_loader_factory.new() disk_loader.start() try: self.logger.info('Installing {}'.format(app)) res = man.install(app, root_password, disk_loader, handler) disk_loader.stop_working() disk_loader.join() self._update_post_transaction_status(res) return res except: traceback.print_exc() return TransactionResult(success=False, installed=[], removed=[]) finally: tf = time.time() self.logger.info('Installation of {}'.format(app) + 'took {0:.2f} minutes'.format((tf - ti) / 60))
def uninstall(self, pkg: SoftwarePackage, root_password: Optional[str], handler: ProcessWatcher, disk_loader: DiskCacheLoader = None) -> TransactionResult: man = self._get_manager_for(pkg) if man: ti = time.time() disk_loader = self.disk_loader_factory.new() disk_loader.start() self.logger.info(f"Uninstalling {pkg.name}") try: res = man.uninstall(pkg, root_password, handler, disk_loader) disk_loader.stop_working() disk_loader.join() self._update_post_transaction_status(res) return res except: traceback.print_exc() return TransactionResult(success=False, installed=[], removed=[]) finally: tf = time.time() self.logger.info( f'Uninstallation of {pkg} took {(tf - ti) / 60:.2f} minutes' )
def search(self, word: str, disk_loader: DiskCacheLoader = None, limit: int = -1) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult([], [], 0) if internet.is_available(self.context.http_client, self.context.logger): norm_word = word.strip().lower() disk_loader = self.disk_loader_factory.new() disk_loader.start() threads = [] for man in self.managers: t = Thread(target=self._search, args=(norm_word, man, disk_loader, res)) t.start() threads.append(t) for t in threads: t.join() if disk_loader: disk_loader.stop_working() disk_loader.join() res.installed = self._sort(res.installed, norm_word) res.new = self._sort(res.new, norm_word) res.total = len(res.installed) + len(res.new) else: raise NoInternetException() tf = time.time() self.logger.info('Took {0:.2f} seconds'.format(tf - ti)) return res
def search(self, words: str, disk_loader: DiskCacheLoader = None, limit: int = -1, is_url: bool = False) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult.empty() if self.context.is_internet_available(): norm_query = sanitize_command_input(words).lower() self.logger.info(f"Search query: {norm_query}") if norm_query: is_url = bool(RE_IS_URL.match(norm_query)) disk_loader = self.disk_loader_factory.new() disk_loader.start() threads = [] for man in self.managers: t = Thread(target=self._search, args=(norm_query, is_url, man, disk_loader, res)) t.start() threads.append(t) for t in threads: t.join() if disk_loader: disk_loader.stop_working() disk_loader.join() # res.installed = self._sort(res.installed, norm_word) # res.new = self._sort(res.new, norm_word) else: raise NoInternetException() res.update_total() tf = time.time() self.logger.info(f'Took {tf - ti:.8f} seconds') return res
def search(self, words: str, disk_loader: DiskCacheLoader = None, limit: int = -1, is_url: bool = False) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult.empty() if self.context.is_internet_available(): norm_word = words.strip().lower() url_words = RE_IS_URL.match(norm_word) disk_loader = self.disk_loader_factory.new() disk_loader.start() threads = [] for man in self.managers: t = Thread(target=self._search, args=(norm_word, url_words, man, disk_loader, res)) t.start() threads.append(t) for t in threads: t.join() if disk_loader: disk_loader.stop_working() disk_loader.join() res.installed = self._sort(res.installed, norm_word) res.new = self._sort(res.new, norm_word) else: raise NoInternetException() res.update_total() tf = time.time() self.logger.info('Took {0:.8f} seconds'.format(tf - ti)) return res