def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1) -> SearchResult: res = SearchResult([], [], 0) connection = self._get_db_connection(DB_APPS_PATH) if connection: try: cursor = connection.cursor() cursor.execute(query.SEARCH_APPS_BY_NAME_OR_DESCRIPTION.format(words, words)) found_map = {} idx = 0 for l in cursor.fetchall(): app = AppImage(*l) res.new.append(app) found_map[self._gen_app_key(app)] = {'app': app, 'idx': idx} idx += 1 finally: self._close_connection(DB_APPS_PATH, connection) if res.new: installed = self.read_installed(disk_loader, limit, only_apps=False, pkg_types=None, internet_available=True).installed if installed: for iapp in installed: key = self._gen_app_key(iapp) new_found = found_map.get(key) if new_found: del res.new[new_found['idx']] res.installed.append(iapp) res.total = len(res.installed) + len(res.new) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: if is_url: return SearchResult.empty() apps_conn = self._get_db_connection(DATABASE_APPS_FILE) if not apps_conn: return SearchResult.empty() not_installed, found_map = [], {} try: cursor = apps_conn.cursor() cursor.execute(query.SEARCH_APPS_BY_NAME_OR_DESCRIPTION.format(words, words)) idx = 0 for r in cursor.fetchall(): app = AppImage(*r, i18n=self.i18n) not_installed.append(app) found_map[self._gen_app_key(app)] = {'app': app, 'idx': idx} idx += 1 except: self.logger.error("An exception happened while querying the 'apps' database") traceback.print_exc() try: installed = self.read_installed(connection=apps_conn, disk_loader=disk_loader, limit=limit, only_apps=False, pkg_types=None, internet_available=True).installed except: installed = None installed_found = [] if installed: lower_words = words.lower() for appim in installed: found = False if not_installed and found_map: key = self._gen_app_key(appim) new_found = found_map.get(key) if new_found: if not appim.imported: for attr in self.search_unfilled_attrs: if getattr(appim, attr) is None: setattr(appim, attr, getattr(new_found['app'], attr)) del not_installed[new_found['idx']] installed_found.append(appim) found = True if not found and (lower_words in appim.name.lower() or (appim.description and lower_words in appim.description.lower())): installed_found.append(appim) try: apps_conn.close() except: self.logger.error(f"An exception happened when trying to close the connection to database file '{DATABASE_APPS_FILE}'") traceback.print_exc() return SearchResult(new=not_installed, installed=installed_found, total=len(not_installed) + len(installed_found))
def search(self, word: str, disk_loader: DiskCacheLoader = None, limit: int = -1) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult([], [], 0) if internet.is_available(self.context.http_client, self.context.logger): norm_word = word.strip().lower() disk_loader = self.disk_loader_factory.new() disk_loader.start() threads = [] for man in self.managers: t = Thread(target=self._search, args=(norm_word, man, disk_loader, res)) t.start() threads.append(t) for t in threads: t.join() if disk_loader: disk_loader.stop_working() disk_loader.join() res.installed = self._sort(res.installed, norm_word) res.new = self._sort(res.new, norm_word) res.total = len(res.installed) + len(res.new) else: raise NoInternetException() tf = time.time() self.logger.info('Took {0:.2f} seconds'.format(tf - ti)) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: if is_url or (not snap.is_installed() and not snapd.is_running()): return SearchResult([], [], 0) snapd_client = SnapdClient(self.logger) apps_found = snapd_client.query(words) res = SearchResult([], [], 0) if apps_found: installed = self.read_installed(disk_loader).installed for app_json in apps_found: already_installed = None if installed: already_installed = [ i for i in installed if i.id == app_json.get('id') ] already_installed = already_installed[ 0] if already_installed else None if already_installed: res.installed.append(already_installed) else: res.new.append(self._map_to_app(app_json, installed=False)) res.total = len(res.installed) + len(res.new) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: if is_url: return SearchResult([], [], 0) remote_level = self._get_search_remote() res = SearchResult([], [], 0) apps_found = flatpak.search(flatpak.get_version(), words, remote_level) if apps_found: already_read = set() installed_apps = self.read_installed( disk_loader=disk_loader, internet_available=True).installed if installed_apps: for app_found in apps_found: for installed_app in installed_apps: if app_found['id'] == installed_app.id: res.installed.append(installed_app) already_read.add(app_found['id']) if len(apps_found) > len(already_read): for app_found in apps_found: if app_found['id'] not in already_read: res.new.append( self._map_to_model(app_found, False, disk_loader)) res.total = len(res.installed) + len(res.new) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1) -> SearchResult: if snap.is_snapd_running(): installed = self.read_installed(disk_loader).installed res = SearchResult([], [], 0) for app_json in snap.search(words): already_installed = None if installed: already_installed = [ i for i in installed if i.id == app_json.get('name') ] already_installed = already_installed[ 0] if already_installed else None if already_installed: res.installed.append(already_installed) else: res.new.append( self.map_json(app_json, installed=False, disk_loader=disk_loader)) res.total = len(res.installed) + len(res.new) return res else: return SearchResult([], [], 0)
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1) -> SearchResult: res = SearchResult([], [], 0) apps_found = flatpak.search(flatpak.get_version(), words) if apps_found: already_read = set() installed_apps = self.read_installed( disk_loader=disk_loader).installed if installed_apps: for app_found in apps_found: for installed_app in installed_apps: if app_found['id'] == installed_app.id: res.installed.append(installed_app) already_read.add(app_found['id']) if len(apps_found) > len(already_read): for app_found in apps_found: if app_found['id'] not in already_read: res.new.append( self._map_to_model(app_found, False, disk_loader)) res.total = len(res.installed) + len(res.new) return res
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None, connection: sqlite3.Connection = None) -> SearchResult: res = SearchResult([], [], 0) if os.path.exists(INSTALLATION_PATH): installed = run_cmd('ls {}*/data.json'.format(INSTALLATION_PATH), print_error=False) if installed: names = set() for path in installed.split('\n'): if path: with open(path) as f: app = AppImage(installed=True, **json.loads(f.read())) app.icon_url = app.icon_path res.installed.append(app) names.add("'{}'".format(app.name.lower())) if res.installed: con = self._get_db_connection( DB_APPS_PATH) if not connection else connection if con: try: cursor = con.cursor() cursor.execute( query.FIND_APPS_BY_NAME.format( ','.join(names))) for tup in cursor.fetchall(): for app in res.installed: if app.name.lower() == tup[0].lower() and ( not app.github or app.github.lower() == tup[1].lower()): app.update = tup[2] > app.version if app.update: app.latest_version = tup[2] app.url_download_latest_version = tup[ 3] break except: traceback.print_exc() finally: if not connection: self._close_connection(DB_APPS_PATH, con) res.total = len(res.installed) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: if is_url: return SearchResult([], [], 0) downgrade_enabled = git.is_enabled() res = SearchResult([], [], 0) installed = {} read_installed = Thread( target=lambda: installed.update(pacman.list_and_map_installed()), daemon=True) read_installed.start() mapped_words = SEARCH_OPTIMIZED_MAP.get(words) api_res = self.aur_client.search( mapped_words if mapped_words else words) if api_res and api_res.get('results'): read_installed.join() for pkgdata in api_res['results']: self._upgrade_search_result(pkgdata, installed, downgrade_enabled, res, disk_loader) else: # if there are no results from the API (it could be because there were too many), tries the names index: aur_index = self.aur_client.read_local_index() if aur_index: self.logger.info("Querying through the local AUR index") to_query = set() for norm_name, real_name in aur_index.items(): if words in norm_name: to_query.add(real_name) if len(to_query) == 25: break pkgsinfo = self.aur_client.get_info(to_query) if pkgsinfo: read_installed.join() for pkgdata in pkgsinfo: self._upgrade_search_result(pkgdata, installed, downgrade_enabled, res, disk_loader) res.total = len(res.installed) + len(res.new) return res
def read_installed(self, disk_loader: Optional[DiskCacheLoader], pkg_types: Optional[Set[Type[SoftwarePackage]]], internet_available: bool, limit: int = -1, only_apps: bool = False, names: Optional[Iterable[str]] = None) -> SearchResult: config_ = dict() fill_config = Thread(target=self._fill_config, args=(config_,)) fill_config.start() ignored_updates = set() fill_ignored_updates = Thread(target=self._fill_ignored_updates, args=(ignored_updates,)) fill_ignored_updates.start() threads = (fill_config, fill_ignored_updates) res = SearchResult(installed=[], new=None, total=0) for pkg in self.aptitude.read_installed(): for t in threads: if t.is_alive(): t.join() pkg.bind_app(self.apps_index.get(pkg.name)) pkg.global_purge = bool(config_.get('remove.purge', False)) pkg.updates_ignored = bool(ignored_updates and pkg.name in ignored_updates) res.installed.append(pkg) return res
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: version = flatpak.get_version() updates = [] if internet_available: thread_updates = Thread(target=self._add_updates, args=(version, updates)) thread_updates.start() else: thread_updates = None installed = flatpak.list_installed(version) models = [] if installed: if thread_updates: thread_updates.join() for app_json in installed: model = self._map_to_model(app_json=app_json, installed=True, disk_loader=disk_loader, internet=internet_available) if version >= '1.5.0': model.update = '{}/{}'.format(app_json['id'], app_json['branch']) in updates[0] if updates else None else: model.update = app_json['ref'] in updates[0] if updates else None models.append(model) return SearchResult(models, None, len(models))
def read_installed(self, disk_loader: DiskCacheLoader = None, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult([], None, 0) disk_loader = None net_available = self.context.is_internet_available() if not pkg_types: # any type for man in self.managers: if self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_available) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total else: man_already_used = [] for t in pkg_types: man = self.map.get(t) if man and (man not in man_already_used) and self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_available) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total if disk_loader: disk_loader.stop_working() disk_loader.join() if res.installed: for p in res.installed: if p.is_update_ignored(): if p.categories is None: p.categories = ['updates_ignored'] elif 'updates_ignored' not in p.categories: p.categories.append('updates_ignored') res.installed.sort(key=self._get_package_lower_name) tf = time.time() self.logger.info('Took {0:.2f} seconds'.format(tf - ti)) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1) -> SearchResult: self.comp_optimizer.join() downgrade_enabled = git.is_enabled() res = SearchResult([], [], 0) installed = {} read_installed = Thread( target=lambda: installed.update(pacman.list_and_map_installed())) read_installed.start() api_res = self.aur_client.search(words) if api_res and api_res.get('results'): read_installed.join() for pkgdata in api_res['results']: self._upgrade_search_result(pkgdata, installed, downgrade_enabled, res, disk_loader) else: # if there are no results from the API (it could be because there were too many), tries the names index: if self.names_index: to_query = set() for norm_name, real_name in self.names_index.items(): if words in norm_name: to_query.add(real_name) if len(to_query) == 25: break pkgsinfo = self.aur_client.get_info(to_query) if pkgsinfo: read_installed.join() for pkgdata in pkgsinfo: self._upgrade_search_result(pkgdata, installed, res) res.total = len(res.installed) + len(res.new) return res
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: if snap.is_snapd_running(): self.categories_downloader.join() installed = [ self.map_json(app_json, installed=True, disk_loader=disk_loader, internet=internet_available) for app_json in snap.read_installed(self.ubuntu_distro) ] return SearchResult(installed, None, len(installed)) else: return SearchResult([], None, 0)
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: info_path = self.get_info_path() if snap.is_snapd_running() and info_path: installed = [ self.map_json(app_json, installed=True, disk_loader=disk_loader, internet=internet_available) for app_json in snap.read_installed(info_path) ] return SearchResult(installed, None, len(installed)) else: return SearchResult([], None, 0)
def read_installed(self, disk_loader: DiskCacheLoader = None, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, net_check: bool = None) -> SearchResult: ti = time.time() self._wait_to_be_ready() net_check = {} thread_internet_check = self._get_internet_check(net_check) res = SearchResult([], None, 0) disk_loader = None if not pkg_types: # any type for man in self.managers: if self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() if thread_internet_check.isAlive(): thread_internet_check.join() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_check['available']) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total else: man_already_used = [] for t in pkg_types: man = self.map.get(t) if man and (man not in man_already_used) and self._can_work(man): if not disk_loader: disk_loader = self.disk_loader_factory.new() disk_loader.start() thread_internet_check.join() mti = time.time() man_res = man.read_installed(disk_loader=disk_loader, pkg_types=None, internet_available=net_check['available']) mtf = time.time() self.logger.info(man.__class__.__name__ + " took {0:.2f} seconds".format(mtf - mti)) res.installed.extend(man_res.installed) res.total += man_res.total if disk_loader: disk_loader.stop_working() disk_loader.join() tf = time.time() self.logger.info('Took {0:.2f} seconds'.format(tf - ti)) return res
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: if snap.is_installed() and snapd.is_running(): snapd_client = SnapdClient(self.logger) app_names = {a['snap'] for a in snapd_client.list_only_apps()} installed = [ self._map_to_app(app_json=appjson, installed=True, disk_loader=disk_loader, is_application=app_names and appjson['name'] in app_names) for appjson in snapd_client.list_all_snaps() ] return SearchResult(installed, None, len(installed)) else: return SearchResult([], None, 0)
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: version = flatpak.get_version() updates = [] if internet_available: thread_updates = Thread(target=self._add_updates, args=(version, updates)) thread_updates.start() else: thread_updates = None installed = flatpak.list_installed(version) models = [] if installed: update_map = None if thread_updates: thread_updates.join() update_map = updates[0] for app_json in installed: model = self._map_to_model(app_json=app_json, installed=True, disk_loader=disk_loader, internet=internet_available) model.update = None models.append(model) if update_map and (update_map['full'] or update_map['partial']): if version >= '1.4.0': update_id = '{}/{}/{}'.format(app_json['id'], app_json['branch'], app_json['installation']) if update_map['full'] and update_id in update_map['full']: model.update = True if update_map['partial']: for partial in update_map['partial']: partial_data = partial.split('/') if app_json['id'] in partial_data[0] and\ app_json['branch'] == partial_data[1] and\ app_json['installation'] == partial_data[2]: partial_model = model.gen_partial(partial.split('/')[0]) partial_model.update = True models.append(partial_model) else: model.update = '{}/{}'.format(app_json['installation'], app_json['ref']) in update_map['full'] if models: ignored = self._read_ignored_updates() if ignored: for model in models: if model.get_update_ignore_key() in ignored: model.updates_ignored = True return SearchResult(models, None, len(models))
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None) -> SearchResult: installed = pacman.list_and_map_installed() apps = [] if installed and installed['not_signed']: self.dcache_updater.join() self._fill_aur_pkgs(installed['not_signed'], apps, disk_loader, internet_available) return SearchResult(apps, None, len(apps))
def read_installed(self, disk_loader: DiskCacheLoader, limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = True) -> SearchResult: res = SearchResult([], [], 0) if os.path.exists(INSTALLED_PATH): for data_path in glob.glob( '{}/*/*data.yml'.format(INSTALLED_PATH)): with open(data_path, 'r') as f: res.installed.append( WebApplication(installed=True, **yaml.safe_load(f.read()))) res.total += 1 return res
def search(self, words: str, disk_loader: DiskCacheLoader = None, limit: int = -1, is_url: bool = False) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult.empty() if self.context.is_internet_available(): norm_query = sanitize_command_input(words).lower() self.logger.info(f"Search query: {norm_query}") if norm_query: is_url = bool(RE_IS_URL.match(norm_query)) disk_loader = self.disk_loader_factory.new() disk_loader.start() threads = [] for man in self.managers: t = Thread(target=self._search, args=(norm_query, is_url, man, disk_loader, res)) t.start() threads.append(t) for t in threads: t.join() if disk_loader: disk_loader.stop_working() disk_loader.join() # res.installed = self._sort(res.installed, norm_word) # res.new = self._sort(res.new, norm_word) else: raise NoInternetException() res.update_total() tf = time.time() self.logger.info(f'Took {tf - ti:.8f} seconds') return res
def search(self, words: str, disk_loader: Optional[DiskCacheLoader], limit: int, is_url: bool) -> SearchResult: config_ = dict() fill_config = Thread(target=self._fill_config, args=(config_,)) fill_config.start() res = SearchResult.empty() if not is_url: for pkg in self.aptitude.search(words): if fill_config.is_alive(): fill_config.join() pkg.global_purge = bool(config_.get('remove.purge', False)) if pkg.installed: pkg.bind_app(self.apps_index.get(pkg.name)) res.installed.append(pkg) else: res.new.append(pkg) return res
def search(self, words: str, disk_loader: DiskCacheLoader = None, limit: int = -1, is_url: bool = False) -> SearchResult: ti = time.time() self._wait_to_be_ready() res = SearchResult.empty() if self.context.is_internet_available(): norm_word = words.strip().lower() url_words = RE_IS_URL.match(norm_word) disk_loader = self.disk_loader_factory.new() disk_loader.start() threads = [] for man in self.managers: t = Thread(target=self._search, args=(norm_word, url_words, man, disk_loader, res)) t.start() threads.append(t) for t in threads: t.join() if disk_loader: disk_loader.stop_working() disk_loader.join() res.installed = self._sort(res.installed, norm_word) res.new = self._sort(res.new, norm_word) else: raise NoInternetException() res.update_total() tf = time.time() self.logger.info('Took {0:.8f} seconds'.format(tf - ti)) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: local_config = {} thread_config = Thread(target=self._fill_config_async, args=(local_config, )) thread_config.start() res = SearchResult([], [], 0) installed = self.read_installed(disk_loader=disk_loader, limit=limit).installed if is_url: url = words[0:-1] if words.endswith('/') else words url_no_protocol = self._strip_url_protocol(url) installed_matches = [ app for app in installed if self._strip_url_protocol(app.url) == url_no_protocol ] if installed_matches: res.installed.extend(installed_matches) else: soup_map = self._map_url(url) if soup_map: soup, response = soup_map[0], soup_map[1] final_url = response.url if final_url.endswith('/'): final_url = final_url[0:-1] name = self._get_app_name(url_no_protocol, soup) desc = self._get_app_description(final_url, soup) icon_url = self._get_app_icon_url(final_url, soup) app = WebApplication(url=final_url, name=name, description=desc, icon_url=icon_url) if self.env_settings.get('electron') and self.env_settings[ 'electron'].get('version'): app.version = self.env_settings['electron']['version'] app.latest_version = app.version res.new = [app] else: lower_words = words.lower().strip() installed_matches = [ app for app in installed if lower_words in app.name.lower() ] index = self._read_search_index() if index: split_words = lower_words.split(' ') singleword = ''.join(lower_words) query_list = [*split_words, singleword] index_match_keys = set() for key in index: for query in query_list: if query in key: index_match_keys.update(index[key]) if not index_match_keys: self.logger.info( "Query '{}' was not found in the suggestion's index". format(words)) res.installed.extend(installed_matches) else: if not os.path.exists(SUGGESTIONS_CACHE_FILE): # if the suggestions cache was not found, it will not be possible to retrieve the matched apps # so only the installed matches will be returned self.logger.warning( "Suggestion cached file {} was not found".format( SUGGESTIONS_CACHE_FILE)) res.installed.extend(installed_matches) else: with open(SUGGESTIONS_CACHE_FILE) as f: cached_suggestions = yaml.safe_load(f.read()) if not cached_suggestions: # if no suggestion is found, it will not be possible to retrieve the matched apps # so only the installed matches will be returned self.logger.warning( "No suggestion found in {}".format( SUGGESTIONS_CACHE_FILE)) res.installed.extend(installed_matches) else: matched_suggestions = [ cached_suggestions[key] for key in index_match_keys if cached_suggestions.get(key) ] if not matched_suggestions: self.logger.warning( "No suggestion found for the search index keys: {}" .format(index_match_keys)) res.installed.extend(installed_matches) else: matched_suggestions.sort( key=lambda s: s.get('priority', 0), reverse=True) if installed_matches: # checking if any of the installed matches is one of the matched suggestions for sug in matched_suggestions: found = [ i for i in installed_matches if i.url == sug.get('url') ] if found: res.installed.extend(found) else: res.new.append( self._map_suggestion( sug).package) else: for sug in matched_suggestions: res.new.append( self._map_suggestion(sug).package) res.total += len(res.installed) res.total += len(res.new) if res.new: thread_config.join() if local_config['environment']['electron']['version']: for app in res.new: app.version = str( local_config['environment']['electron']['version']) app.latest_version = app.version return res
def test_search__must_return_empty_result_when_url(self, read_installed: Mock): words = 'i' res = self.controller.search(words=words, disk_loader=None, limit=-1, is_url=True) read_installed.assert_not_called() self.assertEqual(SearchResult.empty(), res)
def read_installed(self, disk_loader: Optional[DiskCacheLoader], limit: int = -1, only_apps: bool = False, pkg_types: Set[Type[SoftwarePackage]] = None, internet_available: bool = None, wait_async_data: bool = False) -> SearchResult: version = flatpak.get_version() updates, required_runtimes = list(), dict() thread_updates, thread_runtimes = None, None if internet_available: thread_updates = Thread(target=self._add_updates, args=(version, updates)) thread_updates.start() if version >= VERSION_1_12: thread_runtimes = Thread(target=self._fill_required_runtime_updates, args=(required_runtimes,)) thread_runtimes.start() installed = flatpak.list_installed(version) update_map = None if thread_updates: thread_updates.join() update_map = updates[0] models = {} data_loaders: Optional[List[FlatpakAsyncDataLoader]] = [] if wait_async_data else None if installed: for app_json in installed: model, loader = self._map_to_model(app_json=app_json, installed=True, disk_loader=disk_loader, internet=internet_available) model.update = False models[model.get_update_id(version)] = model if loader and data_loaders is not None: data_loaders.append(loader) if update_map: for update_id in update_map['full']: model_with_update = models.get(update_id) if model_with_update: model_with_update.update = True else: # it is a new component that must be installed update_id_split = update_id.split('/') new_app = FlatpakApplication(id=update_id_split[0], branch=update_id_split[1], installation=update_id_split[2], name=update_id_split[0].split('.')[-1].strip(), version=update_id_split[1], arch='x86_64' if self.context.is_system_x86_64() else 'x86', origin=update_id_split[3] if len(update_id_split) == 4 else None) new_app.update_component = True # mark as "update component" new_app.installed = True # faking the "installed" status to be displayed as an update new_app.update = True new_app.update_ref() models[update_id] = new_app if version >= VERSION_1_2: for partial_update_id in update_map['partial']: partial_data = partial_update_id.split('/') for model in models.values(): if model.installation == partial_data[2] and model.branch == partial_data[1]: if model.id == partial_data[0]: model.update = True break elif model.id in partial_data[0]: partial_model = model.gen_partial(partial_data[0]) partial_model.update = True models[partial_update_id] = partial_model break if thread_runtimes: thread_runtimes.join() if required_runtimes: for installation in ('system', 'user'): installation_runtimes = required_runtimes.get(installation) if installation_runtimes: for ref, origin in installation_runtimes: ref_split = ref.split('/') models[f'{installation}.'] = FlatpakApplication(id=ref_split[1], ref=ref, origin=origin, name=ref_split[1], version=ref_split[-1], latest_version=ref_split[-1], runtime=True, installation=installation, installed=False, update_component=True, update=True) if models: ignored = self._read_ignored_updates() if ignored: for model in models.values(): if model.get_update_ignore_key() in ignored: model.updates_ignored = True if data_loaders: for loader in data_loaders: loader.join() return SearchResult([*models.values()], None, len(models))
def read_installed(self, disk_loader: Optional[DiskCacheLoader], limit: int = -1, only_apps: bool = False, pkg_types: Optional[Set[Type[SoftwarePackage]]] = None, internet_available: bool = None, connection: sqlite3.Connection = None) -> SearchResult: installed_apps = [] res = SearchResult(installed_apps, [], 0) if os.path.exists(INSTALLATION_DIR): installed = glob.glob(f'{INSTALLATION_DIR}/*/data.json') if installed: names = set() for path in installed: if path: with open(path) as f: app = AppImage(installed=True, i18n=self.i18n, **json.loads(f.read())) app.icon_url = app.icon_path installed_apps.append(app) names.add(f"'{app.name.lower()}'") if installed_apps: apps_con = self._get_db_connection(DATABASE_APPS_FILE) if not connection else connection if apps_con: try: cursor = apps_con.cursor() cursor.execute(query.FIND_APPS_BY_NAME.format(','.join(names))) for tup in cursor.fetchall(): for app in installed_apps: if app.name.lower() == tup[0].lower() and (not app.github or app.github.lower() == tup[1].lower()): continuous_version = app.version == 'continuous' continuous_update = tup[2] == 'continuous' if tup[3]: if continuous_version and not continuous_update: app.update = True elif continuous_update and not continuous_version: app.update = False else: try: app.update = parse_version(tup[2]) > parse_version(app.version) if tup[2] else False except: app.update = False traceback.print_exc() if app.update: app.latest_version = tup[2] app.url_download_latest_version = tup[3] break except: self.logger.error(f"An exception happened while querying the database file '{DATABASE_APPS_FILE}'") traceback.print_exc() finally: if not connection: # the connection can only be closed if it was opened within this method apps_con.close() ignored_updates = self._read_ignored_updates() if ignored_updates: for app in installed_apps: if app.supports_ignored_updates() and app.name in ignored_updates: app.updates_ignored = True res.total = len(res.installed) return res
def search(self, words: str, disk_loader: DiskCacheLoader, limit: int = -1, is_url: bool = False) -> SearchResult: if is_url: return SearchResult.empty() apps_conn = self._get_db_connection(DATABASE_APPS_FILE) if not apps_conn: return SearchResult.empty() not_installed, found_map = [], {} try: cursor = apps_conn.cursor() cursor.execute( query.SEARCH_APPS_BY_NAME_OR_DESCRIPTION.format(words, words)) idx = 0 for r in cursor.fetchall(): app = AppImage(*r, i18n=self.i18n, custom_actions=self.custom_app_actions) not_installed.append(app) found_map[self._gen_app_key(app)] = {'app': app, 'idx': idx} idx += 1 except: self.logger.error( "An exception happened while querying the 'apps' database") traceback.print_exc() apps_conn.close() return SearchResult.empty() installed_found = [] if not_installed: installed = self.read_installed(disk_loader=disk_loader, limit=limit, only_apps=False, pkg_types=None, connection=apps_conn, internet_available=True).installed if installed: for appim in installed: key = self._gen_app_key(appim) new_found = found_map.get(key) if new_found: del not_installed[new_found['idx']] installed_found.append(appim) try: apps_conn.close() except: self.logger.error( "An exception happened when trying to close the connection to database file '{}'" .format(DATABASE_APPS_FILE)) traceback.print_exc() return SearchResult(new=not_installed, installed=installed_found, total=len(not_installed) + len(installed_found))