def should_update(self) -> bool: if not aur.is_supported(self.config): return False try: exp_hours = int(self.config['aur_idx_exp']) except: traceback.print_exc() return True if exp_hours <= 0: return True if not os.path.exists(AUR_INDEX_FILE): return True if not os.path.exists(AUR_INDEX_TS_FILE): return True with open(AUR_INDEX_TS_FILE) as f: timestamp_str = f.read() try: index_timestamp = datetime.fromtimestamp(float(timestamp_str)) return (index_timestamp + timedelta(hours=exp_hours)) <= datetime.utcnow() except: traceback.print_exc() return True
def run(self): ti = time.time() if self.create_config: self.taskman.update_progress( self.task_id, 0, self.i18n['task.waiting_task'].format( bold(self.create_config.task_name))) self.create_config.join() self.taskman.update_progress(self.task_id, 1, None) if self.create_config.config['optimize'] and aur.is_supported( self.create_config.config): try: self.optimize() except: self.logger.error("Unexpected exception") traceback.print_exc() self.taskman.update_progress(self.task_id, 100, None) else: self.logger.info( "AUR packages compilation optimizations are disabled") if os.path.exists(CUSTOM_MAKEPKG_FILE): try: self.logger.info( "Removing custom 'makepkg.conf' -> '{}'".format( CUSTOM_MAKEPKG_FILE)) os.remove(CUSTOM_MAKEPKG_FILE) except: self.logger.error("Unexpected exception") traceback.print_exc() self.taskman.update_progress(self.task_id, 100, self.i18n['arch.task.disabled']) tf = time.time() self.taskman.finish_task(self.task_id) self.logger.info('Finished. Took {0:.2f} seconds'.format(tf - ti))
def run(self) -> None: self.taskman.update_progress( self.task_id, 0, self.i18n['task.waiting_task'].format( bold(self.create_config.task_name))) self.create_config.join() self.taskman.update_progress( self.task_id, 0, self.i18n['task.waiting_task'].format( bold(self.refresh_mirrors.task_name))) self.refresh_mirrors.join() self.taskman.update_progress(self.task_id, 1, self.i18n['arch.task.checking_settings']) arch_config = self.create_config.config aur_supported = aur.is_supported(arch_config) if not self.is_enabled(arch_config, aur_supported): self.taskman.update_progress(self.task_id, 100, self.i18n['arch.task.disabled']) self.taskman.finish_task(self.task_id) return shoud_sync = self.refresh_mirrors.refreshed or (database.should_sync( arch_config, aur_supported, None, self.logger)) if not shoud_sync: self.taskman.update_progress( self.task_id, 100, self.i18n['arch.sync_databases.substatus.synchronized']) self.taskman.finish_task(self.task_id) self.synchronized = True return self.logger.info("Synchronizing databases") self.taskman.register_task(self.task_id, self.i18n['arch.sync_databases.substatus'], get_icon_path()) progress = 10 dbs = pacman.get_databases() self.taskman.update_progress(self.task_id, progress, None) if dbs: inc = 90 / len(dbs) try: p = new_root_subprocess(['pacman', '-Syy'], self.root_password) dbs_read, last_db = 0, None for o in p.stdout: line = o.decode().strip() if line: self.task_man.update_output(self.task_id, line) if line.startswith('downloading'): db = line.split(' ')[1].strip() if last_db is None or last_db != db: last_db = db dbs_read += 1 progress = dbs_read * inc else: progress += 0.25 self.taskman.update_progress( self.task_id, progress, self.i18n['arch.task.sync_sb.status'].format( db)) for o in p.stderr: line = o.decode().strip() if line: self.task_man.update_output(self.task_id, line) p.wait() if p.returncode == 0: database.register_sync(self.logger) self.synchronized = True else: self.logger.error("Could not synchronize database") except: self.logger.info("Error while synchronizing databases") traceback.print_exc() self.taskman.update_progress(self.task_id, 100, None) self.taskman.finish_task(self.task_id) self.logger.info("Finished")
def run(self): ti = time.time() self.taskman.update_progress( self.task_id, 0, self.i18n['task.waiting_task'].format( bold(self.create_config.task_name))) self.create_config.join() arch_config = self.create_config.config aur_supported = aur.is_supported(arch_config) self.taskman.update_progress(self.task_id, 1, self.i18n['arch.task.checking_settings']) if not self.is_enabled(arch_config, aur_supported): self.taskman.update_progress(self.task_id, 100, self.i18n['arch.task.disabled']) self.taskman.finish_task(self.task_id) return if not mirrors.should_sync(self.logger): self.taskman.update_progress(self.task_id, 100, self.i18n['arch.task.mirrors.cached']) self.taskman.finish_task(self.task_id) return sort_limit = arch_config['mirrors_sort_limit'] self.logger.info("Refreshing mirrors") handler = ProcessHandler() try: self.taskman.update_progress(self.task_id, 10, '') success, output = handler.handle_simple( pacman.refresh_mirrors(self.root_password), output_handler=self._notify_output) if success: if sort_limit is not None and sort_limit >= 0: self.taskman.update_progress( self.task_id, 50, self.i18n[ 'arch.custom_action.refresh_mirrors.status.updating'] ) try: handler.handle_simple( pacman.sort_fastest_mirrors( self.root_password, sort_limit), output_handler=self._notify_output) except: self.logger.error("Could not sort mirrors by speed") traceback.print_exc() mirrors.register_sync(self.logger) self.refreshed = True else: self.logger.error("It was not possible to refresh mirrors") except: self.logger.error("It was not possible to refresh mirrors") traceback.print_exc() self.taskman.update_progress(self.task_id, 100, None) self.taskman.finish_task(self.task_id) tf = time.time() self.logger.info("Finished. Took {0:.2f} seconds".format(tf - ti))
def run(self): ti = time.time() self.taskman.update_progress( self.task_id, 0, self.i18n['task.waiting_task'].format( self.create_config.task_name)) self.create_config.join() config = self.create_config.config aur_supported, repositories = aur.is_supported( config), config['repositories'] self.taskman.update_progress(self.task_id, 1, None) if not any([aur_supported, repositories]): self.taskman.update_progress(self.task_id, 100, self.i18n['arch.task.disabled']) self.taskman.finish_task(self.task_id) return self.logger.info("Checking already cached package data") self._update_progress(1, self.i18n['arch.task.disk_cache.checking']) cache_dirs = [ fpath for fpath in glob.glob('{}/*'.format(self.installed_cache_dir)) if os.path.isdir(fpath) ] not_cached_names = None self._update_progress(15, self.i18n['arch.task.disk_cache.checking']) if cache_dirs: # if there are cache data installed_names = pacman.list_installed_names() cached_pkgs = { cache_dir.split('/')[-1] for cache_dir in cache_dirs } not_cached_names = installed_names.difference(cached_pkgs) self._update_progress(20, self.i18n['arch.task.disk_cache.checking']) if not not_cached_names: self.taskman.update_progress(self.task_id, 100, '') self.taskman.finish_task(self.task_id) tf = time.time() time_msg = '{0:.2f} seconds'.format(tf - ti) self.logger.info( 'Finished: no package data to cache ({})'.format(time_msg)) return self.logger.info('Pre-caching installed Arch packages data to disk') if aur_supported and self.aur_indexer: self.taskman.update_progress( self.task_id, 20, self.i18n['arch.task.disk_cache.waiting_aur_index'].format( bold(self.i18n['arch.task.aur.index.status']))) self.aur_indexer.join() self._update_progress(21, self.i18n['arch.task.disk_cache.checking']) installed = self.controller.read_installed( disk_loader=None, internet_available=self.internet_available, only_apps=False, pkg_types=None, limit=-1, names=not_cached_names, wait_disk_cache=False).installed self._update_progress(35, self.i18n['arch.task.disk_cache.checking']) saved = 0 pkgs = { p.name: p for p in installed if ((aur_supported and p.repository == 'aur') or (repositories and p.repository != 'aur')) and not os.path.exists(p.get_disk_cache_path()) } self.to_index = len(pkgs) # overwrite == True because the verification already happened self._update_progress(40, self.i18n['arch.task.disk_cache.reading_files']) saved += disk.write_several( pkgs=pkgs, after_desktop_files=self._notify_reading_files, after_written=self.update_indexed, overwrite=True) self.taskman.update_progress(self.task_id, 100, None) self.taskman.finish_task(self.task_id) tf = time.time() time_msg = '{0:.2f} seconds'.format(tf - ti) self.logger.info( 'Finished: pre-cached data of {} Arch packages to the disk ({})'. format(saved, time_msg))