def _download_callback(self, document_list): """ Callback called by download_<something>() once data is arrived from web service. document_list can be None! """ has_more = 0 if document_list is not None: has_more = document_list.has_more() # stash more data? if has_more and (document_list is not None): self._offset += len(document_list) # download() will be called externally if const_debug_enabled(): const_debug_write( __name__, "MetadataDownloader._download_callback: %s, more: %s" % (document_list, has_more)) if document_list is not None: const_debug_write( __name__, "MetadataDownloader._download_callback: " "has_more: %s, offset: %s" % (document_list.has_more(), document_list.offset())) self._callback(self, self._app, document_list, has_more)
def _approve_edelta_unlocked(self, url, checksum, installed_url, installed_checksum, installed_download_path): """ Approve Entropy package delta support for given url, checking if a previously fetched package is available. @return: edelta URL to download and previously downloaded package path or None if edelta is not available @rtype: tuple of strings or None """ edelta_local_approved = False try: edelta_local_approved = entropy.tools.compare_md5( installed_download_path, installed_checksum) except (OSError, IOError) as err: const_debug_write( __name__, "_approve_edelta_unlocked, error: %s" % (err,)) return if not edelta_local_approved: return hash_tag = installed_checksum + checksum edelta_file_name = entropy.tools.generate_entropy_delta_file_name( os.path.basename(installed_url), os.path.basename(url), hash_tag) edelta_url = os.path.join( os.path.dirname(url), etpConst['packagesdeltasubdir'], edelta_file_name) return edelta_url
def _pkg_get_search(self): if self._search_callback is None: return [] func, arg = self._search_callback if const.debug: t1 = time.time() const_debug_write(__name__, "_pkg_get_search: begin") matches = func(arg) if const.debug: const_debug_write(__name__, "_pkg_get_search: end in %s" % ( time.time() - t1,)) # load pkgs for key in self._get_calls_dict().keys(): if key == "search": # myself continue self.get_raw_groups(key) pkgs = [] for match in matches: yp, new = self.get_package_item(match) if new: # wtf! sys.stderr.write("WTF! %s is new %s\n" % (match, yp,)) sys.stderr.flush() continue pkgs.append(yp) return pkgs
def _download_callback(self, document_list): """ Callback called by download_<something>() once data is arrived from web service. document_list can be None! """ has_more = 0 if document_list is not None: has_more = document_list.has_more() # stash more data? if has_more and (document_list is not None): self._offset += len(document_list) # download() will be called externally if const_debug_enabled(): const_debug_write( __name__, "MetadataDownloader._download_callback: %s, more: %s" % ( document_list, has_more)) if document_list is not None: const_debug_write( __name__, "MetadataDownloader._download_callback: " "has_more: %s, offset: %s" % ( document_list.has_more(), document_list.offset())) self._callback(self, self._app, document_list, has_more)
def download(self): """ Start downloading URL given at construction time. @return: download status, which can be either one of: UrlFetcher.GENERIC_FETCH_ERROR means error. UrlFetcher.TIMEOUT_FETCH_ERROR means timeout error. UrlFetcher.GENERIC_FETCH_WARN means warning, downloaded fine but unable to calculate the md5 hash. Otherwise returns md5 hash. @rtype: string @todo: improve return data """ protocol = UrlFetcher._get_url_protocol(self.__url) downloader = self.__supported_uris.get(protocol) const_debug_write(__name__, "UrlFetcher.download(%s), save: %s, checksum: %s, resume: %s, " "show speed: %s, abort func: %s, thread stop func: %s, " "disallow redir: %s, speed limit: %s, timeout: %s" % ( self.__url, self.__path_to_save, self.__checksum, self.__resume, self.__show_speed, self.__abort_check_func, self.__thread_stop_func, self.__disallow_redirect, self.__speedlimit, self.__timeout) ) if downloader is None: # return error, protocol not supported self._update_speed() self.__status = UrlFetcher.GENERIC_FETCH_ERROR return self.__status self._init_vars() status = downloader() if self.__show_speed: self.update() return status
def _show_license(self, uri, license_apps): """ Show selected License to User. """ tmp_fd, tmp_path = None, None try: license_text = None # get the first repo with valid license text repos = set([x.get_details().channelname for \ x in license_apps]) if not repos: return with self._entropy.rwsem().reader(): for repo_id in repos: repo = self._entropy.open_repository(repo_id) license_text = repo.retrieveLicenseText(uri) if license_text is not None: break if license_text is not None: tmp_fd, tmp_path = const_mkstemp(suffix=".txt") try: license_text = const_convert_to_unicode( license_text, enctype=etpConst['conf_encoding']) except UnicodeDecodeError: license_text = const_convert_to_unicode( license_text) with entropy.tools.codecs_fdopen( tmp_fd, "w", etpConst['conf_encoding']) as tmp_f: tmp_f.write("License: %s\n" % ( uri,)) apps = self._licenses.get(uri, []) if apps: tmp_f.write("Applications:\n") for app in apps: tmp_f.write("\t%s\n" % (app.name,)) if apps: tmp_f.write("\n") tmp_f.write("-" * 79 + "\n") tmp_f.write(license_text) tmp_f.flush() else: const_debug_write( __name__, "LicensesNotificationBox._show_license: " "not available" ) finally: if tmp_fd is not None: try: os.close(tmp_fd) except OSError: pass # leaks, but xdg-open is async if tmp_path is not None: open_url(tmp_path)
def rsync_stats_extractor(output_line): const_debug_write(__name__, "rsync_stats_extractor(%s): %s" % (self.__th_id, output_line,)) data = output_line.split() if len(data) != 4: # it's just garbage here self._update_speed() return bytes_read, pct, speed_kbsec, eta = data try: bytes_read = int(bytes_read) except ValueError: bytes_read = 0 try: average = int(pct.strip("%")) except ValueError: average = 0 # update progress info # _rsync_commit self.__downloadedsize = bytes_read if average > 100: average = 100 self.__average = average self._update_speed() if self.__show_speed: self.handle_statistics(self.__th_id, self.__downloadedsize, self.__remotesize, self.__average, self.__oldaverage, self.__updatestep, self.__show_speed, self.__datatransfer, self.__time_remaining, self.__time_remaining_secs ) self.update() self.__oldaverage = self.__average
def pop(self, *args, **kwargs): const_debug_write(__name__, "%s pop called: %s, %s" % ( self, args, kwargs, )) return list.pop(self, *args, **kwargs)
def _change_view_state(self, state, lock=False, _ignore_lock=False, payload=None): """ Change Rigo Application UI state. You can pass a custom widget that will be shown in case of static view state. """ with self._state_mutex: if self._current_state_lock and not _ignore_lock: const_debug_write(__name__, "cannot change view state, UI locked") return False txc = self._state_transitions.get(state) if txc is None: raise AttributeError("wrong view state") enter_st, exit_st = txc current_enter_st, current_exit_st = \ self._state_transitions.get( self._current_state) # exit from current state current_exit_st() # enter the new state enter_st() self._current_state = state if lock: self._current_state_lock = True state_meta = self._state_metadata[state] self._window.set_title(escape_markup(state_meta["title"])) return True
def _change_view_state(self, state, lock=False, _ignore_lock=False, payload=None): """ Change Rigo Application UI state. You can pass a custom widget that will be shown in case of static view state. """ with self._state_mutex: if self._current_state_lock and not _ignore_lock: const_debug_write( __name__, "cannot change view state, UI locked") return False txc = self._state_transitions.get(state) if txc is None: raise AttributeError("wrong view state") enter_st, exit_st = txc current_enter_st, current_exit_st = \ self._state_transitions.get( self._current_state) # exit from current state current_exit_st() # enter the new state enter_st() self._current_state = state if lock: self._current_state_lock = True state_meta = self._state_metadata[state] self._window.set_title(escape_markup( state_meta["title"])) return True
def merge(self, source): """ Merge proposed source configuration file. "source" must be a key of this dictionary, if not, True is returned. If file pointed at source doesn't exist or merge fails, False is returned. """ obj = self.pop(source, None) if obj is None: return True root = ConfigurationFiles.root() source_file = root + source dest_file = root + obj['destination'] self._backup(dest_file) source_file = self._encode_path(source_file) dest_file = self._encode_path(dest_file) try: rename_keep_permissions(source_file, dest_file) except OSError as err: const_debug_write( __name__, "merge, OSError: " "%s, locals: %s" % (repr(err), locals())) return False return True
def applet_doubleclick(self): if not self.current_state in ["OKAY", "ERROR", "CRITICAL"]: const_debug_write( "applet_doubleclick", "not ready to show notice window: %s." % self.current_state) return self.trigger_notice_window()
def _backup(self, dest_path): """ Execute a backup of the given path if User enabled the feature through Entropy Client configuration. """ client_settings = self._entropy.ClientSettings() files_backup = client_settings["misc"]["filesbackup"] if not files_backup: return dest_path = self._encode_path(dest_path) if not os.path.isfile(dest_path): return backup_pfx = self._encode_path("._entropy_backup.") sep = self._encode_path("_") dirname, basename = os.path.split(dest_path) bcount = 0 bcount_str = self._encode_path("%d" % (bcount,)) backup_path = os.path.join(dirname, backup_pfx + bcount_str + sep + basename) while os.path.lexists(backup_path): bcount += 1 bcount_str = self._encode_path("%d" % (bcount,)) backup_path = os.path.join(dirname, backup_pfx + bcount_str + sep + basename) # I don't know if copy2 likes bytes() # time will tell! try: shutil.copy2(dest_path, backup_path) except OSError as err: const_debug_write(__name__, "_backup, OSError: " "%s, locals: %s" % (repr(err), locals())) except IOError as err: const_debug_write(__name__, "_backup, IOError: " "%s, locals: %s" % (repr(err), locals()))
def merge(self, source): """ Merge proposed source configuration file. "source" must be a key of this dictionary, if not, True is returned. If file pointed at source doesn't exist or merge fails, False is returned. """ obj = self.pop(source, None) if obj is None: return True root = ConfigurationFiles.root() source_file = root + source dest_file = root + obj['destination'] self._backup(dest_file) source_file = self._encode_path(source_file) dest_file = self._encode_path(dest_file) try: rename_keep_permissions( source_file, dest_file) except OSError as err: const_debug_write( __name__, "merge, OSError: " "%s, locals: %s" % ( repr(err), locals())) return False return True
def authenticate_sync(self, pid, action_id): """ Authenticate current User asking Administrator passwords. Return True if authenticated, False if not. """ authority = Polkit.Authority.get() subject = Polkit.UnixProcess.new(pid) result = authority.check_authorization_sync( subject, action_id, None, Polkit.CheckAuthorizationFlags.ALLOW_USER_INTERACTION, None) authenticated = False try: if result.get_is_authorized(): authenticated = True elif result.get_is_challenge(): authenticated = True except GObject.GError as err: const_debug_write( __name__, "_polkit_auth_callback: error: %s" % (err,)) return authenticated
def __delslice__(self, i, j): const_debug_write(__name__, "%s __delslice__ called: %s|%s" % ( self, i, j, )) return list.__delslice__(self, i, j)
def _approve_edelta_unlocked(self, url, checksum, installed_url, installed_checksum, installed_download_path): """ Approve Entropy package delta support for given url, checking if a previously fetched package is available. @return: edelta URL to download and previously downloaded package path or None if edelta is not available @rtype: tuple of strings or None """ edelta_local_approved = False try: edelta_local_approved = entropy.tools.compare_md5( installed_download_path, installed_checksum) except (OSError, IOError) as err: const_debug_write(__name__, "_approve_edelta_unlocked, error: %s" % (err, )) return if not edelta_local_approved: return hash_tag = installed_checksum + checksum edelta_file_name = entropy.tools.generate_entropy_delta_file_name( os.path.basename(installed_url), os.path.basename(url), hash_tag) edelta_url = os.path.join(os.path.dirname(url), etpConst['packagesdeltasubdir'], edelta_file_name) return edelta_url
def upgrade(self, simulate=False): """ Launch a System Upgrade activity. """ const_debug_write(__name__, "upgrade") self._service.upgrade_system(simulate=simulate) const_debug_write(__name__, "upgrade:" " upgrade_system() sent")
def lock(self, remote_path): # we trust dir but not remote_path, because we do # shell code below. reg = EntropySshUriHandler.valid_lock_path if not reg.match(remote_path): raise ValueError("illegal lock path") remote_ptr = os.path.join(self.__dir, remote_path) remote_ptr_lock = os.path.join( self.__dir, os.path.dirname(remote_path), "." + os.path.basename(remote_path)) remote_ptr_lock += ".lock" const_debug_write(__name__, "lock(): remote_ptr: %s, lock: %s" % ( remote_ptr, remote_ptr_lock,)) args, remote_str = self._setup_fs_args() lock_cmd = '( flock -x -n 9; if [ "${?}" != "0" ]; ' + \ 'then echo -n "FAIL"; else if [ -f ' + remote_ptr + ' ]; then ' + \ 'echo -n "FAIL"; else touch ' + remote_ptr + ' && ' + \ 'rm ' + remote_ptr_lock + ' && echo -n "OK"; fi; fi ) 9> ' \ + remote_ptr_lock args += [remote_str, lock_cmd] exec_rc, output, error = self._exec_cmd(args) const_debug_write(__name__, "lock(), outcome: lock: %s, rc: %s, out: %s, err: %s" % ( remote_ptr_lock, exec_rc, output, error,)) return output == "OK"
def _parse_progress_line(self, line): line_data = line.strip().split() if len(line_data) < 5: const_debug_write(__name__, "_parse_progress_line: cannot parse: %s" % (line_data,)) # mmh... not possible to properly parse data self.output(line.strip(), back = True) return const_debug_write(__name__, "_parse_progress_line: parsing: %s" % (line_data,)) file_name = line_data[0] percent = line_data[1] tx_speed = line_data[3] tx_size = line_data[2] eta = line_data[4] # create text mytxt = _("Transfer status") current_txt = "<-> (%s) %s: " % (teal(file_name), brown(mytxt),) + \ darkgreen(tx_size) + " " + \ brown("[") + str(percent) + brown("]") + \ " " + eta + " " + tx_speed self.output(current_txt, back = True, header = " ")
def insert(self, pos, item): const_debug_write( __name__, "%s insert called: pos:%s => %s" % ( self, pos, item, )) return list.insert(self, pos, item)
def __setitem__(self, key, value): const_debug_write( __name__, "%s __setitem__ called: %s => %s" % ( self, key, value, )) return list.__setitem__(self, key, value)
def _first_check(self): def _do_check(): self.send_check_updates_signal(startup_check = True) return False if self._dbus_service_available: const_debug_write("_first_check", "spawning check.") QTimer.singleShot(10000, _do_check)
def __fork_cmd(self, args, environ, update_output_callback): def _line_reader(std_r): read_buf = "" try: char = std_r.read(1) while char: if (char == "\r") and read_buf: update_output_callback(read_buf) read_buf = "" elif (char != "\r"): read_buf += char char = std_r.read(1) except IOError: return try: pid, fd = pty.fork() except OSError as err: const_debug_write(__name__, "__fork_cmd(%s): status: %s" % (args, err,)) # out of pty devices return 1 if pid == 0: proc = subprocess.Popen(args, env = environ) rc = proc.wait() os._exit(rc) elif pid == -1: raise SystemError("cannot forkpty()") else: dead = False return_code = 1 srd_r = None try: std_r = os.fdopen(fd, "r") while not dead: try: dead, return_code = os.waitpid(pid, os.WNOHANG) except OSError as e: if e.errno != errno.ECHILD: raise dead = True # wait a bit time.sleep(0.2) _line_reader(std_r) if self.__abort_check_func != None: self.__abort_check_func() if self.__thread_stop_func != None: self.__thread_stop_func() finally: if std_err is not None: std_r.close() return return_code
def get_raw_groups(self, flt): if const.debug: t1 = time.time() self.populate_single_group(flt) if const.debug: const_debug_write(__name__, "get_raw_groups: generated group content for %s in %s" % ( flt, time.time() - t1,)) return self._packages[flt]
def __fork_cmd(self, args, environ, update_output_callback): def _line_reader(std_r): read_buf = "" try: char = std_r.read(1) while char: if (char == "\r") and read_buf: update_output_callback(read_buf) read_buf = "" elif (char != "\r"): read_buf += char char = std_r.read(1) except IOError: return try: pid, fd = pty.fork() except OSError as err: const_debug_write(__name__, "__fork_cmd(%s): status: %s" % (args, err,)) # out of pty devices return 1 if pid == 0: proc = subprocess.Popen(args, env = environ) rc = proc.wait() os._exit(rc) elif pid == -1: raise SystemError("cannot forkpty()") else: dead = False return_code = 1 srd_r = None try: std_r = os.fdopen(fd, "r") while not dead: try: dead, return_code = os.waitpid(pid, os.WNOHANG) except OSError as e: if e.errno != errno.ECHILD: raise dead = True # wait a bit time.sleep(0.2) _line_reader(std_r) if self.__abort_check_func != None: self.__abort_check_func() if self.__thread_stop_func != None: self.__thread_stop_func() finally: if std_r is not None: std_r.close() return return_code
def __setslice__(self, i, j, sequence): const_debug_write( __name__, "%s __setslice__ called: i:%s,j:%s,seq:%s" % ( self, i, j, sequence, )) return list.__setslice__(self, i, j, sequence)
def _show_license(self, uri, license_apps): """ Show selected License to User. """ tmp_fd, tmp_path = None, None try: license_text = None # get the first repo with valid license text repos = set([x.get_details().channelname for \ x in license_apps]) if not repos: return with self._entropy.rwsem().reader(): for repo_id in repos: repo = self._entropy.open_repository(repo_id) license_text = repo.retrieveLicenseText(uri) if license_text is not None: break if license_text is not None: tmp_fd, tmp_path = const_mkstemp(suffix=".txt") try: license_text = const_convert_to_unicode( license_text, enctype=etpConst['conf_encoding']) except UnicodeDecodeError: license_text = const_convert_to_unicode(license_text) with entropy.tools.codecs_fdopen( tmp_fd, "w", etpConst['conf_encoding']) as tmp_f: tmp_f.write("License: %s\n" % (uri, )) apps = self._licenses.get(uri, []) if apps: tmp_f.write("Applications:\n") for app in apps: tmp_f.write("\t%s\n" % (app.name, )) if apps: tmp_f.write("\n") tmp_f.write("-" * 79 + "\n") tmp_f.write(license_text) tmp_f.flush() else: const_debug_write( __name__, "LicensesNotificationBox._show_license: " "not available") finally: if tmp_fd is not None: try: os.close(tmp_fd) except OSError: pass # leaks, but xdg-open is async if tmp_path is not None: open_url(tmp_path)
def upgrade(self, simulate=False): """ Launch a System Upgrade activity. """ const_debug_write( __name__, "upgrade") self._service.upgrade_system(simulate=simulate) const_debug_write( __name__, "upgrade:" " upgrade_system() sent")
def populate_single_group(self, mask, force = False): if mask in self._packages and not force and mask not in \ self._non_cached_groups: return if const.debug: t1 = time.time() self._packages[mask] = self._get_groups(mask) if const.debug: const_debug_write(__name__, "populate_single_group: generated group content for %s in %s" % ( mask, time.time() - t1,))
def install_package(self, package_path, simulate=False): """ Install Entropy Package file. """ const_debug_write(__name__, "install_package: %s" % (package_path, )) self._service.package_install_request(package_path, simulate=simulate) const_debug_write( __name__, "install_package: " "package_install_request() sent for: %s" % (package_path, ))
def do_show(): if not self._window.supportsMessages(): const_debug_write("show_alert", "messages not supported.") return icon_id = QSystemTrayIcon.Information if urgency == "critical": icon_id = QSystemTrayIcon.Critical self._window.showMessage(title, text, icon_id) self.last_alert = (title, text)
def _shutdown_signal(self): """ Discard RigoDaemon bus object if shutdown() arrived. """ self.__entropy_bus_mutex.acquire() const_debug_write(__name__, "shutdown() arrived, reloading in 2 seconds") time.sleep(2) if self._unlock_callback is not None: self._unlock_callback() os.execvp("magneto", sys.argv)
def _shutdown_signal(self): """ Discard RigoDaemon bus object if shutdown() arrived. """ self.__entropy_bus_mutex.acquire() const_debug_write( __name__, "shutdown() arrived, reloading in 2 seconds") time.sleep(2) if self._unlock_callback is not None: self._unlock_callback() os.execvp("magneto", sys.argv)
def download(self): """ Start downloading URL given at construction time. @return: download status, which can be either one of: UrlFetcher.GENERIC_FETCH_ERROR means error. UrlFetcher.TIMEOUT_FETCH_ERROR means timeout error. UrlFetcher.GENERIC_FETCH_WARN means warning, downloaded fine but unable to calculate the md5 hash. Otherwise returns md5 hash. @rtype: string @todo: improve return data """ protocol = UrlFetcher._get_url_protocol(self.__url) downloader = self.__supported_uris.get(protocol) const_debug_write( __name__, "UrlFetcher.download(%s), save: %s, checksum: %s, resume: %s, " "show speed: %s, abort func: %s, thread stop func: %s, " "disallow redir: %s, speed limit: %s, timeout: %s, " "download context method: %s, pre download hook: %s, " "post download hook: %s" % ( self.__url, self.__path_to_save, self.__checksum, self.__resume, self.__show_speed, self.__abort_check_func, self.__thread_stop_func, self.__disallow_redirect, self.__speedlimit, self.__timeout, self.__download_context_func, self.__pre_download_hook, self.__post_download_hook) ) if downloader is None: # return error, protocol not supported self._update_speed() self.__status = UrlFetcher.GENERIC_FETCH_ERROR return self.__status self._init_vars() with self.__download_context_func(self.__path_to_save): if self.__pre_download_hook: status = self.__pre_download_hook( self.__path_to_save, self.__th_id) if status is not None: return status status = downloader() if self.__show_speed: self.update() if self.__post_download_hook: self.__post_download_hook( self.__path_to_save, status, self.__th_id) return status
def _polkit_auth_callback(authority, res, loop): authenticated = False try: result = authority.check_authorization_finish(res) if result.get_is_authorized(): authenticated = True elif result.get_is_challenge(): authenticated = True except GObject.GError as err: const_debug_write(__name__, "_polkit_auth_callback: error: %s" % (err, )) finally: authentication_callback(authenticated)
def _setup_differential_download_internal(self, tmp_download_path, download_path, installed_download_path): """ _setup_differential_download() assuming that the installed packages repository lock is held. """ try: shutil.copyfile(installed_download_path, tmp_download_path) except (OSError, IOError, shutil.Error) as err: const_debug_write( __name__, "_setup_differential_download2(%s), %s copyfile error: %s" % ( installed_download_path, tmp_download_path, err)) return False try: user = os.stat(installed_download_path)[stat.ST_UID] group = os.stat(installed_download_path)[stat.ST_GID] os.chown(download_path, user, group) except (OSError, IOError) as err: const_debug_write( __name__, "_setup_differential_download2(%s), chown error: %s" % ( installed_download_path, err)) return False try: shutil.copystat(installed_download_path, tmp_download_path) except (OSError, IOError, shutil.Error) as err: const_debug_write( __name__, "_setup_differential_download2(%s), %s copystat error: %s" % ( installed_download_path, tmp_download_path, err)) return False try: os.rename(tmp_download_path, download_path) except (OSError, IOError) as err: const_debug_write( __name__, "_setup_differential_download2(%s), %s rename error: %s" % ( installed_download_path, tmp_download_path, err)) return False const_debug_write( __name__, "_setup_differential_download2(%s) copied to %s" % ( installed_download_path, download_path)) return True
def _run_post_update_repository_hook(self, repository_id): my_repos = self._settings['repositories'] branch = my_repos['branch'] avail_data = my_repos['available'] repo_data = avail_data[repository_id] post_update_script = repo_data['post_repo_update_script'] if post_update_script is None: const_debug_write(__name__, "_run_post_update_repository_hook: not available") return 0 if not const_file_readable(post_update_script): # not found! const_debug_write(__name__, "_run_post_update_repository_hook: not found") return 0 args = ["/bin/sh", post_update_script, repository_id, etpConst['systemroot'] + os.path.sep, branch] const_debug_write(__name__, "_run_post_update_repository_hook: run: %s" % (args,)) proc = subprocess.Popen(args, stdin = sys.stdin, stdout = sys.stdout, stderr = sys.stderr) # it is possible to ignore errors because # if it's a critical thing, upstream dev just have to fix # the script and will be automagically re-run br_rc = proc.wait() const_debug_write(__name__, "_run_post_update_repository_hook: rc: %s" % (br_rc,)) return br_rc
def _setup_differential_download_internal(self, tmp_download_path, download_path, installed_download_path): """ _setup_differential_download() assuming that the installed packages repository lock is held. """ try: shutil.copyfile(installed_download_path, tmp_download_path) except (OSError, IOError, shutil.Error) as err: const_debug_write( __name__, "_setup_differential_download2(%s), %s copyfile error: %s" % (installed_download_path, tmp_download_path, err)) return False try: user = os.stat(installed_download_path)[stat.ST_UID] group = os.stat(installed_download_path)[stat.ST_GID] os.chown(download_path, user, group) except (OSError, IOError) as err: const_debug_write( __name__, "_setup_differential_download2(%s), chown error: %s" % (installed_download_path, err)) return False try: shutil.copystat(installed_download_path, tmp_download_path) except (OSError, IOError, shutil.Error) as err: const_debug_write( __name__, "_setup_differential_download2(%s), %s copystat error: %s" % (installed_download_path, tmp_download_path, err)) return False try: os.rename(tmp_download_path, download_path) except (OSError, IOError) as err: const_debug_write( __name__, "_setup_differential_download2(%s), %s rename error: %s" % (installed_download_path, tmp_download_path, err)) return False const_debug_write( __name__, "_setup_differential_download2(%s) copied to %s" % (installed_download_path, download_path)) return True
def _polkit_auth_callback(authority, res, loop): authenticated = False try: result = authority.check_authorization_finish(res) if result.get_is_authorized(): authenticated = True elif result.get_is_challenge(): authenticated = True except GObject.GError as err: const_debug_write( __name__, "_polkit_auth_callback: error: %s" % (err,)) finally: authentication_callback(authenticated)
def show_notice_window(self): if self.notice_window_shown: const_debug_write("show_notice_window", "Notice window already shown.") return if not self.package_updates: const_debug_write("show_notice_window", "No computed updates.") return entropy_ver = None packages = [] for atom in self.package_updates: key = entropy.dep.dep_getkey(atom) avail_rev = entropy.dep.dep_get_entropy_revision(atom) avail_tag = entropy.dep.dep_gettag(atom) my_pkg = entropy.dep.remove_entropy_revision(atom) my_pkg = entropy.dep.remove_tag(my_pkg) pkgcat, pkgname, pkgver, pkgrev = entropy.dep.catpkgsplit(my_pkg) ver = pkgver if pkgrev != "r0": ver += "-%s" % (pkgrev, ) if avail_tag: ver += "#%s" % (avail_tag, ) if avail_rev: ver += "~%s" % (avail_tag, ) if key == "sys-apps/entropy": entropy_ver = ver packages.append("%s (%s)" % ( key, ver, )) critical_msg = "" if entropy_ver is not None: critical_msg = "%s <b>sys-apps/entropy</b> %s, %s <b>%s</b>. %s." % ( _("Your system currently has an outdated version of"), _("installed"), _("the latest available version is"), entropy_ver, _("It is recommended that you upgrade to " "the latest before updating any other packages")) self._notice_window.populate(packages, critical_msg) self._notice_window.show() self.notice_window_shown = True
def _set_cached(self, cache_key, data): """ Save a cache item to disk. """ with self._cache_dir_lock: try: return self._cacher.save(cache_key, data, cache_dir = WebService.CACHE_DIR) except IOError as err: # IOError is raised when cache cannot be written to disk if const_debug_enabled(): const_debug_write(__name__, "WebService._set_cached(%s) = cache store error: %s" % ( cache_key, repr(err),))
def logger(self): """ Return the Entropy Client Logger instance. """ with self._real_logger_lock: if self._real_logger is None: real_logger = LogFile( level = self._settings['system']['log_level'], filename = etpConst['entropylogfile'], header = "[client]") const_debug_write(__name__, "Logger loaded") self._real_logger = real_logger return self._real_logger
def logger(self): """ Return the Entropy Client Logger instance. """ with self._real_logger_lock: if self._real_logger is None: real_logger = LogFile( level=self._settings['system']['log_level'], filename=etpConst['entropylogfile'], header="[client]") const_debug_write(__name__, "Logger loaded") self._real_logger = real_logger return self._real_logger
def _settings(self): """ Return a SystemSettings object instance. """ with self._real_settings_lock: if self._real_settings is None: self._real_settings = SystemSettings() const_debug_write(__name__, "SystemSettings loaded") # add our SystemSettings plugin # Make sure we connect Entropy Client plugin # AFTER client db init self._real_settings.add_plugin(self._settings_client_plugin) return self._real_settings
def show_notice_window(self): if self.notice_window_shown: const_debug_write("show_notice_window", "Notice window already shown.") return if not self.package_updates: const_debug_write("show_notice_window", "No computed updates.") return entropy_ver = None packages = [] for atom in self.package_updates: key = entropy.dep.dep_getkey(atom) avail_rev = entropy.dep.dep_get_entropy_revision(atom) avail_tag = entropy.dep.dep_gettag(atom) my_pkg = entropy.dep.remove_entropy_revision(atom) my_pkg = entropy.dep.remove_tag(my_pkg) pkgcat, pkgname, pkgver, pkgrev = entropy.dep.catpkgsplit(my_pkg) ver = pkgver if pkgrev != "r0": ver += "-%s" % (pkgrev,) if avail_tag: ver += "#%s" % (avail_tag,) if avail_rev: ver += "~%s" % (avail_tag,) if key == "sys-apps/entropy": entropy_ver = ver packages.append("%s (%s)" % (key, ver,)) critical_msg = "" if entropy_ver is not None: critical_msg = "%s <b>sys-apps/entropy</b> %s, %s <b>%s</b>. %s." % ( _("Your system currently has an outdated version of"), _("installed"), _("the latest available version is"), entropy_ver, _("It is recommended that you upgrade to " "the latest before updating any other packages") ) self._notice_window.populate(packages, critical_msg) self._notice_window.show() self.notice_window_shown = True
def _settings(self): """ Return a SystemSettings object instance. """ with self._real_settings_lock: if self._real_settings is None: self._real_settings = SystemSettings() const_debug_write(__name__, "SystemSettings loaded") # add our SystemSettings plugin # Make sure we connect Entropy Client plugin # AFTER client db init self._real_settings.add_plugin( self._settings_client_plugin) return self._real_settings
def _signal_ugc(self, entropy_client, package_keys): """ Signal UGC activity. """ for repository_id, pkgkeys in package_keys.items(): try: webserv = get_entropy_webservice(entropy_client, repository_id, tx_cb = False) except WebService.UnsupportedService: continue try: webserv.add_downloads(sorted(package_keys), clear_available_cache = True) except WebService.WebServiceException as err: const_debug_write(__name__, repr(err)) continue
def install_package(self, package_path, simulate=False): """ Install Entropy Package file. """ const_debug_write( __name__, "install_package: %s" % (package_path,)) self._service.package_install_request( package_path, simulate=simulate) const_debug_write( __name__, "install_package: " "package_install_request() sent for: %s" % ( package_path,))
def _proxy_call(self, *args, **kwargs): """ Reimplemented from SQLCursorWrapper. Raise RestartTransaction if MySQL fails to execute the query due to a detected deadlock. """ try: return super(MySQLCursorWrapper, self)._proxy_call(*args, **kwargs) except ProgrammingError as err: tx_errnos = (self._errno["ER_LOCK_WAIT_TIMEOUT"], self._errno["ER_LOCK_DEADLOCK"]) if err.args[0].errno in tx_errnos: const_debug_write(__name__, "deadlock detected, asking to restart transaction") # rollback, is it needed? self._conn_wr.rollback() raise RestartTransaction(err.args[0]) raise
def lock(self, remote_path): # The only atomic operation on FTP seems to be mkdir() # But there is no actual guarantee because it really depends # on the server implementation. # FTP is very old, got to live with it. self.__connect_if_not() remote_path_lock = os.path.join( os.path.dirname(remote_path), "." + os.path.basename(remote_path) + ".lock") remote_ptr = os.path.join(self.__ftpdir, remote_path) remote_ptr_lock = os.path.join(self.__ftpdir, remote_path_lock) const_debug_write( __name__, "lock(): remote_ptr: %s, lock: %s" % ( remote_ptr, remote_ptr_lock, )) try: self._mkdir(remote_ptr_lock) except self.ftplib.error_perm as e: return False # now we can create the lock file reliably tmp_fd, tmp_path = None, None try: tmp_fd, tmp_path = const_mkstemp(prefix="entropy.txc.ftp.lock") # check if remote_ptr is already there if self._is_path_available(remote_ptr): return False with open(tmp_path, "rb") as f: rc = self.__ftpconn.storbinary("STOR " + remote_ptr, f) done = rc.find("226") != -1 if not done: # wtf? return False return True finally: if tmp_fd is not None: os.close(tmp_fd) if tmp_path is not None: os.remove(tmp_path) # and always remove the directory created with _mkdir() # we hope that, if we were able to create it, we're also # able to remove it. self._rmdir(remote_ptr_lock)
def __simulate_orphaned_apps(self, text): const_debug_write(__name__, "__simulate_orphaned_apps: " "%s" % (text, )) with self._entropy.rwsem().reader(): inst_repo = self._entropy.installed_repository() with inst_repo.direct(): pkg_ids = inst_repo.searchPackages(text, just_id=True) manual_pkg_ids, rc = inst_repo.atomMatch(text, multiMatch=True) def _notify(): self._service._unsupported_applications_signal( list(manual_pkg_ids), pkg_ids) GLib.idle_add(_notify) const_debug_write(__name__, "__simulate_orphaned_apps: completed")
def lock(self, remote_path): # The only atomic operation on FTP seems to be mkdir() # But there is no actual guarantee because it really depends # on the server implementation. # FTP is very old, got to live with it. self.__connect_if_not() remote_path_lock = os.path.join( os.path.dirname(remote_path), "." + os.path.basename(remote_path) + ".lock") remote_ptr = os.path.join(self.__ftpdir, remote_path) remote_ptr_lock = os.path.join(self.__ftpdir, remote_path_lock) const_debug_write(__name__, "lock(): remote_ptr: %s, lock: %s" % ( remote_ptr, remote_ptr_lock,)) try: self._mkdir(remote_ptr_lock) except self.ftplib.error_perm as e: return False # now we can create the lock file reliably tmp_fd, tmp_path = None, None try: tmp_fd, tmp_path = const_mkstemp(prefix="entropy.txc.ftp.lock") # check if remote_ptr is already there if self._is_path_available(remote_ptr): return False with open(tmp_path, "rb") as f: rc = self.__ftpconn.storbinary( "STOR " + remote_ptr, f) done = rc.find("226") != -1 if not done: # wtf? return False return True finally: if tmp_fd is not None: os.close(tmp_fd) if tmp_path is not None: os.remove(tmp_path) # and always remove the directory created with _mkdir() # we hope that, if we were able to create it, we're also # able to remove it. self._rmdir(remote_ptr_lock)
def _signal_ugc(self, entropy_client, package_keys): """ Signal UGC activity. """ for repository_id, pkgkeys in package_keys.items(): try: webserv = get_entropy_webservice(entropy_client, repository_id, tx_cb=False) except WebService.UnsupportedService: continue try: webserv.add_downloads(sorted(package_keys), clear_available_cache=True) except WebService.WebServiceException as err: const_debug_write(__name__, repr(err)) continue