def get_noticeboard(self, repository_id): """ Return noticeboard RSS metadata (dict form) for given repository identifier. This method is fault tolerant, except for invalid repository_id given, if repository notice board file is broken or not found an empty dict is returned. @param repository_id: repository identifier @type repository_id: string @return: repository metadata @rtype: dict @raise KeyError: if given repository identifier is not available """ repo_data = self._settings['repositories']['available'][repository_id] nb_path = repo_data['local_notice_board'] if not const_file_readable(nb_path): return {} # not found # load RSS metadata and return if valid myrss = RSS(nb_path, '', '') data, data_len = myrss.get_entries() if data is None: return {} return data
def _run_post_update_repository_hook(self, repository_id): my_repos = self._settings['repositories'] branch = my_repos['branch'] avail_data = my_repos['available'] repo_data = avail_data[repository_id] post_update_script = repo_data['post_repo_update_script'] if post_update_script is None: const_debug_write(__name__, "_run_post_update_repository_hook: not available") return 0 if not const_file_readable(post_update_script): # not found! const_debug_write(__name__, "_run_post_update_repository_hook: not found") return 0 args = ["/bin/sh", post_update_script, repository_id, etpConst['systemroot'] + os.path.sep, branch] const_debug_write(__name__, "_run_post_update_repository_hook: run: %s" % (args,)) proc = subprocess.Popen(args, stdin = sys.stdin, stdout = sys.stdout, stderr = sys.stderr) # it is possible to ignore errors because # if it's a critical thing, upstream dev just have to fix # the script and will be automagically re-run br_rc = proc.wait() const_debug_write(__name__, "_run_post_update_repository_hook: rc: %s" % (br_rc,)) return br_rc
def _inject(self, entropy_server): """ Actual Eit inject code. """ extensions = entropy_server.Spm_class().binary_packages_extensions() etp_pkg_files = [] for pkg_path in self._packages: pkg_path = os.path.realpath(pkg_path) if not const_file_readable(pkg_path): entropy_server.output( "%s: %s" % (purple(pkg_path), teal(_("no such file or directory"))), importance=1, level="error") return 1 found = False for ext in extensions: if pkg_path.endswith("." + ext): etp_pkg_files.append(pkg_path) found = True break if not found: entropy_server.output( "%s: %s" % (purple(pkg_path), teal(_("unsupported extension"))), importance=1, level="error") return 1 if not etp_pkg_files: entropy_server.output(teal(_("no valid package paths")), importance=1, level="error") return 1 # in this case, no split package files are provided repository_id = entropy_server.repository() etp_pkg_files = [( [x], True, ) for x in etp_pkg_files] package_ids = entropy_server.add_packages_to_repository( repository_id, etp_pkg_files, ask=self._ask, reset_revision=self._reset_revision) if package_ids: # checking dependencies and print issues entropy_server.extended_dependencies_test([repository_id]) entropy_server.commit_repositories() if package_ids: return 0 return 1
def __setup_urllib_resume_support(self): # resume support if const_file_readable(self.__path_to_save) and self.__resume: self.__urllib_open_local_file("ab") self.__localfile.seek(0, os.SEEK_END) self.__startingposition = int(self.__localfile.tell()) self.__last_downloadedsize = self.__startingposition else: self.__urllib_open_local_file("wb")
def _log(self, entropy_server): changelog_path = \ entropy_server._get_local_repository_compressed_changelog_file( entropy_server.repository()) if not const_file_readable(changelog_path): entropy_server.output(_("log is not available"), importance=1, level="error") return 1 proc = subprocess.Popen(["bzless", changelog_path]) return proc.wait()
def _log(self, entropy_server): changelog_path = \ entropy_server._get_local_repository_compressed_changelog_file( entropy_server.repository()) if not const_file_readable(changelog_path): entropy_server.output( _("log is not available"), importance=1, level="error") return 1 proc = subprocess.Popen(["bzless", changelog_path]) return proc.wait()
def _inject(self, entropy_server): """ Actual Eit inject code. """ extensions = entropy_server.Spm_class( ).binary_packages_extensions() etp_pkg_files = [] for pkg_path in self._packages: pkg_path = os.path.realpath(pkg_path) if not const_file_readable(pkg_path): entropy_server.output( "%s: %s" % (purple(pkg_path), teal(_("no such file or directory"))), importance=1, level="error") return 1 found = False for ext in extensions: if pkg_path.endswith("."+ext): etp_pkg_files.append(pkg_path) found = True break if not found: entropy_server.output( "%s: %s" % (purple(pkg_path), teal(_("unsupported extension"))), importance=1, level="error") return 1 if not etp_pkg_files: entropy_server.output( teal(_("no valid package paths")), importance=1, level="error") return 1 # in this case, no split package files are provided repository_id = entropy_server.repository() etp_pkg_files = [([x], True,) for x in etp_pkg_files] package_ids = entropy_server.add_packages_to_repository( repository_id, etp_pkg_files, ask=self._ask, reset_revision=self._reset_revision) if package_ids: # checking dependencies and print issues entropy_server.extended_dependencies_test([repository_id]) entropy_server.commit_repositories() if package_ids: return 0 return 1
def _userdata(self): """ Return the noticeboard user metadata. This dictionary contains misc noticeboard information for given repository, like (at the moment) items read status. @return: repository user metadata @rtype: dict @raise KeyError: if given repository identifier is not available """ avail_data = self._settings['repositories']['available'] nb_path = avail_data[self._repository_id]['local_notice_board'] if not const_file_readable(nb_path): return {} # not found return dump_loadobj(nb_path, complete_path=True) or {}
def _userdata(self): """ Return the noticeboard user metadata. This dictionary contains misc noticeboard information for given repository, like (at the moment) items read status. @return: repository user metadata @rtype: dict @raise KeyError: if given repository identifier is not available """ avail_data = self._settings['repositories']['available'] nb_path = avail_data[self._repository_id]['local_notice_board'] if not const_file_readable(nb_path): return {} # not found return dump_loadobj(nb_path, complete_path = True) or {}
def get_noticeboard_userdata(self, repository_id): """ Return noticeboard user metadata dict for given repository identifier. This dictionary contains misc noticeboard information for given repository, like (at the moment) items read status. @param repository_id: repository identifier @type repository_id: string @return: repository user metadata @rtype: dict @raise KeyError: if given repository identifier is not available """ repo_data = self._settings['repositories']['available'][repository_id] nb_path = repo_data['local_notice_board_userdata'] if not const_file_readable(nb_path): return {} # not found # load metadata data = dump_loadobj(nb_path, complete_path = True) or {} return data
def data(self): """ Return the noticeboard RSS metadata. This method is fault tolerant, except for invalid repository_id given, if repository notice board file is broken or not found an empty dict is returned. @return: repository metadata @rtype: dict @raise KeyError: if given repository identifier is not available """ avail_data = self._settings['repositories']['available'] nb_path = avail_data[self._repository_id]['local_notice_board'] if not const_file_readable(nb_path): return {} # not found myrss = RSS(nb_path, '', '') data, data_len = myrss.get_entries() if data is None: return {} return data
def path_cb(s): return const_file_readable(s)
def misc_parser(self, sys_settings_instance): """ Parses Entropy client system configuration file. @return dict data """ data = { 'filesbackup': etpConst['filesbackup'], 'forcedupdates': etpConst['forcedupdates'], 'packagehashes': etpConst['packagehashes'], 'gpg': etpConst['client_gpg'], 'ignore_spm_downgrades': False, 'splitdebug': etpConst['splitdebug'], 'splitdebug_dirs': etpConst['splitdebug_dirs'], 'multifetch': 1, 'collisionprotect': etpConst['collisionprotect'], 'configprotect': set(), 'configprotectmask': set(), 'configprotectskip': set(), 'autoprune_days': None, # disabled by default 'edelta_support': False, # disabled by default } cli_conf = ClientSystemSettingsPlugin.client_conf_path() if not const_file_readable(cli_conf): return data def _filesbackup(setting): bool_setting = entropy.tools.setting_to_bool(setting) if bool_setting is not None: data['filesbackup'] = bool_setting def _forcedupdates(setting): bool_setting = entropy.tools.setting_to_bool(setting) if bool_setting is not None: data['forcedupdates'] = bool_setting def _autoprune(setting): int_setting = entropy.tools.setting_to_int(setting, 0, 365) if int_setting is not None: data['autoprune_days'] = int_setting def _packagesdelta(setting): bool_setting = entropy.tools.setting_to_bool(setting) if bool_setting is not None: data['edelta_support'] = bool_setting def _packagehashes(setting): setting = setting.lower().split() hashes = set() for opt in setting: if opt in etpConst['packagehashes']: hashes.add(opt) if hashes: data['packagehashes'] = tuple(sorted(hashes)) def _multifetch(setting): int_setting = entropy.tools.setting_to_int(setting, None, None) bool_setting = entropy.tools.setting_to_bool(setting) if int_setting is not None: if int_setting not in range(2, 11): int_setting = 10 data['multifetch'] = int_setting if bool_setting is not None: if bool_setting: data['multifetch'] = 3 def _gpg(setting): bool_setting = entropy.tools.setting_to_bool(setting) if bool_setting is not None: data['gpg'] = bool_setting def _spm_downgrades(setting): bool_setting = entropy.tools.setting_to_bool(setting) if bool_setting is not None: data['ignore_spm_downgrades'] = bool_setting def _splitdebug(setting): bool_setting = entropy.tools.setting_to_bool(setting) if bool_setting is not None: data['splitdebug'] = bool_setting def _collisionprotect(setting): int_setting = entropy.tools.setting_to_int(setting, 0, 2) if int_setting is not None: data['collisionprotect'] = int_setting def _configprotect(setting): for opt in setting.split(): data['configprotect'].add(const_convert_to_unicode(opt)) def _configprotectmask(setting): for opt in setting.split(): data['configprotectmask'].add(const_convert_to_unicode(opt)) def _configprotectskip(setting): for opt in setting.split(): data['configprotectskip'].add( etpConst['systemroot'] + const_convert_to_unicode(opt)) settings_map = { # backward compatibility 'filesbackup': _filesbackup, 'files-backup': _filesbackup, # backward compatibility 'forcedupdates': _forcedupdates, 'forced-updates': _forcedupdates, 'packages-autoprune-days': _autoprune, 'packages-delta': _packagesdelta, # backward compatibility 'packagehashes': _packagehashes, 'package-hashes': _packagehashes, 'multifetch': _multifetch, 'gpg': _gpg, 'ignore-spm-downgrades': _spm_downgrades, 'splitdebug': _splitdebug, # backward compatibility 'collisionprotect': _collisionprotect, 'collision-protect': _collisionprotect, # backward compatibility 'configprotect': _configprotect, 'config-protect': _configprotect, # backward compatibility 'configprotectmask': _configprotectmask, 'config-protect-mask': _configprotectmask, # backward compatibility 'configprotectskip': _configprotectskip, 'config-protect-skip': _configprotectskip, } enc = etpConst['conf_encoding'] with codecs.open(cli_conf, "r", encoding=enc) as client_f: clientconf = [x.strip() for x in client_f.readlines() if \ x.strip() and not x.strip().startswith("#")] for line in clientconf: key, value = entropy.tools.extract_setting(line) if key is None: continue func = settings_map.get(key) if func is None: continue func(value) # completely disable GPG feature if not data['gpg'] and ("gpg" in data['packagehashes']): data['packagehashes'] = tuple((x for x in data['packagehashes'] \ if x != "gpg")) # support ETP_SPLITDEBUG split_debug = os.getenv("ETP_SPLITDEBUG") if split_debug is not None: _splitdebug(split_debug) return data
def __setup_repos_files(self, system_settings): """ This function collects available repositories configuration files by filling internal dict() __repos_files. @param system_settings: SystemSettings instance @type system_settings: instance of SystemSettings @return: None @rtype: None """ self.__repos_files = { 'repos_license_whitelist': {}, 'repos_mask': {}, 'repos_system_mask': {}, 'repos_critical_updates': {}, 'repos_keywords': {}, } avail_data = system_settings['repositories']['available'] for repoid in system_settings['repositories']['order']: repo_data = avail_data[repoid] if "__temporary__" in repo_data: continue repos_mask_setting = {} repos_lic_wl_setting = {} repos_sm_mask_setting = {} repos_critical_updates_setting = {} repos_keywords_setting = {} maskpath = os.path.join(repo_data['dbpath'], etpConst['etpdatabasemaskfile']) wlpath = os.path.join(repo_data['dbpath'], etpConst['etpdatabaselicwhitelistfile']) sm_path = os.path.join(repo_data['dbpath'], etpConst['etpdatabasesytemmaskfile']) critical_path = os.path.join(repo_data['dbpath'], etpConst['etpdatabasecriticalfile']) keywords_path = os.path.join(repo_data['dbpath'], etpConst['etpdatabasekeywordsfile']) if const_file_readable(maskpath): repos_mask_setting[repoid] = maskpath if const_file_readable(wlpath): repos_lic_wl_setting[repoid] = wlpath if const_file_readable(sm_path): repos_sm_mask_setting[repoid] = sm_path if const_file_readable(critical_path): repos_critical_updates_setting[repoid] = critical_path if const_file_readable(keywords_path): repos_keywords_setting[repoid] = keywords_path self.__repos_files['repos_mask'].update(repos_mask_setting) self.__repos_files['repos_license_whitelist'].update( repos_lic_wl_setting) self.__repos_files['repos_system_mask'].update( repos_sm_mask_setting) self.__repos_files['repos_critical_updates'].update( repos_critical_updates_setting) self.__repos_files['repos_keywords'].update( repos_keywords_setting)