def _do_trigger_call_ext_generic(self): entropy_sh = "#!%s" % (etpConst['trigger_sh_interpreter'],) entropy_sh = const_convert_to_rawstring(entropy_sh) tmp_fd, tmp_path = const_mkstemp(prefix="_do_trigger_call_ext_generic") with os.fdopen(tmp_fd, "ab+") as tr_f: tr_f.write(const_convert_to_rawstring(self._pkgdata['trigger'])) tr_f.flush() tr_f.seek(0) interpreter = tr_f.read(128) tr_f.seek(0) shell_intr = False if interpreter.startswith(entropy_sh): shell_intr = True try: if shell_intr: exc = self._EntropyShSandbox(self._entropy) else: exc = self._EntropyPySandbox(self._entropy) return exc.run(self._phase, self._pkgdata, tmp_path) finally: if shell_intr: try: os.remove(tmp_path) except OSError: pass
def _do_trigger_call_ext_generic(self): entropy_sh = "#!%s" % (etpConst['trigger_sh_interpreter'], ) entropy_sh = const_convert_to_rawstring(entropy_sh) tmp_fd, tmp_path = const_mkstemp(prefix="_do_trigger_call_ext_generic") with os.fdopen(tmp_fd, "ab+") as tr_f: tr_f.write(const_convert_to_rawstring(self._pkgdata['trigger'])) tr_f.flush() tr_f.seek(0) interpreter = tr_f.read(128) tr_f.seek(0) shell_intr = False if interpreter.startswith(entropy_sh): shell_intr = True try: if shell_intr: exc = self._EntropyShSandbox(self._entropy) else: exc = self._EntropyPySandbox(self._entropy) return exc.run(self._phase, self._pkgdata, tmp_path) finally: if shell_intr: try: os.remove(tmp_path) except OSError: pass
def _cast_to_str(value): if value is None: return const_convert_to_rawstring("") elif isinstance(value, (int, float, long)): return const_convert_to_rawstring(value) elif isinstance(value, (list, tuple)): return repr(value) return value
def _cast_to_str(value): if value is None: return const_convert_to_rawstring("") elif isinstance(value, (int, float, long)): return const_convert_to_rawstring(value) elif isinstance(value, (list, tuple)): return repr(value) return value
def _encode_path(self, path): """ Encode path using proper encoding for use with os functions. """ try: path = const_convert_to_rawstring(path, from_enctype=etpConst["conf_encoding"]) except (UnicodeEncodeError,): path = const_convert_to_rawstring(path, from_enctype=sys.getfilesystemencoding()) return path
def _hash(self, notices): """ Hash a list of Notice objects """ m = hashlib.md5() m.update(const_convert_to_rawstring("")) for notice in notices: m.update( const_convert_to_rawstring(notice.hash())) return m.hexdigest()
def _encode_path(self, path): """ Encode path using proper encoding for use with os functions. """ try: path = const_convert_to_rawstring( path, from_enctype=etpConst['conf_encoding']) except (UnicodeEncodeError, ): path = const_convert_to_rawstring( path, from_enctype=sys.getfilesystemencoding()) return path
def _get_config_protect(self, entropy_repository, package_id, mask=False, _metadata=None): """ Return configuration protection (or mask) metadata for the given package. This method should not be used as source for storing metadata into repositories since the returned objects may not be decoded in utf-8. Data returned by this method is expected to be used only by internal functions. """ misc_data = self._entropy.ClientSettings()['misc'] if mask: paths = entropy_repository.retrieveProtectMask(package_id).split() misc_key = "configprotectmask" else: paths = entropy_repository.retrieveProtect(package_id).split() misc_key = "configprotect" if _metadata is None: _metadata = self._meta root = self._get_system_root(_metadata) config = set(("%s%s" % (root, path) for path in paths)) config.update(misc_data[misc_key]) # os.* methods in Python 2.x do not expect unicode strings # This set of data is only used by _handle_config_protect atm. if not const_is_python3(): config = set((const_convert_to_rawstring(x) for x in config)) return config
def _get_critical_update_cache_hash(self, repo_hash): """ Get critical package updates cache hash that can be used to retrieve the on-disk cached object. """ inst_repo = self.installed_repository() enabled_repos = self._filter_available_repositories() repo_order = [x for x in self._settings['repositories']['order'] if x in enabled_repos] c_hash = "%s|%s|%s|%s|%s|%s|%s|v2" % ( repo_hash, enabled_repos, inst_repo.checksum(), self._all_repositories_hash(), ";".join(sorted(self._settings['repositories']['available'])), repo_order, # needed when users do bogus things like editing config files # manually (branch setting) self._settings['repositories']['branch'], ) sha = hashlib.sha1() sha.update(const_convert_to_rawstring(repr(c_hash))) return sha.hexdigest()
def _get_updates_cache_hash(self, repo_hash, empty_deps, ignore_spm_downgrades): """ Get package updates cache hash that can be used to retrieve the on-disk cached object. """ enabled_repos = self._filter_available_repositories() repo_order = [x for x in self._settings['repositories']['order'] if x in enabled_repos] inst_repo = self.installed_repository() cache_s = "%s|%s|%s|%s|%s|%s|%s|%s|%s|v3" % ( repo_hash, empty_deps, enabled_repos, inst_repo.checksum(), self._all_repositories_hash(), ";".join(sorted(self._settings['repositories']['available'])), repo_order, ignore_spm_downgrades, # needed when users do bogus things like editing config files # manually (branch setting) self._settings['repositories']['branch'], ) sha = hashlib.sha1() sha.update(const_convert_to_rawstring(cache_s)) return "%s%s" % ( EntropyCacher.CACHE_IDS['world_update'], sha.hexdigest(),)
def _get_config_protect(self, entropy_repository, package_id, mask = False, _metadata = None): """ Return configuration protection (or mask) metadata for the given package. This method should not be used as source for storing metadata into repositories since the returned objects may not be decoded in utf-8. Data returned by this method is expected to be used only by internal functions. """ misc_data = self._entropy.ClientSettings()['misc'] if mask: paths = entropy_repository.retrieveProtectMask(package_id).split() misc_key = "configprotectmask" else: paths = entropy_repository.retrieveProtect(package_id).split() misc_key = "configprotect" if _metadata is None: _metadata = self._meta root = self._get_system_root(_metadata) config = set(("%s%s" % (root, path) for path in paths)) config.update(misc_data[misc_key]) # os.* methods in Python 2.x do not expect unicode strings # This set of data is only used by _handle_config_protect atm. if not const_is_python3(): config = set((const_convert_to_rawstring(x) for x in config)) return config
def open_editor(path): """ Open given path with the default editor and return a subprocess.Popen object. """ mime = _query_filetype(path) if mime is None: return None plain_mime = "text/plain" desktop_name = _get_default_desktop(mime) if not desktop_name: if mime != plain_mime: desktop_name = _get_default_desktop( plain_mime) if not desktop_name: return None xdg_data_dirs = os.getenv( "XDG_DATA_DIRS", "/usr/share").split(":") desktop_path = None for xdg_data_dir in xdg_data_dirs: _desktop_path = os.path.join( xdg_data_dir, "applications", desktop_name) if os.path.exists(_desktop_path) and \ os.path.isfile(_desktop_path): desktop_path = _desktop_path break if desktop_path is None: return None parser = configparser.RawConfigParser() read_files = parser.read([desktop_path]) if desktop_path not in read_files: return None exec_string = parser.get("Desktop Entry", "Exec") # replace %F with %f # replace %u with %f # replace %U with %f # as per: # http://standards.freedesktop.org/desktop-entry-spec/latest # and also handle %% (which is the escape for single %) exec_string = exec_string.replace(" %F", " %f") exec_string = exec_string.replace(" %U", " %f") exec_string = exec_string.replace(" %u", " %f") if " %f" in exec_string: exec_string = exec_string.replace( " %f", " \"" + path + "\"") else: exec_string += " \"" exec_string += path exec_string += "\"" # shlex split() wants raw string exec_string = const_convert_to_rawstring(exec_string) args = shlex.split(exec_string) proc = subprocess.Popen(args) return proc
def open_editor(path): """ Open given path with the default editor and return a subprocess.Popen object. """ mime = _query_filetype(path) if mime is None: return None plain_mime = "text/plain" desktop_name = _get_default_desktop(mime) if not desktop_name: if mime != plain_mime: desktop_name = _get_default_desktop( plain_mime) if not desktop_name: return None xdg_data_dirs = os.getenv( "XDG_DATA_DIRS", "/usr/share").split(":") desktop_path = None for xdg_data_dir in xdg_data_dirs: _desktop_path = os.path.join( xdg_data_dir, "applications", desktop_name) if os.path.exists(_desktop_path) and \ os.path.isfile(_desktop_path): desktop_path = _desktop_path break if desktop_path is None: return None parser = configparser.RawConfigParser() read_files = parser.read([desktop_path]) if desktop_path not in read_files: return None exec_string = parser.get("Desktop Entry", "Exec") # replace %F with %f # replace %u with %f # replace %U with %f # as per: # http://standards.freedesktop.org/desktop-entry-spec/latest # and also handle %% (which is the escape for single %) exec_string = exec_string.replace(" %F", " %f") exec_string = exec_string.replace(" %U", " %f") exec_string = exec_string.replace(" %u", " %f") if " %f" in exec_string: exec_string = exec_string.replace( " %f", " \"" + path + "\"") else: exec_string += " \"" exec_string += path exec_string += "\"" # shlex split() wants raw string exec_string = const_convert_to_rawstring(exec_string) args = shlex.split(exec_string) proc = subprocess.Popen(args) return proc
def _load(self): """ Load configuration file updates reading from disk. """ name_cache = set() client_conf_protect = self._get_config_protect() # NOTE: with Python 3.x we can remove const_convert... # and avoid using _encode_path. cfg_pfx = const_convert_to_rawstring("._cfg") underscore = const_convert_to_rawstring("_") for path in client_conf_protect: path = self._encode_path(path) # is it a file? scanfile = False if os.path.isfile(path): # find inside basename path = os.path.dirname(path) scanfile = True for currentdir, _subdirs, files in os.walk(path): for item in files: if scanfile: if path != item: continue if not item.startswith(cfg_pfx): continue # further check then number = item[5:9] try: int(number) except ValueError: continue # not a valid etc-update file if item[9:10] != underscore: # no valid format provided continue filepath = os.path.join(currentdir, item) if filepath in name_cache: continue # skip, already done name_cache.add(filepath) self._load_maybe_add(currentdir, item, filepath, number)
def _get_hash(self): """ Return the noticeboard data and metadata hash string. """ nb_data = self.data() sha = hashlib.sha1() sha.update(const_convert_to_rawstring("--")) for key in ("description", "pubDate", "title", "link", "id",): if key not in nb_data: continue elem = nb_data[key] elem_str = "{%s=%s}" % (key, elem) sha.update(const_convert_to_rawstring(elem_str)) return sha.hexdigest()
def prepare_markup(text): """ Convert text to raw bytestring to make GTK3 happy. """ if const_isunicode(text): return \ const_convert_to_rawstring( text, from_enctype=etpConst['conf_encoding']) return text
def prepare_markup(text): """ Convert text to raw bytestring to make GTK3 happy. """ if const_isunicode(text): return \ const_convert_to_rawstring( text, from_enctype=etpConst['conf_encoding']) return text
def _argparse_easygoing_valid_entropy_path(string): if os.path.isfile(string) and os.path.exists(string): return string # see bug 3873, requires raw string msg = "%s: %s" % ( _("not a valid Entropy package file"), string) msg = const_convert_to_rawstring( msg, from_enctype="utf-8") raise argparse.ArgumentTypeError(msg)
def _argparse_easygoing_valid_entropy_path(string): if os.path.isfile(string) and os.path.exists(string): return string # see bug 3873, requires raw string msg = "%s: %s" % ( _("not a valid Entropy package file"), string) msg = const_convert_to_rawstring( msg, from_enctype="utf-8") raise argparse.ArgumentTypeError(msg)
def hash(self): """ Return a stringy hash """ m = hashlib.md5() items = (self.repository() + "|", "%s|" % (self.notice_id(), ), self.date() + "|", self.description() + "|", self.title() + "|", self.link() + "|", self.guid()) for item in items: m.update(const_convert_to_rawstring(item)) return m.hexdigest()
def __repositories_hash(self, repositories): sha = hashlib.sha1() sha.update(const_convert_to_rawstring("0")) for repo in repositories: try: dbconn = self.open_repository(repo) except (RepositoryError): continue # repo not available try: sha.update(const_convert_to_rawstring(repr(dbconn.mtime()))) except (OperationalError, DatabaseError, OSError, IOError): txt = _("Repository") + " " + const_convert_to_unicode(repo) \ + " " + _("is corrupted") + ". " + \ _("Cannot calculate the checksum") self.output( purple(txt), importance = 1, level = "warning" ) return sha.hexdigest()
def xpak(rootdir, outfile=None): """(rootdir,outfile) -- creates an xpak segment of the directory 'rootdir' and under the name 'outfile' if it is specified. Otherwise it returns the xpak segment.""" mylist = [] _addtolist(mylist, const_convert_to_rawstring(rootdir)) mylist.sort() mydata = {} for x in mylist: x_path = os.path.join(const_convert_to_rawstring(rootdir), x) with open(x_path, "rb") as a: mydata[x] = a.read() xpak_segment = xpak_mem(mydata) if outfile: outf = open(outfile, "wb") outf.write(xpak_segment) outf.close() else: return xpak_segment
def xpak(rootdir, outfile=None): """(rootdir,outfile) -- creates an xpak segment of the directory 'rootdir' and under the name 'outfile' if it is specified. Otherwise it returns the xpak segment.""" mylist = [] _addtolist(mylist, const_convert_to_rawstring(rootdir)) mylist.sort() mydata = {} for x in mylist: x_path = os.path.join(const_convert_to_rawstring(rootdir), x) with open(x_path, "rb") as a: mydata[x] = a.read() xpak_segment = xpak_mem(mydata) if outfile: outf = open(outfile, "wb") outf.write(xpak_segment) outf.close() else: return xpak_segment
def _encode_multipart_form(self, params, file_params, boundary): """ Encode parameters and files into a valid HTTP multipart form data. NOTE: this method loads the whole file in RAM, HTTP post doesn't work well for big files anyway. """ def _cast_to_str(value): if value is None: return const_convert_to_rawstring("") elif isinstance(value, const_get_int() + (float, )): return const_convert_to_rawstring(value) elif isinstance(value, (list, tuple)): return repr(value) return value tmp_fd, tmp_path = const_mkstemp(prefix="_encode_multipart_form") tmp_f = os.fdopen(tmp_fd, "ab+") tmp_f.truncate(0) crlf = const_convert_to_rawstring('\r\n') dashes = const_convert_to_rawstring("--") raw_boundary = const_convert_to_rawstring(boundary) for key, value in params.items(): tmp_f.write(dashes + raw_boundary + crlf) tmp_f.write( const_convert_to_rawstring( "Content-Disposition: form-data; name=\"%s\"" % (key, ))) tmp_f.write(crlf + crlf + _cast_to_str(value) + crlf) for key, (f_name, f_obj) in file_params.items(): tmp_f.write(dashes + raw_boundary + crlf) tmp_f.write( const_convert_to_rawstring( "Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"" % ( key, f_name, ))) tmp_f.write(crlf) tmp_f.write( const_convert_to_rawstring( "Content-Type: application/octet-stream") + crlf) tmp_f.write( const_convert_to_rawstring("Content-Transfer-Encoding: binary") + crlf + crlf) f_obj.seek(0) while True: chunk = f_obj.read(65536) if not chunk: break tmp_f.write(chunk) tmp_f.write(crlf) tmp_f.write(dashes + raw_boundary + dashes + crlf + crlf) tmp_f.flush() return tmp_f, tmp_path
def packages_configuration_hash(self): """ Return a SHA1 hash of the current packages configuration. This includes masking, unmasking, keywording of all the configured repositories. """ sha = hashlib.sha1() sha.update(const_convert_to_rawstring("-begin-")) settings = self._helper.ClientSettings() repo_settings = settings['repositories'] cache_key = "__packages_configuration_hash__" cached = repo_settings.get(cache_key) if cached is not None: return cached sha.update(const_convert_to_rawstring("-begin-mask-")) for repository_id in sorted(repo_settings['mask'].keys()): packages = repo_settings['mask'][repository_id] cache_s = "mask:%s:{%s}|" % ( repository_id, ",".join(sorted(packages)), ) sha.update(const_convert_to_rawstring(cache_s)) sha.update(const_convert_to_rawstring("-end-mask-")) sha.update(const_convert_to_rawstring("-begin-keywords-")) for repository_id in sorted(repo_settings['repos_keywords'].keys()): data = repo_settings['repos_keywords'][repository_id] packages = data['packages'] for package in sorted(packages.keys()): keywords = packages[package] cache_s = "repos_keywords:%s:%s:{%s}|" % ( repository_id, package, sorted(keywords), ) sha.update(const_convert_to_rawstring(cache_s)) sha.update(const_convert_to_rawstring("-end-keywords-")) sha.update(const_convert_to_rawstring("-end-")) outcome = sha.hexdigest() repo_settings[cache_key] = outcome return outcome
def _get_config_protect_skip(self): """ Return the configuration protection path set. """ misc_settings = self._entropy.ClientSettings()['misc'] protectskip = misc_settings['configprotectskip'] if not const_is_python3(): protectskip = set((const_convert_to_rawstring( x, from_enctype=etpConst['conf_encoding']) for x in misc_settings['configprotectskip'])) return protectskip
def _argparse_is_valid_directory(self, string): """ To be used with argparse add_argument() type parameter for validating directory paths. """ if os.path.isdir(string) and os.path.exists(string): # cope with broken symlinks return string msg = "%s: %s" % (_("not a valid directory"), string) # see bug 3873, requires raw string msg = const_convert_to_rawstring(msg, from_enctype="utf-8") raise argparse.ArgumentTypeError(msg)
def _try_auth_login(self): """ Helper method used to attempt the login procedure against users database. This method automatically handles the conversion to raw string (from unicode) if necessary. @return: the user identifier (if login succeeds) @rtype: int @raise AttributeError: if credentials are invalid. """ username, password = request.params.get("username"), \ request.params.get("password") if not (username and password): raise AttributeError("credentials not available") # explicitly use utf8_bin format for username username = const_convert_to_rawstring( username, from_enctype='raw_unicode_escape') # and shitty raw for this, don't ask me why password = const_convert_to_rawstring(password, from_enctype='utf-8') user_id = self._auth.login(username, password) return username, user_id
def _get_config_protect_skip(self): """ Return the configuration protection path set. """ misc_settings = self._entropy.ClientSettings()['misc'] protectskip = misc_settings['configprotectskip'] if not const_is_python3(): protectskip = set(( const_convert_to_rawstring( x, from_enctype = etpConst['conf_encoding']) for x in misc_settings['configprotectskip'])) return protectskip
def _argparse_is_valid_directory(self, string): """ To be used with argparse add_argument() type parameter for validating directory paths. """ if os.path.isdir(string) and os.path.exists(string): # cope with broken symlinks return string msg = "%s: %s" % (_("not a valid directory"), string) # see bug 3873, requires raw string msg = const_convert_to_rawstring( msg, from_enctype="utf-8") raise argparse.ArgumentTypeError(msg)
def _try_auth_login(self): """ Helper method used to attempt the login procedure against users database. This method automatically handles the conversion to raw string (from unicode) if necessary. @return: the user identifier (if login succeeds) @rtype: int @raise AttributeError: if credentials are invalid. """ username, password = request.params.get("username"), \ request.params.get("password") if not (username and password): raise AttributeError("credentials not available") # explicitly use utf8_bin format for username username = const_convert_to_rawstring(username, from_enctype = 'raw_unicode_escape') # and shitty raw for this, don't ask me why password = const_convert_to_rawstring(password, from_enctype = 'utf-8') user_id = self._auth.login(username, password) return username, user_id
def _get_available_packages_hash(self): """ Get available packages cache hash. """ # client digest not needed, cache is kept updated c_hash = "%s|%s|%s" % ( self._repositories_hash(), self._filter_available_repositories(), # needed when users do bogus things like editing config files # manually (branch setting) self._settings['repositories']['branch']) sha = hashlib.sha1() sha.update(const_convert_to_rawstring(repr(c_hash))) return sha.hexdigest()
def _get_hash(self): """ Return the noticeboard data and metadata hash string. """ nb_data = self.data() sha = hashlib.sha1() sha.update(const_convert_to_rawstring("--")) for key in ( "description", "pubDate", "title", "link", "id", ): if key not in nb_data: continue elem = nb_data[key] elem_str = "{%s=%s}" % (key, elem) sha.update(const_convert_to_rawstring(elem_str)) return sha.hexdigest()
def feed_child(self, txt): # Workaround vte.Terminal bug not passing to .feed proper message RAW # size. feed() supports UTF-8 but then, string length is wrongly passed # by python, because it does not consider the fact that UTF-8 chars can # be 16bits long. raw_txt_len = len(txt) if const_isunicode(txt): raw_txt_len = len(txt.encode(etpConst['conf_encoding'])) try: return Vte.Terminal.feed(self, txt, raw_txt_len) except TypeError: # Vte.Terminal 0.32.x txt = const_convert_to_rawstring( txt, from_enctype=etpConst['conf_encoding']) return Vte.Terminal.feed(self, txt)
def _load(self): """ Load configuration file updates reading from disk. """ name_cache = set() client_conf_protect = self._get_config_protect() # NOTE: with Python 3.x we can remove const_convert... # and avoid using _encode_path. cfg_pfx = const_convert_to_rawstring("._cfg") for path in client_conf_protect: path = self._encode_path(path) # is it a file? scanfile = False if os.path.isfile(path): # find inside basename path = os.path.dirname(path) scanfile = True for currentdir, _subdirs, files in os.walk(path): for item in files: if scanfile: if path != item: continue if not item.startswith(cfg_pfx): continue # further check then number = item[5:9] try: int(number) except ValueError: continue # not a valid etc-update file if item[9] != "_": # no valid format provided continue filepath = os.path.join(currentdir, item) if filepath in name_cache: continue # skip, already done name_cache.add(filepath) self._load_maybe_add( currentdir, item, filepath, number)
def data_send_available(self): """ Return whether data send is correctly working. A temporary file with random content is sent to the service, that would need to calculate its md5 hash. For security reason, data will be accepted remotely if, and only if its size is < 256 bytes. """ md5 = hashlib.md5() test_str = const_convert_to_rawstring("") for x in range(256): test_str += chr(x) md5.update(test_str) expected_hash = md5.hexdigest() func_name = "data_send_available" tmp_fd, tmp_path = const_mkstemp(prefix="data_send_available") try: with os.fdopen(tmp_fd, "ab+") as tmp_f: tmp_f.write(test_str) tmp_f.seek(0) params = { "test_param": "hello", } file_params = { "test_file": ("test_file.txt", tmp_f), } remote_hash = self._method_getter(func_name, params, cache=False, require_credentials=False, file_params=file_params) finally: os.remove(tmp_path) const_debug_write( __name__, "WebService.%s, expected: %s, got: %s" % ( func_name, repr(expected_hash), repr(remote_hash), )) return expected_hash == remote_hash
def data_send_available(self): """ Return whether data send is correctly working. A temporary file with random content is sent to the service, that would need to calculate its md5 hash. For security reason, data will be accepted remotely if, and only if its size is < 256 bytes. """ md5 = hashlib.md5() test_str = const_convert_to_rawstring("") for x in range(256): test_str += chr(x) md5.update(test_str) expected_hash = md5.hexdigest() func_name = "data_send_available" tmp_fd, tmp_path = const_mkstemp(prefix="data_send_available") try: with os.fdopen(tmp_fd, "ab+") as tmp_f: tmp_f.write(test_str) tmp_f.seek(0) params = { "test_param": "hello", } file_params = { "test_file": ("test_file.txt", tmp_f), } remote_hash = self._method_getter(func_name, params, cache = False, require_credentials = False, file_params = file_params) finally: os.remove(tmp_path) const_debug_write(__name__, "WebService.%s, expected: %s, got: %s" % ( func_name, repr(expected_hash), repr(remote_hash),)) return expected_hash == remote_hash
def _remove_content_from_system_loop( self, inst_repo, remove_atom, remove_content, remove_config, affected_directories, affected_infofiles, directories, directories_cache, preserved_mgr, not_removed_due_to_collisions, colliding_path_messages, automerge_metadata, col_protect, protect, mask, protectskip, sys_root): """ Body of the _remove_content_from_system() method. """ info_dirs = self._get_info_directories() # collect all the library paths to be preserved # in the final removal loop. preserved_lib_paths = set() if self.PRESERVED_LIBS_ENABLED: for _pkg_id, item, _ftype in remove_content: # determine without sys_root paths = self._handle_preserved_lib(item, remove_atom, preserved_mgr) if paths is not None: preserved_lib_paths.update(paths) for _pkg_id, item, _ftype in remove_content: if not item: continue # empty element?? sys_root_item = sys_root + item sys_root_item_encoded = sys_root_item if not const_is_python3(): # this is coming from the db, and it's pure utf-8 sys_root_item_encoded = const_convert_to_rawstring( sys_root_item, from_enctype=etpConst['conf_raw_encoding']) # collision check if col_protect > 0: if inst_repo.isFileAvailable(item) \ and os.path.isfile(sys_root_item_encoded): # in this way we filter out directories colliding_path_messages.add(sys_root_item) not_removed_due_to_collisions.add(item) continue protected = False in_mask = False if not remove_config: protected_item_test = sys_root_item (in_mask, protected, _x, do_continue) = self._handle_config_protect( protect, mask, protectskip, None, protected_item_test, do_allocation_check=False, do_quiet=True) if do_continue: protected = True # when files have not been modified by the user # and they are inside a config protect directory # we could even remove them directly if in_mask: oldprot_md5 = automerge_metadata.get(item) if oldprot_md5: try: in_system_md5 = entropy.tools.md5sum( protected_item_test) except (OSError, IOError) as err: if err.errno != errno.ENOENT: raise in_system_md5 = "?" if oldprot_md5 == in_system_md5: prot_msg = _("Removing config file, never modified") mytxt = "%s: %s" % ( darkgreen(prot_msg), blue(item), ) self._entropy.output(mytxt, importance=1, level="info", header=red(" ## ")) protected = False do_continue = False # Is file or directory a protected item? if protected: self._entropy.logger.log( "[Package]", etpConst['logging']['verbose_loglevel_id'], "[remove] Protecting config file: %s" % (sys_root_item, )) mytxt = "[%s] %s: %s" % ( red(_("remove")), brown(_("Protecting config file")), sys_root_item, ) self._entropy.output(mytxt, importance=1, level="warning", header=red(" ## ")) continue try: os.lstat(sys_root_item_encoded) except OSError as err: if err.errno in (errno.ENOENT, errno.ENOTDIR): continue # skip file, does not exist raise except UnicodeEncodeError: msg = _("This package contains a badly encoded file !!!") mytxt = brown(msg) self._entropy.output(red("QA: ") + mytxt, importance=1, level="warning", header=darkred(" ## ")) continue # file has a really bad encoding if os.path.isdir(sys_root_item_encoded) and \ os.path.islink(sys_root_item_encoded): # S_ISDIR returns False for directory symlinks, # so using os.path.isdir valid directory symlink if sys_root_item not in directories_cache: # collect for Trigger affected_directories.add(item) directories.add((sys_root_item, "link")) directories_cache.add(sys_root_item) continue if os.path.isdir(sys_root_item_encoded): # plain directory if sys_root_item not in directories_cache: # collect for Trigger affected_directories.add(item) directories.add((sys_root_item, "dir")) directories_cache.add(sys_root_item) continue # files, symlinks or not # just a file or symlink or broken # directory symlink (remove now) # skip file removal if item is a preserved library. if item in preserved_lib_paths: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "[remove] skipping removal of: %s" % (sys_root_item, )) continue try: os.remove(sys_root_item_encoded) except OSError as err: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "[remove] Unable to remove %s, error: %s" % ( sys_root_item, err, )) continue # collect for Trigger dir_name = os.path.dirname(item) affected_directories.add(dir_name) # account for info files, if any if dir_name in info_dirs: for _ext in self._INFO_EXTS: if item.endswith(_ext): affected_infofiles.add(item) break # add its parent directory dirobj = const_convert_to_unicode( os.path.dirname(sys_root_item_encoded)) if dirobj not in directories_cache: if os.path.isdir(dirobj) and os.path.islink(dirobj): directories.add((dirobj, "link")) elif os.path.isdir(dirobj): directories.add((dirobj, "dir")) directories_cache.add(dirobj)
def __env_setup(self, stage, pkgdata): # mandatory variables category = pkgdata.get('category') category = const_convert_to_rawstring(category, from_enctype = "utf-8") pn = pkgdata.get('name') pn = const_convert_to_rawstring(pn, from_enctype = "utf-8") pv_utf = pkgdata.get('version') pv = const_convert_to_rawstring(pv_utf, from_enctype = "utf-8") pr = entropy.dep.dep_get_spm_revision(pv_utf) pvr = pv if pr == "r0": pvr += "-%s" % (pr,) pvr = const_convert_to_rawstring(pvr, from_enctype = "utf-8") pr = const_convert_to_rawstring(pr, from_enctype = "utf-8") pet = pkgdata.get('versiontag') pet = const_convert_to_rawstring(pet, from_enctype = "utf-8") per = pkgdata.get('revision') per = const_convert_to_rawstring(per, from_enctype = "utf-8") etp_branch = pkgdata.get('branch') etp_branch = const_convert_to_rawstring(etp_branch, from_enctype = "utf-8") slot = pkgdata.get('slot') slot = const_convert_to_rawstring(slot, from_enctype = "utf-8") pkgatom = pkgdata.get('atom') pkgkey = entropy.dep.dep_getkey(pkgatom) pvrte = pkgatom[len(pkgkey)+1:] pvrte = const_convert_to_rawstring(pvrte, from_enctype = "utf-8") etpapi = pkgdata.get('etpapi') etpapi = const_convert_to_rawstring(etpapi, from_enctype = "utf-8") p = pkgatom p = const_convert_to_rawstring(p, from_enctype = "utf-8") chost, cflags, cxxflags = pkgdata.get('chost'), \ pkgdata.get('cflags'), pkgdata.get('cxxflags') if chost is None: chost = "" if cflags is None: cflags = "" if cxxflags is None: cxxflags = "" chost = const_convert_to_rawstring(chost, from_enctype = "utf-8") cflags = const_convert_to_rawstring(cflags, from_enctype = "utf-8") cxxflags = const_convert_to_rawstring(cxxflags, from_enctype = "utf-8") # Not mandatory variables unpackdir = pkgdata.get('unpackdir', '') imagedir = pkgdata.get('imagedir', '') sb_dirs = [unpackdir, imagedir] sb_write = const_convert_to_rawstring(':'.join(sb_dirs), from_enctype = "utf-8") myenv = { "ETP_API": etpSys['api'], "ETP_STAGE": stage, # entropy trigger stage "ETP_PHASE": self.__get_sh_stage(stage), # entropy trigger phase "ETP_BRANCH": etp_branch, "CATEGORY": category, # package category "PN": pn, # package name "PV": pv, # package version "PR": pr, # package revision (portage) "PVR": pvr, # package version+revision # package version+revision+entropy tag+entropy rev "PVRTE": pvrte, "PER": per, # package entropy revision "PET": pet, # package entropy tag "SLOT": slot, # package slot "PAPI": etpapi, # package entropy api "P": p, # complete package atom "WORKDIR": unpackdir, # temporary package workdir "B": unpackdir, # unpacked binary package directory? # package unpack destination (before merging to live) "D": imagedir, # entropy temporary directory "ENTROPY_TMPDIR": etpConst['entropyunpackdir'], "CFLAGS": cflags, # compile flags "CXXFLAGS": cxxflags, # compile flags "CHOST": chost, # *nix CHOST "ROOT": etpConst['systemroot'], "SANDBOX_WRITE": sb_write, } sysenv = os.environ.copy() sysenv.update(myenv) return sysenv
def allocate_protected_file(package_file_path, destination_file_path): """ Allocate a configuration protected file. This method returns a new destination_file_path value that is used by Entropy Client code to merge file at package_file_path to live system. This method offers basic support for Entropy ability to protect user configuration files against overwrites. Any subclass can hook code here in order to trigger extra actions on every acknowledged path modification. @param package_file_path: a valid file path pointing to the file that Entropy Client is going to move to destination_file_path @type package_file_path: string @param destination_file_path: the default destination path for given package_file_path. It points to the live system. @type destination_file_path: string @return: Tuple (of length 2) composed by (1) a new destination file path. Please note that it can be the same of the one passed (destination_file_path) if no protection is taken (for eg. when md5 of proposed_file_path and destination_file_path is the same) and (2) a bool informing if the function actually protected the destination file. Unfortunately, the bool bit is stil required in order to provide a valid new destination_file_path in any case. @rtype tuple """ pkg_path_os = package_file_path dest_path_os = destination_file_path if not const_is_python3(): pkg_path_os = const_convert_to_rawstring(package_file_path) dest_path_os = const_convert_to_rawstring(destination_file_path) if os.path.isfile(dest_path_os) and \ os.path.isfile(pkg_path_os): old = entropy.tools.md5sum(package_file_path) new = entropy.tools.md5sum(destination_file_path) if old == new: return destination_file_path, False dest_dirname = os.path.dirname(destination_file_path) dest_basename = os.path.basename(destination_file_path) counter = -1 newfile = "" newfile_os = newfile previousfile = "" previousfile_os = previousfile while True: counter += 1 txtcounter = str(counter) oldtxtcounter = str(counter-1) txtcounter_len = 4-len(txtcounter) cnt = 0 while cnt < txtcounter_len: txtcounter = "0"+txtcounter oldtxtcounter = "0"+oldtxtcounter cnt += 1 newfile = os.path.join(dest_dirname, "._cfg%s_%s" % (txtcounter, dest_basename,)) if counter > 0: previousfile = os.path.join(dest_dirname, "._cfg%s_%s" % (oldtxtcounter, dest_basename,)) else: previousfile = os.path.join(dest_dirname, "._cfg0000_%s" % (dest_basename,)) newfile_os = newfile if not const_is_python3(): newfile_os = const_convert_to_rawstring(newfile) previousfile_os = previousfile if not const_is_python3(): previousfile_os = const_convert_to_rawstring(previousfile) if not os.path.lexists(newfile_os): break if not newfile: newfile = os.path.join(dest_dirname, "._cfg0000_%s" % (dest_basename,)) else: if os.path.exists(previousfile_os): # compare package_file_path with previousfile new = entropy.tools.md5sum(package_file_path) old = entropy.tools.md5sum(previousfile) if new == old: return previousfile, False # compare old and new, if they match, # suggest previousfile directly new = entropy.tools.md5sum(destination_file_path) old = entropy.tools.md5sum(previousfile) if new == old: return previousfile, False return newfile, True
def __env_setup(self, stage, pkgdata): # mandatory variables category = pkgdata.get('category') category = const_convert_to_rawstring(category, from_enctype="utf-8") pn = pkgdata.get('name') pn = const_convert_to_rawstring(pn, from_enctype="utf-8") pv_utf = pkgdata.get('version') pv = const_convert_to_rawstring(pv_utf, from_enctype="utf-8") pr = entropy.dep.dep_get_spm_revision(pv_utf) pvr = pv if pr == "r0": pvr += "-%s" % (pr, ) pvr = const_convert_to_rawstring(pvr, from_enctype="utf-8") pr = const_convert_to_rawstring(pr, from_enctype="utf-8") pet = pkgdata.get('versiontag') pet = const_convert_to_rawstring(pet, from_enctype="utf-8") per = pkgdata.get('revision') per = const_convert_to_rawstring(per, from_enctype="utf-8") etp_branch = pkgdata.get('branch') etp_branch = const_convert_to_rawstring(etp_branch, from_enctype="utf-8") slot = pkgdata.get('slot') slot = const_convert_to_rawstring(slot, from_enctype="utf-8") pkgatom = pkgdata.get('atom') pkgkey = entropy.dep.dep_getkey(pkgatom) pvrte = pkgatom[len(pkgkey) + 1:] pvrte = const_convert_to_rawstring(pvrte, from_enctype="utf-8") etpapi = pkgdata.get('etpapi') etpapi = const_convert_to_rawstring(etpapi, from_enctype="utf-8") p = pkgatom p = const_convert_to_rawstring(p, from_enctype="utf-8") chost, cflags, cxxflags = pkgdata.get('chost'), \ pkgdata.get('cflags'), pkgdata.get('cxxflags') if chost is None: chost = "" if cflags is None: cflags = "" if cxxflags is None: cxxflags = "" chost = const_convert_to_rawstring(chost, from_enctype="utf-8") cflags = const_convert_to_rawstring(cflags, from_enctype="utf-8") cxxflags = const_convert_to_rawstring(cxxflags, from_enctype="utf-8") # Not mandatory variables unpackdir = pkgdata.get('unpackdir', '') imagedir = pkgdata.get('imagedir', '') sb_dirs = [unpackdir, imagedir] sb_write = const_convert_to_rawstring(':'.join(sb_dirs), from_enctype="utf-8") myenv = { "ETP_API": etpSys['api'], "ETP_STAGE": stage, # entropy trigger stage "ETP_PHASE": self.__get_sh_stage(stage), # entropy trigger phase "ETP_BRANCH": etp_branch, "CATEGORY": category, # package category "PN": pn, # package name "PV": pv, # package version "PR": pr, # package revision (portage) "PVR": pvr, # package version+revision # package version+revision+entropy tag+entropy rev "PVRTE": pvrte, "PER": per, # package entropy revision "PET": pet, # package entropy tag "SLOT": slot, # package slot "PAPI": etpapi, # package entropy api "P": p, # complete package atom "WORKDIR": unpackdir, # temporary package workdir "B": unpackdir, # unpacked binary package directory? # package unpack destination (before merging to live) "D": imagedir, # entropy temporary directory "ENTROPY_TMPDIR": etpConst['entropyunpackdir'], "CFLAGS": cflags, # compile flags "CXXFLAGS": cxxflags, # compile flags "CHOST": chost, # *nix CHOST "ROOT": etpConst['systemroot'], "SANDBOX_WRITE": sb_write, } sysenv = os.environ.copy() sysenv.update(myenv) return sysenv
def handle_exception(exc_class, exc_instance, exc_tb): # restore original exception handler, to avoid loops uninstall_exception_handler() _text = TextInterface() if exc_class is SystemDatabaseError: _text.output( darkred(_("Installed packages repository corrupted. " "Please re-generate it")), importance=1, level="error") os._exit(101) generic_exc_classes = (OnlineMirrorError, RepositoryError, PermissionDenied, FileNotFound, SPMError, SystemError) if exc_class in generic_exc_classes: _text.output( "%s: %s" % (exc_instance, darkred(_("Cannot continue")),), importance=1, level="error") os._exit(1) if exc_class is SystemExit: return if issubclass(exc_class, IOError): # in Python 3.3+ it's BrokenPipeError if exc_instance.errno == errno.EPIPE: return if exc_class is KeyboardInterrupt: os._exit(1) t_back = entropy.tools.get_traceback(tb_obj = exc_tb) if const_debug_enabled(): sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ sys.stdin = sys.__stdin__ entropy.tools.print_exception(tb_data = exc_tb) pdb.set_trace() if exc_class in (IOError, OSError): if exc_instance.errno == errno.ENOSPC: print_generic(t_back) _text.output( "%s: %s" % ( exc_instance, darkred(_("Your hard drive is full! Your fault!")),), importance=1, level="error") os._exit(5) elif exc_instance.errno == errno.ENOMEM: print_generic(t_back) _text.output( "%s: %s" % ( exc_instance, darkred(_("No more memory dude! Your fault!")),), importance=1, level="error") os._exit(5) _text.output( darkred(_("Hi. My name is Bug Reporter. " "I am sorry to inform you that the program crashed. " "Well, you know, shit happens.")), importance=1, level="error") _text.output( darkred(_("But there's something you could " "do to help me to be a better application.")), importance=1, level="error") _text.output( darkred( _("-- BUT, DO NOT SUBMIT THE SAME REPORT MORE THAN ONCE --")), importance=1, level="error") _text.output( darkred( _("Now I am showing you what happened. " "Don't panic, I'm here to help you.")), importance=1, level="error") entropy.tools.print_exception(tb_data = exc_tb) exception_data = entropy.tools.print_exception(silent = True, tb_data = exc_tb, all_frame_data = True) exception_tback_raw = const_convert_to_rawstring(t_back) error_fd, error_file = None, None try: error_fd, error_file = const_mkstemp( prefix="entropy.error.report.", suffix=".txt") with os.fdopen(error_fd, "wb") as ferror: ferror.write( const_convert_to_rawstring( "\nRevision: %s\n\n" % ( etpConst['entropyversion'],)) ) ferror.write( exception_tback_raw) ferror.write( const_convert_to_rawstring("\n\n")) ferror.write( const_convert_to_rawstring(''.join(exception_data))) ferror.write( const_convert_to_rawstring("\n")) except (OSError, IOError) as err: _text.output( "%s: %s" % ( err, darkred( _("Oh well, I cannot even write to TMPDIR. " "So, please copy the error and " "mail [email protected]."))), importance=1, level="error") os._exit(1) finally: if error_fd is not None: try: os.close(error_fd) except OSError: pass _text.output("", level="error") ask_msg = _("Erm... Can I send the error, " "along with some other information\nabout your " "hardware to my creators so they can fix me? " "(Your IP will be logged)") rc = _text.ask_question(ask_msg) if rc == _("No"): _text.output( darkgreen(_("Ok, ok ok ok... Sorry!")), level="error") os._exit(2) _text.output( darkgreen( _("If you want to be contacted back " "(and actively supported), also answer " "the questions below:") ), level="error") try: name = readtext(_("Your Full name:")) email = readtext(_("Your E-Mail address:")) description = readtext(_("What you were doing:")) except EOFError: os._exit(2) try: from entropy.client.interfaces.qa import UGCErrorReport from entropy.core.settings.base import SystemSettings _settings = SystemSettings() repository_id = _settings['repositories']['default_repository'] error = UGCErrorReport(repository_id) except (OnlineMirrorError, AttributeError, ImportError,): error = None result = None if error is not None: error.prepare(exception_tback_raw, name, email, '\n'.join([x for x in exception_data]), description) result = error.submit() if result: _text.output( darkgreen( _("Thank you very much. The error has been " "reported and hopefully, the problem will " "be solved as soon as possible.")), level="error") else: _text.output( darkred(_("Ugh. Cannot send the report. " "Please mail the file below " "to [email protected].")), level="error") _text.output("", level="error") _text.output("==> %s" % (error_file,), level="error") _text.output("", level="error")
def handle_exception(exc_class, exc_instance, exc_tb): # restore original exception handler, to avoid loops uninstall_exception_handler() _text = TextInterface() if exc_class is SystemDatabaseError: _text.output(darkred( _("Installed packages repository corrupted. " "Please re-generate it")), importance=1, level="error") os._exit(101) generic_exc_classes = (OnlineMirrorError, RepositoryError, PermissionDenied, FileNotFound, SPMError, SystemError) if exc_class in generic_exc_classes: _text.output("%s: %s" % ( exc_instance, darkred(_("Cannot continue")), ), importance=1, level="error") os._exit(1) if exc_class is SystemExit: return if issubclass(exc_class, IOError): # in Python 3.3+ it's BrokenPipeError if exc_instance.errno == errno.EPIPE: return if exc_class is KeyboardInterrupt: os._exit(1) t_back = entropy.tools.get_traceback(tb_obj=exc_tb) if const_debug_enabled(): sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ sys.stdin = sys.__stdin__ entropy.tools.print_exception(tb_data=exc_tb) pdb.set_trace() if exc_class in (IOError, OSError): if exc_instance.errno == errno.ENOSPC: print_generic(t_back) _text.output("%s: %s" % ( exc_instance, darkred(_("Your hard drive is full! Your fault!")), ), importance=1, level="error") os._exit(5) elif exc_instance.errno == errno.ENOMEM: print_generic(t_back) _text.output("%s: %s" % ( exc_instance, darkred(_("No more memory dude! Your fault!")), ), importance=1, level="error") os._exit(5) _text.output(darkred( _("Hi. My name is Bug Reporter. " "I am sorry to inform you that the program crashed. " "Well, you know, shit happens.")), importance=1, level="error") _text.output(darkred( _("But there's something you could " "do to help me to be a better application.")), importance=1, level="error") _text.output(darkred( _("-- BUT, DO NOT SUBMIT THE SAME REPORT MORE THAN ONCE --")), importance=1, level="error") _text.output(darkred( _("Now I am showing you what happened. " "Don't panic, I'm here to help you.")), importance=1, level="error") entropy.tools.print_exception(tb_data=exc_tb) exception_data = entropy.tools.print_exception(silent=True, tb_data=exc_tb, all_frame_data=True) exception_tback_raw = const_convert_to_rawstring(t_back) error_fd, error_file = None, None try: error_fd, error_file = const_mkstemp(prefix="entropy.error.report.", suffix=".txt") with os.fdopen(error_fd, "wb") as ferror: ferror.write( const_convert_to_rawstring("\nRevision: %s\n\n" % (etpConst['entropyversion'], ))) ferror.write(exception_tback_raw) ferror.write(const_convert_to_rawstring("\n\n")) ferror.write(const_convert_to_rawstring(''.join(exception_data))) ferror.write(const_convert_to_rawstring("\n")) except (OSError, IOError) as err: _text.output("%s: %s" % (err, darkred( _("Oh well, I cannot even write to TMPDIR. " "So, please copy the error and " "mail [email protected]."))), importance=1, level="error") os._exit(1) finally: if error_fd is not None: try: os.close(error_fd) except OSError: pass _text.output("", level="error") ask_msg = _("Erm... Can I send the error, " "along with some other information\nabout your " "hardware to my creators so they can fix me? " "(Your IP will be logged)") rc = _text.ask_question(ask_msg) if rc == _("No"): _text.output(darkgreen(_("Ok, ok ok ok... Sorry!")), level="error") os._exit(2) _text.output(darkgreen( _("If you want to be contacted back " "(and actively supported), also answer " "the questions below:")), level="error") try: name = readtext(_("Your Full name:")) email = readtext(_("Your E-Mail address:")) description = readtext(_("What you were doing:")) except EOFError: os._exit(2) try: from entropy.client.interfaces.qa import UGCErrorReport from entropy.core.settings.base import SystemSettings _settings = SystemSettings() repository_id = _settings['repositories']['default_repository'] error = UGCErrorReport(repository_id) except ( OnlineMirrorError, AttributeError, ImportError, ): error = None result = None if error is not None: error.prepare(exception_tback_raw, name, email, '\n'.join([x for x in exception_data]), description) result = error.submit() if result: _text.output(darkgreen( _("Thank you very much. The error has been " "reported and hopefully, the problem will " "be solved as soon as possible.")), level="error") else: _text.output(darkred( _("Ugh. Cannot send the report. " "Please mail the file below " "to [email protected].")), level="error") _text.output("", level="error") _text.output("==> %s" % (error_file, ), level="error") _text.output("", level="error")
def _create(self, entropy_server): """ Actual Eit key create code. """ repo_sec = self._get_gpg(entropy_server) if repo_sec is None: return 1 repo = entropy_server.repository() entropy_server.output("%s: %s" % ( blue(_("Creating keys for repository")), purple(repo),)) if repo_sec.is_keypair_available(repo): entropy_server.output("%s: %s" % ( blue(_("Another key already exists for repository")), purple(repo), ), level = "warning" ) answer = entropy_server.ask_question( _("Would you like to continue?")) if answer == _("No"): return 1 def mycb(sstr): return sstr def mycb_int(sstr): try: int(sstr) except ValueError: return False return True def mycb_ok(sstr): return True input_data = [ ('name_email', purple(_("Insert e-mail")), mycb, False), ('expiration', purple(_("Insert expiration days (0=no expiration)")), mycb_int, False), ('pass', purple(_("Insert passphrase (empty=no passphrase)")), mycb_ok, False), ] data = entropy_server.input_box( blue("%s") % (_("Repository GPG keypair creation"),), input_data, cancel_button = True) if not data: return 1 elif not isinstance(data, dict): return 1 if not data['pass']: data['pass'] = None else: data['pass'] = const_convert_to_rawstring(data['pass']) key_fp = repo_sec.create_keypair(repo, passphrase = data['pass'], name_email = data['name_email'], expiration_days = int(data['expiration'])) entropy_server.output("%s: %s" % ( darkgreen(_("Produced GPG key with fingerprint")), bold(key_fp), ), level = "info" ) entropy_server.output("%s: %s" % ( darkgreen(_("Now you should sign all the packages in it")), blue(repo), ), level = "warning" ) entropy_server.output( darkgreen( _("Generate a revoke key and store it in a safe place")), level = "warning" ) entropy_server.output( "# gpg --homedir '%s' --armor --output revoke.asc --gen-revoke '%s'" % ( Repository.GPG_HOME, key_fp), level = "info" ) entropy_server.output("%s" % ( darkgreen( _("You may want to send your keys to a key server")), ), level = "info" ) # remove signatures from repository database dbconn = entropy_server.open_server_repository( repo, read_only = False) dbconn.dropGpgSignatures() return 0
def _generic_post_handler(self, function_name, params, file_params, timeout): """ Given a function name and the request data (dict format), do the actual HTTP request and return the response object to caller. WARNING: params and file_params dict keys must be ASCII string only. @param function_name: name of the function that called this method @type function_name: string @param params: POST parameters @type params: dict @param file_params: mapping composed by file names as key and tuple composed by (file_name, file object) as values @type file_params: dict @param timeout: socket timeout @type timeout: float @return: tuple composed by the server response string or None (in case of empty response) and the HTTPResponse object (useful for checking response status) @rtype: tuple """ if timeout is None: timeout = self._default_timeout_secs multipart_boundary = "---entropy.services,boundary---" request_path = self._request_path.rstrip("/") + "/" + function_name const_debug_write( __name__, "WebService _generic_post_handler, calling: %s at %s -- %s," " tx_callback: %s, timeout: %s" % ( self._request_host, request_path, params, self._transfer_callback, timeout, )) connection = None try: if self._request_protocol == "http": connection = httplib.HTTPConnection(self._request_host, timeout=timeout) elif self._request_protocol == "https": connection = httplib.HTTPSConnection(self._request_host, timeout=timeout) else: raise WebService.RequestError("invalid request protocol", method=function_name) headers = { "Accept": "text/plain", "User-Agent": self._generate_user_agent(function_name), } if file_params is None: file_params = {} # autodetect file parameters in params for k in list(params.keys()): if isinstance(params[k], (tuple, list)) \ and (len(params[k]) == 2): f_name, f_obj = params[k] if isinstance(f_obj, file): file_params[k] = params[k] del params[k] elif const_isunicode(params[k]): # convert to raw string params[k] = const_convert_to_rawstring( params[k], from_enctype="utf-8") elif not const_isstring(params[k]): # invalid ? if params[k] is None: # will be converted to "" continue int_types = const_get_int() supported_types = (float, list, tuple) + int_types if not isinstance(params[k], supported_types): raise WebService.UnsupportedParameters( "%s is unsupported type %s" % (k, type(params[k]))) list_types = (list, tuple) if isinstance(params[k], list_types): # not supporting nested lists non_str = [x for x in params[k] if not \ const_isstring(x)] if non_str: raise WebService.UnsupportedParameters( "%s is unsupported type %s" % (k, type(params[k]))) body = None if not file_params: headers["Content-Type"] = "application/x-www-form-urlencoded" encoded_params = urllib_parse.urlencode(params) data_size = len(encoded_params) if self._transfer_callback is not None: self._transfer_callback(0, data_size, False) if data_size < 65536: try: connection.request("POST", request_path, encoded_params, headers) except socket.error as err: raise WebService.RequestError(err, method=function_name) else: try: connection.request("POST", request_path, None, headers) except socket.error as err: raise WebService.RequestError(err, method=function_name) sio = StringIO(encoded_params) data_size = len(encoded_params) while True: chunk = sio.read(65535) if not chunk: break try: connection.send(chunk) except socket.error as err: raise WebService.RequestError(err, method=function_name) if self._transfer_callback is not None: self._transfer_callback(sio.tell(), data_size, False) # for both ways, send a signal through the callback if self._transfer_callback is not None: self._transfer_callback(data_size, data_size, False) else: headers["Content-Type"] = "multipart/form-data; boundary=" + \ multipart_boundary body_file, body_fpath = self._encode_multipart_form( params, file_params, multipart_boundary) try: data_size = body_file.tell() headers["Content-Length"] = str(data_size) body_file.seek(0) if self._transfer_callback is not None: self._transfer_callback(0, data_size, False) try: connection.request("POST", request_path, None, headers) except socket.error as err: raise WebService.RequestError(err, method=function_name) while True: chunk = body_file.read(65535) if not chunk: break try: connection.send(chunk) except socket.error as err: raise WebService.RequestError(err, method=function_name) if self._transfer_callback is not None: self._transfer_callback(body_file.tell(), data_size, False) if self._transfer_callback is not None: self._transfer_callback(data_size, data_size, False) finally: body_file.close() os.remove(body_fpath) try: response = connection.getresponse() except socket.error as err: raise WebService.RequestError(err, method=function_name) const_debug_write( __name__, "WebService.%s(%s), " "response header: %s" % ( function_name, params, response.getheaders(), )) total_length = response.getheader("Content-Length", "-1") try: total_length = int(total_length) except ValueError: total_length = -1 outcome = const_convert_to_rawstring("") current_len = 0 if self._transfer_callback is not None: self._transfer_callback(current_len, total_length, True) while True: try: chunk = response.read(65536) except socket.error as err: raise WebService.RequestError(err, method=function_name) if not chunk: break outcome += chunk current_len += len(chunk) if self._transfer_callback is not None: self._transfer_callback(current_len, total_length, True) if self._transfer_callback is not None: self._transfer_callback(total_length, total_length, True) if const_is_python3(): outcome = const_convert_to_unicode(outcome) if not outcome: return None, response return outcome, response except httplib.HTTPException as err: raise WebService.RequestError(err, method=function_name) finally: if connection is not None: connection.close()
def _create(self, entropy_server): """ Actual Eit key create code. """ repo_sec = self._get_gpg(entropy_server) if repo_sec is None: return 1 repo = entropy_server.repository() entropy_server.output("%s: %s" % ( blue(_("Creating keys for repository")), purple(repo), )) if repo_sec.is_keypair_available(repo): entropy_server.output("%s: %s" % ( blue(_("Another key already exists for repository")), purple(repo), ), level="warning") answer = entropy_server.ask_question( _("Would you like to continue?")) if answer == _("No"): return 1 def mycb(sstr): return sstr def mycb_int(sstr): try: int(sstr) except ValueError: return False return True def mycb_ok(sstr): return True input_data = [ ('name_email', purple(_("Insert e-mail")), mycb, False), ('expiration', purple(_("Insert expiration days (0=no expiration)")), mycb_int, False), ('pass', purple(_("Insert passphrase (empty=no passphrase)")), mycb_ok, False), ] data = entropy_server.input_box( blue("%s") % (_("Repository GPG keypair creation"), ), input_data, cancel_button=True) if not data: return 1 elif not isinstance(data, dict): return 1 if not data['pass']: data['pass'] = None else: data['pass'] = const_convert_to_rawstring(data['pass']) key_fp = repo_sec.create_keypair(repo, passphrase=data['pass'], name_email=data['name_email'], expiration_days=int( data['expiration'])) entropy_server.output("%s: %s" % ( darkgreen(_("Produced GPG key with fingerprint")), bold(key_fp), ), level="info") entropy_server.output("%s: %s" % ( darkgreen(_("Now you should sign all the packages in it")), blue(repo), ), level="warning") entropy_server.output(darkgreen( _("Generate a revoke key and store it in a safe place")), level="warning") entropy_server.output( "# gpg --homedir '%s' --armor --output revoke.asc --gen-revoke '%s'" % (Repository.GPG_HOME, key_fp), level="info") entropy_server.output( "%s" % (darkgreen(_("You may want to send your keys to a key server")), ), level="info") # remove signatures from repository database dbconn = entropy_server.open_server_repository(repo, read_only=False) dbconn.dropGpgSignatures() return 0
def allocate_protected_file(package_file_path, destination_file_path): """ Allocate a configuration protected file. This method returns a new destination_file_path value that is used by Entropy Client code to merge file at package_file_path to live system. This method offers basic support for Entropy ability to protect user configuration files against overwrites. Any subclass can hook code here in order to trigger extra actions on every acknowledged path modification. @param package_file_path: a valid file path pointing to the file that Entropy Client is going to move to destination_file_path @type package_file_path: string @param destination_file_path: the default destination path for given package_file_path. It points to the live system. @type destination_file_path: string @return: Tuple (of length 2) composed by (1) a new destination file path. Please note that it can be the same of the one passed (destination_file_path) if no protection is taken (for eg. when md5 of proposed_file_path and destination_file_path is the same) and (2) a bool informing if the function actually protected the destination file. Unfortunately, the bool bit is stil required in order to provide a valid new destination_file_path in any case. @rtype tuple """ pkg_path_os = package_file_path dest_path_os = destination_file_path if not const_is_python3(): pkg_path_os = const_convert_to_rawstring(package_file_path) dest_path_os = const_convert_to_rawstring(destination_file_path) if os.path.isfile(dest_path_os) and \ os.path.isfile(pkg_path_os): old = entropy.tools.md5sum(package_file_path) new = entropy.tools.md5sum(destination_file_path) if old == new: return destination_file_path, False dest_dirname = os.path.dirname(destination_file_path) dest_basename = os.path.basename(destination_file_path) counter = -1 newfile = "" newfile_os = newfile previousfile = "" previousfile_os = previousfile while True: counter += 1 txtcounter = str(counter) oldtxtcounter = str(counter - 1) txtcounter_len = 4 - len(txtcounter) cnt = 0 while cnt < txtcounter_len: txtcounter = "0" + txtcounter oldtxtcounter = "0" + oldtxtcounter cnt += 1 newfile = os.path.join( dest_dirname, "._cfg%s_%s" % ( txtcounter, dest_basename, )) if counter > 0: previousfile = os.path.join( dest_dirname, "._cfg%s_%s" % ( oldtxtcounter, dest_basename, )) else: previousfile = os.path.join(dest_dirname, "._cfg0000_%s" % (dest_basename, )) newfile_os = newfile if not const_is_python3(): newfile_os = const_convert_to_rawstring(newfile) previousfile_os = previousfile if not const_is_python3(): previousfile_os = const_convert_to_rawstring(previousfile) if not os.path.lexists(newfile_os): break if not newfile: newfile = os.path.join(dest_dirname, "._cfg0000_%s" % (dest_basename, )) else: if os.path.exists(previousfile_os): # compare package_file_path with previousfile new = entropy.tools.md5sum(package_file_path) old = entropy.tools.md5sum(previousfile) if new == old: return previousfile, False # compare old and new, if they match, # suggest previousfile directly new = entropy.tools.md5sum(destination_file_path) old = entropy.tools.md5sum(previousfile) if new == old: return previousfile, False return newfile, True
def _handle_config_protect(self, protect, mask, protectskip, fromfile, tofile, do_allocation_check = True, do_quiet = False): """ Handle configuration file protection. This method contains the logic for determining if a file should be protected from overwrite. """ protected = False do_continue = False in_mask = False tofile_os = tofile fromfile_os = fromfile if not const_is_python3(): tofile_os = const_convert_to_rawstring(tofile) fromfile_os = const_convert_to_rawstring(fromfile) if tofile in protect: protected = True in_mask = True elif os.path.dirname(tofile) in protect: protected = True in_mask = True else: tofile_testdir = os.path.dirname(tofile) old_tofile_testdir = None while tofile_testdir != old_tofile_testdir: if tofile_testdir in protect: protected = True in_mask = True break old_tofile_testdir = tofile_testdir tofile_testdir = os.path.dirname(tofile_testdir) if protected: # check if perhaps, file is masked, so unprotected if tofile in mask: protected = False in_mask = False elif os.path.dirname(tofile) in mask: protected = False in_mask = False else: tofile_testdir = os.path.dirname(tofile) old_tofile_testdir = None while tofile_testdir != old_tofile_testdir: if tofile_testdir in mask: protected = False in_mask = False break old_tofile_testdir = tofile_testdir tofile_testdir = os.path.dirname(tofile_testdir) if not os.path.lexists(tofile_os): protected = False # file doesn't exist # check if it's a text file if protected: protected = entropy.tools.istextfile(tofile) in_mask = protected if fromfile is not None: if protected and os.path.lexists(fromfile_os) and ( not os.path.exists(fromfile_os)) and ( os.path.islink(fromfile_os)): # broken symlink, don't protect self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "WARNING!!! Failed to handle file protection for: " \ "%s, broken symlink in package" % ( tofile, ) ) msg = _("Cannot protect broken symlink") mytxt = "%s:" % ( purple(msg), ) self._entropy.output( mytxt, importance = 1, level = "warning", header = brown(" ## ") ) self._entropy.output( tofile, level = "warning", header = brown(" ## ") ) protected = False if not protected: return in_mask, protected, tofile, do_continue ## ## # file is protected # ##__________________## # check if protection is disabled for this element if tofile in protectskip: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "Skipping config file installation/removal, " \ "as stated in client.conf: %s" % (tofile,) ) if not do_quiet: mytxt = "%s: %s" % ( _("Skipping file installation/removal"), tofile, ) self._entropy.output( mytxt, importance = 1, level = "warning", header = darkred(" ## ") ) do_continue = True return in_mask, protected, tofile, do_continue ## ## # file is protected (2) # ##______________________## prot_status = True if do_allocation_check: spm_class = self._entropy.Spm_class() tofile, prot_status = spm_class.allocate_protected_file(fromfile, tofile) if not prot_status: # a protected file with the same content # is already in place, so not going to protect # the same file twice protected = False return in_mask, protected, tofile, do_continue ## ## # file is protected (3) # ##______________________## oldtofile = tofile if oldtofile.find("._cfg") != -1: oldtofile = os.path.join(os.path.dirname(oldtofile), os.path.basename(oldtofile)[10:]) if not do_quiet: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "Protecting config file: %s" % (oldtofile,) ) mytxt = red("%s: %s") % (_("Protecting config file"), oldtofile,) self._entropy.output( mytxt, importance = 1, level = "warning", header = darkred(" ## ") ) return in_mask, protected, tofile, do_continue
def _handle_config_protect(self, protect, mask, protectskip, fromfile, tofile, do_allocation_check=True, do_quiet=False): """ Handle configuration file protection. This method contains the logic for determining if a file should be protected from overwrite. """ protected = False do_continue = False in_mask = False tofile_os = tofile fromfile_os = fromfile if not const_is_python3(): tofile_os = const_convert_to_rawstring(tofile) fromfile_os = const_convert_to_rawstring(fromfile) if tofile in protect: protected = True in_mask = True elif os.path.dirname(tofile) in protect: protected = True in_mask = True else: tofile_testdir = os.path.dirname(tofile) old_tofile_testdir = None while tofile_testdir != old_tofile_testdir: if tofile_testdir in protect: protected = True in_mask = True break old_tofile_testdir = tofile_testdir tofile_testdir = os.path.dirname(tofile_testdir) if protected: # check if perhaps, file is masked, so unprotected if tofile in mask: protected = False in_mask = False elif os.path.dirname(tofile) in mask: protected = False in_mask = False else: tofile_testdir = os.path.dirname(tofile) old_tofile_testdir = None while tofile_testdir != old_tofile_testdir: if tofile_testdir in mask: protected = False in_mask = False break old_tofile_testdir = tofile_testdir tofile_testdir = os.path.dirname(tofile_testdir) if not os.path.lexists(tofile_os): protected = False # file doesn't exist # check if it's a text file if protected: protected = entropy.tools.istextfile(tofile) in_mask = protected if fromfile is not None: if protected and os.path.lexists(fromfile_os) and ( not os.path.exists(fromfile_os)) and ( os.path.islink(fromfile_os)): # broken symlink, don't protect self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "WARNING!!! Failed to handle file protection for: " \ "%s, broken symlink in package" % ( tofile, ) ) msg = _("Cannot protect broken symlink") mytxt = "%s:" % (purple(msg), ) self._entropy.output(mytxt, importance=1, level="warning", header=brown(" ## ")) self._entropy.output(tofile, level="warning", header=brown(" ## ")) protected = False if not protected: return in_mask, protected, tofile, do_continue ## ## # file is protected # ##__________________## # check if protection is disabled for this element if tofile in protectskip: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "Skipping config file installation/removal, " \ "as stated in client.conf: %s" % (tofile,) ) if not do_quiet: mytxt = "%s: %s" % ( _("Skipping file installation/removal"), tofile, ) self._entropy.output(mytxt, importance=1, level="warning", header=darkred(" ## ")) do_continue = True return in_mask, protected, tofile, do_continue ## ## # file is protected (2) # ##______________________## prot_status = True if do_allocation_check: spm_class = self._entropy.Spm_class() tofile, prot_status = spm_class.allocate_protected_file( fromfile, tofile) if not prot_status: # a protected file with the same content # is already in place, so not going to protect # the same file twice protected = False return in_mask, protected, tofile, do_continue ## ## # file is protected (3) # ##______________________## oldtofile = tofile if oldtofile.find("._cfg") != -1: oldtofile = os.path.join(os.path.dirname(oldtofile), os.path.basename(oldtofile)[10:]) if not do_quiet: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "Protecting config file: %s" % (oldtofile, )) mytxt = red("%s: %s") % ( _("Protecting config file"), oldtofile, ) self._entropy.output(mytxt, importance=1, level="warning", header=darkred(" ## ")) return in_mask, protected, tofile, do_continue
import collections from entropy.const import etpConst, const_convert_to_unicode, \ const_convert_to_rawstring from entropy.exceptions import EntropyException, DependenciesNotRemovable from entropy.i18n import _ from entropy.output import teal, purple, darkgreen, brown, print_info, \ red, print_warning import entropy.dep import entropy.tools KERNEL_BINARY_VIRTUAL = const_convert_to_unicode("virtual/linux-binary") KERNEL_BINARY_LTS_VIRTUAL = const_convert_to_unicode( "virtual/linux-binary-lts") KERNELS_DIR = const_convert_to_rawstring("/etc/kernels") RELEASE_LEVEL = const_convert_to_rawstring("RELEASE_LEVEL") def _remove_tag_from_slot(slot): if not hasattr(entropy.dep, "remove_tag_from_slot"): # backward compatibility return slot[::-1].split(",", 1)[-1][::-1] return entropy.dep.remove_tag_from_slot(slot) def _setup_kernel_symlink(target_tag): eselect_exec = "/usr/bin/eselect" if os.path.lexists(eselect_exec): subprocess.call((eselect_exec, "kernel", "set", target_tag))
def _orphans(self, entropy_client, inst_repo): """ Solo Query Orphans command. """ quiet = self._nsargs.quiet verbose = self._nsargs.verbose settings = entropy_client.Settings() if not quiet: entropy_client.output(darkgreen(_("Orphans Search")), header=darkred(" @@ ")) reverse_symlink_map = settings["system_rev_symlinks"] system_dirs_mask = [x for x in settings["system_dirs_mask"] if entropy.tools.is_valid_path(x)] # make sure we're all rawstring. system_dirs_mask = [const_convert_to_rawstring(x) for x in system_dirs_mask] system_dirs_mask_regexp = [] for mask in settings["system_dirs_mask"]: reg_mask = re.compile(mask) system_dirs_mask_regexp.append(reg_mask) file_data = set() dirs = settings["system_dirs"] count = 0 for xdir in dirs: # make sure it's bytes (raw encoding # as per EntropyRepository.retrieveContent()) xdir = const_convert_to_rawstring(xdir, from_enctype=etpConst["conf_raw_encoding"]) try: wd = os.walk(xdir) except RuntimeError: # maximum recursion? continue for currentdir, subdirs, files in wd: found_files = set() for filename in files: filename = os.path.join(currentdir, filename) # filter symlinks, broken ones will be reported if os.path.islink(filename) and os.path.lexists(filename): continue do_cont = False for mask in system_dirs_mask: if filename.startswith(mask): do_cont = True break if do_cont: continue for mask in system_dirs_mask_regexp: if mask.match(filename): do_cont = True break if do_cont: continue count += 1 filename_utf = const_convert_to_unicode(filename) if not quiet and ((count == 0) or (count % 500 == 0)): count = 0 if len(filename_utf) > 50: fname = filename_utf[:40] + const_convert_to_unicode("...") + filename_utf[-10:] else: fname = filename_utf entropy_client.output( "%s: %s" % (blue(_("Analyzing")), fname), header=darkred(" @@ "), back=True ) found_files.add(filename_utf) if found_files: file_data |= found_files totalfiles = len(file_data) if not quiet: entropy_client.output( "%s: %s" % (blue(_("Analyzed directories")), " ".join(settings["system_dirs"])), header=darkred(" @@ ") ) entropy_client.output( "%s: %s" % (blue(_("Masked directories")), " ".join(settings["system_dirs_mask"])), header=darkred(" @@ "), ) entropy_client.output( "%s: %s" % (blue(_("Number of files collected on the filesystem")), bold(const_convert_to_unicode(totalfiles))), header=darkred(" @@ "), ) entropy_client.output( "%s: %s" % (blue(_("Now searching among installed packages")), bold(const_convert_to_unicode(totalfiles))), header=darkred(" @@ "), ) pkg_ids = inst_repo.listAllPackageIds() length = str(len(pkg_ids)) count = 0 def gen_cont(pkg_id): for path, ftype in inst_repo.retrieveContentIter(pkg_id): # reverse sym for sym_dir in reverse_symlink_map: if path.startswith(sym_dir): for sym_child in reverse_symlink_map[sym_dir]: yield sym_child + path[len(sym_dir) :] # real path also dirname_real = os.path.realpath(os.path.dirname(path)) yield os.path.join(dirname_real, os.path.basename(path)) yield path for pkg_id in pkg_ids: if not quiet: count += 1 atom = inst_repo.retrieveAtom(pkg_id) if atom is None: continue entropy_client.output( "%s: %s" % (blue(_("Checking")), bold(atom)), header=darkred(" @@ "), count=(count, length), back=True, ) # remove from file_data file_data -= set(gen_cont(pkg_id)) orphanedfiles = len(file_data) fname = "/tmp/entropy-orphans.txt" if not quiet: entropy_client.output( "%s: %s" % (blue(_("Number of total files")), bold(const_convert_to_unicode(totalfiles))), header=darkred(" @@ "), ) entropy_client.output( "%s: %s" % (blue(_("Number of matching files")), bold(const_convert_to_unicode(totalfiles - orphanedfiles))), header=darkred(" @@ "), ) entropy_client.output( "%s: %s" % (blue(_("Number of orphaned files")), bold(const_convert_to_unicode(orphanedfiles))), header=darkred(" @@ "), ) entropy_client.output( "%s: %s" % (blue(_("Writing file to disk")), bold(const_convert_to_unicode(fname))), header=darkred(" @@ "), ) sizecount = 0 file_data = list(file_data) file_data.sort(reverse=True) with open(fname, "wb") as f_out: for myfile in file_data: myfile = const_convert_to_rawstring(myfile) mysize = 0 try: mysize += os.stat(myfile).st_size except OSError: mysize = 0 sizecount += mysize f_out.write(myfile + const_convert_to_rawstring("\n")) if quiet: entropy_client.output(myfile, level="generic") f_out.flush() humansize = entropy.tools.bytes_into_human(sizecount) if not quiet: entropy_client.output("%s: %s" % (blue(_("Total space wasted")), bold(humansize)), header=darkred(" @@ ")) else: entropy_client.output(humansize, level="generic") return 0
def _remove_content_from_system_loop(self, inst_repo, remove_atom, remove_content, remove_config, affected_directories, affected_infofiles, directories, directories_cache, preserved_mgr, not_removed_due_to_collisions, colliding_path_messages, automerge_metadata, col_protect, protect, mask, protectskip, sys_root): """ Body of the _remove_content_from_system() method. """ info_dirs = self._get_info_directories() # collect all the library paths to be preserved # in the final removal loop. preserved_lib_paths = set() if self.PRESERVED_LIBS_ENABLED: for _pkg_id, item, _ftype in remove_content: # determine without sys_root paths = self._handle_preserved_lib( item, remove_atom, preserved_mgr) if paths is not None: preserved_lib_paths.update(paths) for _pkg_id, item, _ftype in remove_content: if not item: continue # empty element?? sys_root_item = sys_root + item sys_root_item_encoded = sys_root_item if not const_is_python3(): # this is coming from the db, and it's pure utf-8 sys_root_item_encoded = const_convert_to_rawstring( sys_root_item, from_enctype = etpConst['conf_raw_encoding']) # collision check if col_protect > 0: if inst_repo.isFileAvailable(item) \ and os.path.isfile(sys_root_item_encoded): # in this way we filter out directories colliding_path_messages.add(sys_root_item) not_removed_due_to_collisions.add(item) continue protected = False in_mask = False if not remove_config: protected_item_test = sys_root_item (in_mask, protected, _x, do_continue) = self._handle_config_protect( protect, mask, protectskip, None, protected_item_test, do_allocation_check = False, do_quiet = True ) if do_continue: protected = True # when files have not been modified by the user # and they are inside a config protect directory # we could even remove them directly if in_mask: oldprot_md5 = automerge_metadata.get(item) if oldprot_md5: try: in_system_md5 = entropy.tools.md5sum( protected_item_test) except (OSError, IOError) as err: if err.errno != errno.ENOENT: raise in_system_md5 = "?" if oldprot_md5 == in_system_md5: prot_msg = _("Removing config file, never modified") mytxt = "%s: %s" % ( darkgreen(prot_msg), blue(item), ) self._entropy.output( mytxt, importance = 1, level = "info", header = red(" ## ") ) protected = False do_continue = False # Is file or directory a protected item? if protected: self._entropy.logger.log( "[Package]", etpConst['logging']['verbose_loglevel_id'], "[remove] Protecting config file: %s" % (sys_root_item,) ) mytxt = "[%s] %s: %s" % ( red(_("remove")), brown(_("Protecting config file")), sys_root_item, ) self._entropy.output( mytxt, importance = 1, level = "warning", header = red(" ## ") ) continue try: os.lstat(sys_root_item_encoded) except OSError as err: if err.errno in (errno.ENOENT, errno.ENOTDIR): continue # skip file, does not exist raise except UnicodeEncodeError: msg = _("This package contains a badly encoded file !!!") mytxt = brown(msg) self._entropy.output( red("QA: ")+mytxt, importance = 1, level = "warning", header = darkred(" ## ") ) continue # file has a really bad encoding if os.path.isdir(sys_root_item_encoded) and \ os.path.islink(sys_root_item_encoded): # S_ISDIR returns False for directory symlinks, # so using os.path.isdir valid directory symlink if sys_root_item not in directories_cache: # collect for Trigger affected_directories.add(item) directories.add((sys_root_item, "link")) directories_cache.add(sys_root_item) continue if os.path.isdir(sys_root_item_encoded): # plain directory if sys_root_item not in directories_cache: # collect for Trigger affected_directories.add(item) directories.add((sys_root_item, "dir")) directories_cache.add(sys_root_item) continue # files, symlinks or not # just a file or symlink or broken # directory symlink (remove now) # skip file removal if item is a preserved library. if item in preserved_lib_paths: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "[remove] skipping removal of: %s" % (sys_root_item,) ) continue try: os.remove(sys_root_item_encoded) except OSError as err: self._entropy.logger.log( "[Package]", etpConst['logging']['normal_loglevel_id'], "[remove] Unable to remove %s, error: %s" % ( sys_root_item, err,) ) continue # collect for Trigger dir_name = os.path.dirname(item) affected_directories.add(dir_name) # account for info files, if any if dir_name in info_dirs: for _ext in self._INFO_EXTS: if item.endswith(_ext): affected_infofiles.add(item) break # add its parent directory dirobj = const_convert_to_unicode( os.path.dirname(sys_root_item_encoded)) if dirobj not in directories_cache: if os.path.isdir(dirobj) and os.path.islink(dirobj): directories.add((dirobj, "link")) elif os.path.isdir(dirobj): directories.add((dirobj, "dir")) directories_cache.add(dirobj)
def _generic_post_handler(self, function_name, params, file_params, timeout): """ Given a function name and the request data (dict format), do the actual HTTP request and return the response object to caller. WARNING: params and file_params dict keys must be ASCII string only. @param function_name: name of the function that called this method @type function_name: string @param params: POST parameters @type params: dict @param file_params: mapping composed by file names as key and tuple composed by (file_name, file object) as values @type file_params: dict @param timeout: socket timeout @type timeout: float @return: tuple composed by the server response string or None (in case of empty response) and the HTTPResponse object (useful for checking response status) @rtype: tuple """ if timeout is None: timeout = self._default_timeout_secs multipart_boundary = "---entropy.services,boundary---" request_path = self._request_path.rstrip("/") + "/" + function_name const_debug_write(__name__, "WebService _generic_post_handler, calling: %s at %s -- %s," " tx_callback: %s, timeout: %s" % (self._request_host, request_path, params, self._transfer_callback, timeout,)) connection = None try: if self._request_protocol == "http": connection = httplib.HTTPConnection(self._request_host, timeout = timeout) elif self._request_protocol == "https": connection = httplib.HTTPSConnection(self._request_host, timeout = timeout) else: raise WebService.RequestError("invalid request protocol", method = function_name) headers = { "Accept": "text/plain", "User-Agent": self._generate_user_agent(function_name), } if file_params is None: file_params = {} # autodetect file parameters in params for k in list(params.keys()): if isinstance(params[k], (tuple, list)) \ and (len(params[k]) == 2): f_name, f_obj = params[k] if isinstance(f_obj, file): file_params[k] = params[k] del params[k] elif const_isunicode(params[k]): # convert to raw string params[k] = const_convert_to_rawstring(params[k], from_enctype = "utf-8") elif not const_isstring(params[k]): # invalid ? if params[k] is None: # will be converted to "" continue int_types = const_get_int() supported_types = (float, list, tuple) + int_types if not isinstance(params[k], supported_types): raise WebService.UnsupportedParameters( "%s is unsupported type %s" % (k, type(params[k]))) list_types = (list, tuple) if isinstance(params[k], list_types): # not supporting nested lists non_str = [x for x in params[k] if not \ const_isstring(x)] if non_str: raise WebService.UnsupportedParameters( "%s is unsupported type %s" % (k, type(params[k]))) body = None if not file_params: headers["Content-Type"] = "application/x-www-form-urlencoded" encoded_params = urllib_parse.urlencode(params) data_size = len(encoded_params) if self._transfer_callback is not None: self._transfer_callback(0, data_size, False) if data_size < 65536: try: connection.request("POST", request_path, encoded_params, headers) except socket.error as err: raise WebService.RequestError(err, method = function_name) else: try: connection.request("POST", request_path, None, headers) except socket.error as err: raise WebService.RequestError(err, method = function_name) sio = StringIO(encoded_params) data_size = len(encoded_params) while True: chunk = sio.read(65535) if not chunk: break try: connection.send(chunk) except socket.error as err: raise WebService.RequestError(err, method = function_name) if self._transfer_callback is not None: self._transfer_callback(sio.tell(), data_size, False) # for both ways, send a signal through the callback if self._transfer_callback is not None: self._transfer_callback(data_size, data_size, False) else: headers["Content-Type"] = "multipart/form-data; boundary=" + \ multipart_boundary body_file, body_fpath = self._encode_multipart_form(params, file_params, multipart_boundary) try: data_size = body_file.tell() headers["Content-Length"] = str(data_size) body_file.seek(0) if self._transfer_callback is not None: self._transfer_callback(0, data_size, False) try: connection.request("POST", request_path, None, headers) except socket.error as err: raise WebService.RequestError(err, method = function_name) while True: chunk = body_file.read(65535) if not chunk: break try: connection.send(chunk) except socket.error as err: raise WebService.RequestError(err, method = function_name) if self._transfer_callback is not None: self._transfer_callback(body_file.tell(), data_size, False) if self._transfer_callback is not None: self._transfer_callback(data_size, data_size, False) finally: body_file.close() os.remove(body_fpath) try: response = connection.getresponse() except socket.error as err: raise WebService.RequestError(err, method = function_name) const_debug_write(__name__, "WebService.%s(%s), " "response header: %s" % ( function_name, params, response.getheaders(),)) total_length = response.getheader("Content-Length", "-1") try: total_length = int(total_length) except ValueError: total_length = -1 outcome = const_convert_to_rawstring("") current_len = 0 if self._transfer_callback is not None: self._transfer_callback(current_len, total_length, True) while True: try: chunk = response.read(65536) except socket.error as err: raise WebService.RequestError(err, method = function_name) if not chunk: break outcome += chunk current_len += len(chunk) if self._transfer_callback is not None: self._transfer_callback(current_len, total_length, True) if self._transfer_callback is not None: self._transfer_callback(total_length, total_length, True) if const_is_python3(): outcome = const_convert_to_unicode(outcome) if not outcome: return None, response return outcome, response except httplib.HTTPException as err: raise WebService.RequestError(err, method = function_name) finally: if connection is not None: connection.close()
import collections from entropy.const import etpConst, const_convert_to_unicode, \ const_convert_to_rawstring from entropy.exceptions import EntropyException, DependenciesNotRemovable from entropy.i18n import _ from entropy.output import teal, purple, darkgreen, brown, print_info, \ red, print_warning import entropy.dep import entropy.tools KERNEL_BINARY_VIRTUAL = const_convert_to_unicode("virtual/linux-binary") KERNEL_BINARY_LTS_VIRTUAL = const_convert_to_unicode("virtual/linux-binary-lts") KERNELS_DIR = const_convert_to_rawstring("/etc/kernels") RELEASE_LEVEL = const_convert_to_rawstring("RELEASE_LEVEL") def _remove_tag_from_slot(slot): if not hasattr(entropy.dep, "remove_tag_from_slot"): # backward compatibility return slot[::-1].split(",", 1)[-1][::-1] return entropy.dep.remove_tag_from_slot(slot) def _setup_kernel_symlink(target_tag): eselect_exec = "/usr/bin/eselect" if os.path.lexists(eselect_exec): subprocess.call((eselect_exec, "kernel", "set", target_tag))
def _get_kernels(self, virtual): """ Return a set of kernel available. @param virtual: the kernel virtual package name for retrieve the list of kernel in additional of the new way that uses files under /etc/kernels/available/{sabayon,sabayon-lts}. @type virtual: string """ kernel_availables_dir = os.path.join( KERNELS_DIR, const_convert_to_rawstring("availables")) klist = [] kernels = set() # We may have virtual/ kernels in multiple repos, make sure # to pick them all up. kernel_virtual_pkgs, _rc = self._entropy.atom_match(virtual, multi_match=True, multi_repo=True) # virtual/ kernels have a runtime dependency against a kernel # package provider. So, get the list of runtime deps from them. for pkg_id, repo_id in kernel_virtual_pkgs: repo = self._entropy.open_repository(repo_id) kernel_deps = repo.retrieveRuntimeDependencies(pkg_id) for k in kernel_deps: # Here we have list of kernels separate by ; kk = k.split(';') kernels.update(kk) # Check if exists kernel availables directory for # introduce new kernels or custom kernels. if os.path.exists(kernel_availables_dir): for i in os.listdir(kernel_availables_dir): if os.path.isdir(i): continue if virtual == KERNEL_LTS_AVAILABLES_SUFFIX and \ not i.endswith(KERNEL_LTS_AVAILABLES_SUFFIX): # We want only files *-lts continue kernel_list = self._parse_kernel_available_file( os.path.join(kernel_availables_dir, const_convert_to_rawstring(i))) if len(kernel_list) > 0: klist = klist + kernel_list if len(kernel_list) > 0: kernels.update(klist) # Match the dependencies collected against all repositories, # or we won't be able to pick up binaries in all of them. kernel_packages = set() for kernel in kernels: kernel_pkgs, _rc = self._entropy.atom_match(kernel, multi_match=True, multi_repo=True) kernel_packages.update(kernel_pkgs) # There is no safety check for random packages to be pulled in. # The assumption is that virtual packages are listing just kernel # binaries in their dependencies. return kernel_packages