def usage(): print("esync (%s) - Calls 'emerge sync' and 'eupdatedb' and shows updates" \ % version) print("") print(bold("Usage:"), "esync [", darkgreen("options"), "]") print(bold("Options:")) print(darkgreen(" --help") + ", " + darkgreen("-h")) print(" Print this help message") print("") print(darkgreen(" --webrsync") + ", " + darkgreen("-w")) print(" Use 'emerge-webrsync' instead of 'emerge --sync'") print("") print(darkgreen(" --delta-webrsync") + ", " + darkgreen("-d")) print(" Use 'emerge-delta-webrsync' instead of 'emerge --sync'") print("") print(darkgreen(" --metadata") + ", " + darkgreen("-m")) print(" Use 'emerge --metadata' instead of 'emerge --sync'") print("") print(darkgreen(" --layman-sync") + ", " + darkgreen("-l")) print(" Use layman to sync any installed overlays, then sync the main tree") print("") print(darkgreen(" --nocolor") + ", " + darkgreen("-n")) print(" Don't use ANSI codes for colored output") print("") print(darkgreen(" --quiet") + ", " + darkgreen("-q")) print(" Less output (implies --nospinner)") print("") print(darkgreen(" --verbose") + ", " + darkgreen("-v")) print(" Verbose output") print("") print(darkgreen(" --nospinner") + ", " + darkgreen("-s")) print(" Don't display the remaining index count") sys.exit(0)
def rebuild(logger, assigned, settings): """rebuilds the assigned pkgs""" args = settings['pass_through_options'] if settings['EXACT']: emerge_command = '=' + ' ='.join(assigned) else: emerge_command = ' '.join(get_slotted_cps(assigned, logger)) if settings['PRETEND']: args += ' --pretend' if settings['VERBOSITY'] >= 2: args += ' --verbose' elif settings['VERBOSITY'] < 1: args += ' --quiet' if settings['nocolor']: args += ' --color n' if len(emerge_command) == 0: logger.warn(bold('\nThere is nothing to emerge. Exiting.')) return 0 emerge_command = emerge_command logger.warn(yellow( '\nemerge') + args + ' --oneshot --complete-graph=y ' + bold(emerge_command)) success = os.system( 'emerge ' + args + ' --oneshot --complete-graph=y ' + emerge_command) return success
def _suggest(self): print() if self.suggest['ignore_masked']: print(bold( "Note: use --without-mask to check " "KEYWORDS on dependencies of masked packages")) if self.suggest['include_dev']: print(bold( "Note: use --include-dev (-d) to check " "dependencies for 'dev' profiles")) print()
def chk_updated_cfg_files(eroot, config_protect): target_root = eroot result = list( portage.util.find_updated_config_files(target_root, config_protect)) for x in result: writemsg_level("\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))), level=logging.INFO, noiselevel=-1) if not x[1]: # it's a protected file writemsg_level(_("config file '%s' needs updating.\n") % x[0], level=logging.INFO, noiselevel=-1) else: # it's a protected dir if len(x[1]) == 1: head, tail = os.path.split(x[1][0]) tail = tail[len("._cfg0000_"):] fpath = os.path.join(head, tail) writemsg_level(_("config file '%s' needs updating.\n") % fpath, level=logging.INFO, noiselevel=-1) else: writemsg_level( _("%d config files in '%s' need updating.\n") % \ (len(x[1]), x[0]), level=logging.INFO, noiselevel=-1) if result: print(" " + yellow("*") + " See the " + colorize("INFORM", _("CONFIGURATION FILES")) + " and " + colorize("INFORM", _("CONFIGURATION FILES UPDATE TOOLS"))) print(" " + yellow("*") + " sections of the " + bold("emerge") + " " + _("man page to learn how to update config files."))
def _check_updates(self): mybestpv = self.emerge_config.target_config.trees[ 'porttree'].dbapi.xmatch("bestmatch-visible", portage.const.PORTAGE_PACKAGE_ATOM) mypvs = portage.best( self.emerge_config.target_config.trees['vartree'].dbapi.match( portage.const.PORTAGE_PACKAGE_ATOM)) chk_updated_cfg_files( self.emerge_config.target_config.root, portage.util.shlex_split( self.emerge_config.target_config.settings.get( "CONFIG_PROTECT", ""))) msgs = [] if mybestpv != mypvs and "--quiet" not in self.emerge_config.opts: msgs.append('') msgs.append( warn(" * ") + bold("An update to portage is available.") + " It is _highly_ recommended") msgs.append( warn(" * ") + "that you update portage now, before any other packages are updated." ) msgs.append('') msgs.append( warn(" * ") + "To update portage, run 'emerge --oneshot portage' now.") msgs.append('') return msgs
def get_best_match(cpv, cp, logger): """Tries to find another version of the pkg with the same slot as the deprecated installed version. Failing that attempt to get any version of the same app @param cpv: string @param cp: string @rtype tuple: ([cpv,...], SLOT) """ slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"])[0] logger.warning('\t%s "%s" %s.' % (yellow('* Warning:'), cpv,bold('ebuild not found.'))) logger.debug('\tget_best_match(); Looking for %s:%s' %(cp, slot)) try: match = portdb.match('%s:%s' %(cp, slot)) except portage.exception.InvalidAtom: match = None if not match: logger.warning('\t' + red('!!') + ' ' + yellow( 'Could not find ebuild for %s:%s' %(cp, slot))) slot = [''] match = portdb.match(cp) if not match: logger.warning('\t' + red('!!') + ' ' + yellow('Could not find ebuild for ' + cp)) return match, slot
def shorthelp(): print(bold("emerge:")+" the other white meat (command-line interface to the Portage system)") print(bold("Usage:")) print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] [ "+turquoise("ebuild")+" | "+turquoise("tbz2")+" | "+turquoise("file")+" | "+turquoise("@set")+" | "+turquoise("atom")+" ] [ ... ]") print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] < "+turquoise("system")+" | "+turquoise("world")+" >") print(" "+turquoise("emerge")+" < "+turquoise("--sync")+" | "+turquoise("--metadata")+" | "+turquoise("--info")+" >") print(" "+turquoise("emerge")+" "+turquoise("--resume")+" [ "+green("--pretend")+" | "+green("--ask")+" | "+green("--skipfirst")+" ]") print(" "+turquoise("emerge")+" "+turquoise("--help")+" [ "+green("--verbose")+" ] ") print(bold("Options:")+" "+green("-")+"["+green("abBcCdDefgGhjkKlnNoOpPqrsStuvV")+"]") print(" [ " + green("--color")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ] [ "+green("--columns")+" ]") print(" [ "+green("--complete-graph")+" ] [ "+green("--deep")+" ]") print(" [ "+green("--jobs") + " " + turquoise("JOBS")+" ] [ "+green("--keep-going")+" ] [ " + green("--load-average")+" " + turquoise("LOAD") + " ]") print(" [ "+green("--newuse")+" ] [ "+green("--noconfmem")+" ] [ "+green("--nospinner")+" ]") print(" [ "+green("--oneshot")+" ] [ "+green("--onlydeps")+" ]") print(" [ "+green("--reinstall ")+turquoise("changed-use")+" ] [ " + green("--with-bdeps")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ]") print(bold("Actions:")+" [ "+green("--depclean")+" | "+green("--list-sets")+" | "+green("--search")+" | "+green("--sync")+" | "+green("--version")+" ]")
def chk_updated_cfg_files(eroot, config_protect): target_root = eroot result = list( portage.util.find_updated_config_files(target_root, config_protect)) for x in result: writemsg_level("\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))), level=logging.INFO, noiselevel=-1) if not x[1]: # it's a protected file writemsg_level( _("config file '%s' needs updating.\n") % x[0], level=logging.INFO, noiselevel=-1) else: # it's a protected dir if len(x[1]) == 1: head, tail = os.path.split(x[1][0]) tail = tail[len("._cfg0000_"):] fpath = os.path.join(head, tail) writemsg_level(_("config file '%s' needs updating.\n") % fpath, level=logging.INFO, noiselevel=-1) else: writemsg_level( _("%d config files in '%s' need updating.\n") % \ (len(x[1]), x[0]), level=logging.INFO, noiselevel=-1) if result: print(" " + yellow("*") + " See the " + colorize("INFORM", _("CONFIGURATION FILES")) + " and " + colorize("INFORM", _("CONFIGURATION FILES UPDATE TOOLS"))) print(" " + yellow("*") + " sections of the " + bold("emerge") + " " + _("man page to learn how to update config files."))
def _set_no_columns(self, pkg, pkg_info): """prints pkg info without column indentation. @param pkg: _emerge.Package.Package instance @param pkg_info: dictionary @rtype the updated addl """ pkg_str = pkg.cpv if self.conf.verbosity == 3: pkg_str = self._append_slot(pkg_str, pkg, pkg_info) pkg_str = self._append_repository(pkg_str, pkg, pkg_info) if not pkg_info.merge: addl = self.empty_space_in_brackets() myprint = "[%s%s] %s%s %s" % \ (self.pkgprint(pkg_info.operation.ljust(13), pkg_info), addl, self.indent, self.pkgprint(pkg_str, pkg_info), pkg_info.oldbest) else: myprint = "[%s %s%s] %s%s %s" % \ (self.pkgprint(pkg.type_name, pkg_info), pkg_info.attr_display, \ bold(blue("L")) if self._has_local_patch(pkg) else " ", \ self.indent, \ self.pkgprint(pkg_str, pkg_info), pkg_info.oldbest) #if self.localpatch_enabled: #self. += bold(blue("L")) if self._has_local_patch(pkg) else " " return myprint
def __init__(self, scanned_files, logger, searchlibs=None, searchbits=None, all_masks=None, masked_dirs=None): '''LibCheck init function. @param scanned_files: optional dictionary if the type created by scan_files(). Defaults to the class instance of scanned_files @param logger: python style Logging function to use for output. @param searchlibs: optional set() of libraries to search for. If defined it toggles several settings to configure this class for a target search rather than a broken libs search. ''' self.scanned_files = scanned_files self.logger = logger self.searchlibs = searchlibs self.searchbits = sorted(searchbits) or ['32', '64'] self.all_masks = all_masks self.masked_dirs = masked_dirs self.logger.debug("\tLibCheck.__init__(), new searchlibs: %s" %(self.searchbits)) if searchlibs: self.smsg = '\tLibCheck.search(), Checking for %s bit dependants' self.pmsg = yellow(" * ") + 'Files that depend on: %s (%s bits)' self.setlibs = self._setslibs self.check = self._checkforlib else: self.smsg = '\tLibCheck.search(), Checking for broken %s bit libs' self.pmsg = green(' * ') + bold('Broken files that require:') + ' %s (%s bits)' self.setlibs = self._setlibs self.check = self._checkbroken self.sfmsg = "\tLibCheck.search(); Total found: %(count)d libs, %(deps)d files in %(time)d milliseconds" self.alllibs = None
def search_ebuilds(path, portdir=True, searchdef="", repo_num="", config=None, data=None): pv = "" pkgs = [] nr = len(data['ebuilds']) + 1 if portdir: rep = darkgreen("Portage ") else: rep = red("Overlay "+str(repo_num)+" ") if isdir(path): filelist = listdir(path) for file in filelist: if file[-7:] == ".ebuild": pv = file[:-7] pkgs.append(list(pkgsplit(pv))) pkgs[-1].append(path + file) if searchdef != "" and pv == searchdef: data['defebuild'] = (searchdef, pkgs[-1][3]) if not portdir: config['found_in_overlay'] = True pkgs.sort(key=cmp_sort_key(mypkgcmp)) for pkg in pkgs: rev = "" if pkg[2] != "r0": rev = "-" + pkg[2] data['output'].append(" " + rep + " [" + bold(str(nr)) + "] " + pkg[0] + "-" + pkg[1] + rev + "\n") data['ebuilds'].append(pkg[len(pkg)-1]) nr += 1
def assign_packages(broken, logger, settings): ''' Finds and returns packages that owns files placed in broken. Broken is list of files ''' assigned = set() if not broken: return assigned pkgset = set(get_installed_cpvs()) # Map all files in CONTENTS database to package names fname_pkg_dict = {} for pkg in pkgset: contents = Package(pkg).parsed_contents() for fname in contents.keys(): if contents[fname][0] == "obj": fname_pkg_dict[fname] = str(pkg) for fname in broken: realname = os.path.realpath(fname) if realname in fname_pkg_dict.keys(): pkgname = fname_pkg_dict[realname] elif fname in fname_pkg_dict.keys(): pkgname = fname_pkg_dict[fname] else: pkgname = None if pkgname and pkgname not in assigned: assigned.add(pkgname) if not pkgname: pkgname = "(none)" logger.info('\t' + fname + ' -> ' + bold(pkgname)) return assigned
def usage(): print("esearch (%s) - Replacement for 'emerge search' with search-index" % version) print("") print(bold("Usage:"), "esearch [", darkgreen("options"), "] pattern") print(bold("Options:")) print(darkgreen(" --help") + ", " + darkgreen("-h")) print(" Print help message") print("") print(darkgreen(" --searchdesc") + ", " + darkgreen("-S")) print(" Search package descriptions as well") print("") print(darkgreen(" --fullname") + ", " + darkgreen("-F")) print(" Search packages full name (includes category)") print("") print(darkgreen(" --instonly") + ", " + darkgreen("-I")) print(" Find only packages which are installed") print("") print(darkgreen(" --notinst") + ", " + darkgreen("-N")) print(" Find only packages which are not installed") print("") print( darkgreen(" --exclude=") + "xpattern" + ", " + darkgreen("-x"), "xpattern") print(" Exclude packages matching xpattern from search result") print("") print(darkgreen(" --compact") + ", " + darkgreen("-c")) print(" More compact output format") print("") print(darkgreen(" --verbose") + ", " + darkgreen("-v")) print(" Give a lot of additional information (slow!)") print("") print(darkgreen(" --ebuild") + ", " + darkgreen("-e")) print(" View ebuilds of found packages") print("") print(darkgreen(" --own=") + "format" + ", " + darkgreen("-o"), "format") print(" Use your own output format, see manpage for details of format") print("") print(darkgreen(" --directory=") + "dir" + ", " + darkgreen("-d"), "dir") print(" Use dir as directory to load esearch index from") print("") print(darkgreen(" --nocolor") + ", " + darkgreen("-n")) print(" Don't use ANSI codes for colored output") sys.exit(0)
def help(): print(bold("emerge:")+" command-line interface to the Portage system") print(bold("Usage:")) print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] [ "+turquoise("ebuild")+" | "+turquoise("tbz2")+" | "+turquoise("file")+" | "+turquoise("@set")+" | "+turquoise("atom")+" ] [ ... ]") print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] < "+turquoise("@system")+" | "+turquoise("@world")+" >") print(" "+turquoise("emerge")+" < "+turquoise("--sync")+" | "+turquoise("--metadata")+" | "+turquoise("--info")+" >") print(" "+turquoise("emerge")+" "+turquoise("--resume")+" [ "+green("--pretend")+" | "+green("--ask")+" | "+green("--skipfirst")+" ]") print(" "+turquoise("emerge")+" "+turquoise("--help")) print(bold("Options:")+" "+green("-")+"["+green("abBcCdDefgGhjkKlnNoOpPqrsStuUvVw")+"]") print(" [ " + green("--color")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ] [ "+green("--columns")+" ]") print(" [ "+green("--complete-graph")+" ] [ "+green("--deep")+" ]") print(" [ "+green("--jobs") + " " + turquoise("JOBS")+" ] [ "+green("--keep-going")+" ] [ " + green("--load-average")+" " + turquoise("LOAD") + " ]") print(" [ "+green("--newrepo")+" ] [ "+green("--newuse")+" ] [ "+green("--noconfmem")+" ] [ "+green("--nospinner")+" ]") print(" [ "+green("--oneshot")+" ] [ "+green("--onlydeps")+" ] [ "+ green("--quiet-build")+" [ " + turquoise("y") + " | "+ turquoise("n")+" ] ]") print(" [ "+green("--reinstall ")+turquoise("changed-use")+" ] [ " + green("--with-bdeps")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ]") print(bold("Actions:")+" [ "+green("--depclean")+" | "+green("--list-sets")+" | "+green("--search")+" | "+green("--sync")+" | "+green("--version")+" ]") print() print(" For more help consult the man page.")
def print_changelog(self): """Prints the changelog text to std_out """ if not self.changelogs: return writemsg_stdout('\n', noiselevel=-1) for revision, text in self.changelogs: writemsg_stdout(bold('*' + revision) + '\n' + text, noiselevel=-1) return
def query(self, prompt, enter_invalid, responses=None, colours=None): """Display a prompt and a set of responses, then waits for user input and check it against the responses. The first match is returned. An empty response will match the first value in the list of responses, unless enter_invalid is True. The input buffer is *not* cleared prior to the prompt! prompt: The String to display as a prompt. responses: a List of Strings with the acceptable responses. colours: a List of Functions taking and returning a String, used to process the responses for display. Typically these will be functions like red() but could be e.g. lambda x: "DisplayString". If responses is omitted, it defaults to ["Yes", "No"], [green, red]. If only colours is omitted, it defaults to [bold, ...]. Returns a member of the List responses. (If called without optional arguments, it returns "Yes" or "No".) KeyboardInterrupt is converted to SystemExit to avoid tracebacks being printed.""" if responses is None: responses = ["Yes", "No"] colours = [ create_color_func("PROMPT_CHOICE_DEFAULT"), create_color_func("PROMPT_CHOICE_OTHER"), ] elif colours is None: colours = [bold] colours = (colours * len(responses))[: len(responses)] responses = [_unicode_decode(x) for x in responses] if "--alert" in self.myopts: prompt = "\a" + prompt print(bold(prompt), end=" ") try: while True: try: response = input( "[%s] " % "/".join( [colours[i](responses[i]) for i in range(len(responses))] ) ) except UnicodeDecodeError as e: response = _unicode_decode(e.object).rstrip("\n") if response or not enter_invalid: for key in responses: # An empty response will match the # first value in responses. if response.upper() == key[: len(response)].upper(): return key print("Sorry, response '%s' not understood." % response, end=" ") except (EOFError, KeyboardInterrupt): print("Interrupted.") sys.exit(128 + signal.SIGINT)
def _check_updates(self): mybestpv = self.emerge_config.target_config.trees[ 'porttree'].dbapi.xmatch("bestmatch-visible", portage.const.PORTAGE_PACKAGE_ATOM) mypvs = portage.best( self.emerge_config.target_config.trees['vartree'].dbapi.match( portage.const.PORTAGE_PACKAGE_ATOM)) try: old_use = (self.emerge_config.target_config.trees["vartree"].dbapi. aux_get(mypvs, ["USE"])[0].split()) except KeyError: old_use = () chk_updated_cfg_files( self.emerge_config.target_config.root, portage.util.shlex_split( self.emerge_config.target_config.settings.get( "CONFIG_PROTECT", "")), ) msgs = [] if not (mybestpv and mypvs ) or mybestpv == mypvs or "--quiet" in self.emerge_config.opts: return msgs # Suggest to update to the latest available version of portage. # Since changes to PYTHON_TARGETS cause complications, this message # is suppressed if the new version has different PYTHON_TARGETS enabled # than previous version. portdb = self.emerge_config.target_config.trees["porttree"].dbapi portdb.doebuild_settings.setcpv(mybestpv, mydb=portdb) usemask = portdb.doebuild_settings.usemask useforce = portdb.doebuild_settings.useforce new_use = (frozenset(portdb.doebuild_settings["PORTAGE_USE"].split()) | useforce) - usemask new_python_targets = frozenset(x for x in new_use if x.startswith("python_targets_")) old_python_targets = frozenset(x for x in old_use if x.startswith("python_targets_")) if new_python_targets == old_python_targets: msgs.append('') msgs.append( warn(" * ") + bold("An update to portage is available.") + " It is _highly_ recommended") msgs.append( warn(" * ") + "that you update portage now, before any other packages are updated." ) msgs.append('') msgs.append( warn(" * ") + "To update portage, run 'emerge --oneshot sys-apps/portage' now." ) msgs.append('') return msgs
def print_changelog(self): """Prints the changelog text to std_out """ if not self.changelogs: return writemsg_stdout('\n', noiselevel=-1) for revision, text in self.changelogs: writemsg_stdout(bold('*'+revision) + '\n' + text, noiselevel=-1) return
def usage(): print("esearch (%s) - Replacement for 'emerge search' with search-index" % version) print("") print(bold("Usage:"), "esearch [", darkgreen("options"), "] pattern") print(bold("Options:")) print(darkgreen(" --help") + ", " + darkgreen("-h")) print(" Print help message") print("") print(darkgreen(" --searchdesc") + ", " + darkgreen("-S")) print(" Search package descriptions as well") print("") print(darkgreen(" --fullname") + ", " + darkgreen("-F")) print(" Search packages full name (includes category)") print("") print(darkgreen(" --instonly") + ", " + darkgreen("-I")) print(" Find only packages which are installed") print("") print(darkgreen(" --notinst") + ", " + darkgreen("-N")) print(" Find only packages which are not installed") print("") print(darkgreen(" --exclude=") + "xpattern" + ", " + darkgreen("-x"), "xpattern") print(" Exclude packages matching xpattern from search result") print("") print(darkgreen(" --compact") + ", " + darkgreen("-c")) print(" More compact output format") print("") print(darkgreen(" --verbose") + ", " + darkgreen("-v")) print(" Give a lot of additional information (slow!)") print("") print(darkgreen(" --ebuild") + ", " + darkgreen("-e")) print(" View ebuilds of found packages") print("") print(darkgreen(" --own=") + "format" + ", " + darkgreen("-o"), "format") print(" Use your own output format, see manpage for details of format") print("") print(darkgreen(" --directory=") + "dir" + ", " + darkgreen("-d"), "dir") print(" Use dir as directory to load esearch index from") print("") print(darkgreen(" --nocolor") + ", " + darkgreen("-n")) print(" Don't use ANSI codes for colored output") sys.exit(0)
def query(self, prompt, enter_invalid, responses=None, colours=None): """Display a prompt and a set of responses, then waits for user input and check it against the responses. The first match is returned. An empty response will match the first value in the list of responses, unless enter_invalid is True. The input buffer is *not* cleared prior to the prompt! prompt: The String to display as a prompt. responses: a List of Strings with the acceptable responses. colours: a List of Functions taking and returning a String, used to process the responses for display. Typically these will be functions like red() but could be e.g. lambda x: "DisplayString". If responses is omitted, it defaults to ["Yes", "No"], [green, red]. If only colours is omitted, it defaults to [bold, ...]. Returns a member of the List responses. (If called without optional arguments, it returns "Yes" or "No".) KeyboardInterrupt is converted to SystemExit to avoid tracebacks being printed.""" if responses is None: responses = ["Yes", "No"] colours = [create_color_func("PROMPT_CHOICE_DEFAULT"), create_color_func("PROMPT_CHOICE_OTHER")] elif colours is None: colours = [bold] colours = (colours * len(responses))[: len(responses)] responses = [_unicode_decode(x) for x in responses] if "--alert" in self.myopts: prompt = "\a" + prompt print(bold(prompt), end=" ") try: while True: if sys.hexversion >= 0x3000000: try: response = input("[%s] " % "/".join([colours[i](responses[i]) for i in range(len(responses))])) except UnicodeDecodeError as e: response = _unicode_decode(e.object).rstrip("\n") else: response = raw_input( "[" + "/".join([colours[i](responses[i]) for i in range(len(responses))]) + "] " ) response = _unicode_decode(response) if response or not enter_invalid: for key in responses: # An empty response will match the # first value in responses. if response.upper() == key[: len(response)].upper(): return key print("Sorry, response '%s' not understood." % response, end=" ") except (EOFError, KeyboardInterrupt): print("Interrupted.") sys.exit(128 + signal.SIGINT)
def usage(): print("eupdatedb (%s) - Update the search-index for esearch" % version) print("") print(bold("Usage:"), "eupdatedb [", darkgreen("options"), "]") print(bold("Options:")) print(darkgreen(" --help") + ", " + darkgreen("-h")) print(" Print this help message") print("") print(darkgreen(" --verbose") + ", " + darkgreen("-v")) print(" Verbose mode, show categories") print("") print(darkgreen(" --quiet") + ", " + darkgreen("-q")) print(" Print only summary") print("") print(darkgreen(" --directory=") + "dir, " + darkgreen("-d") + " dir") print(" Load esearch index from dir") print("") print(darkgreen(" --nocolor") + ", " + darkgreen("-n")) print(" Don't use ANSI codes for colored output") sys.exit(0)
def extract_dependencies_from_la(la, libraries, to_check, logger): broken = [] libnames = [] for lib in libraries: match = re.match(r".+\/(.+)\.(so|la|a)(\..+)?", lib) if match is not None: libname = match.group(1) if libname not in libnames: libnames += [ libname, ] for _file in la: if not os.path.exists(_file): continue for line in open( _unicode_encode(_file, encoding=_encodings["fs"]), mode="r", encoding=_encodings["content"], ).readlines(): line = line.strip() if line.startswith("dependency_libs="): match = re.match(r"dependency_libs='([^']+)'", line) if match is not None: for el in match.group(1).split(" "): el = el.strip() if len(el) < 1 or el.startswith("-L") or el.startswith( "-R"): continue if el.startswith("-l") and "lib" + el[2:] in libnames: pass elif el in la or el in libraries: pass else: if to_check: _break = False for tc in to_check: if tc in el: _break = True break if not _break: continue logger.info("\t" + yellow(" * ") + _file + " is broken (requires: " + bold(el) + ")") broken.append(_file) return broken
def _non_commit(self, result): if result["full"]: print(bold('Note: type "repoman full" for a complete listing.')) if result["warn"] and not result["fail"]: if self.options.quiet: print(bold("Non-Fatal QA errors found")) else: utilities.repoman_sez( "\"You're only giving me a partial QA payment?\n" " I'll take it this time, but I'm not happy.\"" ) elif not result["fail"]: if self.options.quiet: print("No QA issues found") else: utilities.repoman_sez( '"If everyone were like you, I\'d be out of business!"') elif result["fail"]: print(bad("Please fix these important QA issues first.")) if not self.options.quiet: utilities.repoman_sez( '"Make your QA payment on time' " and you'll never see the likes of me.\"\n")
def assign_packages(broken, logger, settings): ''' Finds and returns packages that owns files placed in broken. Broken is list of files ''' assigned = set() for group in os.listdir(settings['PKG_DIR']): if group in IGNORED: continue elif os.path.isfile(settings['PKG_DIR'] + group): if not group.startswith('.keep_'): logger.warn(yellow(" * Invalid category found in the installed pkg db: ") + bold(settings['PKG_DIR'] + group)) continue for pkg in os.listdir(settings['PKG_DIR'] + group): if '-MERGING-' in pkg: logger.warn(yellow(" * Invalid/incomplete package merge found in the installed pkg db: ") + bold(settings['PKG_DIR'] + pkg)) continue _file = settings['PKG_DIR'] + group + '/' + pkg + '/CONTENTS' if os.path.exists(_file): try: with open(_file, 'r') as cnt: for line in cnt: matches = re.match('^obj (/[^ ]+)', line) if matches is not None: match = matches.group(1) if match in broken: found = group+'/'+pkg if found not in assigned: assigned.add(found) logger.info('\t' + match + ' -> ' + bold(found)) except Exception as ex: logger.warn(red(' !! Failed to read ' + _file) + " Original exception was:\n" + str(ex)) return assigned
def extract_dependencies_from_la(la, libraries, to_check, logger): broken = [] libnames = [] for lib in libraries: match = re.match('.+\/(.+)\.(so|la|a)(\..+)?', lib) if match is not None: libname = match.group(1) if libname not in libnames: libnames += [ libname, ] for _file in la: if not os.path.exists(_file): continue for line in open(_unicode_encode(_file, encoding=_encodings['fs']), mode='r', encoding=_encodings['content']).readlines(): line = line.strip() if line.startswith('dependency_libs='): match = re.match("dependency_libs='([^']+)'", line) if match is not None: for el in match.group(1).split(' '): el = el.strip() if (len(el) < 1 or el.startswith('-L') or el.startswith('-R')): continue if el.startswith('-l') and 'lib' + el[2:] in libnames: pass elif el in la or el in libraries: pass else: if to_check: _break = False for tc in to_check: if tc in el: _break = True break if not _break: continue logger.info('\t' + yellow(' * ') + _file + ' is broken (requires: ' + bold(el) + ')') broken.append(_file) return broken
def userquery(prompt, enter_invalid, responses=None, colours=None): """Displays a prompt and a set of responses, then waits for a response which is checked against the responses and the first to match is returned. An empty response will match the first value in responses, unless enter_invalid is True. The input buffer is *not* cleared prior to the prompt! prompt: a String. responses: a List of Strings. colours: a List of Functions taking and returning a String, used to process the responses for display. Typically these will be functions like red() but could be e.g. lambda x: "DisplayString". If responses is omitted, defaults to ["Yes", "No"], [green, red]. If only colours is omitted, defaults to [bold, ...]. Returns a member of the List responses. (If called without optional arguments, returns "Yes" or "No".) KeyboardInterrupt is converted to SystemExit to avoid tracebacks being printed.""" if responses is None: responses = ["Yes", "No"] colours = [ create_color_func("PROMPT_CHOICE_DEFAULT"), create_color_func("PROMPT_CHOICE_OTHER") ] elif colours is None: colours = [bold] colours = (colours * len(responses))[:len(responses)] print(bold(prompt), end=' ') try: while True: if sys.hexversion >= 0x3000000: response = input("[" + "/".join( [colours[i](responses[i]) for i in range(len(responses))]) + "] ") else: response = raw_input("[" + "/".join( [colours[i](responses[i]) for i in range(len(responses))]) + "] ") if response or not enter_invalid: for key in responses: # An empty response will match the # first value in responses. if response.upper() == key[:len(response)].upper(): return key print("Sorry, response '%s' not understood." % response, end=' ') except (EOFError, KeyboardInterrupt): print("Interrupted.") sys.exit(1)
def userquery(prompt, responses=None, default_response_num=1): """ Inspired by portage's _emerge.userquery. Gives the user a question ('prompt') and forces him to chose one of the responses ('responses', defaulting to 'Yes' and 'No'). Returns the (full) choice made. """ # Colour for the default response: default_colour_f = output.green # Colour for all the other responses: normal_colour_f = output.red if responses is None: responses = ["Yes", "No"] coloured_responses = [None] * len(responses) for i, r in enumerate(responses): if i + 1 == default_response_num: coloured_responses[i] = default_colour_f(r) else: coloured_responses[i] = normal_colour_f(r) final_prompt = \ "%s [%s] " % (output.bold(prompt), "/".join(coloured_responses)) if sys.hexversion >= 0x3000000: input_function = input else: input_function = raw_input while True: # Directly using 'input_function(final_prompt)' # leads to problems on my machine. print(final_prompt, end='') response = input_function() if not response: # Return the default choice: return responses[default_response_num-1] for r in responses: if response.lower() == r[:len(response)].lower(): return r print("Sorry, response '%s' not understood." % response)
def _non_commit(self, result): if result['full']: print(bold("Note: type \"repoman full\" for a complete listing.")) if result['warn'] and not result['fail']: if self.options.quiet: print(bold("Non-Fatal QA errors found")) else: utilities.repoman_sez( "\"You're only giving me a partial QA payment?\n" " I'll take it this time, but I'm not happy.\"" ) elif not result['fail']: if self.options.quiet: print("No QA issues found") else: utilities.repoman_sez( "\"If everyone were like you, I'd be out of business!\"") elif result['fail']: print(bad("Please fix these important QA issues first.")) if not self.options.quiet: utilities.repoman_sez( "\"Make your QA payment on time" " and you'll never see the likes of me.\"\n") sys.exit(1)
def usage(): print("esync (%s) - Calls 'emerge sync' and 'eupdatedb' and shows updates" \ % version) print("") print(bold("Usage:"), "esync [", darkgreen("options"), "]") print(bold("Options:")) print(darkgreen(" --help") + ", " + darkgreen("-h")) print(" Print this help message") print("") print(darkgreen(" --webrsync") + ", " + darkgreen("-w")) print(" Use 'emerge-webrsync' instead of 'emerge --sync'") print("") print(darkgreen(" --delta-webrsync") + ", " + darkgreen("-d")) print(" Use 'emerge-delta-webrsync' instead of 'emerge --sync'") print("") print(darkgreen(" --metadata") + ", " + darkgreen("-m")) print(" Use 'emerge --metadata' instead of 'emerge --sync'") print("") print(darkgreen(" --layman-sync") + ", " + darkgreen("-l")) print( " Use layman to sync any installed overlays, then sync the main tree" ) print("") print(darkgreen(" --nocolor") + ", " + darkgreen("-n")) print(" Don't use ANSI codes for colored output") print("") print(darkgreen(" --quiet") + ", " + darkgreen("-q")) print(" Less output (implies --nospinner)") print("") print(darkgreen(" --verbose") + ", " + darkgreen("-v")) print(" Verbose output") print("") print(darkgreen(" --nospinner") + ", " + darkgreen("-s")) print(" Don't display the remaining index count") sys.exit(0)
def rebuild(logger, assigned, settings): """rebuilds the assigned pkgs""" args = settings['pass_through_options'] if settings['EXACT']: _assigned = filter_masked(assigned, logger) emerge_command = '=' + ' ='.join(_assigned) else: _assigned = get_slotted_cps(assigned, logger) emerge_command = ' '.join(_assigned) if settings['PRETEND']: args += ' --pretend' if settings['VERBOSITY'] >= 2: args += ' --verbose' elif settings['VERBOSITY'] < 1: args += ' --quiet' if settings['nocolor']: args += ' --color n' if len(emerge_command) == 0: logger.warning(bold('\nThere is nothing to emerge. Exiting.')) return 0 logger.warning(yellow( '\nemerge') + args + ' --oneshot --complete-graph=y ' + bold(emerge_command)) stime = current_milli_time() _args = 'emerge ' + args + ' --oneshot --complete-graph=y ' + emerge_command _args = _args.split() success = subprocess.call(_args) ftime = current_milli_time() logger.debug("\trebuild(); emerge call for %d ebuilds took: %s seconds" % (len(_assigned), str((ftime-stime)/1000.0))) return success
def _non_commit(self, result): if result['full']: print(bold("Note: type \"repoman full\" for a complete listing.")) if result['warn'] and not result['fail']: utilities.repoman_sez( "\"You're only giving me a partial QA payment?\n" " I'll take it this time, but I'm not happy.\"") elif not result['fail']: utilities.repoman_sez( "\"If everyone were like you, I'd be out of business!\"") elif result['fail']: print(bad("Please fix these important QA issues first.")) utilities.repoman_sez("\"Make your QA payment on time" " and you'll never see the likes of me.\"\n") sys.exit(1)
def assign_packages(broken, logger, settings): ''' Finds and returns packages that owns files placed in broken. Broken is list of files ''' stime = current_milli_time() broken_matcher = _file_matcher() for filename in broken: broken_matcher.add(filename) assigned_pkgs = set() assigned_filenames = set() for group in os.listdir(settings['PKG_DIR']): grppath = settings['PKG_DIR'] + group if not os.path.isdir(grppath): continue for pkg in os.listdir(grppath): pkgpath = settings['PKG_DIR'] + group + '/' + pkg if not os.path.isdir(pkgpath): continue f = pkgpath + '/CONTENTS' if os.path.exists(f): contents_matcher = _file_matcher() try: with io.open(f, 'r', encoding='utf_8') as cnt: for line in cnt.readlines(): m = re.match('^obj (/[^ ]+)', line) if m is not None: contents_matcher.add(m.group(1)) except Exception as e: logger.warning(red(' !! Failed to read ' + f)) logger.warning(red(' !! Error was:' + str(e))) else: for m in contents_matcher.intersection(broken_matcher): found = group+'/'+pkg assigned_pkgs.add(found) assigned_filenames.add(m) logger.info('\t' + green('* ') + m + ' -> ' + bold(found)) broken_filenames = set(broken) orphaned = broken_filenames.difference(assigned_filenames) ftime = current_milli_time() logger.debug("\tassign_packages(); assigned " "%d packages, %d orphans in %d milliseconds" % (len(assigned_pkgs), len(orphaned), ftime-stime)) return (assigned_pkgs, orphaned)
def main_checks(found_libs, broken_list, dependencies, logger): ''' Checks for broken dependencies. found_libs have to be the same as returned by prepare_checks broken_list is list of libraries found by scanelf dependencies is the value returned by prepare_checks ''' broken_pathes = [] for broken in broken_list: found = found_libs[broken] logger.info('Broken files that requires: ' + bold(found)) for dep_path in dependencies[broken]: logger.info(yellow(' * ') + dep_path) broken_pathes.append(dep_path) return broken_pathes
def main_checks(found_libs, broken_list, dependencies, logger): """ Checks for broken dependencies. found_libs have to be the same as returned by prepare_checks broken_list is list of libraries found by scanelf dependencies is the value returned by prepare_checks """ broken_pathes = [] for broken in broken_list: found = found_libs[broken] logger.info("Broken files that requires: " + bold(found)) for dep_path in dependencies[broken]: logger.info(yellow(" * ") + dep_path) broken_pathes.append(dep_path) return broken_pathes
def extract_dependencies_from_la(la, libraries, to_check, logger): broken = [] libnames = [] for lib in libraries: match = re.match('.+\/(.+)\.(so|la|a)(\..+)?', lib) if match is not None: libname = match.group(1) if libname not in libnames: libnames += [libname, ] for _file in la: if not os.path.exists(_file): continue for line in open(_unicode_encode(_file, encoding=_encodings['fs']), mode='r', encoding=_encodings['content']).readlines(): line = line.strip() if line.startswith('dependency_libs='): match = re.match("dependency_libs='([^']+)'", line) if match is not None: for el in match.group(1).split(' '): el = el.strip() if (len(el) < 1 or el.startswith('-L') or el.startswith('-R') ): continue if el.startswith('-l') and 'lib'+el[2:] in libnames: pass elif el in la or el in libraries: pass else: if to_check: _break = False for tc in to_check: if tc in el: _break = True break if not _break: continue logger.info('\t' + yellow(' * ') + _file + ' is broken (requires: ' + bold(el)+')') broken.append(_file) return broken
def chk_updated_cfg_files(self, eroot, config_protect): target_root = eroot result = list(portage.util.find_updated_config_files(target_root, config_protect)) print("DEBUG: scanning /etc for config files....") for x in result: print("\n"+colorize("WARN", " * IMPORTANT:"), end=' ') if not x[1]: # it's a protected file print("config file '%s' needs updating." % x[0]) else: # it's a protected dir print("%d config files in '%s' need updating." % (len(x[1]), x[0])) if result: print(" "+yellow("*")+" See the "+colorize("INFORM","CONFIGURATION FILES")\ + " section of the " + bold("emerge")) print(" "+yellow("*")+" man page to learn how to update config files.")
def userquery(prompt, enter_invalid, responses=None, colours=None): """Displays a prompt and a set of responses, then waits for a response which is checked against the responses and the first to match is returned. An empty response will match the first value in responses, unless enter_invalid is True. The input buffer is *not* cleared prior to the prompt! prompt: a String. responses: a List of Strings. colours: a List of Functions taking and returning a String, used to process the responses for display. Typically these will be functions like red() but could be e.g. lambda x: "DisplayString". If responses is omitted, defaults to ["Yes", "No"], [green, red]. If only colours is omitted, defaults to [bold, ...]. Returns a member of the List responses. (If called without optional arguments, returns "Yes" or "No".) KeyboardInterrupt is converted to SystemExit to avoid tracebacks being printed.""" if responses is None: responses = ["Yes", "No"] colours = [ create_color_func("PROMPT_CHOICE_DEFAULT"), create_color_func("PROMPT_CHOICE_OTHER") ] elif colours is None: colours=[bold] colours=(colours*len(responses))[:len(responses)] print(bold(prompt), end=' ') try: while True: if sys.hexversion >= 0x3000000: response=input("["+"/".join([colours[i](responses[i]) for i in range(len(responses))])+"] ") else: response=raw_input("["+"/".join([colours[i](responses[i]) for i in range(len(responses))])+"] ") if response or not enter_invalid: for key in responses: # An empty response will match the # first value in responses. if response.upper()==key[:len(response)].upper(): return key print("Sorry, response '%s' not understood." % response, end=' ') except (EOFError, KeyboardInterrupt): print("Interrupted.") sys.exit(1)
def do_normal(pkg, verbose): data = [] if not pkg[4]: installed = "[ Not Installed ]" else: installed = pkg[4] if pkg[2]: masked = red(" [ Masked ]") else: masked = "" data.append("%s %s%s\n %s %s\n %s %s" % \ (green("*"), bold(pkg[1]), masked, darkgreen("Latest version available:"), pkg[3], darkgreen("Latest version installed:"), installed)) if verbose: mpv = best(portdb.xmatch("match-all", pkg[1])) iuse_split, final_use = get_flags(mpv, final_setting=True) iuse = "" use_list = [] for ebuild_iuse in iuse_split: use = ebuild_iuse.lstrip('+-') if use in final_use: use_list.append(red("+" + use) + " ") else: use_list.append(blue("-" + use) + " ") use_list.sort() iuse = ' '.join(use_list) if iuse == "": iuse = "-" data.append(" %s %s\n %s %s" % \ (darkgreen("Unstable version:"), pkg_version(mpv), darkgreen("Use Flags (stable):"), iuse)) data.append(" %s %s\n %s %s\n %s %s\n %s %s\n" % \ (darkgreen("Size of downloaded files:"), pkg[5], darkgreen("Homepage:"), pkg[6], darkgreen("Description:"), pkg[7], darkgreen("License:"), pkg[8])) return data
def do_normal(pkg, verbose): data = [] if not pkg[4]: installed = "[ Not Installed ]" else: installed = pkg[4] if pkg[2]: masked = red(" [ Masked ]") else: masked = "" data.append("%s %s%s\n %s %s\n %s %s" % \ (green("*"), bold(pkg[1]), masked, darkgreen("Latest version available:"), pkg[3], darkgreen("Latest version installed:"), installed)) if verbose: mpv = best(portdb.xmatch("match-all", pkg[1])) iuse_split, final_use = get_flags(mpv, final_setting=True) iuse = "" use_list = [] for ebuild_iuse in iuse_split: use = ebuild_iuse.lstrip('+-') if use in final_use: use_list.append(red("+" + use) + " ") else: use_list.append(blue("-" + use) + " ") use_list.sort() iuse = ' '.join(use_list) if iuse == "": iuse = "-" data.append(" %s %s\n %s %s" % \ (darkgreen("Unstable version:"), pkg_version(mpv), darkgreen("Use Flags (stable):"), iuse)) data.append(" %s %s\n %s %s\n %s %s\n %s %s\n" % \ (darkgreen("Size of downloaded files:"), pkg[5], darkgreen("Homepage:"), pkg[6], darkgreen("Description:"), pkg[7], darkgreen("License:"), pkg[8])) return data, False
def do_compact(pkg): prefix0 = " " prefix1 = " " if pkg[3] == pkg[4]: color = darkgreen prefix1 = "I" elif not pkg[4]: color = darkgreen prefix1 = "N" else: color = turquoise prefix1 = "U" if pkg[2]: prefix0 = "M" return " [%s%s] %s (%s): %s" % \ (red(prefix0), color(prefix1), bold(pkg[1]), color(pkg[3]), pkg[7])
def _check_updates(self): mybestpv = self.emerge_config.target_config.trees['porttree'].dbapi.xmatch( "bestmatch-visible", portage.const.PORTAGE_PACKAGE_ATOM) mypvs = portage.best( self.emerge_config.target_config.trees['vartree'].dbapi.match( portage.const.PORTAGE_PACKAGE_ATOM)) chk_updated_cfg_files(self.emerge_config.target_config.root, portage.util.shlex_split( self.emerge_config.target_config.settings.get("CONFIG_PROTECT", ""))) msgs = [] if mybestpv != mypvs and "--quiet" not in self.emerge_config.opts: msgs.append('') msgs.append(warn(" * ")+bold("An update to portage is available.")+" It is _highly_ recommended") msgs.append(warn(" * ")+"that you update portage now, before any other packages are updated.") msgs.append('') msgs.append(warn(" * ")+"To update portage, run 'emerge --oneshot portage' now.") msgs.append('') return msgs
def repo(self, **kwargs): '''Sync the specified repo''' options = kwargs.get('options', None) if options: repos = options.get('repo', '') return_messages = options.get('return-messages', False) else: return_messages = False if isinstance(repos, _basestring): repos = repos.split() available = self._get_repos(auto_sync_only=False) selected = self._match_repos(repos, available) if not selected: msgs = [red(" * ") + "Emaint sync, The specified repos were not found: %s" % (bold(", ".join(repos))) + "\n ...returning" ] if return_messages: return msgs return return self._sync(selected, return_messages, emaint_opts=options)
def process_results(self, found_libs, scanned_files=None): '''Processes the search results, logs the files found @param found_libs: dictionary of the type returned by search() @param scanned_files: optional dictionary if the type created by scan_files(). Defaults to the class instance of scanned_files @ returns: list: of filepaths from teh search results. ''' stime = current_milli_time() if not scanned_files: scanned_files = self.scanned_files found_pathes = [] for bits, found in found_libs.items(): for lib, files in found.items(): self.logger.info(self.pmsg % (bold(lib), bits)) for fp in sorted(files): self.logger.info('\t' +yellow('* ') + fp) found_pathes.append(fp) ftime = current_milli_time() self.logger.debug("\tLibCheck.process_results(); total filepaths found: " "%d in %d milliseconds" % (len(found_pathes), ftime-stime)) return found_pathes
def repo(self, **kwargs): '''Sync the specified repo''' options = kwargs.get('options', None) if options: repos = options.get('repo', '') return_messages = options.get('return-messages', False) else: return_messages = False if isinstance(repos, _basestring): repos = repos.split() available = self._get_repos(auto_sync_only=False) selected = self._match_repos(repos, available) if not selected: msgs = [ red(" * ") + "Emaint sync, The specified repos were not found: %s" % (bold(", ".join(repos))) + "\n ...returning" ] if return_messages: return msgs return return self._sync(selected, return_messages, emaint_opts=options)
def __init__( self, scanned_files, logger, searchlibs=None, searchbits=None, all_masks=None, masked_dirs=None, ): """LibCheck init function. @param scanned_files: optional dictionary if the type created by scan_files(). Defaults to the class instance of scanned_files @param logger: python style Logging function to use for output. @param searchlibs: optional set() of libraries to search for. If defined it toggles several settings to configure this class for a target search rather than a broken libs search. """ self.scanned_files = scanned_files self.logger = logger self.searchlibs = searchlibs self.searchbits = sorted(searchbits) or ["32", "64"] self.all_masks = all_masks self.masked_dirs = masked_dirs self.logger.debug("\tLibCheck.__init__(), new searchlibs: %s" % (self.searchbits)) if searchlibs: self.smsg = "\tLibCheck.search(), Checking for %s bit dependants" self.pmsg = yellow(" * ") + "Files that depend on: %s (%s bits)" self.setlibs = self._setslibs self.check = self._checkforlib else: self.smsg = "\tLibCheck.search(), Checking for broken %s bit libs" self.pmsg = (green(" * ") + bold("Broken files that require:") + " %s (%s bits)") self.setlibs = self._setlibs self.check = self._checkbroken self.sfmsg = "\tLibCheck.search(); Total found: %(count)d libs, %(deps)d files in %(time)d milliseconds" self.alllibs = None
def process_results(self, found_libs, scanned_files=None): '''Processes the search results, logs the files found @param found_libs: dictionary of the type returned by search() @param scanned_files: optional dictionary if the type created by scan_files(). Defaults to the class instance of scanned_files @ returns: list: of filepaths from teh search results. ''' stime = current_milli_time() if not scanned_files: scanned_files = self.scanned_files found_pathes = [] for bits, found in found_libs.items(): for lib, files in found.items(): self.logger.info(self.pmsg % (bold(lib), bits)) for fp in sorted(files): self.logger.info('\t' + yellow('* ') + fp) found_pathes.append(fp) ftime = current_milli_time() self.logger.debug( "\tLibCheck.process_results(); total filepaths found: " "%d in %d milliseconds" % (len(found_pathes), ftime - stime)) return found_pathes
def searchEbuilds(path, portdir=True, searchdef="", repo_num="", config=None, data=None): pv = "" pkgs = [] nr = len(data['ebuilds']) + 1 if portdir: rep = darkgreen("Portage ") else: rep = red("Overlay " + str(repo_num) + " ") if isdir(path): filelist = listdir(path) for file in filelist: if file[-7:] == ".ebuild": pv = file[:-7] pkgs.append(list(pkgsplit(pv))) pkgs[-1].append(path + file) if searchdef != "" and pv == searchdef: data['defebuild'] = (searchdef, pkgs[-1][3]) if not portdir: config['found_in_overlay'] = True pkgs.sort(mypkgcmp) for pkg in pkgs: rev = "" if pkg[2] != "r0": rev = "-" + pkg[2] data['output'].append(" " + rep + " [" + bold(str(nr)) + "] " + pkg[0] + "-" + pkg[1] + rev + "\n") data['ebuilds'].append(pkg[len(pkg) - 1]) nr += 1
def update(self): '''Internal update function which performs the transfer''' opts = self.options.get('emerge_config').opts self.usersync_uid = self.options.get('usersync_uid', None) enter_invalid = '--ask-enter-invalid' in opts quiet = '--quiet' in opts out = portage.output.EOutput(quiet=quiet) syncuri = self.repo.sync_uri if self.repo.module_specific_options.get('sync-rsync-vcs-ignore', 'false').lower() == 'true': vcs_dirs = () else: vcs_dirs = frozenset(VCS_DIRS) vcs_dirs = vcs_dirs.intersection(os.listdir(self.repo.location)) for vcs_dir in vcs_dirs: writemsg_level(("!!! %s appears to be under revision " + \ "control (contains %s).\n!!! Aborting rsync sync " "(override with \"sync-rsync-vcs-ignore = true\" in repos.conf).\n") % \ (self.repo.location, vcs_dir), level=logging.ERROR, noiselevel=-1) return (1, False) self.timeout = 180 rsync_opts = [] if self.settings["PORTAGE_RSYNC_OPTS"] == "": rsync_opts = self._set_rsync_defaults() else: rsync_opts = self._validate_rsync_opts(rsync_opts, syncuri) self.rsync_opts = self._rsync_opts_extend(opts, rsync_opts) self.extra_rsync_opts = list() if self.repo.module_specific_options.get('sync-rsync-extra-opts'): self.extra_rsync_opts.extend( portage.util.shlex_split( self.repo.module_specific_options['sync-rsync-extra-opts']) ) exitcode = 0 verify_failure = False # Process GLEP74 verification options. # Default verification to 'no'; it's enabled for ::gentoo # via default repos.conf though. self.verify_metamanifest = (self.repo.module_specific_options.get( 'sync-rsync-verify-metamanifest', 'no') in ('yes', 'true')) # Support overriding job count. self.verify_jobs = self.repo.module_specific_options.get( 'sync-rsync-verify-jobs', None) if self.verify_jobs is not None: try: self.verify_jobs = int(self.verify_jobs) if self.verify_jobs < 0: raise ValueError(self.verify_jobs) except ValueError: writemsg_level( "!!! sync-rsync-verify-jobs not a positive integer: %s\n" % (self.verify_jobs, ), level=logging.WARNING, noiselevel=-1) self.verify_jobs = None else: if self.verify_jobs == 0: # Use the apparent number of processors if gemato # supports it. self.verify_jobs = None # Support overriding max age. self.max_age = self.repo.module_specific_options.get( 'sync-rsync-verify-max-age', '') if self.max_age: try: self.max_age = int(self.max_age) if self.max_age < 0: raise ValueError(self.max_age) except ValueError: writemsg_level( "!!! sync-rsync-max-age must be a non-negative integer: %s\n" % (self.max_age, ), level=logging.WARNING, noiselevel=-1) self.max_age = 0 else: self.max_age = 0 openpgp_env = None if self.verify_metamanifest and gemato is not None: # Use isolated environment if key is specified, # system environment otherwise if self.repo.sync_openpgp_key_path is not None: openpgp_env = gemato.openpgp.OpenPGPEnvironment() else: openpgp_env = gemato.openpgp.OpenPGPSystemEnvironment() try: # Load and update the keyring early. If it fails, then verification # will not be performed and the user will have to fix it and try again, # so we may as well bail out before actual rsync happens. if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None: try: out.einfo('Using keys from %s' % (self.repo.sync_openpgp_key_path, )) with io.open(self.repo.sync_openpgp_key_path, 'rb') as f: openpgp_env.import_key(f) self._refresh_keys(openpgp_env) except (GematoException, asyncio.TimeoutError) as e: writemsg_level( "!!! Manifest verification impossible due to keyring problem:\n%s\n" % (e, ), level=logging.ERROR, noiselevel=-1) return (1, False) # Real local timestamp file. self.servertimestampfile = os.path.join(self.repo.location, "metadata", "timestamp.chk") content = portage.util.grabfile(self.servertimestampfile) timestamp = 0 if content: try: timestamp = time.mktime( time.strptime(content[0], TIMESTAMP_FORMAT)) except (OverflowError, ValueError): pass del content try: self.rsync_initial_timeout = \ int(self.settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15")) except ValueError: self.rsync_initial_timeout = 15 try: maxretries = int(self.settings["PORTAGE_RSYNC_RETRIES"]) except SystemExit as e: raise # Needed else can't exit except: maxretries = -1 #default number of retries if syncuri.startswith("file://"): self.proto = "file" dosyncuri = syncuri[7:] unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync( dosyncuri, timestamp, opts) self._process_exitcode(exitcode, dosyncuri, out, 1) if exitcode == 0: if unchanged: self.repo_storage.abort_update() else: self.repo_storage.commit_update() self.repo_storage.garbage_collection() return (exitcode, updatecache_flg) retries = 0 try: self.proto, user_name, hostname, port = re.split( r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?", syncuri, maxsplit=4)[1:5] except ValueError: writemsg_level("!!! sync-uri is invalid: %s\n" % syncuri, noiselevel=-1, level=logging.ERROR) return (1, False) self.ssh_opts = self.settings.get("PORTAGE_SSH_OPTS") if port is None: port = "" if user_name is None: user_name = "" if re.match(r"^\[[:\da-fA-F]*\]$", hostname) is None: getaddrinfo_host = hostname else: # getaddrinfo needs the brackets stripped getaddrinfo_host = hostname[1:-1] updatecache_flg = False all_rsync_opts = set(self.rsync_opts) all_rsync_opts.update(self.extra_rsync_opts) family = socket.AF_UNSPEC if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts: family = socket.AF_INET elif socket.has_ipv6 and \ ("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts): family = socket.AF_INET6 addrinfos = None uris = [] try: addrinfos = getaddrinfo_validate( socket.getaddrinfo(getaddrinfo_host, None, family, socket.SOCK_STREAM)) except socket.error as e: writemsg_level("!!! getaddrinfo failed for '%s': %s\n" % (_unicode_decode(hostname), _unicode(e)), noiselevel=-1, level=logging.ERROR) if addrinfos: AF_INET = socket.AF_INET AF_INET6 = None if socket.has_ipv6: AF_INET6 = socket.AF_INET6 ips_v4 = [] ips_v6 = [] for addrinfo in addrinfos: if addrinfo[0] == AF_INET: ips_v4.append("%s" % addrinfo[4][0]) elif AF_INET6 is not None and addrinfo[0] == AF_INET6: # IPv6 addresses need to be enclosed in square brackets ips_v6.append("[%s]" % addrinfo[4][0]) random.shuffle(ips_v4) random.shuffle(ips_v6) # Give priority to the address family that # getaddrinfo() returned first. if AF_INET6 is not None and addrinfos and \ addrinfos[0][0] == AF_INET6: ips = ips_v6 + ips_v4 else: ips = ips_v4 + ips_v6 for ip in ips: uris.append( syncuri.replace( "//" + user_name + hostname + port + "/", "//" + user_name + ip + port + "/", 1)) if not uris: # With some configurations we need to use the plain hostname # rather than try to resolve the ip addresses (bug #340817). uris.append(syncuri) # reverse, for use with pop() uris.reverse() uris_orig = uris[:] effective_maxretries = maxretries if effective_maxretries < 0: effective_maxretries = len(uris) - 1 local_state_unchanged = True while (1): if uris: dosyncuri = uris.pop() elif maxretries < 0 or retries > maxretries: writemsg("!!! Exhausted addresses for %s\n" % _unicode_decode(hostname), noiselevel=-1) return (1, False) else: uris.extend(uris_orig) dosyncuri = uris.pop() if (retries == 0): if "--ask" in opts: uq = UserQuery(opts) if uq.query("Do you want to sync your ebuild repository " + \ "with the mirror at\n" + blue(dosyncuri) + bold("?"), enter_invalid) == "No": print() print("Quitting.") print() sys.exit(128 + signal.SIGINT) self.logger(self.xterm_titles, ">>> Starting rsync with " + dosyncuri) if "--quiet" not in opts: print(">>> Starting rsync with " + dosyncuri + "...") else: self.logger(self.xterm_titles, ">>> Starting retry %d of %d with %s" % \ (retries, effective_maxretries, dosyncuri)) writemsg_stdout( "\n\n>>> Starting retry %d of %d with %s\n" % \ (retries, effective_maxretries, dosyncuri), noiselevel=-1) if dosyncuri.startswith('ssh://'): dosyncuri = dosyncuri[6:].replace('/', ':/', 1) unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync( dosyncuri, timestamp, opts) if not unchanged: local_state_unchanged = False if is_synced: break retries = retries + 1 if maxretries < 0 or retries <= maxretries: print(">>> Retrying...") else: # over retries # exit loop exitcode = EXCEEDED_MAX_RETRIES break self._process_exitcode(exitcode, dosyncuri, out, maxretries) if local_state_unchanged: # The quarantine download_dir is not intended to exist # in this case, so refer gemato to the normal repository # location. download_dir = self.repo.location else: download_dir = self.download_dir # if synced successfully, verify now if exitcode == 0 and self.verify_metamanifest: if gemato is None: writemsg_level( "!!! Unable to verify: gemato-11.0+ is required\n", level=logging.ERROR, noiselevel=-1) exitcode = 127 else: try: # we always verify the Manifest signature, in case # we had to deal with key revocation case m = gemato.recursiveloader.ManifestRecursiveLoader( os.path.join(download_dir, 'Manifest'), verify_openpgp=True, openpgp_env=openpgp_env, max_jobs=self.verify_jobs) if not m.openpgp_signed: raise RuntimeError( 'OpenPGP signature not found on Manifest') ts = m.find_timestamp() if ts is None: raise RuntimeError( 'Timestamp not found in Manifest') if (self.max_age != 0 and (datetime.datetime.utcnow() - ts.ts).days > self.max_age): out.quiet = False out.ewarn( 'Manifest is over %d days old, this is suspicious!' % (self.max_age, )) out.ewarn( 'You may want to try using another mirror and/or reporting this one:' ) out.ewarn(' %s' % (dosyncuri, )) out.ewarn('') out.quiet = quiet out.einfo('Manifest timestamp: %s UTC' % (ts.ts, )) out.einfo('Valid OpenPGP signature found:') out.einfo( '- primary key: %s' % (m.openpgp_signature.primary_key_fingerprint)) out.einfo('- subkey: %s' % (m.openpgp_signature.fingerprint)) out.einfo('- timestamp: %s UTC' % (m.openpgp_signature.timestamp)) # if nothing has changed, skip the actual Manifest # verification if not local_state_unchanged: out.ebegin('Verifying %s' % (download_dir, )) m.assert_directory_verifies() out.eend(0) except GematoException as e: writemsg_level( "!!! Manifest verification failed:\n%s\n" % (e, ), level=logging.ERROR, noiselevel=-1) exitcode = 1 verify_failure = True if exitcode == 0 and not local_state_unchanged: self.repo_storage.commit_update() self.repo_storage.garbage_collection() return (exitcode, updatecache_flg) finally: # Don't delete the update if verification failed, in case # the cause needs to be investigated. if not verify_failure: self.repo_storage.abort_update() if openpgp_env is not None: openpgp_env.close()
def _calc_changelog(ebuildpath, current, next): if ebuildpath == None or not os.path.exists(ebuildpath): return [] current = "-".join(catpkgsplit(current)[1:]) if current.endswith("-r0"): current = current[:-3] next = "-".join(catpkgsplit(next)[1:]) if next.endswith("-r0"): next = next[:-3] changelogdir = os.path.dirname(ebuildpath) changelogs = ["ChangeLog"] # ChangeLog-YYYY (see bug #389611) changelogs.extend(sorted((fn for fn in os.listdir(changelogdir) if fn.startswith("ChangeLog-")), reverse=True)) divisions = [] found_current = False for fn in changelogs: changelogpath = os.path.join(changelogdir, fn) try: with io.open( _unicode_encode(changelogpath, encoding=_encodings["fs"], errors="strict"), mode="r", encoding=_encodings["repo.content"], errors="replace", ) as f: changelog = f.read() except EnvironmentError: return [] for node in _find_changelog_tags(changelog): if node[0] == current: found_current = True break else: divisions.append(node) if found_current: break if not found_current: return [] # print 'XX from',current,'to',next # for div,text in divisions: print 'XX',div # skip entries for all revisions above the one we are about to emerge later_rev_index = None for i, node in enumerate(divisions): if node[0] == next: if later_rev_index is not None: first_node = divisions[later_rev_index] # Discard the later revision and the first ChangeLog entry # that follows it. We want to display all the entries after # that first entry, as discussed in bug #373009. trimmed_lines = [] iterator = iter(first_node[1]) for l in iterator: if not l: # end of the first entry that's discarded break first_node = (None, list(iterator)) divisions = [first_node] + divisions[later_rev_index + 1 :] break if node[0] is not None: later_rev_index = i output = [] prev_blank = False prev_rev = False for rev, lines in divisions: if rev is not None: if not (prev_blank or prev_rev): output.append("\n") output.append(bold("*" + rev) + "\n") prev_rev = True prev_blank = False if lines: prev_rev = False if not prev_blank: output.append("\n") for l in lines: output.append(l + "\n") output.append("\n") prev_blank = True return output
def output_results(config, regexlist, found): data = {} data['ebuilds'] = [] data['defebuild'] = (0, 0) data['output'] = [] count = 0 for pkg in found: if config['outputm'] in (NORMAL, VERBOSE): data['output'] += do_normal(pkg, config['outputm'] == VERBOSE) elif config['outputm'] in (COMPACT, EBUILDS): data['output'].append(do_compact(pkg)) elif config['outputm'] == OWN: data['output'].append(do_own(pkg, config['outputf'])) if config['outputm'] == EBUILDS: if count == 0: searchdef = pkg[0] + "-" + pkg[3] else: searchdef = "" search_ebuilds("%s/%s/" % (config['portdir'], pkg[1]), True, searchdef, "", config, data) if config['overlay']: repo_num=1 for repo in config['overlay'].split(): search_ebuilds("%s/%s/" % ( repo, pkg[1]), False, searchdef,repo_num, config, data) repo_num += 1 count += 1 data['count'] = len(found) data['output'] = '\n'.join(data['output']) if config['outputm'] in (NORMAL, VERBOSE): #print("[ Results for search key :", bold(pattern), "]") print("[ Applications found :", bold(str(count)), "]\n") try: print(data['output'], end=' ') print("") except IOError: pass else: print(data['output']) if config['outputm'] == EBUILDS: if config['overlay'] and config['found_in_overlay']: repo_num=1 for repo in config['overlay'].split(): print(red("Overlay "+str(repo_num)+" : "+repo)) repo_num += 1 if count != 0: if count > 1: data['defebuild'] = (0, 0) if len(data['ebuilds']) == 1: nr = 1 else: if data['defebuild'][0] != 0: print(bold("\nShow Ebuild"), " (" + darkgreen(data['defebuild'][0]) + "): ", end=' ') else: print(bold("\nShow Ebuild: "), end=' ') try: nr = sys.stdin.readline() except KeyboardInterrupt: return False try: editor = getenv("EDITOR") if editor: system(editor + " " + data['ebuilds'][int(nr) - 1]) else: print("") error("Please set EDITOR", False, stderr=config['stderr']) except IndexError: print("", file=config['stderr']) error("No such ebuild", False, stderr=config['stderr']) except ValueError: if data['defebuild'][0] != 0: system(editor + " " + data['defebuild'][1]) else: print("", file=config['stderr']) error("Please enter a valid number", False, stderr=config['stderr']) return True
def main(settings=None, logger=None): """Main program operation method.... @param settings: dict. defaults to settings.DEFAULTS @param logger: python logging module defaults to init_logger(settings) @return boolean success/failure """ if settings is None: print("NO Input settings, using defaults...") settings = DEFAULTS.copy() if logger is None: logger = init_logger(settings) _libs_to_check = settings['library'] if not settings['stdout'].isatty() or settings['nocolor']: nocolor() #TODO: Development warning logger.warn(blue(' * ') + yellow('This is a development version, ' 'so it may not work correctly')) logger.warn(blue(' * ') + yellow('The original revdep-rebuild script is ' 'installed as revdep-rebuild.sh')) if os.getuid() != 0 and not settings['PRETEND']: logger.warn(blue(' * ') + yellow('You are not root, adding --pretend to portage options')) settings['PRETEND'] = True if settings['library']: logger.warn(green(' * ') + "Looking for libraries: %s" % (bold(', '.join(settings['library'])))) if settings['USE_TMP_FILES'] \ and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger): libraries, la_libraries, libraries_links, binaries = read_cache( settings['DEFAULT_TMP_DIR']) assigned = analyse( settings=settings, logger=logger, libraries=libraries, la_libraries=la_libraries, libraries_links=libraries_links, binaries=binaries, _libs_to_check=_libs_to_check) else: assigned = analyse(settings, logger, _libs_to_check=_libs_to_check) if not assigned: logger.warn('\n' + bold('Your system is consistent')) # return the correct exit code return 0 has_masked = False tmp = [] for ebuild in assigned: if get_masking_status(ebuild): has_masked = True logger.warn('!!! ' + red('All ebuilds that could satisfy: ') + green(ebuild) + red(' have been masked')) else: tmp.append(ebuild) assigned = tmp if has_masked: logger.info(red(' * ') + 'Unmask all ebuild(s) listed above and call revdep-rebuild ' 'again or manually emerge given packages.') success = rebuild(logger, assigned, settings) logger.debug("rebuild return code = %i" %success) return success
def _calc_changelog(ebuildpath,current,next): # pylint: disable=redefined-builtin if ebuildpath == None or not os.path.exists(ebuildpath): return [] current = '-'.join(catpkgsplit(current)[1:]) if current.endswith('-r0'): current = current[:-3] next = '-'.join(catpkgsplit(next)[1:]) if next.endswith('-r0'): next = next[:-3] changelogdir = os.path.dirname(ebuildpath) changelogs = ['ChangeLog'] # ChangeLog-YYYY (see bug #389611) changelogs.extend(sorted((fn for fn in os.listdir(changelogdir) if fn.startswith('ChangeLog-')), reverse=True)) divisions = [] found_current = False for fn in changelogs: changelogpath = os.path.join(changelogdir, fn) try: with io.open(_unicode_encode(changelogpath, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') as f: changelog = f.read() except EnvironmentError: return [] for node in _find_changelog_tags(changelog): if node[0] == current: found_current = True break else: divisions.append(node) if found_current: break if not found_current: return [] #print 'XX from',current,'to',next #for div,text in divisions: print 'XX',div # skip entries for all revisions above the one we are about to emerge later_rev_index = None for i, node in enumerate(divisions): if node[0] == next: if later_rev_index is not None: first_node = divisions[later_rev_index] # Discard the later revision and the first ChangeLog entry # that follows it. We want to display all the entries after # that first entry, as discussed in bug #373009. trimmed_lines = [] iterator = iter(first_node[1]) for l in iterator: if not l: # end of the first entry that's discarded break first_node = (None, list(iterator)) divisions = [first_node] + divisions[later_rev_index+1:] break if node[0] is not None: later_rev_index = i output = [] prev_blank = False prev_rev = False for rev, lines in divisions: if rev is not None: if not (prev_blank or prev_rev): output.append("\n") output.append(bold('*' + rev) + '\n') prev_rev = True prev_blank = False if lines: prev_rev = False if not prev_blank: output.append("\n") for l in lines: output.append(l + "\n") output.append("\n") prev_blank = True return output
def unmerge(root_config, myopts, unmerge_action, unmerge_files, ldpath_mtimes, autoclean=0, clean_world=1, clean_delay=1, ordered=0, raise_on_error=0, scheduler=None, writemsg_level=portage.util.writemsg_level): if clean_world: clean_world = myopts.get('--deselect') != 'n' quiet = "--quiet" in myopts enter_invalid = '--ask-enter-invalid' in myopts settings = root_config.settings sets = root_config.sets vartree = root_config.trees["vartree"] candidate_catpkgs=[] global_unmerge=0 xterm_titles = "notitles" not in settings.features out = portage.output.EOutput() pkg_cache = {} db_keys = list(vartree.dbapi._aux_cache_keys) def _pkg(cpv): pkg = pkg_cache.get(cpv) if pkg is None: pkg = Package(built=True, cpv=cpv, installed=True, metadata=zip(db_keys, vartree.dbapi.aux_get(cpv, db_keys)), operation="uninstall", root_config=root_config, type_name="installed") pkg_cache[cpv] = pkg return pkg vdb_path = os.path.join(settings["ROOT"], portage.VDB_PATH) try: # At least the parent needs to exist for the lock file. portage.util.ensure_dirs(vdb_path) except portage.exception.PortageException: pass vdb_lock = None try: if os.access(vdb_path, os.W_OK): vdb_lock = portage.locks.lockdir(vdb_path) realsyslist = sets["system"].getAtoms() syslist = [] for x in realsyslist: mycp = portage.dep_getkey(x) if mycp in settings.getvirtuals(): providers = [] for provider in settings.getvirtuals()[mycp]: if vartree.dbapi.match(provider): providers.append(provider) if len(providers) == 1: syslist.extend(providers) else: syslist.append(mycp) mysettings = portage.config(clone=settings) if not unmerge_files: if unmerge_action == "unmerge": print() print(bold("emerge unmerge") + " can only be used with specific package names") print() return 0 else: global_unmerge = 1 localtree = vartree # process all arguments and add all # valid db entries to candidate_catpkgs if global_unmerge: if not unmerge_files: candidate_catpkgs.extend(vartree.dbapi.cp_all()) else: #we've got command-line arguments if not unmerge_files: print("\nNo packages to unmerge have been provided.\n") return 0 for x in unmerge_files: arg_parts = x.split('/') if x[0] not in [".","/"] and \ arg_parts[-1][-7:] != ".ebuild": #possible cat/pkg or dep; treat as such candidate_catpkgs.append(x) elif unmerge_action in ["prune","clean"]: print("\n!!! Prune and clean do not accept individual" + \ " ebuilds as arguments;\n skipping.\n") continue else: # it appears that the user is specifying an installed # ebuild and we're in "unmerge" mode, so it's ok. if not os.path.exists(x): print("\n!!! The path '"+x+"' doesn't exist.\n") return 0 absx = os.path.abspath(x) sp_absx = absx.split("/") if sp_absx[-1][-7:] == ".ebuild": del sp_absx[-1] absx = "/".join(sp_absx) sp_absx_len = len(sp_absx) vdb_path = os.path.join(settings["ROOT"], portage.VDB_PATH) vdb_len = len(vdb_path) sp_vdb = vdb_path.split("/") sp_vdb_len = len(sp_vdb) if not os.path.exists(absx+"/CONTENTS"): print("!!! Not a valid db dir: "+str(absx)) return 0 if sp_absx_len <= sp_vdb_len: # The Path is shorter... so it can't be inside the vdb. print(sp_absx) print(absx) print("\n!!!",x,"cannot be inside "+ \ vdb_path+"; aborting.\n") return 0 for idx in range(0,sp_vdb_len): if idx >= sp_absx_len or sp_vdb[idx] != sp_absx[idx]: print(sp_absx) print(absx) print("\n!!!", x, "is not inside "+\ vdb_path+"; aborting.\n") return 0 print("="+"/".join(sp_absx[sp_vdb_len:])) candidate_catpkgs.append( "="+"/".join(sp_absx[sp_vdb_len:])) newline="" if (not "--quiet" in myopts): newline="\n" if settings["ROOT"] != "/": writemsg_level(darkgreen(newline+ \ ">>> Using system located in ROOT tree %s\n" % \ settings["ROOT"])) if (("--pretend" in myopts) or ("--ask" in myopts)) and \ not ("--quiet" in myopts): writemsg_level(darkgreen(newline+\ ">>> These are the packages that would be unmerged:\n")) # Preservation of order is required for --depclean and --prune so # that dependencies are respected. Use all_selected to eliminate # duplicate packages since the same package may be selected by # multiple atoms. pkgmap = [] all_selected = set() for x in candidate_catpkgs: # cycle through all our candidate deps and determine # what will and will not get unmerged try: mymatch = vartree.dbapi.match(x) except portage.exception.AmbiguousPackageName as errpkgs: print("\n\n!!! The short ebuild name \"" + \ x + "\" is ambiguous. Please specify") print("!!! one of the following fully-qualified " + \ "ebuild names instead:\n") for i in errpkgs[0]: print(" " + green(i)) print() sys.exit(1) if not mymatch and x[0] not in "<>=~": mymatch = localtree.dep_match(x) if not mymatch: portage.writemsg("\n--- Couldn't find '%s' to %s.\n" % \ (x, unmerge_action), noiselevel=-1) continue pkgmap.append( {"protected": set(), "selected": set(), "omitted": set()}) mykey = len(pkgmap) - 1 if unmerge_action=="unmerge": for y in mymatch: if y not in all_selected: pkgmap[mykey]["selected"].add(y) all_selected.add(y) elif unmerge_action == "prune": if len(mymatch) == 1: continue best_version = mymatch[0] best_slot = vartree.getslot(best_version) best_counter = vartree.dbapi.cpv_counter(best_version) for mypkg in mymatch[1:]: myslot = vartree.getslot(mypkg) mycounter = vartree.dbapi.cpv_counter(mypkg) if (myslot == best_slot and mycounter > best_counter) or \ mypkg == portage.best([mypkg, best_version]): if myslot == best_slot: if mycounter < best_counter: # On slot collision, keep the one with the # highest counter since it is the most # recently installed. continue best_version = mypkg best_slot = myslot best_counter = mycounter pkgmap[mykey]["protected"].add(best_version) pkgmap[mykey]["selected"].update(mypkg for mypkg in mymatch \ if mypkg != best_version and mypkg not in all_selected) all_selected.update(pkgmap[mykey]["selected"]) else: # unmerge_action == "clean" slotmap={} for mypkg in mymatch: if unmerge_action == "clean": myslot = localtree.getslot(mypkg) else: # since we're pruning, we don't care about slots # and put all the pkgs in together myslot = 0 if myslot not in slotmap: slotmap[myslot] = {} slotmap[myslot][localtree.dbapi.cpv_counter(mypkg)] = mypkg for mypkg in vartree.dbapi.cp_list( portage.cpv_getkey(mymatch[0])): myslot = vartree.getslot(mypkg) if myslot not in slotmap: slotmap[myslot] = {} slotmap[myslot][vartree.dbapi.cpv_counter(mypkg)] = mypkg for myslot in slotmap: counterkeys = list(slotmap[myslot]) if not counterkeys: continue counterkeys.sort() pkgmap[mykey]["protected"].add( slotmap[myslot][counterkeys[-1]]) del counterkeys[-1] for counter in counterkeys[:]: mypkg = slotmap[myslot][counter] if mypkg not in mymatch: counterkeys.remove(counter) pkgmap[mykey]["protected"].add( slotmap[myslot][counter]) #be pretty and get them in order of merge: for ckey in counterkeys: mypkg = slotmap[myslot][ckey] if mypkg not in all_selected: pkgmap[mykey]["selected"].add(mypkg) all_selected.add(mypkg) # ok, now the last-merged package # is protected, and the rest are selected numselected = len(all_selected) if global_unmerge and not numselected: portage.writemsg_stdout("\n>>> No outdated packages were found on your system.\n") return 0 if not numselected: portage.writemsg_stdout( "\n>>> No packages selected for removal by " + \ unmerge_action + "\n") return 0 finally: if vdb_lock: vartree.dbapi.flush_cache() portage.locks.unlockdir(vdb_lock) from portage._sets.base import EditablePackageSet # generate a list of package sets that are directly or indirectly listed in "selected", # as there is no persistent list of "installed" sets installed_sets = ["selected"] stop = False pos = 0 while not stop: stop = True pos = len(installed_sets) for s in installed_sets[pos - 1:]: if s not in sets: continue candidates = [x[len(SETPREFIX):] for x in sets[s].getNonAtoms() if x.startswith(SETPREFIX)] if candidates: stop = False installed_sets += candidates installed_sets = [x for x in installed_sets if x not in root_config.setconfig.active] del stop, pos # we don't want to unmerge packages that are still listed in user-editable package sets # listed in "world" as they would be remerged on the next update of "world" or the # relevant package sets. unknown_sets = set() for cp in range(len(pkgmap)): for cpv in pkgmap[cp]["selected"].copy(): try: pkg = _pkg(cpv) except KeyError: # It could have been uninstalled # by a concurrent process. continue if unmerge_action != "clean" and \ root_config.root == "/" and \ portage.match_from_list( portage.const.PORTAGE_PACKAGE_ATOM, [pkg]): msg = ("Not unmerging package %s since there is no valid " + \ "reason for portage to unmerge itself.") % (pkg.cpv,) for line in textwrap.wrap(msg, 75): out.eerror(line) # adjust pkgmap so the display output is correct pkgmap[cp]["selected"].remove(cpv) all_selected.remove(cpv) pkgmap[cp]["protected"].add(cpv) continue parents = [] for s in installed_sets: # skip sets that the user requested to unmerge, and skip world # user-selected set, since the package will be removed from # that set later on. if s in root_config.setconfig.active or s == "selected": continue if s not in sets: if s in unknown_sets: continue unknown_sets.add(s) out = portage.output.EOutput() out.eerror(("Unknown set '@%s' in %s%s") % \ (s, root_config.root, portage.const.WORLD_SETS_FILE)) continue # only check instances of EditablePackageSet as other classes are generally used for # special purposes and can be ignored here (and are usually generated dynamically, so the # user can't do much about them anyway) if isinstance(sets[s], EditablePackageSet): # This is derived from a snippet of code in the # depgraph._iter_atoms_for_pkg() method. for atom in sets[s].iterAtomsForPackage(pkg): inst_matches = vartree.dbapi.match(atom) inst_matches.reverse() # descending order higher_slot = None for inst_cpv in inst_matches: try: inst_pkg = _pkg(inst_cpv) except KeyError: # It could have been uninstalled # by a concurrent process. continue if inst_pkg.cp != atom.cp: continue if pkg >= inst_pkg: # This is descending order, and we're not # interested in any versions <= pkg given. break if pkg.slot_atom != inst_pkg.slot_atom: higher_slot = inst_pkg break if higher_slot is None: parents.append(s) break if parents: print(colorize("WARN", "Package %s is going to be unmerged," % cpv)) print(colorize("WARN", "but still listed in the following package sets:")) print(" %s\n" % ", ".join(parents)) del installed_sets numselected = len(all_selected) if not numselected: writemsg_level( "\n>>> No packages selected for removal by " + \ unmerge_action + "\n") return 0 # Unmerge order only matters in some cases if not ordered: unordered = {} for d in pkgmap: selected = d["selected"] if not selected: continue cp = portage.cpv_getkey(next(iter(selected))) cp_dict = unordered.get(cp) if cp_dict is None: cp_dict = {} unordered[cp] = cp_dict for k in d: cp_dict[k] = set() for k, v in d.items(): cp_dict[k].update(v) pkgmap = [unordered[cp] for cp in sorted(unordered)] for x in range(len(pkgmap)): selected = pkgmap[x]["selected"] if not selected: continue for mytype, mylist in pkgmap[x].items(): if mytype == "selected": continue mylist.difference_update(all_selected) cp = portage.cpv_getkey(next(iter(selected))) for y in localtree.dep_match(cp): if y not in pkgmap[x]["omitted"] and \ y not in pkgmap[x]["selected"] and \ y not in pkgmap[x]["protected"] and \ y not in all_selected: pkgmap[x]["omitted"].add(y) if global_unmerge and not pkgmap[x]["selected"]: #avoid cluttering the preview printout with stuff that isn't getting unmerged continue if not (pkgmap[x]["protected"] or pkgmap[x]["omitted"]) and cp in syslist: writemsg_level(colorize("BAD","\a\n\n!!! " + \ "'%s' is part of your system profile.\n" % cp), level=logging.WARNING, noiselevel=-1) writemsg_level(colorize("WARN","\a!!! Unmerging it may " + \ "be damaging to your system.\n\n"), level=logging.WARNING, noiselevel=-1) if clean_delay and "--pretend" not in myopts and "--ask" not in myopts: countdown(int(settings["EMERGE_WARNING_DELAY"]), colorize("UNMERGE_WARN", "Press Ctrl-C to Stop")) if not quiet: writemsg_level("\n %s\n" % (bold(cp),), noiselevel=-1) else: writemsg_level(bold(cp) + ": ", noiselevel=-1) for mytype in ["selected","protected","omitted"]: if not quiet: writemsg_level((mytype + ": ").rjust(14), noiselevel=-1) if pkgmap[x][mytype]: sorted_pkgs = [portage.catpkgsplit(mypkg)[1:] for mypkg in pkgmap[x][mytype]] sorted_pkgs.sort(key=cmp_sort_key(portage.pkgcmp)) for pn, ver, rev in sorted_pkgs: if rev == "r0": myversion = ver else: myversion = ver + "-" + rev if mytype == "selected": writemsg_level( colorize("UNMERGE_WARN", myversion + " "), noiselevel=-1) else: writemsg_level( colorize("GOOD", myversion + " "), noiselevel=-1) else: writemsg_level("none ", noiselevel=-1) if not quiet: writemsg_level("\n", noiselevel=-1) if quiet: writemsg_level("\n", noiselevel=-1) writemsg_level("\nAll selected packages: %s\n" % " ".join(all_selected), noiselevel=-1) writemsg_level("\n>>> " + colorize("UNMERGE_WARN", "'Selected'") + \ " packages are slated for removal.\n") writemsg_level(">>> " + colorize("GOOD", "'Protected'") + \ " and " + colorize("GOOD", "'omitted'") + \ " packages will not be removed.\n\n") if "--pretend" in myopts: #we're done... return return 0 if "--ask" in myopts: if userquery("Would you like to unmerge these packages?", enter_invalid) == "No": # enter pretend mode for correct formatting of results myopts["--pretend"] = True print() print("Quitting.") print() return 0 #the real unmerging begins, after a short delay.... if clean_delay and not autoclean: countdown(int(settings["CLEAN_DELAY"]), ">>> Unmerging") for x in range(len(pkgmap)): for y in pkgmap[x]["selected"]: writemsg_level(">>> Unmerging "+y+"...\n", noiselevel=-1) emergelog(xterm_titles, "=== Unmerging... ("+y+")") mysplit = y.split("/") #unmerge... retval = portage.unmerge(mysplit[0], mysplit[1], settings["ROOT"], mysettings, unmerge_action not in ["clean","prune"], vartree=vartree, ldpath_mtimes=ldpath_mtimes, scheduler=scheduler) if retval != os.EX_OK: emergelog(xterm_titles, " !!! unmerge FAILURE: "+y) if raise_on_error: raise UninstallFailure(retval) sys.exit(retval) else: if clean_world and hasattr(sets["selected"], "cleanPackage")\ and hasattr(sets["selected"], "lock"): sets["selected"].lock() if hasattr(sets["selected"], "load"): sets["selected"].load() sets["selected"].cleanPackage(vartree.dbapi, y) sets["selected"].unlock() emergelog(xterm_titles, " >>> unmerge success: "+y) if clean_world and hasattr(sets["selected"], "remove")\ and hasattr(sets["selected"], "lock"): sets["selected"].lock() # load is called inside remove() for s in root_config.setconfig.active: sets["selected"].remove(SETPREFIX + s) sets["selected"].unlock() return 1
def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True): root = trees._running_eroot mysettings = trees[root]["vartree"].settings portdb = trees[root]["porttree"].dbapi vardb = trees[root]["vartree"].dbapi bindb = trees[root]["bintree"].dbapi world_file = os.path.join(mysettings['EROOT'], WORLD_FILE) world_list = grabfile(world_file) world_modified = False world_warnings = set() updpath_map = {} # Maps repo_name to list of updates. If a given repo has no updates # directory, it will be omitted. If a repo has an updates directory # but none need to be applied (according to timestamp logic), the # value in the dict will be an empty list. repo_map = {} timestamps = {} retupd = False update_notice_printed = False for repo_name in portdb.getRepositories(): repo = portdb.getRepositoryPath(repo_name) updpath = os.path.join(repo, "profiles", "updates") if not os.path.isdir(updpath): continue if updpath in updpath_map: repo_map[repo_name] = updpath_map[updpath] continue try: if if_mtime_changed: update_data = grab_updates(updpath, prev_mtimes=prev_mtimes) else: update_data = grab_updates(updpath) except DirectoryNotFound: continue myupd = [] updpath_map[updpath] = myupd repo_map[repo_name] = myupd if len(update_data) > 0: for mykey, mystat, mycontent in update_data: if not update_notice_printed: update_notice_printed = True writemsg_stdout("\n") writemsg_stdout( colorize("GOOD", _("Performing Global Updates\n"))) writemsg_stdout( _("(Could take a couple of minutes if you have a lot of binary packages.)\n" )) if not quiet: writemsg_stdout(_(" %s='update pass' %s='binary update' " "%s='/var/db update' %s='/var/db move'\n" " %s='/var/db SLOT move' %s='binary move' " "%s='binary SLOT move'\n %s='update /etc/portage/package.*'\n") % \ (bold("."), bold("*"), bold("#"), bold("@"), bold("s"), bold("%"), bold("S"), bold("p"))) valid_updates, errors = parse_updates(mycontent) myupd.extend(valid_updates) if not quiet: writemsg_stdout(bold(mykey)) writemsg_stdout(len(valid_updates) * "." + "\n") if len(errors) == 0: # Update our internal mtime since we # processed all of our directives. timestamps[mykey] = mystat[stat.ST_MTIME] else: for msg in errors: writemsg("%s\n" % msg, noiselevel=-1) if myupd: retupd = True if retupd: if os.access(bindb.bintree.pkgdir, os.W_OK): # Call binarytree.populate(), since we want to make sure it's # only populated with local packages here (getbinpkgs=0). bindb.bintree.populate() else: bindb = None master_repo = portdb.repositories.mainRepo() if master_repo is not None: master_repo = master_repo.name if master_repo in repo_map: repo_map['DEFAULT'] = repo_map[master_repo] for repo_name, myupd in repo_map.items(): if repo_name == 'DEFAULT': continue if not myupd: continue def repo_match(repository): return repository == repo_name or \ (repo_name == master_repo and repository not in repo_map) def _world_repo_match(atoma, atomb): """ Check whether to perform a world change from atoma to atomb. If best vardb match for atoma comes from the same repository as the update file, allow that. Additionally, if portdb still can find a match for old atom name, warn about that. """ matches = vardb.match(atoma) if not matches: matches = vardb.match(atomb) if matches and \ repo_match(vardb.aux_get(best(matches), ['repository'])[0]): if portdb.match(atoma): world_warnings.add((atoma, atomb)) return True else: return False for update_cmd in myupd: for pos, atom in enumerate(world_list): new_atom = update_dbentry(update_cmd, atom) if atom != new_atom: if _world_repo_match(atom, new_atom): world_list[pos] = new_atom world_modified = True for update_cmd in myupd: if update_cmd[0] == "move": moves = vardb.move_ent(update_cmd, repo_match=repo_match) if moves: writemsg_stdout(moves * "@") if bindb: moves = bindb.move_ent(update_cmd, repo_match=repo_match) if moves: writemsg_stdout(moves * "%") elif update_cmd[0] == "slotmove": moves = vardb.move_slot_ent(update_cmd, repo_match=repo_match) if moves: writemsg_stdout(moves * "s") if bindb: moves = bindb.move_slot_ent(update_cmd, repo_match=repo_match) if moves: writemsg_stdout(moves * "S") if world_modified: world_list.sort() write_atomic(world_file, "".join("%s\n" % (x, ) for x in world_list)) if world_warnings: # XXX: print warning that we've updated world entries # and the old name still matches something (from an overlay)? pass if retupd: def _config_repo_match(repo_name, atoma, atomb): """ Check whether to perform a world change from atoma to atomb. If best vardb match for atoma comes from the same repository as the update file, allow that. Additionally, if portdb still can find a match for old atom name, warn about that. """ matches = vardb.match(atoma) if not matches: matches = vardb.match(atomb) if not matches: return False repository = vardb.aux_get(best(matches), ['repository'])[0] return repository == repo_name or \ (repo_name == master_repo and repository not in repo_map) update_config_files(root, shlex_split(mysettings.get("CONFIG_PROTECT", "")), shlex_split( mysettings.get("CONFIG_PROTECT_MASK", "")), repo_map, match_callback=_config_repo_match, case_insensitive="case-insensitive-fs" in mysettings.features) # The above global updates proceed quickly, so they # are considered a single mtimedb transaction. if timestamps: # We do not update the mtime in the mtimedb # until after _all_ of the above updates have # been processed because the mtimedb will # automatically commit when killed by ctrl C. for mykey, mtime in timestamps.items(): prev_mtimes[mykey] = mtime do_upgrade_packagesmessage = False # We gotta do the brute force updates for these now. if True: def onUpdate(_maxval, curval): if curval > 0: writemsg_stdout("#") if quiet: onUpdate = None vardb.update_ents(repo_map, onUpdate=onUpdate) if bindb: def onUpdate(_maxval, curval): if curval > 0: writemsg_stdout("*") if quiet: onUpdate = None bindb.update_ents(repo_map, onUpdate=onUpdate) else: do_upgrade_packagesmessage = 1 # Update progress above is indicated by characters written to stdout so # we print a couple new lines here to separate the progress output from # what follows. writemsg_stdout("\n\n") if do_upgrade_packagesmessage and bindb and \ bindb.cpv_all(): writemsg_stdout( _(" ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the tbz2's in the packages directory.\n" )) writemsg_stdout(bold(_("Note: This can take a very long time."))) writemsg_stdout("\n") return retupd
def unmerge(root_config, myopts, unmerge_action, unmerge_files, ldpath_mtimes, autoclean=0, clean_world=1, clean_delay=1, ordered=0, raise_on_error=0, scheduler=None, writemsg_level=portage.util.writemsg_level): if clean_world: clean_world = myopts.get('--deselect') != 'n' quiet = "--quiet" in myopts enter_invalid = '--ask-enter-invalid' in myopts settings = root_config.settings sets = root_config.sets vartree = root_config.trees["vartree"] candidate_catpkgs = [] global_unmerge = 0 xterm_titles = "notitles" not in settings.features out = portage.output.EOutput() pkg_cache = {} db_keys = list(vartree.dbapi._aux_cache_keys) def _pkg(cpv): pkg = pkg_cache.get(cpv) if pkg is None: pkg = Package(cpv=cpv, installed=True, metadata=zip(db_keys, vartree.dbapi.aux_get(cpv, db_keys)), root_config=root_config, type_name="installed") pkg_cache[cpv] = pkg return pkg vdb_path = os.path.join(settings["ROOT"], portage.VDB_PATH) try: # At least the parent needs to exist for the lock file. portage.util.ensure_dirs(vdb_path) except portage.exception.PortageException: pass vdb_lock = None try: if os.access(vdb_path, os.W_OK): vdb_lock = portage.locks.lockdir(vdb_path) realsyslist = sets["system"].getAtoms() syslist = [] for x in realsyslist: mycp = portage.dep_getkey(x) if mycp in settings.getvirtuals(): providers = [] for provider in settings.getvirtuals()[mycp]: if vartree.dbapi.match(provider): providers.append(provider) if len(providers) == 1: syslist.extend(providers) else: syslist.append(mycp) mysettings = portage.config(clone=settings) if not unmerge_files: if unmerge_action == "unmerge": print() print( bold("emerge unmerge") + " can only be used with specific package names") print() return 0 else: global_unmerge = 1 localtree = vartree # process all arguments and add all # valid db entries to candidate_catpkgs if global_unmerge: if not unmerge_files: candidate_catpkgs.extend(vartree.dbapi.cp_all()) else: #we've got command-line arguments if not unmerge_files: print("\nNo packages to unmerge have been provided.\n") return 0 for x in unmerge_files: arg_parts = x.split('/') if x[0] not in [".","/"] and \ arg_parts[-1][-7:] != ".ebuild": #possible cat/pkg or dep; treat as such candidate_catpkgs.append(x) elif unmerge_action in ["prune", "clean"]: print("\n!!! Prune and clean do not accept individual" + \ " ebuilds as arguments;\n skipping.\n") continue else: # it appears that the user is specifying an installed # ebuild and we're in "unmerge" mode, so it's ok. if not os.path.exists(x): print("\n!!! The path '" + x + "' doesn't exist.\n") return 0 absx = os.path.abspath(x) sp_absx = absx.split("/") if sp_absx[-1][-7:] == ".ebuild": del sp_absx[-1] absx = "/".join(sp_absx) sp_absx_len = len(sp_absx) vdb_path = os.path.join(settings["ROOT"], portage.VDB_PATH) vdb_len = len(vdb_path) sp_vdb = vdb_path.split("/") sp_vdb_len = len(sp_vdb) if not os.path.exists(absx + "/CONTENTS"): print("!!! Not a valid db dir: " + str(absx)) return 0 if sp_absx_len <= sp_vdb_len: # The Path is shorter... so it can't be inside the vdb. print(sp_absx) print(absx) print("\n!!!",x,"cannot be inside "+ \ vdb_path+"; aborting.\n") return 0 for idx in range(0, sp_vdb_len): if idx >= sp_absx_len or sp_vdb[idx] != sp_absx[idx]: print(sp_absx) print(absx) print("\n!!!", x, "is not inside "+\ vdb_path+"; aborting.\n") return 0 print("=" + "/".join(sp_absx[sp_vdb_len:])) candidate_catpkgs.append("=" + "/".join(sp_absx[sp_vdb_len:])) newline = "" if (not "--quiet" in myopts): newline = "\n" if settings["ROOT"] != "/": writemsg_level(darkgreen(newline+ \ ">>> Using system located in ROOT tree %s\n" % \ settings["ROOT"])) if (("--pretend" in myopts) or ("--ask" in myopts)) and \ not ("--quiet" in myopts): writemsg_level(darkgreen(newline+\ ">>> These are the packages that would be unmerged:\n")) # Preservation of order is required for --depclean and --prune so # that dependencies are respected. Use all_selected to eliminate # duplicate packages since the same package may be selected by # multiple atoms. pkgmap = [] all_selected = set() for x in candidate_catpkgs: # cycle through all our candidate deps and determine # what will and will not get unmerged try: mymatch = vartree.dbapi.match(x) except portage.exception.AmbiguousPackageName as errpkgs: print("\n\n!!! The short ebuild name \"" + \ x + "\" is ambiguous. Please specify") print("!!! one of the following fully-qualified " + \ "ebuild names instead:\n") for i in errpkgs[0]: print(" " + green(i)) print() sys.exit(1) if not mymatch and x[0] not in "<>=~": mymatch = localtree.dep_match(x) if not mymatch: portage.writemsg("\n--- Couldn't find '%s' to %s.\n" % \ (x, unmerge_action), noiselevel=-1) continue pkgmap.append({ "protected": set(), "selected": set(), "omitted": set() }) mykey = len(pkgmap) - 1 if unmerge_action == "unmerge": for y in mymatch: if y not in all_selected: pkgmap[mykey]["selected"].add(y) all_selected.add(y) elif unmerge_action == "prune": if len(mymatch) == 1: continue best_version = mymatch[0] best_slot = vartree.getslot(best_version) best_counter = vartree.dbapi.cpv_counter(best_version) for mypkg in mymatch[1:]: myslot = vartree.getslot(mypkg) mycounter = vartree.dbapi.cpv_counter(mypkg) if (myslot == best_slot and mycounter > best_counter) or \ mypkg == portage.best([mypkg, best_version]): if myslot == best_slot: if mycounter < best_counter: # On slot collision, keep the one with the # highest counter since it is the most # recently installed. continue best_version = mypkg best_slot = myslot best_counter = mycounter pkgmap[mykey]["protected"].add(best_version) pkgmap[mykey]["selected"].update(mypkg for mypkg in mymatch \ if mypkg != best_version and mypkg not in all_selected) all_selected.update(pkgmap[mykey]["selected"]) else: # unmerge_action == "clean" slotmap = {} for mypkg in mymatch: if unmerge_action == "clean": myslot = localtree.getslot(mypkg) else: # since we're pruning, we don't care about slots # and put all the pkgs in together myslot = 0 if myslot not in slotmap: slotmap[myslot] = {} slotmap[myslot][localtree.dbapi.cpv_counter(mypkg)] = mypkg for mypkg in vartree.dbapi.cp_list( portage.cpv_getkey(mymatch[0])): myslot = vartree.getslot(mypkg) if myslot not in slotmap: slotmap[myslot] = {} slotmap[myslot][vartree.dbapi.cpv_counter(mypkg)] = mypkg for myslot in slotmap: counterkeys = list(slotmap[myslot]) if not counterkeys: continue counterkeys.sort() pkgmap[mykey]["protected"].add( slotmap[myslot][counterkeys[-1]]) del counterkeys[-1] for counter in counterkeys[:]: mypkg = slotmap[myslot][counter] if mypkg not in mymatch: counterkeys.remove(counter) pkgmap[mykey]["protected"].add( slotmap[myslot][counter]) #be pretty and get them in order of merge: for ckey in counterkeys: mypkg = slotmap[myslot][ckey] if mypkg not in all_selected: pkgmap[mykey]["selected"].add(mypkg) all_selected.add(mypkg) # ok, now the last-merged package # is protected, and the rest are selected numselected = len(all_selected) if global_unmerge and not numselected: portage.writemsg_stdout( "\n>>> No outdated packages were found on your system.\n") return 0 if not numselected: portage.writemsg_stdout( "\n>>> No packages selected for removal by " + \ unmerge_action + "\n") return 0 finally: if vdb_lock: vartree.dbapi.flush_cache() portage.locks.unlockdir(vdb_lock) from portage.sets.base import EditablePackageSet # generate a list of package sets that are directly or indirectly listed in "selected", # as there is no persistent list of "installed" sets installed_sets = ["selected"] stop = False pos = 0 while not stop: stop = True pos = len(installed_sets) for s in installed_sets[pos - 1:]: if s not in sets: continue candidates = [ x[len(SETPREFIX):] for x in sets[s].getNonAtoms() if x.startswith(SETPREFIX) ] if candidates: stop = False installed_sets += candidates installed_sets = [ x for x in installed_sets if x not in root_config.setconfig.active ] del stop, pos # we don't want to unmerge packages that are still listed in user-editable package sets # listed in "world" as they would be remerged on the next update of "world" or the # relevant package sets. unknown_sets = set() for cp in range(len(pkgmap)): for cpv in pkgmap[cp]["selected"].copy(): try: pkg = _pkg(cpv) except KeyError: # It could have been uninstalled # by a concurrent process. continue if unmerge_action != "clean" and \ root_config.root == "/" and \ portage.match_from_list( portage.const.PORTAGE_PACKAGE_ATOM, [pkg]): msg = ("Not unmerging package %s since there is no valid " + \ "reason for portage to unmerge itself.") % (pkg.cpv,) for line in textwrap.wrap(msg, 75): out.eerror(line) # adjust pkgmap so the display output is correct pkgmap[cp]["selected"].remove(cpv) all_selected.remove(cpv) pkgmap[cp]["protected"].add(cpv) continue parents = [] for s in installed_sets: # skip sets that the user requested to unmerge, and skip world # user-selected set, since the package will be removed from # that set later on. if s in root_config.setconfig.active or s == "selected": continue if s not in sets: if s in unknown_sets: continue unknown_sets.add(s) out = portage.output.EOutput() out.eerror(("Unknown set '@%s' in %s%s") % \ (s, root_config.root, portage.const.WORLD_SETS_FILE)) continue # only check instances of EditablePackageSet as other classes are generally used for # special purposes and can be ignored here (and are usually generated dynamically, so the # user can't do much about them anyway) if isinstance(sets[s], EditablePackageSet): # This is derived from a snippet of code in the # depgraph._iter_atoms_for_pkg() method. for atom in sets[s].iterAtomsForPackage(pkg): inst_matches = vartree.dbapi.match(atom) inst_matches.reverse() # descending order higher_slot = None for inst_cpv in inst_matches: try: inst_pkg = _pkg(inst_cpv) except KeyError: # It could have been uninstalled # by a concurrent process. continue if inst_pkg.cp != atom.cp: continue if pkg >= inst_pkg: # This is descending order, and we're not # interested in any versions <= pkg given. break if pkg.slot_atom != inst_pkg.slot_atom: higher_slot = inst_pkg break if higher_slot is None: parents.append(s) break if parents: #print colorize("WARN", "Package %s is going to be unmerged," % cpv) #print colorize("WARN", "but still listed in the following package sets:") #print " %s\n" % ", ".join(parents) print( colorize("WARN", "Not unmerging package %s as it is" % cpv)) print( colorize( "WARN", "still referenced by the following package sets:")) print(" %s\n" % ", ".join(parents)) # adjust pkgmap so the display output is correct pkgmap[cp]["selected"].remove(cpv) all_selected.remove(cpv) pkgmap[cp]["protected"].add(cpv) del installed_sets numselected = len(all_selected) if not numselected: writemsg_level( "\n>>> No packages selected for removal by " + \ unmerge_action + "\n") return 0 # Unmerge order only matters in some cases if not ordered: unordered = {} for d in pkgmap: selected = d["selected"] if not selected: continue cp = portage.cpv_getkey(next(iter(selected))) cp_dict = unordered.get(cp) if cp_dict is None: cp_dict = {} unordered[cp] = cp_dict for k in d: cp_dict[k] = set() for k, v in d.items(): cp_dict[k].update(v) pkgmap = [unordered[cp] for cp in sorted(unordered)] for x in range(len(pkgmap)): selected = pkgmap[x]["selected"] if not selected: continue for mytype, mylist in pkgmap[x].items(): if mytype == "selected": continue mylist.difference_update(all_selected) cp = portage.cpv_getkey(next(iter(selected))) for y in localtree.dep_match(cp): if y not in pkgmap[x]["omitted"] and \ y not in pkgmap[x]["selected"] and \ y not in pkgmap[x]["protected"] and \ y not in all_selected: pkgmap[x]["omitted"].add(y) if global_unmerge and not pkgmap[x]["selected"]: #avoid cluttering the preview printout with stuff that isn't getting unmerged continue if not (pkgmap[x]["protected"] or pkgmap[x]["omitted"]) and cp in syslist: writemsg_level(colorize("BAD","\a\n\n!!! " + \ "'%s' is part of your system profile.\n" % cp), level=logging.WARNING, noiselevel=-1) writemsg_level(colorize("WARN","\a!!! Unmerging it may " + \ "be damaging to your system.\n\n"), level=logging.WARNING, noiselevel=-1) if clean_delay and "--pretend" not in myopts and "--ask" not in myopts: countdown(int(settings["EMERGE_WARNING_DELAY"]), colorize("UNMERGE_WARN", "Press Ctrl-C to Stop")) if not quiet: writemsg_level("\n %s\n" % (bold(cp), ), noiselevel=-1) else: writemsg_level(bold(cp) + ": ", noiselevel=-1) for mytype in ["selected", "protected", "omitted"]: if not quiet: writemsg_level((mytype + ": ").rjust(14), noiselevel=-1) if pkgmap[x][mytype]: sorted_pkgs = [ portage.catpkgsplit(mypkg)[1:] for mypkg in pkgmap[x][mytype] ] sorted_pkgs.sort(key=cmp_sort_key(portage.pkgcmp)) for pn, ver, rev in sorted_pkgs: if rev == "r0": myversion = ver else: myversion = ver + "-" + rev if mytype == "selected": writemsg_level(colorize("UNMERGE_WARN", myversion + " "), noiselevel=-1) else: writemsg_level(colorize("GOOD", myversion + " "), noiselevel=-1) else: writemsg_level("none ", noiselevel=-1) if not quiet: writemsg_level("\n", noiselevel=-1) if quiet: writemsg_level("\n", noiselevel=-1) writemsg_level("\n>>> " + colorize("UNMERGE_WARN", "'Selected'") + \ " packages are slated for removal.\n") writemsg_level(">>> " + colorize("GOOD", "'Protected'") + \ " and " + colorize("GOOD", "'omitted'") + \ " packages will not be removed.\n\n") if "--pretend" in myopts: #we're done... return return 0 if "--ask" in myopts: if userquery("Would you like to unmerge these packages?", enter_invalid) == "No": # enter pretend mode for correct formatting of results myopts["--pretend"] = True print() print("Quitting.") print() return 0 #the real unmerging begins, after a short delay.... if clean_delay and not autoclean: countdown(int(settings["CLEAN_DELAY"]), ">>> Unmerging") for x in range(len(pkgmap)): for y in pkgmap[x]["selected"]: writemsg_level(">>> Unmerging " + y + "...\n", noiselevel=-1) emergelog(xterm_titles, "=== Unmerging... (" + y + ")") mysplit = y.split("/") #unmerge... retval = portage.unmerge(mysplit[0], mysplit[1], settings["ROOT"], mysettings, unmerge_action not in ["clean", "prune"], vartree=vartree, ldpath_mtimes=ldpath_mtimes, scheduler=scheduler) if retval != os.EX_OK: emergelog(xterm_titles, " !!! unmerge FAILURE: " + y) if raise_on_error: raise UninstallFailure(retval) sys.exit(retval) else: if clean_world and hasattr(sets["selected"], "cleanPackage")\ and hasattr(sets["selected"], "lock"): sets["selected"].lock() if hasattr(sets["selected"], "load"): sets["selected"].load() sets["selected"].cleanPackage(vartree.dbapi, y) sets["selected"].unlock() emergelog(xterm_titles, " >>> unmerge success: " + y) if clean_world and hasattr(sets["selected"], "remove")\ and hasattr(sets["selected"], "lock"): sets["selected"].lock() # load is called inside remove() for s in root_config.setconfig.active: sets["selected"].remove(SETPREFIX + s) sets["selected"].unlock() return 1