def display_results(self, archive, directory, checksum): # Display results is called from the tail of SoSReport.final_work() # # Logging is already shutdown and all terminal output must use the # print() call. # make sure a report exists if not archive and not directory: return False self._print() if archive: self._print(_("Your sosreport has been generated and saved " "in:\n %s") % archive, always=True) else: self._print(_("sosreport build tree is located at : %s" % directory), always=True) self._print() if checksum: self._print(_("The checksum is: ") + checksum) self._print() self._print(_("Please send this file to your support " "representative.")) self._print()
def final_work(self): # package up the results for the support organization self.policy.package_results(self.archive.name()) self._finish_logging() final_filename = self.archive.compress(self.opts.compression_type) # automated submission will go here if not self.opts.upload: self.policy.display_results(final_filename) else: self.policy.upload_results(final_filename) self.tempfile_util.clean() # if the user wants the sosreport chown'ed to someone else if self.opts.save_as: try: # get uid, this may raise KeyError if it can't lookup save_uid = pwd.getpwnam(self.opts.save_as).pw_uid # change UID, leave GID as-is, may raise OSError os.chown(final_filename, save_uid, -1) except KeyError: self.soslog.error(_("unable to find user %s, saving as current user" % (self.opts.save_as))) except OSError: self.soslog.error(_("unable to save report as %s, saving as current user" % (self.opts.save_as))) return final_filename
def packageResults(self): if len(self.ticketNumber): self.reportName = self.reportName + "." + self.ticketNumber else: self.reportName = self.reportName curwd = os.getcwd() os.chdir(os.path.dirname(self.cInfo['dstroot'])) oldmask = os.umask(077) print _("Creating compressed archive...") if os.path.isfile("/usr/bin/xz"): self.report_file_ext = "tar.xz" self.renameResults("sosreport-%s-%s.%s" % (self.reportName, time.strftime("%Y%m%d%H%M%S"), self.report_file_ext)) cmd = "/bin/tar -c %s | /usr/bin/xz -1 > %s" % (os.path.basename(self.cInfo['dstroot']),self.report_file) p = Popen(cmd, shell=True, bufsize=-1) sts = os.waitpid(p.pid, 0)[1] else: self.report_file_ext = "tar.bz2" self.renameResults("sosreport-%s-%s.%s" % (self.reportName, time.strftime("%Y%m%d%H%M%S"), self.report_file_ext)) tarcmd = "/bin/tar -jcf %s %s" % (self.report_file, os.path.basename(self.cInfo['dstroot'])) p = Popen(tarcmd, shell=True, stdout=PIPE, stderr=PIPE, bufsize=-1) output = p.communicate()[0] os.umask(oldmask) os.chdir(curwd) return
def pre_work(self): # this method will be called before the gathering begins localname = self.get_local_name() if not self.commons['cmdlineopts'].batch and not self.commons['cmdlineopts'].quiet: try: self.report_name = raw_input(_("Please enter your first initial and last name [%s]: ") % localname) self.ticket_number = raw_input(_("Please enter the case number that you are generating this report for: ")) self._print() except: self._print() sys.exit(0) if len(self.report_name) == 0: self.report_name = localname if self.commons['cmdlineopts'].customer_name: self.report_name = self.commons['cmdlineopts'].customer_name if self.commons['cmdlineopts'].ticket_number: self.ticket_number = self.commons['cmdlineopts'].ticket_number self.report_name = self.sanitize_report_name(self.report_name) if self.ticket_number: self.ticket_number = self.sanitize_ticket_number(self.ticket_number) if (self.report_name == ""): self.report_name = "default" return
def encryptResults(self): # make sure a report exists if not self.report_file: return False print _("Encrypting archive...") gpgname = self.report_file + ".gpg" try: keyring = self.cInfo['config'].get("general", "gpg_keyring") except: keyring = "/usr/share/sos/rhsupport.pub" try: recipient = self.cInfo['config'].get("general", "gpg_recipient") except: recipient = "*****@*****.**" p = Popen("""/usr/bin/gpg --trust-model always --batch --keyring "%s" --no-default-keyring --compress-level 0 --encrypt --recipient "%s" --output "%s" "%s" """ % (keyring, recipient, gpgname, self.report_file), shell=True, stdout=PIPE, stderr=PIPE, bufsize=-1) stdout, stderr = p.communicate() if p.returncode == 0: os.unlink(self.report_file) self.report_file = gpgname else: print _("There was a problem encrypting your report.") sys.exit(1)
def final_work(self): # package up the results for the support organization if not self.opts.build: self.ui_log.info(_("Creating compressed archive...")) # compression could fail for a number of reasons try: final_filename = self.archive.finalize(self.opts.compression_type) except: if self.opts.debug: raise else: return False # automated submission will go here if not self.opts.upload: self.policy.display_results(final_filename) else: self.policy.upload_results(final_filename) else: self.ui_log.info(_("\n sosreport build tree is located at : %s\n" % self.archive.get_archive_path())) self._finish_logging() self.tempfile_util.clean() return True
def displayResults(self, final_filename=None): self.report_file = final_filename # make sure a report exists if not self.report_file: return False # calculate md5 fp = open(self.report_file, "r") self.report_md5 = md5(fp.read()).hexdigest() fp.close() # store md5 into file fp = open(self.report_file + ".md5", "w") fp.write(self.report_md5 + "\n") fp.close() self._print() self._print(_("Your sosreport has been generated and saved in:\n %s") % self.report_file) self._print() if len(self.report_md5): self._print(_("The md5sum is: ") + self.report_md5) self._print() self._print(_("Please send this file to your support representative.")) self._print()
def display_results(self, final_filename=None, build=False): # make sure a report exists if not final_filename: return False self._print() if not build: # store checksum into file fp = open(final_filename + "." + get_hash_name(), "w") checksum = self._create_checksum(final_filename) if checksum: fp.write(checksum + "\n") fp.close() self._print(_("Your sosreport has been generated and saved in:\n %s") % final_filename) else: checksum = None self._print(_("sosreport build tree is located at : %s" % final_filename)) self._print() if checksum: self._print(_("The checksum is: ") + checksum) self._print() self._print(_("Please send this file to your support representative.")) self._print()
def preWork(self): # this method will be called before the gathering begins localname = self.getLocalName() if not self.commons['cmdlineopts'].batch and not self.commons['cmdlineopts'].silent: try: self.reportName = raw_input(_("Please enter your first initial and last name [%s]: ") % localname) self.reportName = re.sub(r"[^a-zA-Z.0-9]", "", self.reportName) self.ticketNumber = raw_input(_("Please enter the case number that you are generating this report for: ")) self.ticketNumber = re.sub(r"[^0-9]", "", self.ticketNumber) self._print() except: self._print() sys.exit(0) if len(self.reportName) == 0: self.reportName = localname if self.commons['cmdlineopts'].customerName: self.reportName = self.commons['cmdlineopts'].customerName self.reportName = re.sub(r"[^a-zA-Z.0-9]", "", self.reportName) if self.commons['cmdlineopts'].ticketNumber: self.ticketNumber = self.commons['cmdlineopts'].ticketNumber self.ticketNumber = re.sub(r"[^0-9]", "", self.ticketNumber) return
def load_plugins(self): import sos.plugins helper = ImporterHelper(sos.plugins) plugins = helper.get_modules() self.plugin_names = deque() # validate and load plugins for plug in plugins: plugbase, ext = os.path.splitext(plug) try: plugin_classes = import_plugin( plugbase, tuple(self.policy.valid_subclasses)) if not len(plugin_classes): # no valid plugin classes for this policy continue plugin_class = self.policy.match_plugin(plugin_classes) if not self.policy.validate_plugin(plugin_class): self.soslog.warning( _("plugin %s does not validate, skipping") % plug) if self.opts.verbosity > 0: self._skip(plugin_class, _("does not validate")) continue if plugin_class.requires_root and not self._is_root: self.soslog.info(_("plugin %s requires root permissions" "to execute, skipping") % plug) self._skip(plugin_class, _("requires root")) continue # plug-in is valid, let's decide whether run it or not self.plugin_names.append(plugbase) if self._is_skipped(plugbase): self._skip(plugin_class, _("skipped")) continue if self._is_inactive(plugbase, plugin_class): self._skip(plugin_class, _("inactive")) continue if self._is_not_default(plugbase, plugin_class): self._skip(plugin_class, _("not default")) continue if self._is_not_specified(plugbase): self._skip(plugin_class, _("not specified")) continue self._load(plugin_class) except Exception as e: self.soslog.warning(_("plugin %s does not install, " "skipping: %s") % (plug, e)) if self.raise_plugins: raise
def prework(self): self.policy.pre_work() try: self.ui_log.info(_(" Setting up archive ...")) compression_methods = ('auto', 'zip', 'bzip2', 'gzip', 'xz') method = self.opts.compression_type if method not in compression_methods: compression_list = ', '.join(compression_methods) self.ui_log.error("") self.ui_log.error("Invalid compression specified: " + method) self.ui_log.error("Valid types are: " + compression_list) self.ui_log.error("") self._exit(1) self._set_archive() self._make_archive_paths() return except (OSError, IOError) as e: if e.errno in fatal_fs_errors: self.ui_log.error("") self.ui_log.error(" %s while setting up archive" % e.strerror) self.ui_log.error("") else: raise e except Exception as e: import traceback self.ui_log.error("") self.ui_log.error(" Unexpected exception setting up archive:") traceback.print_exc(e) self.ui_log.error(e) self._exit(1)
def final_work(self): # this must come before archive creation to ensure that log # files are closed and cleaned up at exit. self._finish_logging() # package up the results for the support organization if not self.opts.build: print(_("Creating compressed archive...")) # compression could fail for a number of reasons try: final_filename = self.archive.finalize( self.opts.compression_type) except (OSError, IOError) as e: if e.errno in fatal_fs_errors: self.ui_log.error("") self.ui_log.error(" %s while finalizing archive" % e.strerror) self.ui_log.error("") self._exit(1) except: if self.opts.debug: raise else: return False else: final_filename = self.archive.get_archive_path() self.policy.display_results(final_filename, build=self.opts.build) self.tempfile_util.clean() return True
def collect(self): self.ui_log.info(_(" Running plugins. Please wait ...")) self.ui_log.info("") plugruncount = 0 for i in zip(self.loaded_plugins): plugruncount += 1 plugname, plug = i[0] status_line = (" Running %d/%d: %s... " % (plugruncount, len(self.loaded_plugins), plugname)) if self.opts.verbosity == 0: status_line = "\r%s" % status_line else: status_line = "%s\n" % status_line if not self.opts.quiet: sys.stdout.write(status_line) sys.stdout.flush() try: plug.collect() except KeyboardInterrupt: raise except (OSError, IOError) as e: if e.errno in fatal_fs_errors: self.ui_log.error("") self.ui_log.error(" %s while collecting plugin data" % e.strerror) self.ui_log.error("") self._exit(1) except: if self.raise_plugins: raise else: self._log_plugin_exception(plugname) self.ui_log.info("")
def main(args): """The main entry point""" try: sos = SoSReport(args) if sos.opts.listPlugins: sos.list_plugins() sos.ensure_plugins() sos.batch() if sos.opts.diagnose: sos.diagnose() sos.prework() sos.setup() sos.ui_log.info(_(" Running plugins. Please wait ...")) sos.ui_log.info("") sos.copy_stuff() sos.ui_log.info("") if sos.opts.report: sos.report() sos.html_report() sos.plain_report() sos.postproc() sos.version() return sos.final_work() except SystemExit: return None
def final_work(self): # this must come before archive creation to ensure that log # files are closed and cleaned up at exit. self._finish_logging() # package up the results for the support organization if not self.opts.build: print (_("Creating compressed archive...")) # compression could fail for a number of reasons try: final_filename = self.archive.finalize(self.opts.compression_type) except: if self.opts.debug: raise else: return False else: final_filename = self.archive.get_archive_path() # automated submission will go here if not self.opts.upload: self.policy.display_results(final_filename, build = self.opts.build) else: self.policy.upload_results(final_filename) self.tempfile_util.clean() return True
def batch(self): if self.opts.batch: self.ui_log.info(self.msg) else: self.msg += _("Press ENTER to continue, or CTRL-C to quit.\n") try: raw_input(self.msg) except: self.ui_log.info("") self._exit()
def batch(self): if self.opts.batch: self.ui_log.info(self.policy.get_msg()) else: msg = self.policy.get_msg() msg += _("Press ENTER to continue, or CTRL-C to quit.\n") try: input(msg) except: self.ui_log.info("") self._exit()
def displayResults(self): # make sure a report exists if not self.report_file: return False # calculate md5 fp = open(self.report_file, "r") self.report_md5 = md5(fp.read()).hexdigest() fp.close() self.renameResults("sosreport-%s-%s-%s.%s" % (self.reportName, time.strftime("%Y%m%d%H%M%S"), self.report_md5[-4:], self.report_file_ext)) # store md5 into file fp = open(self.report_file + ".md5", "w") fp.write(self.report_md5 + "\n") fp.close() print print _("Your sosreport has been generated and saved in:\n %s") % self.report_file print if len(self.report_md5): print _("The md5sum is: ") + self.report_md5 print print _("Please send this file to your support representative.") print
def load_plugins(self): helper = ImporterHelper(package_path=os.path.join('sos', 'plugins')) plugins = helper.get_modules() self.plugin_names = deque() # validate and load plugins for plug in plugins: plugbase, ext = os.path.splitext(plug) try: plugin_classes = import_plugin(plugbase) for plugin_class in plugin_classes: if not self.policy.validatePlugin(plugin_class): self.soslog.debug(_("plugin %s does not validate, skipping") % plug) self._skip(plugin_class, "does not validate") continue if plugin_class.requires_root and not self._is_root: self.soslog.debug(_("plugin %s requires root permissions to execute, skipping") % plug) self._skip(plugin_class, "requires root") continue # plug-in is valid, let's decide whether run it or not self.plugin_names.append(plugbase) if any((self._is_skipped(plugbase), self._is_inactive(plugbase, plugin_class), self._is_not_default(plugbase, plugin_class), self._is_not_specified(plugbase), )): self._skip(plugin_class, "inactive") continue self._load(plugin_class) except Exception, e: self.soslog.warning(_("plugin %s does not install, skipping: %s") % (plug, e)) if self.raise_plugins: raise
def pre_work(self): # this method will be called before the gathering begins cmdline_opts = self.commons['cmdlineopts'] customer_name = cmdline_opts.customer_name localname = customer_name if customer_name else self.get_local_name() caseid = cmdline_opts.case_id if cmdline_opts.case_id else "" if not cmdline_opts.batch and not \ cmdline_opts.quiet: try: self.report_name = input(_("Please enter your first initial " "and last name [%s]: ") % localname) self.case_id = input(_("Please enter the case id " "that you are generating this " "report for [%s]: ") % caseid) self._print() except: self._print() self.report_name = localname if len(self.report_name) == 0: self.report_name = localname if customer_name: self.report_name = customer_name if cmdline_opts.case_id: self.case_id = cmdline_opts.case_id self.report_name = self.sanitize_report_name(self.report_name) if self.case_id: self.case_id = self.sanitize_case_id(self.case_id) if (self.report_name == ""): self.report_name = "default" return
def list_profiles(self): if not self.profiles: self.soslog.fatal(_("no valid profiles found")) return self.ui_log.info(_("The following profiles are available:")) self.ui_log.info("") def _has_prof(c): return hasattr(c, "profiles") profiles = list(self.profiles) profiles.sort() for profile in profiles: plugins = [] for name, plugin in self.loaded_plugins: if _has_prof(plugin) and profile in plugin.profiles: plugins.append(name) lines = _format_list("%-15s " % profile, plugins, indent=True) for line in lines: self.ui_log.info(" %s" % line) self.ui_log.info("") self.ui_log.info(" %d profiles, %d plugins" % (len(profiles), len(self.loaded_plugins))) self.ui_log.info("")
def list_plugins(self): if not self.loaded_plugins and not self.skipped_plugins: self.soslog.fatal(_("no valid plugins found")) return if self.loaded_plugins: self.ui_log.info(_("The following plugins are currently enabled:")) self.ui_log.info("") for (plugname, plug) in self.loaded_plugins: self.ui_log.info(" %-20s %s" % (plugname, plug.get_description())) else: self.ui_log.info(_("No plugin enabled.")) self.ui_log.info("") if self.skipped_plugins: self.ui_log.info(_("The following plugins are currently " "disabled:")) self.ui_log.info("") for (plugname, plugclass, reason) in self.skipped_plugins: self.ui_log.info(" %-20s %-14s %s" % ( plugname, reason, plugclass.get_description())) self.ui_log.info("") if self.all_options: self.ui_log.info(_("The following plugin options are available:")) self.ui_log.info("") for (plug, plugname, optname, optparm) in self.all_options: # format option value based on its type (int or bool) if type(optparm["enabled"]) == bool: if optparm["enabled"] is True: tmpopt = "on" else: tmpopt = "off" else: tmpopt = optparm["enabled"] self.ui_log.info(" %-25s %-15s %s" % ( plugname + "." + optname, tmpopt, optparm["desc"])) else: self.ui_log.info(_("No plugin options available.")) self.ui_log.info("") profiles = list(self.profiles) profiles.sort() lines = _format_list("Profiles: ", profiles, indent=True) for line in lines: self.ui_log.info(" %s" % line) self.ui_log.info("") self.ui_log.info(" %d profiles, %d plugins" % (len(self.profiles), len(self.loaded_plugins))) self.ui_log.info("")
def setup(self): self.ui_log.info(_(" Setting up plugins ...")) for plugname, plug in self.loaded_plugins: try: plug.archive = self.archive plug.setup() except KeyboardInterrupt: raise except (OSError, IOError) as e: if e.errno in fatal_fs_errors: self.ui_log.error("") self.ui_log.error(" %s while setting up plugins" % e.strerror) self.ui_log.error("") self._exit(1) except: if self.raise_plugins: raise else: self._log_plugin_exception(plugname)
def execute(self): try: self._setup_logging() self.policy.setCommons(self.get_commons()) self.print_header() self.load_plugins() self._set_tunables() self._check_for_unknown_plugins() self._set_plugin_options() if self.opts.listPlugins: self.list_plugins() return self.ensure_plugins() self.batch() if self.opts.diagnose: self.diagnose() self.prework() self.setup() self.ui_log.info(_(" Running plugins. Please wait ...")) self.ui_log.info("") self.copy_stuff() self.ui_log.info("") if self.opts.report: self.report() self.html_report() self.plain_report() self.postproc() self.version() return self.final_work() except SystemExit: return None
def pre_work(self): # this method will be called before the gathering begins cmdline_opts = self.commons['cmdlineopts'] caseid = cmdline_opts.case_id if cmdline_opts.case_id else "" if not cmdline_opts.batch and not \ cmdline_opts.quiet: try: self.case_id = input(_("Please enter the case id " "that you are generating this " "report for [%s]: ") % caseid) self._print() except KeyboardInterrupt: self._print() raise if cmdline_opts.case_id: self.case_id = cmdline_opts.case_id return
def collect(self): self.ui_log.info(_(" Running plugins. Please wait ...")) self.ui_log.info("") plugruncount = 0 for i in izip(self.loaded_plugins): plugruncount += 1 plugname, plug = i[0] if not self.opts.quiet: sys.stdout.write("\r Running %d/%d: %s... " % (plugruncount, len(self.loaded_plugins), plugname)) sys.stdout.flush() try: plug.collect() except KeyboardInterrupt: raise except: if self.raise_plugins: raise else: self._log_plugin_exception(plugname) self.ui_log.info("")
def diagnose(self): tmpcount = 0 for plugname, plug in GlobalVars.loadedplugins: try: plug.diagnose() except: if self.raise_plugins: raise else: self._log_plugin_exception(plugname) tmpcount += len(plug.diagnose_msgs) if tmpcount > 0: self.ui_log.info(_("One or more plugins have detected a problem in your " "configuration.")) self.ui_log.info(_("Please review the following messages:")) self.ui_log.info("") fp = open(os.path.join(rptdir, "diagnose.txt"), "w") for plugname, plug in self.loaded_plugins: for tmpcount2 in range(0, len(plug.diagnose_msgs)): if tmpcount2 == 0: soslog.warning("%s:" % plugname) soslog.warning(" * %s" % plug.diagnose_msgs[tmpcount2]) fp.write("%s: %s\n" % (plugname, plug.diagnose_msgs[tmpcount2])) fp.close() self.ui_log.info("") if not self.opts.batch: try: while True: yorno = raw_input( _("Are you sure you would like to " "continue (y/n) ? ") ) if yorno == _("y") or yorno == _("Y"): self.ui_log.info("") break elif yorno == _("n") or yorno == _("N"): self._exit(0) del yorno except KeyboardInterrupt: self.ui_log.info("") self._exit(0)
def setup(self): msg = "[%s:%s] executing 'sosreport %s'" self.soslog.info(msg % (__name__, "setup", " ".join(self._args))) self.ui_log.info(_(" Setting up plugins ...")) for plugname, plug in self.loaded_plugins: try: plug.archive = self.archive plug.setup() except KeyboardInterrupt: raise except (OSError, IOError) as e: if e.errno in fatal_fs_errors: self.ui_log.error("") self.ui_log.error(" %s while setting up plugins" % e.strerror) self.ui_log.error("") self._exit(1) except: if self.raise_plugins: raise else: self._log_plugin_exception(plugname)
def prework(self): self.policy.pre_work() try: self.ui_log.info(_(" Setting up archive ...")) self._set_archive() self._make_archive_paths() return except (OSError, IOError) as e: if e.errno in fatal_fs_errors: self.ui_log.error("") self.ui_log.error(" %s while setting up archive" % e.strerror) self.ui_log.error("") else: raise e except Exception as e: import traceback self.ui_log.error("") self.ui_log.error(" Unexpected exception setting up archive:") traceback.print_exc(e) self.ui_log.error(e) self._exit(1)
def list_plugins(self): if not self.loaded_plugins and not self.skipped_plugins: self.soslog.fatal(_("no valid plugins found")) self._exit(1) if self.loaded_plugins: self.ui_log.info(_("The following plugins are currently enabled:")) self.ui_log.info("") for (plugname, plug) in self.loaded_plugins: self.ui_log.info(" %-15s %s" % (plugname, plug.get_description())) else: self.ui_log.info(_("No plugin enabled.")) self.ui_log.info("") if self.skipped_plugins: self.ui_log.info(_("The following plugins are currently disabled:")) self.ui_log.info("") for (plugname, plugclass, reason) in self.skipped_plugins: self.ui_log.info(" %-15s %-14s %s" % (plugname, reason, plugclass.get_description())) self.ui_log.info("") if self.all_options: self.ui_log.info(_("The following plugin options are available:")) self.ui_log.info("") for (plug, plugname, optname, optparm) in self.all_options: # format and colorize option value based on its type (int or bool) if type(optparm["enabled"]) == bool: if optparm["enabled"] == True: tmpopt = "on" else: tmpopt = "off" else: tmpopt = optparm["enabled"] self.ui_log.info(" %-25s %-15s %s" % ( plugname + "." + optname, tmpopt, optparm["desc"])) else: self.ui_log.info(_("No plugin options available.")) self.ui_log.info("") self._exit()
def ensure_plugins(self): if not self.loaded_plugins: self.soslog.error(_("no valid plugins were enabled")) self._exit(1)
class RHELPolicy(RedHatPolicy): distro = RHEL_RELEASE_STR vendor = "Red Hat" vendor_url = "https://access.redhat.com/support/" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system and installed \ applications. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. """ + disclaimer_text + "%(vendor_text)s\n") def __init__(self, sysroot=None): super(RHELPolicy, self).__init__(sysroot=sysroot) self.register_presets(rhel_presets) @classmethod def check(cls): """Test to see if the running host is a RHEL installation. Checks for the presence of the "Red Hat Enterprise Linux" release string at the beginning of the NAME field in the `/etc/os-release` file and returns ``True`` if it is found, and ``False`` otherwise. :returns: ``True`` if the host is running RHEL or ``False`` otherwise. """ if not os.path.exists(OS_RELEASE): return False with open(OS_RELEASE, "r") as f: for line in f: if line.startswith("NAME"): (name, value) = line.split("=") value = value.strip("\"'") if value.startswith(cls.distro): return True return False def dist_version(self): try: rr = self.package_manager.all_pkgs_by_name_regex("redhat-release*") pkgname = self.pkgs[rr[0]]["version"] if pkgname[0] == "4": return 4 elif pkgname[0] in ["5Server", "5Client"]: return 5 elif pkgname[0] == "6": return 6 elif pkgname[0] == "7": return 7 elif pkgname[0] == "8": return 8 except Exception: pass return False def probe_preset(self): # Package based checks if self.pkg_by_name("satellite-common") is not None: return self.find_preset(RH_SATELLITE) if self.pkg_by_name("rhosp-release") is not None: return self.find_preset(RHOSP) if self.pkg_by_name("cfme") is not None: return self.find_preset(RH_CFME) if self.pkg_by_name("ovirt-engine") is not None or \ self.pkg_by_name("vdsm") is not None: return self.find_preset(RHV) # Vanilla RHEL is default return self.find_preset(RHEL)
class RHELPolicy(RedHatPolicy): distro = "CentOS Linux" vendor = "CentOS" vendor_url = "https://wiki.centos.org/" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system and installed \ applications. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. """ + disclaimer_text + "%(vendor_text)s\n") def __init__(self, sysroot=None): super(RHELPolicy, self).__init__(sysroot=sysroot) self.register_presets(rhel_presets) @classmethod def check(cls): """Test to see if the running host is a RHEL installation. Checks for the presence of the "CentOS Linux" release string at the beginning of the NAME field in the `/etc/os-release` file and returns ``True`` if it is found, and ``False`` otherwise. :returns: ``True`` if the host is running RHEL or ``False`` otherwise. """ if not os.path.exists(OS_RELEASE): return False with open(OS_RELEASE, "r") as f: for line in f: if line.startswith("NAME"): (name, value) = line.split("=") value = value.strip("\"'") if value.startswith(RHEL_RELEASE_STR): return True return False def dist_version(self): try: rr = self.package_manager.all_pkgs_by_name_regex("redhat-release*") pkgname = self.pkgs[rr[0]]["version"] if pkgname[0] == "4": return 4 elif pkgname[0] in ["5Server", "5Client"]: return 5 elif pkgname[0] == "6": return 6 elif pkgname[0] == "7": return 7 except Exception: pass return False def rhn_username(self): try: # cfg = config.initUp2dateConfig() rhn_username = rpclib.xmlrpclib.loads( up2dateAuth.getSystemId())[0][0]['username'] return rhn_username.encode('utf-8', 'ignore') except Exception: # ignore any exception and return an empty username return "" def get_local_name(self): return self.rhn_username() or self.host_name() def probe_preset(self): # Package based checks if self.pkg_by_name("satellite-common") is not None: return self.find_preset(RH_SATELLITE) if self.pkg_by_name("rhosp-release") is not None: return self.find_preset(RHOSP) # Vanilla RHEL is default return self.find_preset(RHEL)
class Policy(object): """Policies represent distributions that sos supports, and define the way in which sos behaves on those distributions. A policy should define at minimum a way to identify the distribution, and a package manager to allow for package based plugin enablement. Policies also control preferred ContainerRuntime()'s, upload support to default locations for distribution vendors, disclaimer text, and default presets supported by that distribution or vendor's products. Every Policy will also need at least one "tagging class" for plugins. :param sysroot: Set the sysroot for the system, if not / :type sysroot: ``str`` or ``None`` :param probe_runtime: Should the Policy try to load a ContainerRuntime :type probe_runtime: ``bool`` :cvar distro: The name of the distribution the Policy represents :vartype distro: ``str`` :cvar vendor: The name of the vendor producing the distribution :vartype vendor: ``str`` :cvar vendor_urls: List of URLs for the vendor's website, or support portal :vartype vendor_urls: ``list`` of ``tuples`` formatted ``(``description``, ``url``)`` :cvar vendor_text: Additional text to add to the banner message :vartype vendor_text: ``str`` :cvar name_pattern: The naming pattern to be used for naming archives generated by sos. Values of `legacy`, and `friendly` are preset patterns. May also be set to an explicit custom pattern, see `get_archive_name()` :vartype name_pattern: ``str`` """ msg = _("""\ This command will collect system configuration and diagnostic information \ from this %(distro)s system. For more information on %(vendor)s visit: %(vendor_urls)s The generated archive may contain data considered sensitive and its content \ should be reviewed by the originating organization before being passed to \ any third party. %(changes_text)s %(vendor_text)s """) distro = "Unknown" vendor = "Unknown" vendor_urls = [('Example URL', "http://www.example.com/")] vendor_text = "" PATH = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" default_scl_prefix = "" name_pattern = 'legacy' presets = {"": PresetDefaults()} presets_path = PRESETS_PATH _in_container = False _host_sysroot = '/' def __init__(self, sysroot=None, probe_runtime=True): """Subclasses that choose to override this initializer should call super() to ensure that they get the required platform bits attached. super(SubClass, self).__init__(). Policies that require runtime tests to construct PATH must call self.set_exec_path() after modifying PATH in their own initializer.""" self._parse_uname() self.case_id = None self.probe_runtime = probe_runtime self.package_manager = PackageManager() self.valid_subclasses = [IndependentPlugin] self.set_exec_path() self._host_sysroot = sysroot self.register_presets(GENERIC_PRESETS) def check(self, remote=''): """ This function is responsible for determining if the underlying system is supported by this policy. If `remote` is provided, it should be the contents of os-release from a remote host, or a similar vendor-specific file that can be used in place of a locally available file. :returns: ``True`` if the Policy should be loaded, else ``False`` :rtype: ``bool`` """ return False @property def forbidden_paths(self): """This property is used to determine the list of forbidden paths set by the policy. Note that this property will construct a *cumulative* list based on all subclasses of a given policy. :returns: All patterns of policy forbidden paths :rtype: ``list`` """ if not hasattr(self, '_forbidden_paths'): self._forbidden_paths = [] for cls in self.__class__.__mro__: if hasattr(cls, 'set_forbidden_paths'): self._forbidden_paths.extend(cls.set_forbidden_paths()) return list(set(self._forbidden_paths)) @classmethod def set_forbidden_paths(cls): """Use this to *append* policy-specifc forbidden paths that apply to all plugins. Setting this classmethod on an invidual policy will *not* override subclass-specific paths """ return ['*.pyc', '*.pyo', '*.swp'] def in_container(self): """Are we running inside a container? :returns: ``True`` if in a container, else ``False`` :rtype: ``bool`` """ return self._in_container def host_sysroot(self): """Get the host's default sysroot :returns: Host sysroot :rtype: ``str`` or ``None`` """ return self._host_sysroot def dist_version(self): """ Return the OS version """ pass def get_preferred_archive(self): """ Return the class object of the prefered archive format for this platform """ from sos.archive import TarFileArchive return TarFileArchive def get_archive_name(self): """ This function should return the filename of the archive without the extension. This uses the policy's `name_pattern` attribute to determine the name. There are two pre-defined naming patterns - `legacy` and `friendly` that give names like the following: * legacy - `sosreport-tux.123456-20171224185433` * friendly - `sosreport-tux-mylabel-123456-2017-12-24-ezcfcop.tar.xz` A custom name_pattern can be used by a policy provided that it defines name_pattern using a format() style string substitution. Usable substitutions are: * name - the short hostname of the system * label - the label given by --label * case - the case id given by --case-id or --ticker-number * rand - a random string of 7 alpha characters Note that if a datestamp is needed, the substring should be set in `name_pattern` in the format accepted by ``strftime()``. :returns: A name to be used for the archive, as expanded from the Policy `name_pattern` :rtype: ``str`` """ name = self.get_local_name().split('.')[0] case = self.case_id label = self.commons['cmdlineopts'].label date = '' rand = ''.join(random.choice(string.ascii_lowercase) for x in range(7)) if self.name_pattern == 'legacy': nstr = "sosreport-{name}{case}{date}" case = '.' + case if case else '' date = '-%Y%m%d%H%M%S' elif self.name_pattern == 'friendly': nstr = "sosreport-{name}{label}{case}{date}-{rand}" case = '-' + case if case else '' label = '-' + label if label else '' date = '-%Y-%m-%d' else: nstr = self.name_pattern nstr = nstr.format(name=name, label=label, case=case, date=date, rand=rand) return self.sanitize_filename(time.strftime(nstr)) # for some specific binaries like "xz", we need to determine package # providing it; that is policy specific. By default return the binary # name itself until particular policy overwrites it def _get_pkg_name_for_binary(self, binary): return binary def get_cmd_for_compress_method(self, method, threads): """Determine the command to use for compressing the archive :param method: The compression method/binary to use :type method: ``str`` :param threads: Number of threads compression should use :type threads: ``int`` :returns: Full command to use to compress the archive :rtype: ``str`` """ cmd = method if cmd.startswith("xz"): # XZ set compression to -2 and use threads cmd = "%s -2 -T%d" % (cmd, threads) return cmd def get_tmp_dir(self, opt_tmp_dir): if not opt_tmp_dir: return tempfile.gettempdir() return opt_tmp_dir def get_default_scl_prefix(self): return self.default_scl_prefix def match_plugin(self, plugin_classes): """Determine what subclass of a Plugin should be used based on the tagging classes assigned to the Plugin :param plugin_classes: The classes that the Plugin subclasses :type plugin_classes: ``list`` :returns: The first subclass that matches one of the Policy's `valid_subclasses` :rtype: A tagging class for Plugins """ if len(plugin_classes) > 1: for p in plugin_classes: # Give preference to the first listed tagging class # so that e.g. UbuntuPlugin is chosen over DebianPlugin # on an Ubuntu installation. if issubclass(p, self.valid_subclasses[0]): return p return plugin_classes[0] def validate_plugin(self, plugin_class, experimental=False): """ Verifies that the plugin_class should execute under this policy :param plugin_class: The tagging class being checked :type plugin_class: A Plugin() tagging class :returns: ``True`` if the `plugin_class` is allowed by the policy :rtype: ``bool`` """ valid_subclasses = [IndependentPlugin] + self.valid_subclasses if experimental: valid_subclasses += [ExperimentalPlugin] return any( issubclass(plugin_class, class_) for class_ in valid_subclasses) def pre_work(self): """ This function is called prior to collection. """ pass def post_work(self): """ This function is called after the sosreport has been generated. """ pass def pkg_by_name(self, pkg): """Wrapper to retrieve a package from the Policy's package manager :param pkg: The name of the package :type pkg: ``str`` :returns: The first package that matches `pkg` :rtype: ``str`` """ return self.package_manager.pkg_by_name(pkg) def _parse_uname(self): (system, node, release, version, machine, processor) = platform.uname() self.system = system self.hostname = node self.release = release self.smp = version.split()[1] == "SMP" self.machine = machine def set_commons(self, commons): """Set common host data for the Policy to reference """ self.commons = commons def _set_PATH(self, path): os.environ['PATH'] = path def set_exec_path(self): self._set_PATH(self.PATH) def is_root(self): """This method should return true if the user calling the script is considered to be a superuser :returns: ``True`` if user is superuser, else ``False`` :rtype: ``bool`` """ return (os.getuid() == 0) def get_preferred_hash_name(self): """Returns the string name of the hashlib-supported checksum algorithm to use""" return "md5" def display_results(self, archive, directory, checksum, archivestat=None, map_file=None): """Display final information about a generated archive :param archive: The name of the archive that was generated :type archive: ``str`` :param directory: The build directory for sos if --build was used :type directory: ``str`` :param checksum: The checksum of the archive :type checksum: ``str`` :param archivestat: stat() information for the archive :type archivestat: `os.stat_result` :param map_file: If sos clean was invoked, the location of the mapping file for this run :type map_file: ``str`` """ # Logging is already shutdown and all terminal output must use the # print() call. # make sure a report exists if not archive and not directory: return False self._print() if map_file: self._print( _("A mapping of obfuscated elements is available at" "\n\t%s\n" % map_file)) if archive: self._print(_("Your sosreport has been generated and saved " "in:\n\t%s\n") % archive, always=True) self._print( _(" Size\t%s") % get_human_readable(archivestat.st_size)) self._print(_(" Owner\t%s") % getpwuid(archivestat.st_uid).pw_name) else: self._print(_("Your sosreport build tree has been generated " "in:\n\t%s\n") % directory, always=True) if checksum: self._print(" " + self.get_preferred_hash_name() + "\t" + checksum) self._print() self._print( _("Please send this file to your support " "representative.")) self._print() def _print(self, msg=None, always=False): """A wrapper around print that only prints if we are not running in quiet mode""" if always or not self.commons['cmdlineopts'].quiet: if msg: print(msg) else: print() def get_msg(self): """This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated. :returns: Formatted banner message string :rtype: ``str`` """ if self.commons['cmdlineopts'].allow_system_changes: changes_text = "Changes CAN be made to system configuration." else: changes_text = "No changes will be made to system configuration." width = 72 _msg = self.msg % { 'distro': self.distro, 'vendor': self.vendor, 'vendor_urls': self._fmt_vendor_urls(), 'vendor_text': self.vendor_text, 'tmpdir': self.commons['tmpdir'], 'changes_text': changes_text } _fmt = "" for line in _msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt def _fmt_vendor_urls(self): """Formats all items in the ``vendor_urls`` class attr into a usable string for the banner message. :returns: Formatted string of URLS :rtype: ``str`` """ width = max([len(v[0]) for v in self.vendor_urls]) return "\n".join("\t{desc:<{width}} : {url}".format( desc=u[0], width=width, url=u[1]) for u in self.vendor_urls) def register_presets(self, presets, replace=False): """Add new presets to this policy object. Merges the presets dictionary ``presets`` into this ``Policy`` object, or replaces the current presets if ``replace`` is ``True``. ``presets`` should be a dictionary mapping ``str`` preset names to ``<class PresetDefaults>`` objects specifying the command line defaults. :param presets: dictionary of presets to add or replace :param replace: replace presets rather than merge new presets. """ if replace: self.presets = {} self.presets.update(presets) def find_preset(self, preset): """Find a preset profile matching the specified preset string. :param preset: a string containing a preset profile name. :returns: a matching PresetProfile. """ # FIXME: allow fuzzy matching? for match in self.presets.keys(): if match == preset: return self.presets[match] return None def probe_preset(self): """Return a ``PresetDefaults`` object matching the runing host. Stub method to be implemented by derived policy classes. :returns: a ``PresetDefaults`` object. """ return self.presets[NO_PRESET] def load_presets(self, presets_path=None): """Load presets from disk. Read JSON formatted preset data from the specified path, or the default location at ``/var/lib/sos/presets``. :param presets_path: a directory containing JSON presets. """ presets_path = presets_path or self.presets_path if not os.path.exists(presets_path): return for preset_path in os.listdir(presets_path): preset_path = os.path.join(presets_path, preset_path) with open(preset_path) as pf: try: preset_data = json.load(pf) except ValueError: continue for preset in preset_data.keys(): pd = PresetDefaults(preset, opts=SoSOptions()) data = preset_data[preset] pd.desc = data[DESC] if DESC in data else "" pd.note = data[NOTE] if NOTE in data else "" if OPTS in data: for arg in data[OPTS]: setattr(pd.opts, arg, data[OPTS][arg]) pd.builtin = False self.presets[preset] = pd def add_preset(self, name=None, desc=None, note=None, opts=SoSOptions()): """Add a new on-disk preset and write it to the configured presets path. :param preset: the new PresetDefaults to add """ presets_path = self.presets_path if not name: raise ValueError("Preset name cannot be empty") if name in self.presets.keys(): raise ValueError("A preset with name '%s' already exists" % name) preset = PresetDefaults(name=name, desc=desc, note=note, opts=opts) preset.builtin = False self.presets[preset.name] = preset preset.write(presets_path) def del_preset(self, name=""): if not name or name not in self.presets.keys(): raise ValueError("Unknown profile: '%s'" % name) preset = self.presets[name] if preset.builtin: raise ValueError("Cannot delete built-in preset '%s'" % preset.name) preset.delete(self.presets_path) self.presets.pop(name)
ftp.connect(host, port) if username and passwd: ftp.login(username, passwd) else: ftp.login() ftp.cwd(path) ftp.set_pasv(True) ftp.storbinary('STOR %s' % upload_name, fp) ftp.quit() except Exception, e: self._print( _("There was a problem uploading your report to Red Hat support. " + str(e))) else: self._print( _("Your report was successfully uploaded to %s with name:" % (upload_url, ))) self._print(" " + upload_name) self._print() self._print( _("Please communicate this name to your support representative." )) self._print() fp.close() def _print(self, msg=None): """A wrapper around print that only prints if we are not running in quiet mode""" if not self.commons['cmdlineopts'].quiet: if msg: print msg
class Policy(object): msg = _("""\ This command will collect system configuration and diagnostic information \ from this %(distro)s system. For more information on %(vendor)s visit: %(vendor_url)s The generated archive may contain data considered sensitive and its content \ should be reviewed by the originating organization before being passed to \ any third party. %(changes_text)s %(vendor_text)s """) distro = "Unknown" vendor = "Unknown" vendor_url = "http://www.example.com/" vendor_text = "" PATH = "" default_scl_prefix = "" name_pattern = 'legacy' presets = {"": PresetDefaults()} presets_path = PRESETS_PATH _in_container = False _host_sysroot = '/' def __init__(self, sysroot=None): """Subclasses that choose to override this initializer should call super() to ensure that they get the required platform bits attached. super(SubClass, self).__init__(). Policies that require runtime tests to construct PATH must call self.set_exec_path() after modifying PATH in their own initializer.""" self._parse_uname() self.case_id = None self.package_manager = PackageManager() self._valid_subclasses = [] self.set_exec_path() self._host_sysroot = sysroot self.register_presets(GENERIC_PRESETS) def get_valid_subclasses(self): return [IndependentPlugin] + self._valid_subclasses def set_valid_subclasses(self, subclasses): self._valid_subclasses = subclasses def del_valid_subclasses(self): del self._valid_subclasses valid_subclasses = property( get_valid_subclasses, set_valid_subclasses, del_valid_subclasses, "list of subclasses that this policy can " "process") def check(self): """ This function is responsible for determining if the underlying system is supported by this policy. """ return False def in_container(self): """ Returns True if sos is running inside a container environment. """ return self._in_container def host_sysroot(self): return self._host_sysroot def dist_version(self): """ Return the OS version """ pass def get_preferred_archive(self): """ Return the class object of the prefered archive format for this platform """ from sos.archive import TarFileArchive return TarFileArchive def get_archive_name(self): """ This function should return the filename of the archive without the extension. This uses the policy's name_pattern attribute to determine the name. There are two pre-defined naming patterns - 'legacy' and 'friendly' that give names like the following: legacy - 'sosreport-tux.123456-20171224185433' friendly - 'sosreport-tux-mylabel-123456-2017-12-24-ezcfcop.tar.xz' A custom name_pattern can be used by a policy provided that it defines name_pattern using a format() style string substitution. Usable substitutions are: name - the short hostname of the system label - the label given by --label case - the case id given by --case-id or --ticker-number rand - a random string of 7 alpha characters Note that if a datestamp is needed, the substring should be set in the name_pattern in the format accepted by strftime(). """ name = self.get_local_name().split('.')[0] case = self.case_id label = self.commons['cmdlineopts'].label date = '' rand = ''.join(random.choice(string.ascii_lowercase) for x in range(7)) if self.name_pattern == 'legacy': nstr = "sosreport-{name}{case}{date}" case = '.' + case if case else '' date = '-%Y%m%d%H%M%S' elif self.name_pattern == 'friendly': nstr = "sosreport-{name}{label}{case}{date}-{rand}" case = '-' + case if case else '' label = '-' + label if label else '' date = '-%Y-%m-%d' else: nstr = self.name_pattern nstr = nstr.format(name=name, label=label, case=case, date=date, rand=rand) return self.sanitize_filename(time.strftime(nstr)) # for some specific binaries like "xz", we need to determine package # providing it; that is policy specific. By default return the binary # name itself until particular policy overwrites it def _get_pkg_name_for_binary(self, binary): return binary def get_cmd_for_compress_method(self, method, threads): cmd = method # use fast compression if using xz or bz2 if cmd != "gzip": cmd = "%s -2" % cmd # determine number of threads to use for compressing - applicable # only for xz and of version 5.2 or higher if cmd.startswith("xz"): try: xz_package = self._get_pkg_name_for_binary(method) xz_version = self.package_manager\ .all_pkgs()[xz_package]["version"] except Exception as e: xz_version = [u'0'] # deal like xz version is really old if xz_version >= [u'5', u'2']: cmd = "%s -T%d" % (cmd, threads) return cmd def get_tmp_dir(self, opt_tmp_dir): if not opt_tmp_dir: return tempfile.gettempdir() return opt_tmp_dir def get_default_scl_prefix(self): return self.default_scl_prefix def match_plugin(self, plugin_classes): if len(plugin_classes) > 1: for p in plugin_classes: # Give preference to the first listed tagging class # so that e.g. UbuntuPlugin is chosen over DebianPlugin # on an Ubuntu installation. if issubclass(p, self.valid_subclasses[0]): return p return plugin_classes[0] def validate_plugin(self, plugin_class, experimental=False): """ Verifies that the plugin_class should execute under this policy """ valid_subclasses = [IndependentPlugin] + self.valid_subclasses if experimental: valid_subclasses += [ExperimentalPlugin] return any( issubclass(plugin_class, class_) for class_ in valid_subclasses) def pre_work(self): """ This function is called prior to collection. """ pass def post_work(self): """ This function is called after the sosreport has been generated. """ pass def pkg_by_name(self, pkg): return self.package_manager.pkg_by_name(pkg) def _parse_uname(self): (system, node, release, version, machine, processor) = platform.uname() self.system = system self.hostname = node self.release = release self.smp = version.split()[1] == "SMP" self.machine = machine def set_commons(self, commons): self.commons = commons def _set_PATH(self, path): os.environ['PATH'] = path def set_exec_path(self): self._set_PATH(self.PATH) def is_root(self): """This method should return true if the user calling the script is considered to be a superuser""" return (os.getuid() == 0) def get_preferred_hash_name(self): """Returns the string name of the hashlib-supported checksum algorithm to use""" return "md5" def display_results(self, archive, directory, checksum, archivestat=None): # Display results is called from the tail of SoSReport.final_work() # # Logging is already shutdown and all terminal output must use the # print() call. # make sure a report exists if not archive and not directory: return False self._print() if archive: self._print(_("Your sosreport has been generated and saved " "in:\n %s\n") % archive, always=True) self._print( _(" Size\t%s") % get_human_readable(archivestat.st_size)) self._print(_(" Owner\t%s") % getpwuid(archivestat.st_uid).pw_name) else: self._print(_("Your sosreport build tree has been generated " "in:\n %s\n") % directory, always=True) if checksum: self._print(" " + self.get_preferred_hash_name() + "\t" + checksum) self._print() self._print( _("Please send this file to your support " "representative.")) self._print() def _print(self, msg=None, always=False): """A wrapper around print that only prints if we are not running in quiet mode""" if always or not self.commons['cmdlineopts'].quiet: if msg: print_(msg) else: print_() def get_msg(self): """This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated.""" if self.commons['cmdlineopts'].allow_system_changes: changes_text = "Changes CAN be made to system configuration." else: changes_text = "No changes will be made to system configuration." width = 72 _msg = self.msg % { 'distro': self.distro, 'vendor': self.vendor, 'vendor_url': self.vendor_url, 'vendor_text': self.vendor_text, 'tmpdir': self.commons['tmpdir'], 'changes_text': changes_text } _fmt = "" for line in _msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt def register_presets(self, presets, replace=False): """Add new presets to this policy object. Merges the presets dictionary ``presets`` into this ``Policy`` object, or replaces the current presets if ``replace`` is ``True``. ``presets`` should be a dictionary mapping ``str`` preset names to ``<class PresetDefaults>`` objects specifying the command line defaults. :param presets: dictionary of presets to add or replace :param replace: replace presets rather than merge new presets. """ if replace: self.presets = {} self.presets.update(presets) def find_preset(self, preset): """Find a preset profile matching the specified preset string. :param preset: a string containing a preset profile name. :returns: a matching PresetProfile. """ # FIXME: allow fuzzy matching? for match in self.presets.keys(): if match == preset: return self.presets[match] return None def probe_preset(self): """Return a ``PresetDefaults`` object matching the runing host. Stub method to be implemented by derived policy classes. :returns: a ``PresetDefaults`` object. """ return self.presets[NO_PRESET] def load_presets(self, presets_path=None): """Load presets from disk. Read JSON formatted preset data from the specified path, or the default location at ``/var/lib/sos/presets``. :param presets_path: a directory containing JSON presets. """ presets_path = presets_path or self.presets_path if not os.path.exists(presets_path): return for preset_path in os.listdir(presets_path): preset_path = os.path.join(presets_path, preset_path) try: preset_data = json.load(open(preset_path)) except ValueError: continue for preset in preset_data.keys(): pd = PresetDefaults(preset, opts=SoSOptions()) data = preset_data[preset] pd.desc = data[DESC] if DESC in data else "" pd.note = data[NOTE] if NOTE in data else "" if OPTS in data: for arg in _arg_names: if arg in data[OPTS]: setattr(pd.opts, arg, data[OPTS][arg]) pd.builtin = False self.presets[preset] = pd def add_preset(self, name=None, desc=None, note=None, opts=SoSOptions()): """Add a new on-disk preset and write it to the configured presets path. :param preset: the new PresetDefaults to add """ presets_path = self.presets_path if not name: raise ValueError("Preset name cannot be empty") if name in self.presets.keys(): raise ValueError("A preset with name '%s' already exists" % name) preset = PresetDefaults(name=name, desc=desc, note=note, opts=opts) preset.builtin = False self.presets[preset.name] = preset preset.write(presets_path) def del_preset(self, name=""): if not name or name not in self.presets.keys(): raise ValueError("Unknown profile: '%s'" % name) preset = self.presets[name] if preset.builtin: raise ValueError("Cannot delete built-in preset '%s'" % preset.name) preset.delete(self.presets_path) self.presets.pop(name)
class RedHatCoreOSPolicy(RHELPolicy): distro = "Red Hat CoreOS" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. """ + disclaimer_text + "%(vendor_text)s\n") containerized = True container_runtime = 'podman' container_image = 'registry.redhat.io/rhel8/support-tools' sos_path_strip = '/host' container_version_command = 'rpm -q sos' def __init__(self, sysroot=None, init=None, probe_runtime=True, remote_exec=None): super(RedHatCoreOSPolicy, self).__init__(sysroot=sysroot, init=init, probe_runtime=probe_runtime, remote_exec=remote_exec) @classmethod def check(cls, remote=''): if remote: return 'CoreOS' in remote coreos = False if ENV_HOST_SYSROOT not in os.environ: return coreos host_release = os.environ[ENV_HOST_SYSROOT] + cls._redhat_release try: for line in open(host_release, 'r').read().splitlines(): coreos |= 'Red Hat Enterprise Linux CoreOS' in line except IOError: pass return coreos def probe_preset(self): # As of the creation of this policy, RHCOS is only available for # RH OCP environments. return self.find_preset(RHOCP) def create_sos_container(self): _cmd = ("{runtime} run -di --name {name} --privileged --ipc=host" " --net=host --pid=host -e HOST=/host -e NAME={name} -e " "IMAGE={image} -v /run:/run -v /var/log:/var/log -v " "/etc/machine-id:/etc/machine-id -v " "/etc/localtime:/etc/localtime -v /:/host {image}") return _cmd.format(runtime=self.container_runtime, name=self.sos_container_name, image=self.container_image) def set_cleanup_cmd(self): return 'podman rm --force %s' % self.sos_container_name
class RedHatAtomicPolicy(RHELPolicy): distro = "Red Hat Atomic Host" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. """ + disclaimer_text + "%(vendor_text)s\n") containerzed = True container_runtime = 'docker' container_image = 'registry.access.redhat.com/rhel7/support-tools' sos_path_strip = '/host' container_version_command = 'rpm -q sos' def __init__(self, sysroot=None, init=None, probe_runtime=True, remote_exec=None): super(RedHatAtomicPolicy, self).__init__(sysroot=sysroot, init=init, probe_runtime=probe_runtime, remote_exec=remote_exec) self.register_presets(atomic_presets) @classmethod def check(cls, remote=''): if remote: return cls.distro in remote atomic = False if ENV_HOST_SYSROOT not in os.environ: return atomic host_release = os.environ[ENV_HOST_SYSROOT] + cls._redhat_release if not os.path.exists(host_release): return False try: for line in open(host_release, "r").read().splitlines(): atomic |= ATOMIC_RELEASE_STR in line except IOError: pass return atomic def probe_preset(self): if self.pkg_by_name('atomic-openshift'): return self.find_preset(RHOCP) return self.find_preset(ATOMIC) def create_sos_container(self): _cmd = ("{runtime} run -di --name {name} --privileged --ipc=host" " --net=host --pid=host -e HOST=/host -e NAME={name} -e " "IMAGE={image} -v /run:/run -v /var/log:/var/log -v " "/etc/machine-id:/etc/machine-id -v " "/etc/localtime:/etc/localtime -v /:/host {image}") return _cmd.format(runtime=self.container_runtime, name=self.sos_container_name, image=self.container_image) def set_cleanup_cmd(self): return 'docker rm --force sos-collector-tmp'
class RHELPolicy(RedHatPolicy): distro = RHEL_RELEASE_STR vendor = "Red Hat" vendor_url = "https://access.redhat.com/support/" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system and installed \ applications. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. """ + disclaimer_text + "%(vendor_text)s\n") _upload_url = RH_FTP_HOST _upload_user = '******' _upload_directory = '/incoming' def __init__(self, sysroot=None, init=None, probe_runtime=True, remote_exec=None): super(RHELPolicy, self).__init__(sysroot=sysroot, init=init, probe_runtime=probe_runtime, remote_exec=remote_exec) self.register_presets(rhel_presets) @classmethod def check(cls, remote=''): """Test to see if the running host is a RHEL installation. Checks for the presence of the "Red Hat Enterprise Linux" release string at the beginning of the NAME field in the `/etc/os-release` file and returns ``True`` if it is found, and ``False`` otherwise. :returns: ``True`` if the host is running RHEL or ``False`` otherwise. """ if remote: return cls.distro in remote if not os.path.exists(OS_RELEASE): return False with open(OS_RELEASE, "r") as f: for line in f: if line.startswith("NAME"): (name, value) = line.split("=") value = value.strip("\"'") if value.startswith(cls.distro): return True return False def prompt_for_upload_user(self): if self.commons['cmdlineopts'].upload_user: return # Not using the default, so don't call this prompt for RHCP if self.commons['cmdlineopts'].upload_url: super(RHELPolicy, self).prompt_for_upload_user() return if self.case_id and not self.get_upload_user(): self.upload_user = input( _("Enter your Red Hat Customer Portal username (empty to use " "public dropbox): ")) def get_upload_url(self): if self.commons['cmdlineopts'].upload_url: return self.commons['cmdlineopts'].upload_url if (not self.case_id or not self.get_upload_user() or not self.get_upload_password()): # Cannot use the RHCP. Use anonymous dropbox self.upload_user = self._upload_user self.upload_directory = self._upload_directory self.upload_password = None return RH_FTP_HOST else: rh_case_api = "/hydra/rest/cases/%s/attachments" return RH_API_HOST + rh_case_api % self.case_id def _get_upload_headers(self): if self.get_upload_url().startswith(RH_API_HOST): return {'isPrivate': 'false', 'cache-control': 'no-cache'} return {} def get_upload_url_string(self): if self.get_upload_url().startswith(RH_API_HOST): return "Red Hat Customer Portal" return self.upload_url or RH_FTP_HOST def get_upload_user(self): # if this is anything other than dropbox, annonymous won't work if self.upload_url != RH_FTP_HOST: return os.getenv('SOSUPLOADUSER', None) or self.upload_user return self._upload_user def dist_version(self): try: rr = self.package_manager.all_pkgs_by_name_regex("redhat-release*") pkgname = self.pkgs[rr[0]]["version"] if pkgname[0] == "4": return 4 elif pkgname[0] in ["5Server", "5Client"]: return 5 elif pkgname[0] == "6": return 6 elif pkgname[0] == "7": return 7 elif pkgname[0] == "8": return 8 except Exception: pass return False def probe_preset(self): # Emergency or rescue mode? for target in ["rescue", "emergency"]: if self.init_system.is_running("%s.target" % target): return self.find_preset(CB) # Package based checks if self.pkg_by_name("satellite-common") is not None: return self.find_preset(RH_SATELLITE) if self.pkg_by_name("rhosp-release") is not None: return self.find_preset(RHOSP) if self.pkg_by_name("cfme") is not None: return self.find_preset(RH_CFME) if self.pkg_by_name("ovirt-engine") is not None or \ self.pkg_by_name("vdsm") is not None: return self.find_preset(RHV) # Vanilla RHEL is default return self.find_preset(RHEL)
def packageResults(self, archive_filename): self._print(_("Creating compressed archive..."))
def print_header(self): self.ui_log.info("\n%s\n" % _("sosreport (version %s)" % (__version__, )))
class Policy(object): msg = _("""\ This command will collect system configuration and diagnostic information \ from this %(distro)s system. An archive containing the collected information \ will be generated in %(tmpdir)s. For more information on %(vendor)s visit: %(vendor_url)s The generated archive may contain data considered sensitive and its content \ should be reviewed by the originating organization before being passed to \ any third party. No changes will be made to system configuration. %(vendor_text)s """) distro = "Unknown" vendor = "Unknown" vendor_url = "http://www.example.com/" vendor_text = "" PATH = "" def __init__(self): """Subclasses that choose to override this initializer should call super() to ensure that they get the required platform bits attached. super(SubClass, self).__init__(). Policies that require runtime tests to construct PATH must call self.set_exec_path() after modifying PATH in their own initializer.""" self._parse_uname() self.report_name = self.hostname self.ticket_number = None self.package_manager = PackageManager() self._valid_subclasses = [] self.set_exec_path() def get_valid_subclasses(self): return [IndependentPlugin] + self._valid_subclasses def set_valid_subclasses(self, subclasses): self._valid_subclasses = subclasses def del_valid_subclasses(self): del self._valid_subclasses valid_subclasses = property( get_valid_subclasses, set_valid_subclasses, del_valid_subclasses, "list of subclasses that this policy can " "process") def check(self): """ This function is responsible for determining if the underlying system is supported by this policy. """ return False def get_preferred_archive(self): """ Return the class object of the prefered archive format for this platform """ from sos.archive import TarFileArchive return TarFileArchive def get_archive_name(self): """ This function should return the filename of the archive without the extension. """ if self.ticket_number: self.report_name += "." + self.ticket_number return "sosreport-%s-%s" % (self.report_name, time.strftime("%Y%m%d%H%M%S")) def get_tmp_dir(self, opt_tmp_dir): if not opt_tmp_dir: return tempfile.gettempdir() return opt_tmp_dir def match_plugin(self, plugin_classes): if len(plugin_classes) > 1: for p in plugin_classes: # Give preference to the first listed tagging class # so that e.g. UbuntuPlugin is chosen over DebianPlugin # on an Ubuntu installation. if issubclass(p, self.valid_subclasses[0]): return p return plugin_classes[0] def validate_plugin(self, plugin_class): """ Verifies that the plugin_class should execute under this policy """ valid_subclasses = [IndependentPlugin] + self.valid_subclasses return any( issubclass(plugin_class, class_) for class_ in valid_subclasses) def pre_work(self): """ This function is called prior to collection. """ pass def post_work(self): """ This function is called after the sosreport has been generated. """ pass def pkg_by_name(self, pkg): return self.package_manager.pkg_by_name(pkg) def _parse_uname(self): (system, node, release, version, machine, processor) = platform.uname() self.system = system self.hostname = node self.release = release self.smp = version.split()[1] == "SMP" self.machine = machine def set_commons(self, commons): self.commons = commons def _set_PATH(self, path): environ['PATH'] = path def set_exec_path(self): self._set_PATH(self.PATH) def is_root(self): """This method should return true if the user calling the script is considered to be a superuser""" return (os.getuid() == 0) def _create_checksum(self, final_filename=None): if not final_filename: return False archive_fp = open(final_filename, 'rb') digest = hashlib.new(get_hash_name()) digest.update(archive_fp.read()) archive_fp.close() return digest.hexdigest() def get_preferred_hash_algorithm(self): """Returns the string name of the hashlib-supported checksum algorithm to use""" return "md5" def display_results(self, final_filename=None, build=False): # make sure a report exists if not final_filename: return False self._print() if not build: # store checksum into file fp = open(final_filename + "." + get_hash_name(), "w") checksum = self._create_checksum(final_filename) if checksum: fp.write(checksum + "\n") fp.close() self._print( _("Your sosreport has been generated and saved " "in:\n %s") % final_filename) else: checksum = None self._print( _("sosreport build tree is located at : %s" % final_filename)) self._print() if checksum: self._print(_("The checksum is: ") + checksum) self._print() self._print( _("Please send this file to your support " "representative.")) self._print() def _print(self, msg=None): """A wrapper around print that only prints if we are not running in quiet mode""" if not self.commons['cmdlineopts'].quiet: if msg: print_(msg) else: print_() def get_msg(self): """This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated.""" width = 72 _msg = self.msg % { 'distro': self.distro, 'vendor': self.vendor, 'vendor_url': self.vendor_url, 'vendor_text': self.vendor_text, 'tmpdir': self.commons['tmpdir'] } _fmt = "" for line in _msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt
def verify_plugins(self): if not self.loaded_plugins: self.soslog.error(_("no valid plugins were enabled")) return False return True
class Policy(object): msg = _("""This utility will collect some detailed information about the hardware and setup of your %(distro)s system. The information is collected and an archive is packaged under /tmp, which you can send to a support representative. %(distro)s will use this information for diagnostic purposes ONLY and it will be considered confidential information. This process may take a while to complete. No changes will be made to your system. """) distro = "" def __init__(self): """Subclasses that choose to override this initializer should call super() to ensure that they get the required platform bits attached. super(SubClass, self).__init__()""" self._parse_uname() self.reportName = self.hostname self.ticketNumber = None self.package_manager = PackageManager() def check(self): """ This function is responsible for determining if the underlying system is supported by this policy. """ return False def preferedArchive(self): """ Return the class object of the prefered archive format for this platform """ from sos.utilities import TarFileArchive return TarFileArchive def getArchiveName(self): """ This function should return the filename of the archive without the extension. """ if self.ticketNumber: self.reportName += "." + self.ticketNumber return "sosreport-%s-%s" % (self.reportName, time.strftime("%Y%m%d%H%M%S")) def validatePlugin(self, plugin_class): """ Verifies that the plugin_class should execute under this policy """ return issubclass(plugin_class, IndependentPlugin) def preWork(self): """ This function is called prior to collection. """ pass def packageResults(self, package_name): """ This function is called prior to packaging. """ pass def postWork(self): """ This function is called after the sosreport has been generated. """ pass def pkgByName(self, pkg): return None def _parse_uname(self): (system, node, release, version, machine, processor) = platform.uname() self.system = system self.hostname = node self.release = release self.smp = version.split()[1] == "SMP" self.machine = machine def setCommons(self, commons): self.commons = commons def is_root(self): """This method should return true if the user calling the script is considered to be a superuser""" return (os.getuid() == 0) def _create_checksum(self, final_filename=None): if not final_filename: return False archive_fp = open(final_filename, 'r') digest = hashlib.new(get_hash_name()) digest.update(archive_fp.read()) archive_fp.close() return digest.hexdigest() def getPreferredHashAlgorithm(self): """Returns the string name of the hashlib-supported checksum algorithm to use""" return "md5" def displayResults(self, final_filename=None): # make sure a report exists if not final_filename: return False # store checksum into file fp = open(final_filename + "." + get_hash_name(), "w") checksum = self._create_checksum(final_filename) if checksum: fp.write(checksum + "\n") fp.close() self._print() self._print( _("Your sosreport has been generated and saved in:\n %s") % final_filename) self._print() if checksum: self._print(_("The checksum is: ") + checksum) self._print() self._print(_("Please send this file to your support representative.")) self._print() def uploadResults(self, final_filename): # make sure a report exists if not final_filename: return False self._print() # make sure it's readable try: fp = open(final_filename, "r") except: return False # read ftp URL from configuration if self.commons['cmdlineopts'].upload: upload_url = self.commons['cmdlineopts'].upload else: try: upload_url = self.commons['config'].get( "general", "ftp_upload_url") except: self._print(_("No URL defined in config file.")) return from urlparse import urlparse url = urlparse(upload_url) if url[0] != "ftp": self._print(_("Cannot upload to specified URL.")) return # extract username and password from URL, if present if url[1].find("@") > 0: username, host = url[1].split("@", 1) if username.find(":") > 0: username, passwd = username.split(":", 1) else: passwd = None else: username, passwd, host = None, None, url[1] # extract port, if present if host.find(":") > 0: host, port = host.split(":", 1) port = int(port) else: port = 21 path = url[2] try: from ftplib import FTP upload_name = os.path.basename(final_filename) ftp = FTP() ftp.connect(host, port) if username and passwd: ftp.login(username, passwd) else: ftp.login() ftp.cwd(path) ftp.set_pasv(True) ftp.storbinary('STOR %s' % upload_name, fp) ftp.quit() except Exception, e: self._print( _("There was a problem uploading your report to Red Hat support. " + str(e))) else:
class RHELPolicy(RedHatPolicy): distro = RHEL_RELEASE_STR vendor = "Red Hat" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system and installed \ applications. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. """ + disclaimer_text + "%(vendor_text)s\n") _upload_url = RH_SFTP_HOST _upload_method = 'post' def __init__(self, sysroot=None, init=None, probe_runtime=True, remote_exec=None): super(RHELPolicy, self).__init__(sysroot=sysroot, init=init, probe_runtime=probe_runtime, remote_exec=remote_exec) self.register_presets(RHEL_PRESETS) @classmethod def check(cls, remote=''): """Test to see if the running host is a RHEL installation. Checks for the presence of the "Red Hat Enterprise Linux" release string at the beginning of the NAME field in the `/etc/os-release` file and returns ``True`` if it is found, and ``False`` otherwise. :returns: ``True`` if the host is running RHEL or ``False`` otherwise. """ if remote: return cls.distro in remote if not os.path.exists(OS_RELEASE): return False with open(OS_RELEASE, "r") as f: for line in f: if line.startswith("NAME"): (name, value) = line.split("=") value = value.strip("\"'") if value.startswith(cls.distro): return True return False def prompt_for_upload_user(self): if self.commons['cmdlineopts'].upload_user: return # Not using the default, so don't call this prompt for RHCP if self.commons['cmdlineopts'].upload_url: super(RHELPolicy, self).prompt_for_upload_user() return if self.case_id and not self.get_upload_user(): self.upload_user = input( _("Enter your Red Hat Customer Portal username for uploading [" "empty for anonymous SFTP]: ")) def get_upload_url(self): if self.upload_url: return self.upload_url elif self.commons['cmdlineopts'].upload_url: return self.commons['cmdlineopts'].upload_url elif self.commons['cmdlineopts'].upload_protocol == 'sftp': return RH_SFTP_HOST else: rh_case_api = "/support/v1/cases/%s/attachments" return RH_API_HOST + rh_case_api % self.case_id def _get_upload_headers(self): if self.get_upload_url().startswith(RH_API_HOST): return {'isPrivate': 'false', 'cache-control': 'no-cache'} return {} def get_upload_url_string(self): if self.get_upload_url().startswith(RH_API_HOST): return "Red Hat Customer Portal" elif self.get_upload_url().startswith(RH_SFTP_HOST): return "Red Hat Secure FTP" return self.upload_url def _get_sftp_upload_name(self): """The RH SFTP server will only automatically connect file uploads to cases if the filename _starts_ with the case number """ fname = self.upload_archive_name.split('/')[-1] if self.case_id: return "%s_%s" % (self.case_id, fname) return fname def upload_sftp(self): """Override the base upload_sftp to allow for setting an on-demand generated anonymous login for the RH SFTP server if a username and password are not given """ if RH_SFTP_HOST.split('//')[1] not in self.get_upload_url(): return super(RHELPolicy, self).upload_sftp() if not REQUESTS_LOADED: raise Exception("python3-requests is not installed and is required" " for obtaining SFTP auth token.") _token = None _user = None url = RH_API_HOST + '/support/v2/sftp/token' # we have a username and password, but we need to reset the password # to be the token returned from the auth endpoint if self.get_upload_user() and self.get_upload_password(): auth = self.get_upload_https_auth() ret = requests.post(url, auth=auth, timeout=10) if ret.status_code == 200: # credentials are valid _user = self.get_upload_user() _token = json.loads(ret.text)['token'] else: print("Unable to retrieve Red Hat auth token using provided " "credentials. Will try anonymous.") # we either do not have a username or password/token, or both if not _token: adata = {"isAnonymous": True} anon = requests.post(url, data=json.dumps(adata), timeout=10) if anon.status_code == 200: resp = json.loads(anon.text) _user = resp['username'] _token = resp['token'] print( "User '%s'" # lgtm [py/clear-text-logging-sensitive-data] "used for anonymous upload. Please inform your support " "engineer so they may retrieve the data." % _user) if _user and _token: return super(RHELPolicy, self).upload_sftp(user=_user, password=_token) raise Exception("Could not retrieve valid or anonymous credentials") def upload_archive(self, archive): """Override the base upload_archive to provide for automatic failover from RHCP failures to the public RH dropbox """ try: if not self.get_upload_user() or not self.get_upload_password(): self.upload_url = RH_SFTP_HOST uploaded = super(RHELPolicy, self).upload_archive(archive) except Exception: uploaded = False if not self.upload_url.startswith(RH_API_HOST): raise else: print("Upload to Red Hat Customer Portal failed. Trying %s" % RH_SFTP_HOST) self.upload_url = RH_SFTP_HOST uploaded = super(RHELPolicy, self).upload_archive(archive) return uploaded def dist_version(self): try: rr = self.package_manager.all_pkgs_by_name_regex("redhat-release*") pkgname = self.pkgs[rr[0]]["version"] if pkgname[0] == "4": return 4 elif pkgname[0] in ["5Server", "5Client"]: return 5 elif pkgname[0] == "6": return 6 elif pkgname[0] == "7": return 7 elif pkgname[0] == "8": return 8 except Exception: pass return False def probe_preset(self): # Emergency or rescue mode? for target in ["rescue", "emergency"]: if self.init_system.is_running("%s.target" % target): return self.find_preset(CB) # Package based checks if self.pkg_by_name("satellite-common") is not None: return self.find_preset(RH_SATELLITE) if self.pkg_by_name("rhosp-release") is not None: return self.find_preset(RHOSP) if self.pkg_by_name("cfme") is not None: return self.find_preset(RH_CFME) if self.pkg_by_name("ovirt-engine") is not None or \ self.pkg_by_name("vdsm") is not None: return self.find_preset(RHV) # Vanilla RHEL is default return self.find_preset(RHEL)
class Policy(object): msg = _("""\ This command will collect system configuration and diagnostic information \ from this %(distro)s system. An archive containing the collected information \ will be generated in %(tmpdir)s. For more information on %(vendor)s visit: %(vendor_url)s The generated archive may contain data considered sensitive and its content \ should be reviewed by the originating organization before being passed to \ any third party. No changes will be made to system configuration. %(vendor_text)s """) distro = "Unknown" vendor = "Unknown" vendor_url = "http://www.example.com/" vendor_text = "" PATH = "" def __init__(self): """Subclasses that choose to override this initializer should call super() to ensure that they get the required platform bits attached. super(SubClass, self).__init__(). Policies that require runtime tests to construct PATH must call self.set_exec_path() after modifying PATH in their own initializer.""" self._parse_uname() self.report_name = self.hostname self.ticket_number = None self.package_manager = PackageManager() self._valid_subclasses = [] self.set_exec_path() def get_valid_subclasses(self): return [IndependentPlugin] + self._valid_subclasses def set_valid_subclasses(self, subclasses): self._valid_subclasses = subclasses def del_valid_subclasses(self): del self._valid_subclasses valid_subclasses = property( get_valid_subclasses, set_valid_subclasses, del_valid_subclasses, "list of subclasses that this policy can process") def check(self): """ This function is responsible for determining if the underlying system is supported by this policy. """ return False def preferred_archive_name(self): """ Return the class object of the prefered archive format for this platform """ from sos.archive import TarFileArchive return TarFileArchive def get_archive_name(self): """ This function should return the filename of the archive without the extension. """ if self.ticket_number: self.report_name += "." + self.ticket_number return "sosreport-%s-%s" % (self.report_name, time.strftime("%Y%m%d%H%M%S")) def validatePlugin(self, plugin_class): """ Verifies that the plugin_class should execute under this policy """ valid_subclasses = [IndependentPlugin] + self.valid_subclasses return any( issubclass(plugin_class, class_) for class_ in valid_subclasses) def pre_work(self): """ This function is called prior to collection. """ pass def package_results(self, package_name): """ This function is called prior to packaging. """ pass def post_work(self): """ This function is called after the sosreport has been generated. """ pass def pkg_by_name(self, pkg): return self.package_manager.pkg_by_name(pkg) def _parse_uname(self): (system, node, release, version, machine, processor) = platform.uname() self.system = system self.hostname = node self.release = release self.smp = version.split()[1] == "SMP" self.machine = machine def set_commons(self, commons): self.commons = commons def _set_PATH(self, path): environ['PATH'] = path def set_exec_path(self): self._set_PATH(self.PATH) def is_root(self): """This method should return true if the user calling the script is considered to be a superuser""" return (os.getuid() == 0) def _create_checksum(self, final_filename=None): if not final_filename: return False archive_fp = open(final_filename, 'rb') digest = hashlib.new(get_hash_name()) digest.update(archive_fp.read()) archive_fp.close() return digest.hexdigest() def get_preferred_hash_algorithm(self): """Returns the string name of the hashlib-supported checksum algorithm to use""" return "md5" def display_results(self, final_filename=None): # make sure a report exists if not final_filename: return False # store checksum into file fp = open(final_filename + "." + get_hash_name(), "w") checksum = self._create_checksum(final_filename) if checksum: fp.write(checksum + "\n") fp.close() self._print() self._print( _("Your sosreport has been generated and saved in:\n %s") % final_filename) self._print() if checksum: self._print(_("The checksum is: ") + checksum) self._print() self._print(_("Please send this file to your support representative.")) self._print() def upload_results(self, final_filename): # make sure a report exists if not final_filename: return False self._print() # make sure it's readable try: fp = open(final_filename, "r") except: return False # read ftp URL from configuration if self.commons['cmdlineopts'].upload: upload_url = self.commons['cmdlineopts'].upload else: try: upload_url = self.commons['config'].get( "general", "ftp_upload_url") except: self._print(_("No URL defined in config file.")) return from urlparse import urlparse url = urlparse(upload_url) if url[0] != "ftp": self._print(_("Cannot upload to specified URL.")) return # extract username and password from URL, if present if url[1].find("@") > 0: username, host = url[1].split("@", 1) if username.find(":") > 0: username, passwd = username.split(":", 1) else: passwd = None else: username, passwd, host = None, None, url[1] # extract port, if present if host.find(":") > 0: host, port = host.split(":", 1) port = int(port) else: port = 21 path = url[2] try: from ftplib import FTP upload_name = os.path.basename(final_filename) ftp = FTP() ftp.connect(host, port) if username and passwd: ftp.login(username, passwd) else: ftp.login() ftp.cwd(path) ftp.set_pasv(True) ftp.storbinary('STOR %s' % upload_name, fp) ftp.quit() except Exception, e: self._print( _("There was a problem uploading your report to Red Hat support. " + str(e))) else:
def upload_results(self, final_filename): # make sure a report exists if not final_filename: return False self._print() # make sure it's readable try: fp = open(final_filename, "r") except: return False # read ftp URL from configuration if self.commons['cmdlineopts'].upload: upload_url = self.commons['cmdlineopts'].upload else: try: upload_url = self.commons['config'].get( "general", "ftp_upload_url") except: self._print(_("No URL defined in config file.")) return from urlparse import urlparse url = urlparse(upload_url) if url[0] != "ftp": self._print(_("Cannot upload to specified URL.")) return # extract username and password from URL, if present if url[1].find("@") > 0: username, host = url[1].split("@", 1) if username.find(":") > 0: username, passwd = username.split(":", 1) else: passwd = None else: username, passwd, host = None, None, url[1] # extract port, if present if host.find(":") > 0: host, port = host.split(":", 1) port = int(port) else: port = 21 path = url[2] try: from ftplib import FTP upload_name = os.path.basename(final_filename) ftp = FTP() ftp.connect(host, port) if username and passwd: ftp.login(username, passwd) else: ftp.login() ftp.cwd(path) ftp.set_pasv(True) ftp.storbinary('STOR %s' % upload_name, fp) ftp.quit() except Exception, e: self._print( _("There was a problem uploading your report to Red Hat support. " + str(e)))
class Policy(object): msg = _("""\ This command will collect system configuration and diagnostic information \ from this %(distro)s system. An archive containing the collected information \ will be generated in %(tmpdir)s. For more information on %(vendor)s visit: %(vendor_url)s The generated archive may contain data considered sensitive and its content \ should be reviewed by the originating organization before being passed to \ any third party. No changes will be made to system configuration. %(vendor_text)s """) distro = "Unknown" vendor = "Unknown" vendor_url = "http://www.example.com/" vendor_text = "" PATH = "" default_scl_prefix = "" name_pattern = 'legacy' _in_container = False _host_sysroot = '/' def __init__(self, sysroot=None): """Subclasses that choose to override this initializer should call super() to ensure that they get the required platform bits attached. super(SubClass, self).__init__(). Policies that require runtime tests to construct PATH must call self.set_exec_path() after modifying PATH in their own initializer.""" self._parse_uname() self.case_id = None self.package_manager = PackageManager() self._valid_subclasses = [] self.set_exec_path() self._host_sysroot = sysroot def get_valid_subclasses(self): return [IndependentPlugin] + self._valid_subclasses def set_valid_subclasses(self, subclasses): self._valid_subclasses = subclasses def del_valid_subclasses(self): del self._valid_subclasses valid_subclasses = property(get_valid_subclasses, set_valid_subclasses, del_valid_subclasses, "list of subclasses that this policy can " "process") def check(self): """ This function is responsible for determining if the underlying system is supported by this policy. """ return False def in_container(self): """ Returns True if sos is running inside a container environment. """ return self._in_container def host_sysroot(self): return self._host_sysroot def dist_version(self): """ Return the OS version """ pass def get_preferred_archive(self): """ Return the class object of the prefered archive format for this platform """ from sos.archive import TarFileArchive return TarFileArchive def get_archive_name(self): """ This function should return the filename of the archive without the extension. This uses the policy's name_pattern attribute to determine the name. There are two pre-defined naming patterns - 'legacy' and 'friendly' that give names like the following: legacy - 'sosreport-tux.123456-20171224185433' friendly - 'sosreport-tux-mylabel-123456-2017-12-24-ezcfcop.tar.xz' A custom name_pattern can be used by a policy provided that it defines name_pattern using a format() style string substitution. Usable substitutions are: name - the short hostname of the system label - the label given by --label case - the case id given by --case-id or --ticker-number rand - a random string of 7 alpha characters Note that if a datestamp is needed, the substring should be set in the name_pattern in the format accepted by strftime(). """ name = self.get_local_name().split('.')[0] case = self.case_id label = self.commons['cmdlineopts'].label rand = ''.join(random.choice(string.lowercase) for x in range(7)) if self.name_pattern == 'legacy': nstr = "sosreport-{name}{case}{date}" case = '.' + case if case else '' date = '-%Y%m%d%H%M%S' elif self.name_pattern == 'friendly': nstr = "sosreport-{name}{label}{case}{date}-{rand}" case = '-' + case if case else '' label = '-' + label if label else '' date = '-%Y-%m-%d' else: nstr = self.name_pattern nstr = nstr.format( name=name, label=label, case=case, date=date, rand=rand ) return time.strftime(nstr) def get_tmp_dir(self, opt_tmp_dir): if not opt_tmp_dir: return tempfile.gettempdir() return opt_tmp_dir def get_default_scl_prefix(self): return self.default_scl_prefix def match_plugin(self, plugin_classes): if len(plugin_classes) > 1: for p in plugin_classes: # Give preference to the first listed tagging class # so that e.g. UbuntuPlugin is chosen over DebianPlugin # on an Ubuntu installation. if issubclass(p, self.valid_subclasses[0]): return p return plugin_classes[0] def validate_plugin(self, plugin_class, experimental=False): """ Verifies that the plugin_class should execute under this policy """ valid_subclasses = [IndependentPlugin] + self.valid_subclasses if experimental: valid_subclasses += [ExperimentalPlugin] return any(issubclass(plugin_class, class_) for class_ in valid_subclasses) def pre_work(self): """ This function is called prior to collection. """ pass def post_work(self): """ This function is called after the sosreport has been generated. """ pass def pkg_by_name(self, pkg): return self.package_manager.pkg_by_name(pkg) def _parse_uname(self): (system, node, release, version, machine, processor) = platform.uname() self.system = system self.hostname = node self.release = release self.smp = version.split()[1] == "SMP" self.machine = machine def set_commons(self, commons): self.commons = commons def _set_PATH(self, path): environ['PATH'] = path def set_exec_path(self): self._set_PATH(self.PATH) def is_root(self): """This method should return true if the user calling the script is considered to be a superuser""" return (os.getuid() == 0) def get_preferred_hash_name(self): """Returns the string name of the hashlib-supported checksum algorithm to use""" return "md5" def display_results(self, archive, directory, checksum): # Display results is called from the tail of SoSReport.final_work() # # Logging is already shutdown and all terminal output must use the # print() call. # make sure a report exists if not archive and not directory: return False self._print() if archive: self._print(_("Your sosreport has been generated and saved " "in:\n %s") % archive) else: self._print(_("sosreport build tree is located at : %s" % directory)) self._print() if checksum: self._print(_("The checksum is: ") + checksum) self._print() self._print(_("Please send this file to your support " "representative.")) self._print() def _print(self, msg=None): """A wrapper around print that only prints if we are not running in quiet mode""" if not self.commons['cmdlineopts'].quiet: if msg: print_(msg) else: print_() def get_msg(self): """This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated.""" width = 72 _msg = self.msg % {'distro': self.distro, 'vendor': self.vendor, 'vendor_url': self.vendor_url, 'vendor_text': self.vendor_text, 'tmpdir': self.commons['tmpdir']} _fmt = "" for line in _msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt
class RHELPolicy(RedHatPolicy): distro = "Red Hat Enterprise Linux" vendor = "Red Hat" vendor_url = "https://access.redhat.com/support/" msg = _("""\ This command will collect diagnostic and configuration \ information from this %(distro)s system and installed \ applications. An archive containing the collected information will be \ generated in %(tmpdir)s and may be provided to a %(vendor)s \ support representative. Any information provided to %(vendor)s will be treated in \ accordance with the published support policies at:\n %(vendor_url)s The generated archive may contain data considered sensitive \ and its content should be reviewed by the originating \ organization before being passed to any third party. No changes will be made to system configuration. %(vendor_text)s """) def __init__(self, sysroot=None): super(RHELPolicy, self).__init__(sysroot=sysroot) @classmethod def check(self): """This method checks to see if we are running on RHEL. It returns True or False.""" return (os.path.isfile(self._redhat_release) and not os.path.isfile('/etc/fedora-release')) def dist_version(self): try: pkg = self.pkg_by_name("redhat-release") or \ self.all_pkgs_by_name_regex("redhat-release-.*")[-1] pkgname = pkg["version"] if pkgname[0] == "4": return 4 elif pkgname[0] in ["5Server", "5Client"]: return 5 elif pkgname[0] == "6": return 6 elif pkgname[0] == "7": return 7 except: pass return False def rhn_username(self): try: # cfg = config.initUp2dateConfig() rhn_username = rpclib.xmlrpclib.loads( up2dateAuth.getSystemId())[0][0]['username'] return rhn_username.encode('utf-8', 'ignore') except: # ignore any exception and return an empty username return "" def get_local_name(self): return self.rhn_username() or self.host_name()
def sosreport(opts): """ This is the top-level function that gathers and processes all sosreport information """ parse_options(opts) # check debug isDebug() config = ConfigParser.ConfigParser() if GlobalVars.__cmdLineOpts__.config_file: config_file = GlobalVars.__cmdLineOpts__.config_file else: config_file = '/etc/sos.conf' try: config.readfp(open(config_file)) except IOError: pass GlobalVars.loadedplugins = deque() skippedplugins = deque() alloptions = deque() # perhaps we should automatically locate the policy module?? GlobalVars.policy = sos.policyredhat.SosPolicy() # find the plugins path paths = sys.path for path in paths: if path.strip()[-len("site-packages"):] == "site-packages" \ and os.path.isdir(path + "/sos/plugins"): pluginpath = path + "/sos/plugins" # Set up common info and create destinations GlobalVars.dstroot = GlobalVars.policy.getDstroot( GlobalVars.__cmdLineOpts__.tmp_dir) if not GlobalVars.dstroot: print _("Could not create temporary directory.") doExit() cmddir = os.path.join(GlobalVars.dstroot, "sos_commands") logdir = os.path.join(GlobalVars.dstroot, "sos_logs") rptdir = os.path.join(GlobalVars.dstroot, "sos_reports") os.mkdir(cmddir, 0755) os.mkdir(logdir, 0755) os.mkdir(rptdir, 0755) # initialize logging soslog = logging.getLogger('sos') soslog.setLevel(logging.DEBUG) if GlobalVars.__cmdLineOpts__.profiler: proflog = logging.getLogger('sosprofile') proflog.setLevel(logging.DEBUG) else: proflog = None # limit verbosity to DEBUG if GlobalVars.__cmdLineOpts__.verbosity > 3: GlobalVars.__cmdLineOpts__.verbosity = 3 # if stdin is not a tty, disable colors and don't ask questions if not sys.stdin.isatty(): GlobalVars.__cmdLineOpts__.nocolors = True GlobalVars.__cmdLineOpts__.batch = True # log to a file flog = logging.FileHandler(logdir + "/sos.log") flog.setFormatter( logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) if GlobalVars.__cmdLineOpts__.verbosity > 0: # standard log levels have a step of 10 flog.setLevel(logging.INFO - (GlobalVars.__cmdLineOpts__.verbosity * 10)) else: flog.setLevel(logging.INFO) soslog.addHandler(flog) if GlobalVars.__cmdLineOpts__.profiler: # setup profile log plog = logging.FileHandler(logdir + "/sosprofile.log") plog.setFormatter(logging.Formatter('%(message)s')) plog.setLevel(logging.DEBUG) proflog.addHandler(plog) # define a Handler which writes INFO messages or higher to the sys.stderr console = logging.StreamHandler(sys.stderr) if GlobalVars.__cmdLineOpts__.verbosity > 0: # standard log levels have a step of 10 console.setLevel(logging.WARNING - (GlobalVars.__cmdLineOpts__.verbosity * 10)) else: console.setLevel(logging.WARNING) console.setFormatter(logging.Formatter('%(message)s')) soslog.addHandler(console) xmlrep = XmlReport() # set up dict so everyone can share the following commons = { 'dstroot': GlobalVars.dstroot, 'cmddir': cmddir, 'logdir': logdir, 'rptdir': rptdir, 'soslog': soslog, 'proflog': proflog, 'policy': GlobalVars.policy, 'verbosity': GlobalVars.__cmdLineOpts__.verbosity, 'xmlreport': xmlrep, 'cmdlineopts': GlobalVars.__cmdLineOpts__, 'config': config } # Make policy aware of the commons GlobalVars.policy.setCommons(commons) print print _("sosreport (version %s)" % (__version__, )) print # disable plugins that we read from conf files conf_disable_plugins_list = deque() conf_disable_plugins = None if config.has_option("plugins", "disable"): conf_disable_plugins = config.get("plugins", "disable").split(',') for item in conf_disable_plugins: conf_disable_plugins_list.append(item.strip()) # generate list of available plugins plugins = os.listdir(pluginpath) plugins.sort() plugin_names = deque() # validate and load plugins for plug in plugins: plugbase = plug[:-3] if not plug[-3:] == '.py' or plugbase == "__init__": continue try: if GlobalVars.policy.validatePlugin(pluginpath + plug): pluginClass = importPlugin("sos.plugins." + plugbase, plugbase) else: soslog.warning( _("plugin %s does not validate, skipping") % plug) skippedplugins.append( (plugbase, pluginClass(plugbase, commons))) continue # plug-in is valid, let's decide whether run it or not plugin_names.append(plugbase) if plugbase in GlobalVars.__cmdLineOpts__.noplugins or \ plugbase in conf_disable_plugins_list: # skipped skippedplugins.append( (plugbase, pluginClass(plugbase, commons))) continue if not pluginClass(plugbase, commons).checkenabled() and \ not plugbase in GlobalVars.__cmdLineOpts__.enableplugins and \ not plugbase in GlobalVars.__cmdLineOpts__.onlyplugins: # inactive skippedplugins.append( (plugbase, pluginClass(plugbase, commons))) continue if not pluginClass(plugbase, commons).defaultenabled() and \ not plugbase in GlobalVars.__cmdLineOpts__.enableplugins and \ not plugbase in GlobalVars.__cmdLineOpts__.onlyplugins: # not loaded by default skippedplugins.append( (plugbase, pluginClass(plugbase, commons))) continue if GlobalVars.__cmdLineOpts__.onlyplugins and \ not plugbase in GlobalVars.__cmdLineOpts__.onlyplugins: # not specified skippedplugins.append( (plugbase, pluginClass(plugbase, commons))) continue GlobalVars.loadedplugins.append( (plugbase, pluginClass(plugbase, commons))) except: soslog.warning(_("plugin %s does not install, skipping") % plug) if GlobalVars.__raisePlugins__: raise # First, gather and process options # using the options specified in the command line (if any) if GlobalVars.__cmdLineOpts__.usealloptions: for plugname, plug in GlobalVars.loadedplugins: for name, parms in zip(plug.optNames, plug.optParms): if type(parms["enabled"]) == bool: parms["enabled"] = True del name # read plugin tunables from configuration file if config.has_section("tunables"): if not GlobalVars.__cmdLineOpts__.plugopts: GlobalVars.__cmdLineOpts__.plugopts = deque() for opt, val in config.items("tunables"): if not opt.split('.')[0] in conf_disable_plugins_list: GlobalVars.__cmdLineOpts__.plugopts.append(opt + "=" + val) if GlobalVars.__cmdLineOpts__.plugopts: opts = {} for opt in GlobalVars.__cmdLineOpts__.plugopts: # split up "general.syslogsize=5" try: opt, val = opt.split("=") except: val = True else: if val.lower() in ["off", "disable", "disabled", "false"]: val = False else: # try to convert string "val" to int() try: val = int(val) except: pass # split up "general.syslogsize" try: plug, opt = opt.split(".") except: plug = opt opt = True try: opts[plug] except KeyError: opts[plug] = deque() opts[plug].append((opt, val)) for plugname, plug in GlobalVars.loadedplugins: if plugname in opts: for opt, val in opts[plugname]: if not plug.setOption(opt, val): soslog.error('no such option "%s" for plugin ' \ '(%s)' % (opt,plugname)) doExit(1) del opts[plugname] for plugname in opts.keys(): soslog.error('unable to set option for disabled or non-existing ' \ 'plugin (%s)' % (plugname)) # Do not want to exit on invalid opts due to a misconfiguration in sos.conf # doExit(1) del opt, opts, val # error if the user references a plugin which does not exist unk_plugs = [plugname.split(".")[0] for plugname in \ GlobalVars.__cmdLineOpts__.onlyplugins \ if not plugname.split(".")[0] in plugin_names] unk_plugs += [plugname.split(".")[0] for plugname in \ GlobalVars.__cmdLineOpts__.noplugins \ if not plugname.split(".")[0] in plugin_names] unk_plugs += [plugname.split(".")[0] for plugname in \ GlobalVars.__cmdLineOpts__.enableplugins \ if not plugname.split(".")[0] in plugin_names] if len(unk_plugs): for plugname in unk_plugs: soslog.error('a non-existing plugin (%s) was specified in the ' \ 'command line' % (plugname)) doExit(1) del unk_plugs for plugname, plug in GlobalVars.loadedplugins: names, parms = plug.getAllOptions() for optname, optparm in zip(names, parms): alloptions.append((plug, plugname, optname, optparm)) # when --listplugins is specified we do a dry-run # which tells the user which plugins are going to be enabled # and with what options. if GlobalVars.__cmdLineOpts__.listPlugins: if not len(GlobalVars.loadedplugins) and not len(skippedplugins): soslog.error(_("no valid plugins found")) doExit(1) if len(GlobalVars.loadedplugins): print _("The following plugins are currently enabled:") print for (plugname, plug) in GlobalVars.loadedplugins: print " %-25s %s" % (textcolor( plugname, "lblue"), plug.get_description()) else: print _("No plugin enabled.") print if len(skippedplugins): print _("The following plugins are currently disabled:") print for (plugname, plugclass) in skippedplugins: print " %-25s %s" % (textcolor( plugname, "cyan"), plugclass.get_description()) print if len(alloptions): print _("The following plugin options are available:") print for (plug, plugname, optname, optparm) in alloptions: # format and colorize option value based on its type (int or bool) if type(optparm["enabled"]) == bool: if optparm["enabled"] == True: tmpopt = textcolor("on", "lred") else: tmpopt = textcolor("off", "red") elif type(optparm["enabled"]) == int: if optparm["enabled"] > 0: tmpopt = textcolor(optparm["enabled"], "lred") else: tmpopt = textcolor(optparm["enabled"], "red") else: tmpopt = optparm["enabled"] print " %-21s %-5s %s" % (plugname + "." + optname, tmpopt, optparm["desc"]) del tmpopt else: print _("No plugin options available.") print doExit() # to go anywhere further than listing the # plugins we will need root permissions. if os.getuid() != 0: print _('sosreport requires root permissions to run.') doExit(1) # we don't need to keep in memory plugins we are not going to use del skippedplugins if not len(GlobalVars.loadedplugins): soslog.error(_("no valid plugins were enabled")) doExit(1) msg = _("""This utility will collect some detailed information about the hardware and setup of your %(distroa)s system. The information is collected and an archive is packaged under /tmp, which you can send to a support representative. %(distrob)s will use this information for diagnostic purposes ONLY and it will be considered confidential information. This process may take a while to complete. No changes will be made to your system. """ % { 'distroa': __distro__, 'distrob': __distro__ }) if GlobalVars.__cmdLineOpts__.batch: print msg else: msg += _("""Press ENTER to continue, or CTRL-C to quit.\n""") try: raw_input(msg) except: print doExit() del msg if GlobalVars.__cmdLineOpts__.diagnose: # Call the diagnose() method for each plugin tmpcount = 0 for plugname, plug in GlobalVars.loadedplugins: try: plug.diagnose() except: if GlobalVars.__raisePlugins__: raise else: error_log = open(logdir + "/sosreport-plugin-errors.txt", "a") etype, eval, etrace = sys.exc_info() traceback.print_exception(etype, eval, etrace, limit=2, file=sys.stdout) error_log.write(traceback.format_exc()) error_log.close() tmpcount += len(plug.diagnose_msgs) if tmpcount > 0: print _("One or more plugins have detected a problem in your " \ "configuration.") print _("Please review the following messages:") print fp = open(rptdir + "/diagnose.txt", "w") for plugname, plug in GlobalVars.loadedplugins: for tmpcount2 in range(0, len(plug.diagnose_msgs)): if tmpcount2 == 0: soslog.warning(textcolor("%s:" % plugname, "red")) soslog.warning(" * %s" % plug.diagnose_msgs[tmpcount2]) fp.write("%s: %s\n" % (plugname, plug.diagnose_msgs[tmpcount2])) fp.close() print if not GlobalVars.__cmdLineOpts__.batch: try: while True: yorno = raw_input( _("Are you sure you would like to " \ "continue (y/n) ? ") ) if yorno == _("y") or yorno == _("Y"): print break elif yorno == _("n") or yorno == _("N"): doExit(0) del yorno except KeyboardInterrupt: print doExit(0) GlobalVars.policy.preWork() # Call the setup() method for each plugin for plugname, plug in GlobalVars.loadedplugins: try: plug.setup() except KeyboardInterrupt: raise except: if GlobalVars.__raisePlugins__: raise else: error_log = open(logdir + "/sosreport-plugin-errors.txt", "a") etype, eval, etrace = sys.exc_info() traceback.print_exception(etype, eval, etrace, limit=2, file=sys.stdout) error_log.write(traceback.format_exc()) error_log.close() print _(" Running plugins. Please wait ...") print plugruncount = 0 for i in izip(GlobalVars.loadedplugins): plugruncount += 1 sys.stdout.write("\r Completed [%d/%d] ... " % (plugruncount, len(GlobalVars.loadedplugins))) sys.stdout.flush() plugname, plug = i[0] try: plug.copyStuff() except KeyboardInterrupt: raise except: if GlobalVars.__raisePlugins__: raise else: error_log = open(logdir + "/sosreport-plugin-errors.txt", "a") etype, eval, etrace = sys.exc_info() traceback.print_exception(etype, eval, etrace, limit=2, file=sys.stdout) error_log.write(traceback.format_exc()) error_log.close() print if GlobalVars.__cmdLineOpts__.report: for plugname, plug in GlobalVars.loadedplugins: for oneFile in plug.copiedFiles: try: xmlrep.add_file(oneFile["srcpath"], os.stat(oneFile["srcpath"])) except: pass xmlrep.serialize_to_file(rptdir + "/sosreport.xml") if GlobalVars.__cmdLineOpts__.analyze: # Call the analyze method for each plugin for plugname, plug in GlobalVars.loadedplugins: try: plug.analyze() except: # catch exceptions in analyze() and keep working pass if GlobalVars.__cmdLineOpts__.report: # Generate the header for the html output file rfd = open(rptdir + "/" + "sosreport.html", "w") rfd.write(""" <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en"> <head> <link rel="stylesheet" type="text/css" media="screen" href="donot.css" /> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <title>Sos System Report</title> </head> <body> """) # Make a pass to gather Alerts and a list of module names allAlerts = deque() plugNames = deque() for plugname, plug in GlobalVars.loadedplugins: for alert in plug.alerts: allAlerts.append('<a href="#%s">%s</a>: %s' % (plugname, plugname, alert)) plugNames.append(plugname) # Create a table of links to the module info rfd.write("<hr/><h3>Loaded Plugins:</h3>") rfd.write("<table><tr>\n") rr = 0 for i in range(len(plugNames)): rfd.write('<td><a href="#%s">%s</a></td>\n' % (plugNames[i], plugNames[i])) rr = divmod(i, 4)[1] if (rr == 3): rfd.write('</tr>') if not (rr == 3): rfd.write('</tr>') rfd.write('</table>\n') rfd.write('<hr/><h3>Alerts:</h3>') rfd.write('<ul>') for alert in allAlerts: rfd.write('<li>%s</li>' % alert) rfd.write('</ul>') # Call the report method for each plugin for plugname, plug in GlobalVars.loadedplugins: try: html = plug.report() except: if GlobalVars.__raisePlugins__: raise else: rfd.write(html) rfd.write("</body></html>") rfd.close() # Call the postproc method for each plugin for plugname, plug in GlobalVars.loadedplugins: try: plug.postproc() except: if GlobalVars.__raisePlugins__: raise if GlobalVars.__cmdLineOpts__.build: print print _(" sosreport build tree is located at : %s" % (GlobalVars.dstroot, )) print return GlobalVars.dstroot # package up the results for the support organization GlobalVars.policy.packageResults() # delete gathered files GlobalVars.policy.cleanDstroot() # let's encrypt the tar-ball #if GlobalVars.__cmdLineOpts__.encrypt: # policy.encryptResults() # automated submission will go here if not GlobalVars.__cmdLineOpts__.upload: GlobalVars.policy.displayResults() else: GlobalVars.policy.uploadResults() # Close all log files and perform any cleanup logging.shutdown()