def run(plugin): # do not edit this line """ Executes plugin :param plugin: plugin dictionary :return: returncode, out, err """ rhvlc = risu.which("rhv-log-collector-analyzer-live") # rhv-log-collector-analyzer-live --json if not rhvlc: return ( risu.RC_SKIPPED, "", _("rhv-log-collector-analyzer-live support not found"), ) if risu.RISU_LIVE == 0: # We're running in snapshoot skipped = 1 elif risu.RISU_LIVE == 1: # We're running in Live mode skipped = 0 else: # We do not satisfy conditions, exit early skipped = 1 if skipped == 1: return ( risu.RC_SKIPPED, "", _("Plugin does not satisfy conditions for running"), ) command = "%s --json" % rhvlc # Call exec to run playbook returncode, out, err = risu.execonshell(filename=command) # Do formatting of results and adjust return codes to risu standards if returncode == 2: returncode = risu.RC_FAILED elif returncode == 0: returncode = risu.RC_OKAY # Convert stdout to stderr for risu handling try: err = out except: err = "Failed to convert output from log-analyzer" returncode = risu.RC_SKIPPED out = "" return returncode, out, err
def run(plugin): # do not edit this line """ Executes plugin :param plugin: plugin dictionary :return: returncode, out, err """ gorun = risu.which("go") if not gorun: return risu.RC_SKIPPED, "", _("Golang support not found") filename = plugin["plugin"] # Call exec to run playbook mypath = os.getcwd() path = os.path.dirname(filename) file = os.path.basename(filename) # Compiling binary = os.path.splitext(filename)[0] os.chdir(path) try: os.remove(binary) except: pass command = "%s build %s" % (gorun, file) risu.execonshell(filename=command) # Go back to our folder os.chdir(mypath) # Running returncode, out, err = risu.execonshell(filename=binary) return returncode, out, err
def main(): """ Main code stub """ options = parse_args() # Configure ENV language before anything else os.environ["LANG"] = "%s" % options.lang # Reinstall language in case it has changed trad = gettext.translation("risu", localedir, fallback=True, languages=[options.lang]) try: _ = trad.ugettext except AttributeError: _ = trad.gettext # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = risu.initPymodules( extensions=risu.getPymodules(options=options, folders=[PluginsFolder])) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(risu.indent(text=desc, amount=4)) return # Prefill enabled risu plugins from args if not risu.extensions: extensions = risu.initPymodules()[0] else: extensions = risu.extensions # Grab the data sosreports = options.sosreports # If we've provided a hosts file, use ansible to grab the data from them if options.hosts: ansible = risu.which("ansible-playbook") if not ansible: LOG.err(_("No ansible-playbook support found, skipping")) else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ["ANSIBLE_RETRY_FILES_ENABLED"] = "0" if options.loglevel == "DEBUG": # Keep ansible remote files for debug os.environ["ANSIBLE_KEEP_REMOTE_FILES"] = "1" command = "%s -i %s %s" % ( ansible, options.hosts, os.path.join(maguidir, "remote.yml"), ) LOG.debug("Running: %s with 600 seconds timeout" % command) risu.execonshell(filename=command, timeout=600) # Now check the hosts we got logs from: hosts = risu.findplugins( folders=glob.glob("/tmp/risu/hostrun/*"), executables=False, fileextension=".json", ) for host in hosts: sosreports.append(os.path.dirname(host["plugin"])) # Get all data from hosts for all plugins, etc if options.output: dooutput = options.output else: dooutput = False if len(sosreports) > int(options.max_hosts): print("Maximum number of sosreports provided, exiting") sys.exit(0) risuplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: risuplugins.extend(extension.listplugins()) global allplugins allplugins = risuplugins # By default, flatten plugin list for all extensions newplugins = [] for each in risuplugins: newplugins.extend(each) risuplugins = newplugins def runmaguiandplugs( sosreports, risuplugins, filename=dooutput, extranames=None, serveruri=False, onlysave=False, result=None, anon=False, grouped={}, ): """ Runs magui and magui plugins :param grouped: Grouped results from sosreports to speedup processing (domagui) :param anon: anonymize results on execution :param serveruri: Server uri to POST the analysis :param sosreports: sosreports to process :param risuplugins: risuplugins to run :param filename: filename to save to :param extranames: additional filenames used :param onlysave: Bool: Defines if we just want to save results :param result: Results to write to disk :return: results of execution """ start_time = time.time() if not onlysave and not result: # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, risuplugins=risuplugins, grouped=grouped) # Run Magui plugins result = [] for plugin in magplugs: plugstart_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {"rc": returncode, "out": out, "err": err} subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, "plugins", ""), "") if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split( os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { "plugin": plugin.__name__.split(".")[-1], "name": "magui: %s" % os.path.basename(plugin.__name__.split(".")[-1]), "id": hashlib.sha512( plugin.__file__.replace( maguidir, "").encode("UTF-8")).hexdigest(), "description": plugin.help(), "long_name": plugin.help(), "result": updates, "time": time.time() - plugstart_time, "category": category, "subcategory": subcategory, } result.append(mydata) if filename: branding = _(" ") risu.write_results( results=result, filename=filename, source="magui", path=sosreports, time=time.time() - start_time, branding=branding, web=True, extranames=extranames, serveruri=serveruri, anon=anon, ) return result, grouped print(_("\nStarting check updates and comparison")) metadataplugins = [] for plugin in risuplugins: if plugin["backend"] == "metadata": metadataplugins.append(plugin) # Prepare metadata execution to find groups results, grouped = runmaguiandplugs( sosreports=sosreports, risuplugins=metadataplugins, filename=options.output, serveruri=options.call_home, ) # Now we've Magui saved for the whole execution provided in 'results' var # Start working on autogroups for result in results: if result["plugin"] == "metadata-outputs": autodata = result["result"]["err"] print(_("\nGenerating autogroups:\n")) groups = autogroups(autodata) processedgroups = {} # TODO(iranzo): Review this # This code was used to provide a field in json for risu.html to get # other groups in dropdown, but is not in use so commenting meanwhile filenames = [] # loop over filenames first so that full results are saved and freed from memory for group in groups: basefilename = os.path.splitext(options.output) filename = basefilename[0] + "-" + group + basefilename[1] runautogroup = True for progroup in processedgroups: if sorted(set(groups[group])) == sorted( set(processedgroups[progroup])): runautogroup = False runautofile = progroup if runautogroup: # Analysis will be generated filenames.append(filename) print("\nRunning full comparison:... %s" % options.output) # Run full (not only metadata plugins) so that we've the data stored and save filenames in magui.json results, grouped = runmaguiandplugs( sosreports=sosreports, risuplugins=risuplugins, extranames=filenames, filename=options.output, serveruri=options.call_home, ) # Here 'grouped' obtained from above contains the full set of data # Results stored, removing variable to free up memory del results # reset list of processed groups # while len(data) != 0: # print "loop: ", loop # loop = loop +1 # target, data, todel = findtarget(data) processedgroups = {} basefilename = os.path.splitext(options.output) while len(groups) != 0: target, newgroups, todel = findtarget(groups) if target and target != "": group = target filename = basefilename[0] + "-" + group + basefilename[1] print(_("\nRunning for group: %s" % filename)) runautogroup = True for progroup in processedgroups: if groups[target] == processedgroups[progroup]: runautogroup = False runautofile = progroup if runautogroup: # Analysis was missing for this group, run it # pass grouped as 'dict' to avoid mutable newgrouped = copy.deepcopy(grouped) runmaguiandplugs( sosreports=groups[target], risuplugins=risuplugins, filename=filename, extranames=filenames, anon=options.anon, grouped=newgrouped, ) else: # Copy file instead of run as it was already existing LOG.debug("Copying old file from %s to %s" % (runautofile, filename)) shutil.copyfile(runautofile, filename) processedgroups[filename] = groups[target] if todel: # We can remove a sosreport from the dataset for plugin in grouped: if todel in grouped[plugin]["sosreport"]: del grouped[plugin]["sosreport"][todel] del newgroups[target] # Put remaining groups to work groups = dict(newgroups) del groups del processedgroups print(_("\nFinished autogroup generation."))
def test_which(self): assert risu.which("/bin/sh") == "/bin/sh"
def run(plugin): # do not edit this line """ Executes plugin :param plugin: plugin dictionary :return: returncode, out, err """ ansible = risu.which("ansible-playbook") if not ansible: return risu.RC_SKIPPED, "", _("ansible-playbook support not found") if risu.RISU_LIVE == 0 and risu.regexpfile(filename=plugin["plugin"], regexp="RISU_ROOT"): # We're running in snapshoot and playbook has RISU_ROOT skipped = 0 elif risu.RISU_LIVE == 1: if risu.regexpfile(filename=plugin["plugin"], regexp="RISU_HYBRID") or not risu.regexpfile( filename=plugin["plugin"], regexp="RISU_ROOT"): # We're running in Live mode and either plugin supports HYBRID or has no RISU_ROOT skipped = 0 else: # We do not satisfy conditions, exit early skipped = 1 else: # We do not satisfy conditions, exit early skipped = 1 if skipped == 1: return ( risu.RC_SKIPPED, "", _("Plugin does not satisfy conditions for running"), ) command = "%s -i localhost, --connection=local %s" % (ansible, plugin["plugin"]) # Disable Ansible retry files creation: os.environ["ANSIBLE_RETRY_FILES_ENABLED"] = "0" # Call exec to run playbook returncode, out, err = risu.execonshell(filename=command) # Do formatting of results to remove ansible-playbook -i localhost, and adjust return codes to risu standards if returncode == 2: returncode = risu.RC_FAILED elif returncode == 0: returncode = risu.RC_OKAY # Convert stdout to stderr for risu handling err = out out = "" # Rewrite error messages to not contain all playbook execution but just the actual error if "FAILED!" in err: start = err.find("FAILED!", 0) + 11 end = err.find("PLAY RECAP", 0) - 10 newtext = err[start:end] err = newtext return returncode, out, err