def test_plugins_have_long_name(self): global extensions extensions, exttriggers = citellus.initPymodules() # get all plugins plugins = [] # code for plugin in citellus.findplugins(folders=[ os.path.join(citellus.citellusdir, 'plugins', 'core') ]): plugins.append(plugin) # ansible for plugin in citellus.findplugins(executables=False, fileextension=".yml", extension='ansible', folders=[ os.path.join( citellus.citellusdir, 'plugins', 'ansible') ]): plugins.append(plugin) for plugin in plugins: if plugin['long_name'] == '': print(plugin) assert plugin['long_name'] != ''
def main(): """ Main code stub """ options = parse_args() # Configure ENV language before anything else os.environ['LANG'] = "%s" % options.lang # Reinstall language in case it has changed trad = gettext.translation('citellus', localedir, fallback=True, languages=[options.lang]) try: _ = trad.ugettext except AttributeError: _ = trad.gettext # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = citellus.initPymodules( extensions=citellus.getPymodules(options=options, folders=[PluginsFolder])) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(citellus.indent(text=desc, amount=4)) return # Prefill enabled citellus plugins from args if not citellus.extensions: extensions = citellus.initPymodules()[0] else: extensions = citellus.extensions # Grab the data sosreports = options.sosreports # If we've provided a hosts file, use ansible to grab the data from them if options.hosts: ansible = citellus.which("ansible-playbook") if not ansible: LOG.err(_("No ansible-playbook support found, skipping")) else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ['ANSIBLE_RETRY_FILES_ENABLED'] = "0" if options.loglevel == 'DEBUG': # Keep ansible remote files for debug os.environ['ANSIBLE_KEEP_REMOTE_FILES'] = "1" command = "%s -i %s %s" % (ansible, options.hosts, os.path.join(maguidir, 'remote.yml')) LOG.debug("Running: %s with 600 seconds timeout" % command) citellus.execonshell(filename=command, timeout=600) # Now check the hosts we got logs from: hosts = citellus.findplugins( folders=glob.glob('/tmp/citellus/hostrun/*'), executables=False, fileextension='.json') for host in hosts: sosreports.append(os.path.dirname(host['plugin'])) # Get all data from hosts for all plugins, etc if options.output: dooutput = options.output else: dooutput = False if len(sosreports) > options.max_hosts: print("Maximum number of sosreports provided, exiting") sys.exit(0) citellusplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: citellusplugins.extend(extension.listplugins()) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins def runmaguiandplugs(sosreports, citellusplugins, filename=dooutput, extranames=None, serveruri=False, onlysave=False, result=None, anon=False): """ Runs magui and magui plugins :param serveruri: :param sosreports: sosreports to process :param citellusplugins: citellusplugins to run :param filename: filename to save to :param extranames: additional filenames used :param onlysave: Bool: Defines if we just want to save results :param result: Results to write to disk :return: results of execution """ start_time = time.time() if not onlysave and not result: # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins) # Run Magui plugins result = [] for plugin in magplugs: plugstart_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {'rc': returncode, 'out': out, 'err': err} subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, 'plugins', ''), '') if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split( os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { 'plugin': plugin.__name__.split(".")[-1], 'name': "magui: %s" % os.path.basename(plugin.__name__.split(".")[-1]), 'id': hashlib.md5( plugin.__file__.replace( maguidir, '').encode('UTF-8')).hexdigest(), 'description': plugin.help(), 'long_name': plugin.help(), 'result': updates, 'time': time.time() - plugstart_time, 'category': category, 'subcategory': subcategory } result.append(mydata) if filename: branding = _(" ") citellus.write_results(results=result, filename=filename, source='magui', path=sosreports, time=time.time() - start_time, branding=branding, web=True, extranames=extranames, serveruri=serveruri, anon=anon) return result print(_("\nStarting check updates and comparison")) results = runmaguiandplugs(sosreports=sosreports, citellusplugins=citellusplugins, filename=options.output, serveruri=options.call_home) # Now we've Magui saved for the whole execution provided in 'results' var # Start working on autogroups for result in results: if result['plugin'] == 'metadata-outputs': autodata = result['result']['err'] print(_("\nGenerating autogroups:\n")) groups = autogroups(autodata) processedgroups = {} filenames = [] # loop over filenames first so that full results are saved and freed from memory for group in groups: basefilename = os.path.splitext(options.output) filename = basefilename[0] + "-" + group + basefilename[1] runautogroup = True for progroup in processedgroups: if groups[group] == processedgroups[progroup]: runautogroup = False runautofile = progroup if runautogroup: # Analisys will be generated filenames.append(filename) if len(filenames) > 0: # We've written additional files, so save again magui.json with additional references runmaguiandplugs(sosreports=sosreports, citellusplugins=citellusplugins, filename=options.output, extranames=filenames, onlysave=True, result=results, anon=options.anon) # Results stored, removing variable del results print("\nFull results written to %s" % options.output) # reset list of processed groups processedgroups = {} for group in groups: basefilename = os.path.splitext(options.output) filename = basefilename[0] + "-" + group + basefilename[1] print(_("\nRunning for group: %s" % filename)) runautogroup = True for progroup in processedgroups: if groups[group] == processedgroups[progroup]: runautogroup = False runautofile = progroup if runautogroup: # Analisys was missing for this group, run runmaguiandplugs(sosreports=groups[group], citellusplugins=citellusplugins, filename=filename, extranames=options.output, anon=options.anon) filenames.append(filename) else: # Copy file instead of run as it was already existing LOG.debug("Copying old file from %s to %s" % (runautofile, filename)) shutil.copyfile(runautofile, filename) processedgroups[filename] = groups[group] del groups del processedgroups print(_("\nFinished autogroup generation."))
def domagui(sosreports, citellusplugins, options=False): """ Do actual execution against sosreports :return: dict of result """ # Check if we've been provided options if options: forcerun = options.run citinclude = options.include citexclude = options.exclude hosts = options.hosts else: forcerun = False citinclude = None citexclude = None hosts = False # Grab data from citellus for the sosreports provided result = {} for sosreport in sosreports: result[sosreport] = callcitellus(path=sosreport, plugins=citellusplugins, forcerun=forcerun, include=citinclude, exclude=citexclude) # Sanity check in case we do need to force run because of inconsistencies between saved data if not forcerun: # Prefill all plugins plugins = [] for sosreport in sosreports: for plugin in result[sosreport]: plugins.append(plugin) plugins = sorted(set(plugins)) rerun = False # Check all sosreports for data for all plugins for sosreport in sosreports: for plugin in plugins: # Skip composed plugins as they will cause rerun if '-' not in plugin: try: result[sosreport][plugin]['result'] except: rerun = True # If we were running against a folder with just json, cancel rerun as it will fail if rerun: try: access = os.access(os.path.join(sosreport, 'version.txt'), os.R_OK) except: access = False if not access: # We're running against a folder that misses version.txt, so probably just folder with json, skip rerun rerun = False # Forcing rerun but not if we've specified ansible hosts if rerun and not hosts: LOG.debug( "Forcing rerun of citellus for %s because of missing %s" % (sosreport, plugin)) # Sosreport contains non uniform data, rerun result[sosreport] = callcitellus(path=sosreport, plugins=citellusplugins, forcerun=True) # Precreate multidimensional array grouped = {} for sosreport in sosreports: plugins = [] for plugin in result[sosreport]: plugins.append(plugin) grouped[plugin] = {} grouped[plugin]['sosreport'] = {} # Fill the data for sosreport in sosreports: for plugin in result[sosreport]: grouped[plugin]['sosreport'][sosreport] = result[sosreport][ plugin]['result'] for element in result[sosreport][plugin]: # Some of the elements are not useful as they are sosreport specific, so we do skip them completely # In this approach we don't need to update this code each time the plugin exports new metadata if element not in ['time', 'result']: grouped[plugin][element] = result[sosreport][plugin][ element] # Run the hook processing hooks on the results for maguihook in citellus.initPymodules(extensions=citellus.getPymodules( options=options, folders=[MaguiHooksFolder]))[0]: LOG.debug("Running hook: %s" % maguihook.__name__.split('.')[-1]) newresults = maguihook.run(data=grouped) if newresults: grouped = dict(newresults) # We've now a matrix of grouped[plugin][sosreport] and then [text] [out] [err] [rc] return grouped
def main(): """ Main code stub """ options = parse_args() # Configure ENV language before anything else os.environ["LANG"] = "%s" % options.lang # Reinstall language in case it has changed trad = gettext.translation("citellus", localedir, fallback=True, languages=[options.lang]) try: _ = trad.ugettext except AttributeError: _ = trad.gettext # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = citellus.initPymodules( extensions=citellus.getPymodules(options=options, folders=[PluginsFolder])) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(citellus.indent(text=desc, amount=4)) return # Prefill enabled citellus plugins from args if not citellus.extensions: extensions = citellus.initPymodules()[0] else: extensions = citellus.extensions # Grab the data sosreports = options.sosreports # If we've provided a hosts file, use ansible to grab the data from them if options.hosts: ansible = citellus.which("ansible-playbook") if not ansible: LOG.err(_("No ansible-playbook support found, skipping")) else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ["ANSIBLE_RETRY_FILES_ENABLED"] = "0" if options.loglevel == "DEBUG": # Keep ansible remote files for debug os.environ["ANSIBLE_KEEP_REMOTE_FILES"] = "1" command = "%s -i %s %s" % ( ansible, options.hosts, os.path.join(maguidir, "remote.yml"), ) LOG.debug("Running: %s with 600 seconds timeout" % command) citellus.execonshell(filename=command, timeout=600) # Now check the hosts we got logs from: hosts = citellus.findplugins( folders=glob.glob("/tmp/citellus/hostrun/*"), executables=False, fileextension=".json", ) for host in hosts: sosreports.append(os.path.dirname(host["plugin"])) # Get all data from hosts for all plugins, etc if options.output: dooutput = options.output else: dooutput = False if len(sosreports) > int(options.max_hosts): print("Maximum number of sosreports provided, exiting") sys.exit(0) citellusplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: citellusplugins.extend(extension.listplugins()) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins def runmaguiandplugs( sosreports, citellusplugins, filename=dooutput, extranames=None, serveruri=False, onlysave=False, result=None, anon=False, grouped={}, ): """ Runs magui and magui plugins :param grouped: Grouped results from sosreports to speedup processing (domagui) :param anon: anonymize results on execution :param serveruri: Server uri to POST the analysis :param sosreports: sosreports to process :param citellusplugins: citellusplugins to run :param filename: filename to save to :param extranames: additional filenames used :param onlysave: Bool: Defines if we just want to save results :param result: Results to write to disk :return: results of execution """ start_time = time.time() if not onlysave and not result: # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins, grouped=grouped) # Run Magui plugins result = [] for plugin in magplugs: plugstart_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {"rc": returncode, "out": out, "err": err} subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, "plugins", ""), "") if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split( os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { "plugin": plugin.__name__.split(".")[-1], "name": "magui: %s" % os.path.basename(plugin.__name__.split(".")[-1]), "id": hashlib.sha512( plugin.__file__.replace( maguidir, "").encode("UTF-8")).hexdigest(), "description": plugin.help(), "long_name": plugin.help(), "result": updates, "time": time.time() - plugstart_time, "category": category, "subcategory": subcategory, } result.append(mydata) if filename: branding = _(" ") citellus.write_results( results=result, filename=filename, source="magui", path=sosreports, time=time.time() - start_time, branding=branding, web=True, extranames=extranames, serveruri=serveruri, anon=anon, ) return result, grouped print(_("\nStarting check updates and comparison")) metadataplugins = [] for plugin in citellusplugins: if plugin["backend"] == "metadata": metadataplugins.append(plugin) # Prepare metadata execution to find groups results, grouped = runmaguiandplugs( sosreports=sosreports, citellusplugins=metadataplugins, filename=options.output, serveruri=options.call_home, ) # Now we've Magui saved for the whole execution provided in 'results' var # Start working on autogroups for result in results: if result["plugin"] == "metadata-outputs": autodata = result["result"]["err"] print(_("\nGenerating autogroups:\n")) groups = autogroups(autodata) processedgroups = {} # TODO(iranzo): Review this # This code was used to provide a field in json for citellus.html to get # other groups in dropdown, but is not in use so commenting meanwhile filenames = [] # loop over filenames first so that full results are saved and freed from memory for group in groups: basefilename = os.path.splitext(options.output) filename = basefilename[0] + "-" + group + basefilename[1] runautogroup = True for progroup in processedgroups: if sorted(set(groups[group])) == sorted( set(processedgroups[progroup])): runautogroup = False runautofile = progroup if runautogroup: # Analysis will be generated filenames.append(filename) print("\nRunning full comparison:... %s" % options.output) # Run full (not only metadata plugins) so that we've the data stored and save filenames in magui.json results, grouped = runmaguiandplugs( sosreports=sosreports, citellusplugins=citellusplugins, extranames=filenames, filename=options.output, serveruri=options.call_home, ) # Here 'grouped' obtained from above contains the full set of data # Results stored, removing variable to free up memory del results # reset list of processed groups # while len(data) != 0: # print "loop: ", loop # loop = loop +1 # target, data, todel = findtarget(data) processedgroups = {} basefilename = os.path.splitext(options.output) while len(groups) != 0: target, newgroups, todel = findtarget(groups) group = target filename = basefilename[0] + "-" + group + basefilename[1] print(_("\nRunning for group: %s" % filename)) runautogroup = True for progroup in processedgroups: if groups[target] == processedgroups[progroup]: runautogroup = False runautofile = progroup if runautogroup: # Analysis was missing for this group, run it # pass grouped as 'dict' to avoid mutable newgrouped = copy.deepcopy(grouped) runmaguiandplugs( sosreports=groups[target], citellusplugins=citellusplugins, filename=filename, extranames=filenames, anon=options.anon, grouped=newgrouped, ) else: # Copy file instead of run as it was already existing LOG.debug("Copying old file from %s to %s" % (runautofile, filename)) shutil.copyfile(runautofile, filename) processedgroups[filename] = groups[target] if todel: # We can remove a sosreport from the dataset for plugin in grouped: if todel in grouped[plugin]["sosreport"]: del grouped[plugin]["sosreport"][todel] del newgroups[target] # Put remaining groups to work groups = dict(newgroups) del groups del processedgroups print(_("\nFinished autogroup generation."))