import os import shutil import subprocess import tempfile from unittest import TestCase import citellusclient.shell as citellus # To create your own test, update NAME with plugin name and copy this file to test_$NAME.py NAME = "test_cf_is_active" testplugins = os.path.join(citellus.citellusdir, "plugins", "test") plugins = os.path.join(citellus.citellusdir, "plugins", "core") folder = os.path.join(os.path.abspath(os.path.dirname(__file__)), "setup") uttest = citellus.findplugins(folders=[folder], include=[NAME])[0]["plugin"] us = os.path.basename(uttest) citplugs = citellus.findplugins(folders=[folder], include=[us]) # Setup commands and expected return codes rcs = { "pass": citellus.RC_OKAY, "fail": citellus.RC_FAILED, "skipped": citellus.RC_SKIPPED, "info": citellus.RC_INFO, } def runtest(testtype="False"): """
import os import shutil import subprocess import tempfile from unittest import TestCase import citellusclient.shell as citellus # To create your own test, update NAME with plugin name and copy this file to test_$NAME.py NAME = 'system_lib_overload' testplugins = os.path.join(citellus.citellusdir, 'plugins', 'test') plugins = os.path.join(citellus.citellusdir, 'plugins', 'core') folder = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'setup') uttest = citellus.findplugins(folders=[folder], include=[NAME])[0]['plugin'] citplugs = citellus.findplugins(folders=[plugins], include=[NAME]) # Setup commands and expected return codes rcs = { "pass": citellus.RC_OKAY, "fail": citellus.RC_FAILED, "skipped": citellus.RC_SKIPPED, "info": citellus.RC_INFO } def runtest(testtype='False'): """ Actually run the test for UT :param testtype: argument to pass to setup script
# GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import re import sys from unittest import TestCase sys.path.append(os.path.abspath(os.path.dirname(__file__) + "/" + "../")) import citellusclient.shell as citellus testplugins = os.path.join(citellus.citellusdir, "plugins", "test") pluginsdir = os.path.join(citellus.citellusdir, "plugins", "core") plugins = citellus.findplugins(folders=[pluginsdir]) class CitellusTest(TestCase): def test_ut_sourced_if_used(self): # Check list of plugins for regexp sourcing common functions and skip them nonsourcing = [] for plugin in plugins: if not citellus.regexpfile( filename=plugin["plugin"], regexp=".*common-functions" ): nonsourcing.append(plugin["plugin"]) commonfunctions = []
import os import re import shutil import subprocess import citellusclient.shell as citellus regexpyear = "[0-9][0-9][0-9][0-9]-" regexpemail = "\\<(.*@.*)\\>" # Find all plugins print("Finding all possible files to modify...") # plugins = citellus.findallplugins() plugins = citellus.findplugins( folders=[os.path.abspath(os.path.dirname(__file__))], executables=False, exclude=[".git", ".tox", ".pyc", ".history", "doc/templates"], include=[".yml", ".py", ".sh", ".txt"], ) os.environ["LANG"] = "en_US.UTF-8" # Iterate over found plugins for plugin in plugins: if not "citellus/plugins" in plugin["plugin"]: name = "" date = "" command = ( "cd $(dirname %s) && git blame -e %s | awk '{print $2\" \"$3\" \"$4}'|egrep -o '<.*>.*[0-9][0-9][0-9][0-9]-' | sed 's/ */ /g' | cut -d ' ' -f 1-2 | sort -u|grep -v not.committed.yet"
# along with this program. If not, see <http://www.gnu.org/licenses/>. import os import random import shutil import subprocess import tempfile from unittest import TestCase import citellusclient.shell as citellus import maguiclient.magui as magui testplugins = os.path.join(citellus.citellusdir, "plugins", "test") plugins = os.path.join(citellus.citellusdir, "plugins", "core") folder = os.path.join(os.path.abspath(os.path.dirname(__file__)), "setup") uttest = citellus.findplugins(folders=[folder]) citplugs = citellus.findplugins(folders=[plugins]) okay = random.randint(10, 29) failed = random.randint(30, 49) skipped = random.randint(50, 69) info = random.randint(70, 89) # Setup commands and expected return codes rcs = {"pass": okay, "fail": failed, "skipped": skipped, "info": info} class CitellusTest(TestCase): def test_all_plugins_snapshot(self): tmpdir = tempfile.mkdtemp(prefix="citellus-tmp")
def main(): """ Main code stub """ options = parse_args() # Configure ENV language before anything else os.environ['LANG'] = "%s" % options.lang # Reinstall language in case it has changed trad = gettext.translation('citellus', localedir, fallback=True, languages=[options.lang]) try: _ = trad.ugettext except AttributeError: _ = trad.gettext # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = citellus.initPymodules( extensions=citellus.getPymodules(options=options, folders=[PluginsFolder])) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(citellus.indent(text=desc, amount=4)) return # Prefill enabled citellus plugins from args if not citellus.extensions: extensions = citellus.initPymodules()[0] else: extensions = citellus.extensions # Grab the data sosreports = options.sosreports # If we've provided a hosts file, use ansible to grab the data from them if options.hosts: ansible = citellus.which("ansible-playbook") if not ansible: LOG.err(_("No ansible-playbook support found, skipping")) else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ['ANSIBLE_RETRY_FILES_ENABLED'] = "0" if options.loglevel == 'DEBUG': # Keep ansible remote files for debug os.environ['ANSIBLE_KEEP_REMOTE_FILES'] = "1" command = "%s -i %s %s" % (ansible, options.hosts, os.path.join(maguidir, 'remote.yml')) LOG.debug("Running: %s with 600 seconds timeout" % command) citellus.execonshell(filename=command, timeout=600) # Now check the hosts we got logs from: hosts = citellus.findplugins( folders=glob.glob('/tmp/citellus/hostrun/*'), executables=False, fileextension='.json') for host in hosts: sosreports.append(os.path.dirname(host['plugin'])) # Get all data from hosts for all plugins, etc if options.output: dooutput = options.output else: dooutput = False if len(sosreports) > options.max_hosts: print("Maximum number of sosreports provided, exiting") sys.exit(0) citellusplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: citellusplugins.extend(extension.listplugins()) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins def runmaguiandplugs(sosreports, citellusplugins, filename=dooutput, extranames=None, serveruri=False, onlysave=False, result=None, anon=False, grouped={}): """ Runs magui and magui plugins :param grouped: Grouped results from sosreports to speedup processing (domagui) :param anon: anonymize results on execution :param serveruri: Server uri to POST the analysis :param sosreports: sosreports to process :param citellusplugins: citellusplugins to run :param filename: filename to save to :param extranames: additional filenames used :param onlysave: Bool: Defines if we just want to save results :param result: Results to write to disk :return: results of execution """ start_time = time.time() if not onlysave and not result: # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins, grouped=grouped) # Run Magui plugins result = [] for plugin in magplugs: plugstart_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {'rc': returncode, 'out': out, 'err': err} subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, 'plugins', ''), '') if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split( os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { 'plugin': plugin.__name__.split(".")[-1], 'name': "magui: %s" % os.path.basename(plugin.__name__.split(".")[-1]), 'id': hashlib.sha512( plugin.__file__.replace( maguidir, '').encode('UTF-8')).hexdigest(), 'description': plugin.help(), 'long_name': plugin.help(), 'result': updates, 'time': time.time() - plugstart_time, 'category': category, 'subcategory': subcategory } result.append(mydata) if filename: branding = _(" ") citellus.write_results(results=result, filename=filename, source='magui', path=sosreports, time=time.time() - start_time, branding=branding, web=True, extranames=extranames, serveruri=serveruri, anon=anon) return result, grouped print(_("\nStarting check updates and comparison")) metadataplugins = [] for plugin in citellusplugins: if plugin['backend'] == 'metadata': metadataplugins.append(plugin) # Prepare metadata execution to find groups results, grouped = runmaguiandplugs(sosreports=sosreports, citellusplugins=metadataplugins, filename=options.output, serveruri=options.call_home) # Now we've Magui saved for the whole execution provided in 'results' var # Start working on autogroups for result in results: if result['plugin'] == 'metadata-outputs': autodata = result['result']['err'] print(_("\nGenerating autogroups:\n")) groups = autogroups(autodata) processedgroups = {} # TODO(iranzo): Review this # This code was used to provide a field in json for citellus.html to get # other groups in dropdown, but is not in use so commenting meanwhile filenames = [] # loop over filenames first so that full results are saved and freed from memory for group in groups: basefilename = os.path.splitext(options.output) filename = basefilename[0] + "-" + group + basefilename[1] runautogroup = True for progroup in processedgroups: if sorted(set(groups[group])) == sorted( set(processedgroups[progroup])): runautogroup = False runautofile = progroup if runautogroup: # Analysis will be generated filenames.append(filename) print("\nRunning full comparison:... %s" % options.output) # Run full (not only metadata plugins) so that we've the data stored and save filenames in magui.json results, grouped = runmaguiandplugs(sosreports=sosreports, citellusplugins=citellusplugins, extranames=filenames, filename=options.output, serveruri=options.call_home) # Here 'grouped' obtained from above contains the full set of data # Results stored, removing variable to free up memory del results # reset list of processed groups # while len(data) != 0: # print "loop: ", loop # loop = loop +1 # target, data, todel = findtarget(data) processedgroups = {} basefilename = os.path.splitext(options.output) while len(groups) != 0: target, newgroups, todel = findtarget(groups) group = target filename = basefilename[0] + "-" + group + basefilename[1] print(_("\nRunning for group: %s" % filename)) runautogroup = True for progroup in processedgroups: if groups[target] == processedgroups[progroup]: runautogroup = False runautofile = progroup if runautogroup: # Analysis was missing for this group, run it # pass grouped as 'dict' to avoid mutable newgrouped = copy.deepcopy(grouped) runmaguiandplugs(sosreports=groups[target], citellusplugins=citellusplugins, filename=filename, extranames=filenames, anon=options.anon, grouped=newgrouped) else: # Copy file instead of run as it was already existing LOG.debug("Copying old file from %s to %s" % (runautofile, filename)) shutil.copyfile(runautofile, filename) processedgroups[filename] = groups[target] if todel: # We can remove a sosreport from the dataset for plugin in grouped: if todel in grouped[plugin]['sosreport']: del grouped[plugin]['sosreport'][todel] del newgroups[target] # Put remaining groups to work groups = dict(newgroups) del groups del processedgroups print(_("\nFinished autogroup generation."))
import re import citellusclient.shell as citellus import shutil import sys import os.path import os regexpyear = '[0-9][0-9][0-9][0-9]-' regexpemail = '\\<(.*@.*)\\>' # Find all plugins print("Finding all possible files to modify...") #plugins = citellus.findallplugins() plugins = citellus.findplugins( folders=[os.path.abspath(os.path.dirname(__file__))], executables=False, exclude=['.git', '.tox', '.pyc', '.history', 'doc/templates'], include=['.yml', '.py', '.sh', '.txt']) os.environ['LANG'] = 'en_US.UTF-8' # Iterate over found plugins for plugin in plugins: if not 'citellus/plugins' in plugin['plugin']: name = '' date = '' command = "git blame -e %s | awk '{print $2\" \"$3\" \"$4}'|egrep -o '<.*>.*[0-9][0-9][0-9][0-9]-' | sed 's/ */ /g' | cut -d ' ' -f 1-2 | sort -u" % plugin[ 'plugin']
def test_findplugins_positive_filter_exclude(self): plugins = citellus.findplugins([testplugins], exclude=["exit_passed", "exit_skipped"]) for plugin in plugins: assert "exit_passed" not in plugin and "exit_skipped" not in plugin
def test_findplugins_ext(self): plugins = [] folder = [os.path.join(citellus.citellusdir, "plugins", "core")] for each in citellus.findplugins(folders=folder, fileextension=".sh"): plugins.append(each) assert len(plugins) != 0
def main(): """ Main code stub """ start_time = time.time() options = parse_args() # Configure ENV language before anything else os.environ['LANG'] = "%s" % options.lang # Reinstall language in case it has changed trad = gettext.translation('citellus', localedir, fallback=True, languages=[options.lang]) try: _ = trad.ugettext except AttributeError: _ = trad.gettext # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = initPlugins(options) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(citellus.indent(text=desc, amount=4)) return # Prefill enabled citellus plugins from args if not citellus.extensions: extensions, exttriggers = citellus.initExtensions() else: extensions = citellus.extensions # Grab the data sosreports = options.sosreports if options.hosts: ansible = citellus.which("ansible-playbook") if not ansible: LOG.err(_("No ansible-playbook support found, skipping")) else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ['ANSIBLE_RETRY_FILES_ENABLED'] = "0" if options.loglevel == 'DEBUG': # Keep ansible remote files for debug os.environ['ANSIBLE_KEEP_REMOTE_FILES'] = "1" command = "%s -i %s %s" % (ansible, options.hosts, os.path.join(maguidir, 'remote.yml')) LOG.debug("Running: %s " % command) citellus.execonshell(filename=command) # Now check the hosts we got logs from: hosts = citellus.findplugins( folders=glob.glob('/tmp/citellus/hostrun/*'), executables=False, fileextension='.json') for host in hosts: sosreports.append(os.path.dirname(host['plugin'])) # Get all data from hosts for all plugins, etc if options.output: citellusplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: citellusplugins.extend(extension.listplugins()) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins) # Run Magui plugins result = [] for plugin in magplugs: start_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {'rc': returncode, 'out': out, 'err': err} subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, 'plugins', ''), '') if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split(os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { 'plugin': plugin.__name__.split(".")[-1], 'id': hashlib.md5( plugin.__file__.replace(maguidir, '').encode('UTF-8')).hexdigest(), 'description': plugin.help(), 'result': updates, 'time': time.time() - start_time, 'category': category, 'subcategory': subcategory } result.append(mydata) branding = _(" ") citellus.write_results(results=result, filename=options.output, source='magui', path=sosreports, time=time.time() - start_time, branding=branding, web=True) # Here preprocess output to use filtering, etc # "result" does contain all data for both all citellus plugins and all magui plugins, need to filter for output on CLI only # As we don't have a proper place to store output and we're running the full set of tests only when output is going # to be stored (and then, the screen output is based on the already cached citellus results), it's probably not worth at this point to change this citellusplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: citellusplugins.extend(extension.listplugins(options)) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins, options=options) # Run Magui plugins result = [] for plugin in magplugs: start_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {'rc': returncode, 'out': out, 'err': err} adddata = True if options.quiet: if returncode in [citellus.RC_OKAY, citellus.RC_SKIPPED]: adddata = False if adddata: # If RC is to be stored, process further subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, 'plugins', ''), '') if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split(os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { 'plugin': plugin.__name__.split(".")[-1], 'id': hashlib.md5( plugin.__file__.replace(maguidir, '').encode('UTF-8')).hexdigest(), 'description': plugin.help(), 'result': updates, 'time': time.time() - start_time, 'category': category, 'subcategory': subcategory } result.append(mydata) pprint.pprint(result, width=1)
def test_findplugins_negative(self): assert citellus.findplugins("__does_not_exist__") == []
def test_findplugins_positive(self): assert len(citellus.findplugins([testplugins])) != 0
def test_findplugins_positive_filter_include(self): plugins = citellus.findplugins([testplugins], include=["exit_passed"]) assert len(plugins) == 1
def main(): """ Main code stub """ start_time = time.time() options = parse_args() # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = initPlugins(options) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(citellus.indent(text=desc, amount=4)) return # Prefill enabled citellus plugins from args if not citellus.extensions: extensions, exttriggers = citellus.initExtensions() else: extensions = citellus.extensions citellusplugins = [] for extension in extensions: citellusplugins.extend(extension.listplugins(options)) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins # Grab the data sosreports = options.sosreports if options.hosts: ansible = citellus.which("ansible-playbook") if not ansible: LOG.err("No ansible-playbook support found, skipping") else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ['ANSIBLE_RETRY_FILES_ENABLED'] = "0" if options.loglevel == 'DEBUG': # Keep ansible remote files for debug os.environ['ANSIBLE_KEEP_REMOTE_FILES'] = "1" command = "%s -i %s %s" % (ansible, options.hosts, os.path.join(maguidir, 'remote.yml')) LOG.debug("Running: %s " % command) citellus.execonshell(filename=command) # Now check the hosts we got logs from: hosts = citellus.findplugins( folders=glob.glob('/tmp/citellus/hostrun/*'), executables=False, fileextension='.json') for host in hosts: sosreports.append(os.path.dirname(host['plugin'])) grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins, options=options) # Run Magui plugins result = [] for plugin in magplugs: start_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {'rc': returncode, 'out': out, 'err': err} adddata = True if options.quiet: if returncode in [citellus.RC_OKAY, citellus.RC_SKIPPED]: adddata = False subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, 'plugins', ''), '') if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split(os.sep)[0] else: category = subcategory subcategory = "" else: category = "" if adddata: result.append({ 'plugin': plugin.__name__.split(".")[-1], 'id': hashlib.md5( plugin.__file__.replace(maguidir, '').encode('UTF-8')).hexdigest(), 'description': plugin.help(), 'result': updates, 'time': time.time() - start_time, 'category': category, 'subcategory': subcategory }) if options.output: citellus.write_results(results=result, filename=options.output, source='magui', path=sosreports, time=time.time() - start_time) pprint.pprint(result, width=1)
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # prefill plugins we had used: plugins = [] for item in data: plugin = {"plugin": data[item]["plugin"], "id": data[item]["id"]} plugins.append(plugin) # Find available profile definitions profiles = citellus.findplugins(folders=[pluginsdir], executables=False, fileextension=".txt") for item in profiles: uid = citellus.getids(plugins=[item])[0] profile = item["plugin"] plugin = dict(item) # Precreate storage for this profile name = "Profiles: %s" % os.path.basename( os.path.splitext(profile.replace(pluginsdir, ""))[0]) subcategory = "" category = name data[uid] = { "category": category, "hash": item["hash"], "plugin": item["plugin"], "name": name, "result": { "rc": 0, "err": "", "out": "" }, "time": 0, "backend": "profile", "id": uid, "subcategory": subcategory, } metadata = { "description": citellus.regexpfile(filename=plugin["plugin"], regexp=r"\A# description:")[14:].strip(), "long_name": citellus.regexpfile(filename=plugin["plugin"], regexp=r"\A# long_name:")[12:].strip(), "bugzilla": citellus.regexpfile(filename=plugin["plugin"], regexp=r"\A# bugzilla:")[11:].strip(), "priority": int( citellus.regexpfile(filename=plugin["plugin"], regexp=r"\A# priority:")[11:].strip() or 0), } data[uid].update(metadata) # start with OK status okay = int(os.environ["RC_OKAY"]) failed = int(os.environ["RC_FAILED"]) skipped = int(os.environ["RC_SKIPPED"]) info = int(os.environ["RC_INFO"]) # Start asembling data for the plugins relevant for profile data[uid]["result"]["err"] = "" ids = plugidsforprofile(profile=profile, plugins=plugins) new_results = [] overallitems = [] for id in ids: if id in data: if "sysinfo" in name and data[id]["result"]["rc"] == skipped: # Do nothing as we don't want to show skipped in sysinfo pass else: new_results.append({ "plugin_id": id, "plugin": data[id]["plugin"].replace( os.path.join(citellus.citellusdir, "plugins"), ""), "err": data[id]["result"]["err"].strip(), "rc": data[id]["result"]["rc"], }) overallitems.append(data[id]["result"]["rc"]) if "sysinfo" in name: if okay in overallitems or failed in overallitems or info in overallitems: overall = info else: # No plugins matched, so skip it overall = skipped else: if failed in overallitems: overall = failed elif info in overallitems: overall = info elif skipped in overallitems: overall = skipped else: overall = okay data[uid]["result"]["err"] = json.dumps(new_results) data[uid]["components"] = ids data[uid]["result"]["rc"] = overall return data
def main(): """ Main code stub """ options = parse_args() # Configure ENV language before anything else os.environ['LANG'] = "%s" % options.lang # Reinstall language in case it has changed trad = gettext.translation('citellus', localedir, fallback=True, languages=[options.lang]) try: _ = trad.ugettext except AttributeError: _ = trad.gettext # Configure logging logging.basicConfig(level=options.loglevel) if not options.quiet: show_logo() # Each argument in sosreport is a sosreport magplugs, magtriggers = citellus.initPymodules( extensions=citellus.getPymodules(options=options, folders=[PluginsFolder])) if options.list_plugins: for plugin in magplugs: print("-", plugin.__name__.split(".")[-1]) if options.description: desc = plugin.help() if desc: print(citellus.indent(text=desc, amount=4)) return # Prefill enabled citellus plugins from args if not citellus.extensions: extensions, exttriggers = citellus.initPymodules() else: extensions = citellus.extensions # Grab the data sosreports = options.sosreports # If we've provided a hosts file, use ansible to grab the data from them if options.hosts: ansible = citellus.which("ansible-playbook") if not ansible: LOG.err(_("No ansible-playbook support found, skipping")) else: LOG.info("Grabbing data from remote hosts with Ansible") # Grab data from ansible hosts # Disable Ansible retry files creation: os.environ['ANSIBLE_RETRY_FILES_ENABLED'] = "0" if options.loglevel == 'DEBUG': # Keep ansible remote files for debug os.environ['ANSIBLE_KEEP_REMOTE_FILES'] = "1" command = "%s -i %s %s" % (ansible, options.hosts, os.path.join(maguidir, 'remote.yml')) LOG.debug("Running: %s " % command) citellus.execonshell(filename=command) # Now check the hosts we got logs from: hosts = citellus.findplugins( folders=glob.glob('/tmp/citellus/hostrun/*'), executables=False, fileextension='.json') for host in hosts: sosreports.append(os.path.dirname(host['plugin'])) # Get all data from hosts for all plugins, etc if options.output: dooutput = options.output else: dooutput = False if len(sosreports) > options.max_hosts: print("Maximum number of sosreports provided, exitting") sys.exit(0) citellusplugins = [] # Prefill with all available plugins and the ones we want to filter for for extension in extensions: citellusplugins.extend(extension.listplugins()) global allplugins allplugins = citellusplugins # By default, flatten plugin list for all extensions newplugins = [] for each in citellusplugins: newplugins.extend(each) citellusplugins = newplugins def runmaguiandplugs(sosreports, citellusplugins, filename=dooutput, extranames=None, serveruri=False, onlysave=False, result=None): """ Runs magui and magui plugins :param serveruri: :param sosreports: sosreports to process :param citellusplugins: citellusplugins to run :param filename: filename to save to :param extranames: additional filenames used :param onlysave: Bool: Defines if we just want to save results :param result: Results to write to disk :return: results of execution """ start_time = time.time() if not onlysave and not result: # Run with all plugins so that we get all data back grouped = domagui(sosreports=sosreports, citellusplugins=citellusplugins) # Run Magui plugins result = [] for plugin in magplugs: plugstart_time = time.time() # Get output from plugin data = filterresults( data=grouped, triggers=magtriggers[plugin.__name__.split(".")[-1]]) returncode, out, err = plugin.run(data=data, quiet=options.quiet) updates = {'rc': returncode, 'out': out, 'err': err} subcategory = os.path.split(plugin.__file__)[0].replace( os.path.join(maguidir, 'plugins', ''), '') if subcategory: if len(os.path.normpath(subcategory).split(os.sep)) > 1: category = os.path.normpath(subcategory).split( os.sep)[0] else: category = subcategory subcategory = "" else: category = "" mydata = { 'plugin': plugin.__name__.split(".")[-1], 'name': "magui: %s" % os.path.basename(plugin.__name__.split(".")[-1]), 'id': hashlib.md5( plugin.__file__.replace( maguidir, '').encode('UTF-8')).hexdigest(), 'description': plugin.help(), 'long_name': plugin.help(), 'result': updates, 'time': time.time() - plugstart_time, 'category': category, 'subcategory': subcategory } result.append(mydata) if filename: branding = _(" ") citellus.write_results(results=result, filename=filename, source='magui', path=sosreports, time=time.time() - start_time, branding=branding, web=True, extranames=extranames, serveruri=serveruri) return result results = runmaguiandplugs(sosreports=sosreports, citellusplugins=citellusplugins, filename=options.output, serveruri=options.call_home) # Now we've Magui saved for the whole execution provided in 'results' var # Start working on autogroups for result in results: if result['plugin'] == 'metadata-outputs': autodata = result['result']['err'] print(_("Running magui for autogroups:\n")) groups = autogroups(autodata) processedgroups = {} filenames = [] for group in groups: basefilename = os.path.splitext(options.output) filename = basefilename[0] + "-" + group + basefilename[1] print(filename) runautogroup = True for progroup in processedgroups: if groups[group] == processedgroups[progroup]: runautogroup = False runautofile = progroup if runautogroup: # Analisys was missing for this group, run runmaguiandplugs(sosreports=groups[group], citellusplugins=citellusplugins, filename=filename, extranames=options.output) filenames.append(filename) else: # Copy file instead of run as it was already existing LOG.debug("Copying old file from %s to %s" % (runautofile, filename)) shutil.copyfile(runautofile, filename) processedgroups[filename] = groups[group] print(_("\nFinished autogroup generation.")) if len(filenames) > 0: # We've written additional files, so save again magui.json with additional references results = runmaguiandplugs(sosreports=sosreports, citellusplugins=citellusplugins, filename=options.output, extranames=filenames, onlysave=True, result=results) print("\nResults written to %s" % options.output)
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # prefill plugins we had used: plugins = [] for item in data: plugin = {'plugin': data[item]['plugin'], 'id': data[item]['id']} plugins.append(plugin) # Find available profile definitions profiles = citellus.findplugins(folders=[pluginsdir], executables=False, fileextension='.txt') for item in profiles: uid = citellus.getids(plugins=[item])[0] profile = item['plugin'] plugin = dict(item) # Precreate storage for this profile name = "Profiles: %s" % os.path.basename( os.path.splitext(profile.replace(pluginsdir, ''))[0]) subcategory = '' category = name data[uid] = { "category": category, "hash": item['hash'], "plugin": item['plugin'], "name": name, "result": { "rc": 0, "err": "", "out": "" }, "time": 0, "backend": "profile", "id": uid, "subcategory": subcategory } metadata = { 'description': citellus.regexpfile(filename=plugin['plugin'], regexp='\A# description:')[14:].strip(), 'long_name': citellus.regexpfile(filename=plugin['plugin'], regexp='\A# long_name:')[12:].strip(), 'bugzilla': citellus.regexpfile(filename=plugin['plugin'], regexp='\A# bugzilla:')[11:].strip(), 'priority': int( citellus.regexpfile(filename=plugin['plugin'], regexp='\A# priority:')[11:].strip() or 0) } data[uid].update(metadata) # start with OK status overall = int(os.environ['RC_OKAY']) failed = int(os.environ['RC_FAILED']) # Start asembling data for the plugins relevant for profile data[uid]['result']['err'] = '' ids = plugidsforprofile(profile=profile, plugins=plugins) new_results = [] for id in ids: if id in data: new_results.append({ 'plugin_id': id, 'plugin': data[id]['plugin'].replace( os.path.join(citellus.citellusdir, 'plugins'), ''), 'err': data[id]['result']['err'].strip(), 'rc': data[id]['result']['rc'] }) if data[id]['result']['rc'] == failed: # If test is failed, return global as failed overall = failed data[uid]['result']['err'] = json.dumps(new_results) data[uid]['components'] = ids data[uid]['result']['rc'] = overall return data