def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Use calculate ID instead of getid because of execution loop sourceid = citellus.calcid(string='/plugins/core/system/clock-1-ntpd.sh') targetid = citellus.calcid(string='/plugins/core/system/clock-1-chrony.sh') skipped = int(os.environ['RC_SKIPPED']) okay = int(os.environ['RC_OKAY']) mangle = False # Grab source data if sourceid in data: if data[sourceid]['result']['rc'] == okay: mangle = True if mangle: # We now fake result as SKIPPED and copy to datahook dict the new data data[targetid]['datahook'] = {} data[targetid]['datahook']['prior'] = dict(data[targetid]['result']) newresults = dict(data[targetid]['result']) newresults['rc'] = skipped newresults[ 'err'] = 'Marked as skipped by data hook %s' % os.path.basename( __file__).split(os.sep)[0] data[targetid]['result'] = newresults citellus.LOG.debug("Data mangled for plugin %s:" % data[targetid]['plugin']) return data
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Use calculate ID instead of getid because of execution loop sourceid = citellus.calcid(string="/plugins/metadata/openstack/system-role.sh") targetid = citellus.calcid(string="/plugins/core/system/firewall.sh") skipped = int(os.environ["RC_SKIPPED"]) mangle = False # Grab source data if sourceid in data: if data[sourceid]["result"]["err"] != "unknown": mangle = True if mangle and targetid in data: # We now fake result as SKIPPED and copy to datahook dict the new data data[targetid]["datahook"] = {} data[targetid]["datahook"]["prior"] = dict(data[targetid]["result"]) newresults = dict(data[targetid]["result"]) newresults["rc"] = skipped newresults["err"] = ( "Marked as skipped by data hook %s" % os.path.basename(__file__).split(os.sep)[0] ) data[targetid]["result"] = newresults citellus.LOG.debug("Data mangled for plugin %s:" % data[targetid]["plugin"]) return data
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Use calculated ID instead of getid because of execution loop fakeid = citellus.calcid(string=__file__) # load here branding string = _(" ") if 'GSS' in string: # We now fake results to list kbase for linking fakedata = {"category": "support", "hash": "c6e2fd181c31e921b3a9b1c3677f143c", "description": "Reports kbase for Citellus information", "plugin": __file__, "name": "Citellus Kbase reporter", "priority": 1000, "long_name": "https://access.redhat.com/solutions/3405671", "bugzilla": "", "result": {"rc": citellus.RC_FAILED, "err": "Please do link provided kbase https://access.redhat.com/solutions/3405671 for metrics on usefulness of the tool", "out": ""}, "time": 0, "backend": "core", "id": fakeid, "subcategory": "gss/cee"} data.update({fakeid: fakedata}) return data
def listplugins(options=None): """ List available plugins :param options: argparse options provided :return: plugin object generator """ prio = 0 if options: try: prio = options.prio except: pass plugins = citellus.findplugins(folders=[pluginsdir], executables=False, fileextension=".txt", extension=extension, prio=prio) # check for multiple files specified as per the 'path' by using "," as separator newplugins = [] for plugin in plugins: if ',' not in plugin['path']: newplugins.append(plugin) else: # Path contains ',' so we fake extra plugins for each path for path in plugin['path'].split(","): # Clone plugin dictionary: newplugin = dict(plugin) newplugin['name'] = "Check %s" % path.replace('${CITELLUS_ROOT}', '') newplugin['path'] = path newplugin['description'] = "%s: %s" % (plugin['description'], path.replace('${CITELLUS_ROOT}', '')) newplugin['id'] = "%s-%s" % (plugin['id'], citellus.calcid(string=path)) newplugins.append(newplugin) yield newplugins
def init(): """ Initializes module :return: List of triggers for Plugin """ triggers = [citellus.calcid(string="/plugins/faraday/positive/network/mtus.sh")] return triggers
def init(): """ Initializes module :return: List of triggers for Plugin """ triggers = [citellus.calcid(string='/plugins/faraday/positive/system/multipathluns.sh')] return triggers
def listplugins(options=None): """ List available plugins :param options: argparse options provided :return: plugin object generator """ prio = 0 if options: try: prio = options.prio except: pass plugins = [] if options and options.extraplugintree: folders = [ pluginsdir, os.path.join(options.extraplugintree, extension) ] else: folders = [pluginsdir] for plugin in citellus.findplugins( folders=folders, executables=False, fileextension=".json", extension=extension, prio=prio, options=options, ): filename = plugin["plugin"] data = json.load(open(filename, "r")) if "logPath" in data and "rules" in data: path = data["logPath"] for rule in data["rules"]: # Clone plugin dictionary: newplugin = dict(plugin) newplugin["name"] = "Check %s for %s" % (path, rule["pattern"]) newplugin["category"] = "node-problem-detector" newplugin["path"] = "%s%s" % ("${CITELLUS_ROOT}", path) newplugin["description"] = "%s: %s" % ( plugin["description"], path.replace("${CITELLUS_ROOT}", ""), ) newplugin["id"] = "%s%s" % ( plugin["id"], citellus.calcid(string=rule["pattern"]), ) newplugin["pattern"] = rule["pattern"] newplugin["reason"] = rule["reason"] plugins.append(dict(newplugin)) yield plugins
def listplugins(options=None): """ List available plugins :param options: argparse options provided :return: plugin object generator """ prio = 0 if options: try: prio = options.prio except: pass if options and options.extraplugintree: folders = [ pluginsdir, os.path.join(options.extraplugintree, extension) ] else: folders = [pluginsdir] plugins = citellus.findplugins( folders=folders, executables=False, fileextension=".txt", extension=extension, prio=prio, options=options, ) # check for multiple files specified as per the 'path' by using "," as separator newplugins = [] for plugin in plugins: if "," not in plugin["path"]: newplugins.append(plugin) else: # Path contains ',' so we fake extra plugins for each path for path in plugin["path"].split(","): # Clone plugin dictionary: newplugin = dict(plugin) newplugin["name"] = "Check %s" % path.replace( "${CITELLUS_ROOT}", "") newplugin["path"] = path newplugin["description"] = "%s: %s" % ( plugin["description"], path.replace("${CITELLUS_ROOT}", ""), ) newplugin["id"] = "%s-%s" % (plugin["id"], citellus.calcid(string=path)) newplugins.append(newplugin) yield newplugins
def listplugins(options=None): """ List available plugins :param options: argparse options provided :return: plugin object generator """ prio = 0 if options: try: prio = options.prio except: pass plugins = [] for plugin in citellus.findplugins(folders=[pluginsdir], executables=False, fileextension=".json", extension=extension, prio=prio): filename = plugin['plugin'] data = json.load(open(filename, 'r')) if 'logPath' in data and 'rules' in data: path = data['logPath'] for rule in data['rules']: # Clone plugin dictionary: newplugin = dict(plugin) newplugin['name'] = "Check %s for %s" % (path, rule['pattern']) newplugin['category'] = 'node-problem-detector' newplugin['path'] = '%s%s' % ("${CITELLUS_ROOT}", path) newplugin['description'] = "%s: %s" % ( plugin['description'], path.replace( '${CITELLUS_ROOT}', '')) newplugin['id'] = "%s%s" % ( plugin['id'], citellus.calcid(string=rule['pattern'])) newplugin['pattern'] = rule['pattern'] newplugin['reason'] = rule['reason'] plugins.append(dict(newplugin)) yield plugins
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Act on all faraday-exec plugins idstodel = [] datatoadd = [] mpathids = citellus.calcid( string="/plugins/faraday/positive/system/multipathluns.sh" ) # Loop over plugin id's in data for pluginid in data: if data[pluginid]["id"] == mpathids: # Make a copy of dict for working on it plugin = dict(data[pluginid]) # Add plugin ID to be removed for resulting data so magui doesn't compare the whole set of nics at the same time idstodel.append(str(pluginid)) err = str(plugin["result"]["err"]) rc = int(plugin["result"]["rc"]) plugpath = str(plugin["plugin"]) id = str(plugin["id"]) ln = str(plugin["long_name"]) desc = str(plugin["description"]) # Iterate over NIC pairs if ";" in err: for pair in err.split(";"): if pair != "": # For each device:size pair, split on ":" for LUN/size and fake plugin entry newid = "%s-%s" % ( id, citellus.calcid(string=pair.split(":")[0]), ) update = { "id": newid, "description": "%s: %s" % (desc, pair.split(":")[0]), "long_name": "%s: %s" % (ln, pair.split(":")[0]), "plugin": "%s-%s" % (plugpath, pair.split(":")[0]), "name": "mpath: %s" % pair.split(":")[0], } resultupdate = {"result": {"err": pair, "out": "", "rc": rc}} update.update(resultupdate) # Update plugin dictionary with forged values plugin.update(dict(update)) plugin["result"]["err"] = str(pair) # Append new modified plugin to dataset datatoadd.append({newid: dict(plugin)}) # Process id's to remove for id in idstodel: del data[id] # Process data to add for item in datatoadd: data.update(item) return data
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Act on all faraday-exec plugins idstodel = [] datatoadd = [] rhvlcid = citellus.calcid( string="/plugins/rhv-log-collector-analyzer/virtualization/base.txt") # Loop over plugin id's in data for pluginid in data: if (data[pluginid]["id"] == rhvlcid and data[pluginid]["result"]["rc"] == citellus.RC_OKAY): # Make a copy of dict for working on it try: plugin = json.loads(data[pluginid]["result"] ["err"])["rhv-log-collector-analyzer"] except: plugin = None # Fake data until we've the way to run it # plugin = json.load(open('/home/iranzo/DEVEL/citellus/citellus/logcollector2.json', 'r'))['rhv-log-collector-live'] # Add plugin ID to be removed for resulting data idstodel.append(str(pluginid)) # Iterate over plugindata items for item in plugin: # Item ID in log-collector is not unique newid = item["id"] if "WARNING" in item["type"]: returncode = citellus.RC_FAILED else: returncode = citellus.RC_OKAY # Write plugin entry for the individual result newitem = { newid: { "name": "rhv-log-collector-analyzer: %s" % item["name"], "description": item["description"], "long_name": item["name"], "id": newid, "category": "", "priority": 400, "bugzilla": item["bugzilla"], "time": item["time"], "subcategory": "", "hash": item["hash"], "result": { "out": "", "err": "%s" % item["result"], "rc": returncode, }, "plugin": item["path"], "backend": "rhv-log-collector-analyzer", "kb": item["kb"], } } datatoadd.append(newitem) # Process id's to remove for id in idstodel: del data[id] # Process data to add for item in datatoadd: data.update(item) return data
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Act on all faraday-exec plugins idstodel = [] datatoadd = [] rhvlcid = citellus.calcid( string='/plugins/rhv-log-collector-analyzer/virtualization/base.txt') # Loop over plugin id's in data for pluginid in data: if data[pluginid]['id'] == rhvlcid and data[pluginid]['result'][ 'rc'] == citellus.RC_OKAY: # Make a copy of dict for working on it try: plugin = json.loads(data[pluginid]['result'] ['err'])['rhv-log-collector-analyzer'] except: plugin = None # Fake data until we've the way to run it # plugin = json.load(open('/home/iranzo/DEVEL/citellus/citellus/logcollector2.json', 'r'))['rhv-log-collector-live'] # Add plugin ID to be removed for resulting data idstodel.append(str(pluginid)) # Iterate over plugindata items for item in plugin: # Item ID in log-collector is not unique newid = item['id'] if 'WARNING' in item['type']: returncode = citellus.RC_FAILED else: returncode = citellus.RC_OKAY # Write plugin entry for the individual result newitem = { newid: { 'name': 'rhv-log-collector-analyzer: %s' % item['name'], 'description': item['description'], 'long_name': item['name'], 'id': newid, 'category': '', 'priority': 400, 'bugzilla': item['bugzilla'], 'time': item['time'], 'subcategory': '', 'hash': item['hash'], 'result': { 'out': '', 'err': "%s" % item['result'], 'rc': returncode }, 'plugin': item['path'], 'backend': 'rhv-log-collector-analyzer', 'kb': item['kb'] } } datatoadd.append(newitem) # Process id's to remove for id in idstodel: del data[id] # Process data to add for item in datatoadd: data.update(item) return data
def run(data, quiet=False): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ # Act on all faraday-exec plugins idstodel = [] datatoadd = [] # Loop over plugin id's in data faradayids = citellus.getids(include=['faraday/positive', 'faraday/negative']) for pluginid in data: if data[pluginid]['id'] in faradayids: # Make a copy of dict for working on it plugin = dict(data[pluginid]) # Add plugin ID to be removed for resulting data so magui doesn't compare the whole set of nics at the same time idstodel.append(str(pluginid)) err = str(plugin['result']['err']) rc = int(plugin['result']['rc']) plugpath = str(plugin['plugin']) id = str(plugin['id']) ln = str(plugin['long_name']) desc = str(plugin['description']) name = str(plugin['name']) # Iterate over NIC pairs for pair in err.split(";"): if pair != '': # For each value split and fake plugin entry newid = "%s-%s" % (id, citellus.calcid(string=pair.split(":")[0])) update = {'id': newid, 'description': '%s: %s' % (desc, pair.split(":")[0]), 'long_name': '%s: %s' % (ln, pair.split(":")[0]), 'plugin': '%s-%s' % (plugpath, pair.split(":")[0]), 'name': 'Faraday: %s' % name} resultupdate = {'result': {'err': pair, 'out': '', 'rc': rc}} update.update(resultupdate) # Update plugin dictionary with forged values plugin.update(dict(update)) plugin['result']['err'] = str(pair) # Append new modified plugin to dataset datatoadd.append({newid: dict(plugin)}) # Process id's to remove for id in idstodel: del data[id] # Process data to add for item in datatoadd: data.update(item) return data
def run(data, quiet=False, options=None): # do not edit this line """ Executes plugin :param quiet: be more silent on returned information :param data: data to process :return: returncode, out, err """ skipped = int(os.environ["RC_SKIPPED"]) failed = int(os.environ["RC_FAILED"]) jsons = glob.glob( os.path.join(os.environ["CITELLUS_ROOT"], "insights-*.json")) mydata = [] for insijson in jsons: filenamewithpath = insijson if (os.path.exists(filenamewithpath) and os.path.isfile(filenamewithpath) and os.access(filenamewithpath, os.R_OK)): with open(filenamewithpath) as json_file: for line in json_file.readlines(): try: mydata = json.loads(line) except: citellus.LOG.debug( "Error processing dataline in %s, skipping" % json_file) if mydata and isinstance(mydata, dict): pass else: mydata = [] else: mydata = [] # Fill plugins with actual report received if "reports" in mydata: for plugin in mydata["reports"]: # Fake plugin entries to integrate into 'data' dictionary pluginid = citellus.calcid(plugin["component"]) data[pluginid] = {} data[pluginid]["id"] = pluginid data[pluginid]["plugin"] = "%s.%s" % (insijson, plugin["component"]) if "links" in plugin and "kcs" in plugin["links"]: if isinstance(plugin["links"]["kcs"], str): data[pluginid]["kb"] = plugin["links"]["kcs"].split() elif isinstance(plugin["links"]["kcs"], list): data[pluginid]["kb"] = " ".join(plugin["links"]["kcs"]) else: data[pluginid]["kb"] = "" data[pluginid]["category"] = "insights" data[pluginid]["hash"] = pluginid data[pluginid][ "backend"] = "insights-core-unifier-merge-loader" data[pluginid]["name"] = "%s-%s" % (insijson, plugin["rule_id"]) data[pluginid]["result"] = {} data[pluginid]["result"]["err"] = "%s" % plugin["details"] data[pluginid]["result"]["rc"] = failed data[pluginid]["result"]["out"] = "" data[pluginid]["priority"] = 666 # Fill empty values for missing fields for key in [ "description", "bugzilla", "path", "time", "long_name", "subcategory", ]: data[pluginid]["%s" % key] = "" # Process plugins in skip to fake skipped entries if "skips" in mydata: for plugin in mydata["skips"]: pluginid = citellus.calcid(plugin["rule_fqdn"]) data[pluginid] = {} data[pluginid]["id"] = pluginid data[pluginid]["plugin"] = "insights.%s" % plugin["rule_fqdn"] data[pluginid]["category"] = "insights" data[pluginid]["hash"] = pluginid data[pluginid][ "backend"] = "insights-core-unifier-merge-loader" data[pluginid]["name"] = plugin["rule_fqdn"] data[pluginid]["result"] = {} data[pluginid]["result"]["err"] = "%s" % plugin["reason"] data[pluginid]["result"]["rc"] = skipped data[pluginid]["result"]["out"] = "" data[pluginid]["priority"] = 666 # Fill empty values for missing fields for key in [ "description", "bugzilla", "path", "time", "long_name", "subcategory", "kb", ]: data[pluginid]["%s" % key] = "" return data