def processpkt_netconfig(self, drone, unused_srcaddr, jsonobj): '''We want to trigger ARP discovery when we hear a 'netconfig' packet Build up the parameters for the discovery action, then send it to drone.request_discovery(...) To build up the parameters, you use ConfigFile.agent_params() which will pull values from the system configuration. ''' unused_srcaddr = unused_srcaddr # make pylint happy init_params = ConfigFile.agent_params(self.config, 'discovery', '#ARP', drone.designation) netconfiginfo = pyConfigContext(jsonobj) data = jsonobj['data'] # the data portion of the JSON message discovery_args = [] for devname in data.keys(): #print >> sys.stderr, "*** devname:", devname devinfo = data[devname] if (str(devinfo['operstate']) == 'up' and str(devinfo['carrier']) == 'True' and str(devinfo['address']) != '00-00-00-00-00-00' and str(devinfo['address']) != ''): params = pyConfigContext(init_params) params[CONFIGNAME_INSTANCE] = '_ARP_' + devname params[CONFIGNAME_DEVNAME] = devname #print >> sys.stderr, '#ARP parameters:', params discovery_args.append(params) if discovery_args: drone.request_discovery(discovery_args)
def processpkt_netconfig(self, drone, unused_srcaddr, jsonobj): '''We want to trigger ARP discovery when we hear a 'netconfig' packet Build up the parameters for the discovery action, then send it to drone.request_discovery(...) To build up the parameters, you use ConfigFile.agent_params() which will pull values from the system configuration. ''' unused_srcaddr = unused_srcaddr # make pylint happy init_params = ConfigFile.agent_params(self.config, 'discovery', '#ARP', drone.designation) netconfiginfo = pyConfigContext(jsonobj) data = jsonobj['data'] # the data portion of the JSON message discovery_args = [] for devname in data.keys(): #print >> sys.stderr, "*** devname:", devname devinfo = data[devname] if (str(devinfo['operstate']) == 'up' and str(devinfo['carrier']) == 'True' and str(devinfo['address']) != '00-00-00-00-00-00' and str(devinfo['address']) != ''): params = pyConfigContext(init_params) params[CONFIGNAME_INSTANCE] = '#ARP_' + devname params[CONFIGNAME_DEVNAME] = devname #print >> sys.stderr, '#ARP parameters:', params discovery_args.append(params) if discovery_args: drone.request_discovery(discovery_args)
def testme(): 'This is a string' from AssimCtypes import CONFIGNAME_OUTSIG from AssimCclasses import pyReliableUDP, pyPacketDecoder, pySignFrame config = pyConfigContext(init={CONFIGNAME_OUTSIG: pySignFrame(1)}) io = pyReliableUDP(config, pyPacketDecoder()) trans = Transaction(encryption_required=False) destaddr = pyNetAddr('10.10.10.1:1984') addresses = (pyNetAddr('10.10.10.5:1984'), pyNetAddr('10.10.10.6:1984')) trans.add_packet(destaddr, FrameSetTypes.SENDEXPECTHB, addresses, frametype=FrameTypes.IPPORT) trans.add_packet( pyNetAddr('10.10.10.1:1984'), FrameSetTypes.SENDEXPECTHB, (pyNetAddr('10.10.10.5:1984'), pyNetAddr('10.10.10.6:1984')), frametype=FrameTypes.IPPORT) print >> sys.stderr, 'JSON: %s\n' % str(trans) print >> sys.stderr, 'JSON: %s\n' % str(pyConfigContext(str(trans))) trans.commit_trans(io)
def contexttests(): 'GraphNodeExpression tests that need a context' lsattrs='''{ "/var/log/audit/": {"owner": "root", "group": "root", "type": "d", "perms": {"owner":{"read":true, "write":true, "exec":true, "setid":false}, "group": {"read":true, "write":false, "exec":true, "setid":false}, "other": {"read":false, "write":false, "exec":false}, "sticky":false}, "octal": "0750"}, "/var/log/audit/audit.log": {"owner": "root", "group": "root", "type": "-", "perms": {"owner":{"read":true, "write":true, "exec":false, "setid":false}, "group": {"read":false, "write":false, "exec":false, "setid":false}, "other": {"read":false, "write":false, "exec":false}, "sticky":false}, "octal": "0600"}, "/var/log/audit/audit.log.1": {"owner": "root", "group": "root", "type": "-", "perms": {"owner":{"read":true, "write":false, "exec":false, "setid":false}, "group": {"read":false, "write":false, "exec":false, "setid":false}, "other": {"read":false, "write":false, "exec":false}, "sticky":false}, "octal": "0400"} }''' lscontext = ExpressionContext(pyConfigContext(lsattrs,)) Pie_context = ExpressionContext(( pyConfigContext({'a': {'b': 'c', 'pie': 3, 'pi': 3, 'const': 'constant'}, 'f': {'g': 'h', 'pie': '3', 'pi': 3, 'const': 'constant'}}), pyConfigContext({'math': {'pi': 3.14159, 'pie': 3, 'const': 'constant'}}), pyConfigContext({'geography': {'Europe': 'big', 'const': 'constant'}}), )) complicated_context = ExpressionContext(pyConfigContext({'a': {'b': {'pie': 3}}}),) assert FOREACH(("EQ(False, $perms.group.write, $perms.other.write)",), lscontext) == True assert FOREACH(("EQ($pi, 3)",), Pie_context) == False assert FOREACH(("EQ($pie, 3)",), Pie_context) is None assert FOREACH(("$a", "EQ($pie, 3)"), complicated_context) == True assert FOREACH(("$a", "EQ($pie, 3.14159)"), complicated_context) == False assert FOREACH(("$a", "EQ($pi, 3.14159)"), complicated_context) == None assert FOREACH(("EQ($const, constant)",), Pie_context) == True assert GraphNodeExpression.evaluate('EQ($math.pie, 3)', Pie_context) == True assert FOREACH(("EQ($group, root)",), lscontext) == True assert FOREACH(("EQ($owner, root)",), lscontext) == True assert FOREACH(("AND(EQ($owner, root), EQ($group, root))",), lscontext) == True print >> sys.stderr, 'Context tests passed.'
def log_rule_results(self, results, drone, _srcaddr, discoveryobj, discovertype, rulesobj): '''Log the results of this set of rule evaluations''' status_name = Drone.bp_discoverytype_result_attrname(discovertype) if hasattr(drone, status_name): oldstats = pyConfigContext(getattr(drone, status_name)) else: oldstats = pyConfigContext({'pass': [], 'fail': [], 'ignore': [], 'NA': [], 'score': 0.0}) for stat in ('pass', 'fail', 'ignore', 'NA'): logmethod = self.log.info if stat == 'pass' else self.log.warning for ruleid in results[stat]: oldstat = None for statold in ('pass', 'fail', 'ignore', 'NA'): if ruleid in oldstats[statold]: oldstat = statold break if oldstat == stat or stat == 'NA': # No change continue url = self.url(drone, ruleid, rulesobj[ruleid]) BestPractices.send_rule_event(oldstat, stat, drone, ruleid, rulesobj, url) thisrule = rulesobj[ruleid] rulecategory = thisrule['category'] logmethod('%s %sED %s rule %s: %s [%s]' % (drone, stat.upper(), rulecategory, ruleid, url, thisrule['rule'])) self.compute_score_updates(discoveryobj, drone, rulesobj, results, oldstats) setattr(drone, status_name, str(results))
def testme(): 'This is a string' from AssimCtypes import CONFIGNAME_OUTSIG from AssimCclasses import pyReliableUDP, pyPacketDecoder, pySignFrame config = pyConfigContext(init={CONFIGNAME_OUTSIG: pySignFrame(1)}) io = pyReliableUDP(config, pyPacketDecoder()) trans = Transaction(encryption_required=False) destaddr = pyNetAddr('10.10.10.1:1984') addresses = (pyNetAddr('10.10.10.5:1984'), pyNetAddr('10.10.10.6:1984')) trans.add_packet(destaddr, FrameSetTypes.SENDEXPECTHB , addresses, frametype=FrameTypes.IPPORT) assert len(trans.tree['packets']) == 1 trans.add_packet(pyNetAddr('10.10.10.1:1984') , FrameSetTypes.SENDEXPECTHB , (pyNetAddr('10.10.10.5:1984') , pyNetAddr('10.10.10.6:1984')) , frametype=FrameTypes.IPPORT) assert len(trans.tree['packets']) == 2 print >> sys.stderr, 'JSON: %s\n' % str(trans) print >> sys.stderr, 'JSON: %s\n' % str(pyConfigContext(str(trans))) trans.commit_trans(io) assert len(trans.tree['packets']) == 0
def contexttests(): 'GraphNodeExpression tests that need a context' lsattrs = '''{ "/var/log/audit/": {"owner": "root", "group": "root", "type": "d", "perms": {"owner":{"read":true, "write":true, "exec":true, "setid":false}, "group": {"read":true, "write":false, "exec":true, "setid":false}, "other": {"read":false, "write":false, "exec":false}, "sticky":false}, "octal": "0750"}, "/var/log/audit/audit.log": {"owner": "root", "group": "root", "type": "-", "perms": {"owner":{"read":true, "write":true, "exec":false, "setid":false}, "group": {"read":false, "write":false, "exec":false, "setid":false}, "other": {"read":false, "write":false, "exec":false}, "sticky":false}, "octal": "0600"}, "/var/log/audit/audit.log.1": {"owner": "root", "group": "root", "type": "-", "perms": {"owner":{"read":true, "write":false, "exec":false, "setid":false}, "group": {"read":false, "write":false, "exec":false, "setid":false}, "other": {"read":false, "write":false, "exec":false}, "sticky":false}, "octal": "0400"} }''' lscontext = ExpressionContext(pyConfigContext(lsattrs, )) Pie_context = ExpressionContext(( pyConfigContext({ 'a': { 'b': 'c', 'pie': 3, 'pi': 3, 'const': 'constant' }, 'f': { 'g': 'h', 'pie': '3', 'pi': 3, 'const': 'constant' } }), pyConfigContext( {'math': { 'pi': 3.14159, 'pie': 3, 'const': 'constant' }}), pyConfigContext( {'geography': { 'Europe': 'big', 'const': 'constant' }}), )) complicated_context = ExpressionContext( pyConfigContext({'a': { 'b': { 'pie': 3 } }}), ) assert FOREACH(("EQ(False, $perms.group.write, $perms.other.write)", ), lscontext) == True assert FOREACH(("EQ($pi, 3)", ), Pie_context) == False assert FOREACH(("EQ($pie, 3)", ), Pie_context) is None assert FOREACH(("$a", "EQ($pie, 3)"), complicated_context) == True assert FOREACH(("$a", "EQ($pie, 3.14159)"), complicated_context) == False assert FOREACH(("$a", "EQ($pi, 3.14159)"), complicated_context) == None assert FOREACH(("EQ($const, constant)", ), Pie_context) == True assert GraphNodeExpression.evaluate('EQ($math.pie, 3)', Pie_context) == True assert FOREACH(("EQ($group, root)", ), lscontext) == True assert FOREACH(("EQ($owner, root)", ), lscontext) == True assert FOREACH(("AND(EQ($owner, root), EQ($group, root))", ), lscontext) == True print >> sys.stderr, 'Context tests passed.'
def processJSONevent(self, jsonstr): 'Process a single JSON event from out input stream' eventobj = pyConfigContext(jsonstr) aobj = eventobj['associatedobject'] aobjclass = aobj['nodetype'] eventtype = AssimEvent.eventtypenames[eventobj['eventtype']] env = {} # Initialize the child environment with our current environment for item in os.environ: env[item] = os.environ[item] # Add in things in 'extrainfo' (if any) if 'extrainfo' in eventobj and eventobj['extrainfo'] is not None: extrastuff = eventobj['extrainfo'] for extra in extrastuff.keys(): evextra = extrastuff[extra] env['ASSIM_%s' % extra] = str(evextra) # Add all the scalars in the associated object for attr in aobj.keys(): avalue = aobj[attr] if isinstance(avalue, (str, unicode, int, float, long, bool)): env['ASSIM_%s' % attr] = str(avalue) env['ASSIM_JSONobj'] = str(jsonstr) # It's an event we want our scripts to know about... # So, let them know! if DEBUG: print >> sys.stderr, 'TO RUN: %s' % (str(self.listscripts())) for script in self.listscripts(): args = [script, eventtype, aobjclass] if DEBUG: print >> sys.stderr, 'STARTING SCRIPT: %s' % (str(args)) os.spawnve(os.P_WAIT, script, args, env) if DEBUG: print >> sys.stderr, 'SCRIPT %s IS NOW DONE' % (str(args))
def compute_available_agents(context): '''Create a cache of all our available monitoring agents - and return it''' if not hasattr(context, 'get') or not hasattr(context, 'objects'): context = ExpressionContext(context) #CMAdb.log.debug('CREATING AGENT CACHE (%s)' % str(context)) for node in context.objects: if hasattr(node, '_agentcache'): # Keep pylint from getting irritated... #CMAdb.log.debug('AGENT ATTR IS %s' % getattr(node, '_agentcache')) return getattr(node, '_agentcache') if not hasattr(node, '__iter__') or '_init_monitoringagents' not in node: #CMAdb.log.debug('SKIPPING AGENT NODE (%s)' % (str(node))) #if hasattr(node, 'keys'): # CMAdb.log.debug('SKIPPING AGENT NODE keys: (%s)' % (str(node.keys()))) continue agentobj = pyConfigContext(node['_init_monitoringagents']) agentobj = agentobj['data'] ret = {} for cls in agentobj.keys(): agents = agentobj[cls] for agent in agents: if cls not in ret: ret[cls] = {} ret[cls][agent] = True setattr(node, '_agentcache', ret) #CMAdb.log.debug('AGENT CACHE IS: %s' % str(ret)) return ret #CMAdb.log.debug('RETURNING NO AGENT CACHE AT ALL!') return {}
def log_rule_results(self, results, drone, _srcaddr, discovertype, rulesobj): '''Log the results of this set of rule evaluations''' status_name = 'BP_%s_rulestatus' % discovertype if hasattr(drone, status_name): oldstats = pyConfigContext(getattr(drone, status_name)) else: oldstats = {'pass': [], 'fail': [], 'ignore': [], 'NA': []} for stat in ('pass', 'fail', 'ignore'): logmethod = self.log.info if stat == 'pass' else self.log.warning for ruleid in results[stat]: oldstat = None for statold in ('pass', 'fail', 'ignore', 'NA'): if ruleid in oldstats[statold]: oldstat = statold break if oldstat == stat or stat == 'NA': # No change continue thisrule = rulesobj[ruleid] rulecategory = thisrule['category'] logmethod('%s %sED %s rule %s: %s [%s]' % (drone, stat.upper(), rulecategory, ruleid, self.url(drone, ruleid, rulesobj[ruleid]), thisrule['rule'])) setattr(Drone, status_name, str(results))
def agent_params(config, agenttype, agentname, dronedesignation): '''We return the agent parameters for the given type, agent name and drone The most specific values take priority over the less specific values creating a 3-level value inheritance scheme. - Top level is for all agents. - Second level is for specific agents. - Third level is for specific agents on specific machines. We implement this. ''' compoundname = '%s/%s' % (agentname, dronedesignation) subconfig = config[agenttype] result = pyConfigContext('{"type": "%s", "parameters":{}}' % agentname) if compoundname in subconfig: result = subconfig[compoundname] if agentname in subconfig: for tag in subconfig[agentname]: if tag not in result: subval = subconfig[agentname][tag] if not hasattr(subval, 'keys'): result['parameters'][tag] = subconfig[agentname][tag] for tag in subconfig: if tag not in result: subval = subconfig[tag] if not hasattr(subval, 'keys'): result['parameters'][tag] = subconfig[tag] return result
def processpkt_netconfig(self, drone, _unused_srcaddr, jsonobj): '''We want to trigger ARP discovery when we hear a 'netconfig' packet Build up the parameters for the discovery action, then send it to drone.request_discovery(...) To build up the parameters, you use ConfigFile.agent_params() which will pull values from the system configuration. ''' init_params = ConfigFile.agent_params(self.config, 'discovery', '#ARP', drone.designation) data = jsonobj['data'] # the data portion of the JSON message discovery_args = [] for devname in data.keys(): #print >> sys.stderr, "*** devname:", devname devinfo = data[devname] if discovery_indicates_link_is_up(devinfo): params = pyConfigContext(init_params) params[CONFIGNAME_INSTANCE] = '_ARP_' + devname params[CONFIGNAME_DEVNAME] = devname #print >> sys.stderr, '#ARP parameters:', params discovery_args.append(params) if discovery_args: drone.request_discovery(discovery_args)
def dispatch(self, origaddr, frameset): fstype = frameset.get_framesettype() if CMAdb.debug: CMAdb.log.debug( "DispatchJSDISCOVERY: received [%s] FrameSet from [%s]" % (FrameSetTypes.get(fstype)[0], repr(origaddr))) sysname = None for frame in frameset.iter(): frametype = frame.frametype() if frametype == FrameTypes.HOSTNAME: sysname = frame.getstr() if frametype == FrameTypes.JSDISCOVER: json = frame.getstr() jsonconfig = pyConfigContext(init=json) #print 'JSON received: ', json if sysname is None: sysname = jsonconfig.getstring('host') drone = self.droneinfo.find(sysname) #print >> sys.stderr, 'FOUND DRONE for %s IS: %s' % (sysname, drone) #print >> sys.stderr, 'LOGGING JSON FOR DRONE for %s IS: %s' % (drone, json) child = drone.find_child_system_from_json(jsonconfig) #if child is not drone: # print >> sys.stderr, ('>>>>>>>>>>>>>>>>>>>LOGGED child system Discovery %s: %s' # % (str(child), json)) child.logjson(origaddr, json) sysname = None
def dispatch(self, origaddr, frameset): fstype = frameset.get_framesettype() if CMAdb.debug: CMAdb.log.debug("DispatchJSDISCOVERY: received [%s] FrameSet from [%s]" % (FrameSetTypes.get(fstype)[0], repr(origaddr))) sysname = None for frame in frameset.iter(): frametype = frame.frametype() if frametype == FrameTypes.HOSTNAME: sysname = frame.getstr() if frametype == FrameTypes.JSDISCOVER: json = frame.getstr() jsonconfig = pyConfigContext(init=json) #print 'JSON received: ', json if sysname is None: sysname = jsonconfig.getstring('host') drone = self.droneinfo.find(sysname) #print >> sys.stderr, 'FOUND DRONE for %s IS: %s' % (sysname, drone) #print >> sys.stderr, 'LOGGING JSON FOR DRONE for %s IS: %s' % (drone, json) child = drone.find_child_system_from_json(jsonconfig) #if child is not drone: # print >> sys.stderr, ('>>>>>>>>>>>>>>>>>>>LOGGED child system Discovery %s: %s' # % (str(child), json)) child.logjson(origaddr, json) sysname = None
def compute_scores(drone, rulesobj, statuses): '''Compute the scores from this set of statuses - organized by category We return the total score, scores organized by category and the scoring detailed on a rule-by-rule basis. ''' score_algorithm = BestPractices.determine_rule_score_algorithm(drone, rulesobj) scores = {} rulescores = {} totalscore=0 if isinstance(statuses, (str, unicode)): statuses = pyConfigContext(statuses) for status in statuses: if status == 'score': continue for ruleid in statuses[status]: rule = rulesobj[ruleid] rulecat = rule['category'] rulescore = score_algorithm(drone, rule, status) if rulecat not in rulescores: rulescores[rulecat] = {} rulescores[rulecat][ruleid] = rulescore totalscore += rulescore if rulecat not in scores: scores[rulecat] = 0.0 scores[rulecat] += rulescore return totalscore, scores, rulescores
def log_rule_results(self, results, drone, _srcaddr, discoveryobj, rulesobj): """Log the results of this set of rule evaluations""" discovertype = discoveryobj["discovertype"] status_name = "BP_%s_rulestatus" % discovertype if hasattr(drone, status_name): oldstats = pyConfigContext(getattr(drone, status_name)) else: oldstats = {"pass": [], "fail": [], "ignore": [], "NA": [], "score": 0.0} for stat in ("pass", "fail", "ignore", "NA"): logmethod = self.log.info if stat == "pass" else self.log.warning for ruleid in results[stat]: oldstat = None for statold in ("pass", "fail", "ignore", "NA"): if ruleid in oldstats[statold]: oldstat = statold break if oldstat == stat or stat == "NA": # No change continue BestPractices.send_rule_event(oldstat, stat, drone, ruleid, rulesobj) thisrule = rulesobj[ruleid] rulecategory = thisrule["category"] logmethod( "%s %sED %s rule %s: %s [%s]" % ( drone, stat.upper(), rulecategory, ruleid, self.url(drone, ruleid, rulesobj[ruleid]), thisrule["rule"], ) ) self.compute_score_updates(discoveryobj, drone, rulesobj, results, oldstats) setattr(Drone, status_name, str(results))
def _getitem(self, name): 'Return the given attribute or item from our object' if name in self.kw: return self.kw[name](self.obj, name) if callable(self.kw[name]) \ else self.kw[name] try: #print("Looking for %s in %s." % (name, type(self.obj)), # file=sys.stderr) ret = self.obj.deepget(name) if hasattr(self.obj, 'deepget') \ else self.obj[name] if ret is not None: return ret except (IndexError, KeyError, TypeError): pass try: if not hasattr(self.obj, name): #print("Name %s not found in %s." % (name, type(self.obj)), # file=sys.stderr) if name.find('.') > 0: prefix, suffix = name.split('.', 1) base = getattr(self.obj, prefix) subobj = pyConfigContext(base) return subobj.deepget(suffix) #print("Returning getattr( %s, %s." % (type(self.obj), name), # file=sys.stderr) return getattr(self.obj, name) except AttributeError: pass raise ValueError(name)
def logjson(self, origaddr, jsontext): 'Process and save away JSON discovery data' assert CMAdb.store.has_node(self) jsonobj = pyConfigContext(jsontext) if not 'discovertype' in jsonobj or not 'data' in jsonobj: CMAdb.log.warning('Invalid JSON discovery packet: %s' % jsontext) return dtype = jsonobj['discovertype'] jsonname = 'JSON_' + dtype if not hasattr(self, jsonname) or str(getattr(self, jsonname)) != jsontext: if CMAdb.debug: CMAdb.log.debug("Saved discovery type %s for endpoint %s." % (dtype, self.designation)) setattr(self, jsonname, jsontext) else: if not self.monitors_activated and dtype == 'tcpdiscovery': # This is because we need to start the monitors anyway... if CMAdb.debug: CMAdb.log.debug('Discovery type %s for endpoint %s is unchanged' '. PROCESSING ANYWAY.' % (dtype, self.designation)) else: if CMAdb.debug: CMAdb.log.debug('Discovery type %s for endpoint %s is unchanged. ignoring' % (dtype, self.designation)) return self._process_json(origaddr, jsonobj)
def processpkt(self, drone, unused_srcaddr, jsonobj): "Send commands to gather discovery data from /proc/sys" unused_srcaddr = unused_srcaddr data = jsonobj['data'] # The data portion of the JSON message osfield = 'operating-system' if osfield not in data: self.log.warning('OS name not found in %s' % str(data)) return osname = data[osfield] if osname.find('Linux') == -1 and osname.find('linux') == -1: self.log.info('ProcSysDiscovery: OS name is not Linux: %s' % str(osname)) return params = ConfigFile.agent_params(self.config, 'discovery', 'proc_sys', drone.designation) params['parameters'] = pyConfigContext( {'ASSIM_discoverdir': '/proc/sys'}) params[CONFIGNAME_TYPE] = 'proc_sys' params[CONFIGNAME_INSTANCE] = '_auto_proc_sys' # Request discovery of checksums of all the binaries talking (tcp) over the network if self.debug: self.log.debug('REQUESTING /proc/sys DISCOVERY') drone.request_discovery((params, ))
def _jsonstr_other(self, thing): 'Do our best to make JSON out of a "normal" python object - the final "other" case' ret = '{' comma = '' attrs = thing.__dict__.keys() attrs.sort() if Store.has_node(thing) and Store.id(thing) is not None: ret += '"_id": %s' % str(Store.id(thing)) comma = ',' for attr in attrs: skip = False for prefix in self.filterprefixes: if attr.startswith(prefix): skip = True continue if skip: continue value = getattr(thing, attr) if self.maxJSON > 0 and attr.startswith( 'JSON_') and len(value) > self.maxJSON: continue if self.expandJSON and attr.startswith( 'JSON_') and value.startswith('{'): js = pyConfigContext(value) if js is not None: value = js ret += '%s"%s":%s' % (comma, attr, self._jsonstr(value)) comma = ',' ret += '}' return ret
def agent_params(config, agenttype, agentname, dronedesignation): '''We return the agent parameters for the given type, agent name and drone The most specific values take priority over the less specific values creating a 3-level value inheritance scheme. - Top level is for all agents. - Second level is for specific agents. - Third level is for specific agents on specific machines. agenttype should be one of 'monitoring' or 'discovery' agentname for discovery: name of discovery agent agentname for monitoring: monitoring-class::provider:monitortype for OCF monitoring-class::monitortype for non-OCF We implement this. ''' compoundname = '%s/%s' % (agentname, dronedesignation) subconfig = config[agenttype] result = pyConfigContext('{"type": "%s", "parameters":{}}' % agentname) if compoundname in subconfig: result['parameters'] = subconfig[compoundname] if 'agents' in subconfig and agentname in subconfig['agents']: agentlist = subconfig['agents'] for tag in agentlist[agentname]: if tag not in result: subval = agentlist[agentname][tag] result['parameters'][tag] = agentlist[agentname][tag] for tag in subconfig: if tag not in result: subval = subconfig[tag] if not hasattr(subval, 'keys'): result['parameters'][tag] = subconfig[tag] return result
def _getitem(self, name): 'Return the given attribute or item from our object' if name in self.kw: return self.kw[name](self.obj, name) if callable(self.kw[name]) \ else self.kw[name] try: #print("Looking for %s in %s." % (name, type(self.obj)), # file=sys.stderr) ret = self.obj.deepget(name) if hasattr(self.obj, 'deepget') \ else self.obj[name] if ret is not None: return ret except (IndexError, KeyError, TypeError): pass try: if not hasattr(self.obj, name): #print("Name %s not found in %s." % (name, type(self.obj)), # file=sys.stderr) if name.find('.') > 0: prefix, suffix = name.split('.', 1) base = getattr(self.obj,prefix) subobj = pyConfigContext(base) return subobj.deepget(suffix) #print("Returning getattr( %s, %s." % (type(self.obj), name), # file=sys.stderr) return getattr(self.obj, name) except AttributeError: pass raise ValueError(name)
def _jsonstr_other(self, thing): 'Do our best to make JSON out of a "normal" python object - the final "other" case' ret = '{' comma = '' attrs = thing.__dict__.keys() attrs.sort() if Store.has_node(thing) and Store.id(thing) is not None: ret += '"_id": %s' % str(Store.id(thing)) comma = ',' for attr in attrs: skip = False for prefix in self.filterprefixes: if attr.startswith(prefix): skip = True continue if skip: continue value = getattr(thing, attr) if self.maxJSON > 0 and attr.startswith('JSON_') and len(value) > self.maxJSON: continue if self.expandJSON and attr.startswith('JSON_') and value.startswith('{'): js = pyConfigContext(value) if js is not None: value = js ret += '%s"%s":%s' % (comma, attr, self._jsonstr(value)) comma = ',' ret += '}' return ret
def __init__(self, filename=None, template=None, defaults=None): 'Init function for ConfigFile class, give us a filename - or None!' if template is None: template = ConfigFile.default_template self.template = template if defaults is None: defaults = ConfigFile.default_defaults() self.defaults = defaults if filename is None: self.config = pyConfigContext(self.defaults) else: self.config = pyConfigContext(filename=filename) # Call any registered callbacks for callbacktuple in ConfigFile.callbacks: function, args = callbacktuple function(self, None, args)
def compute_available_agents(context): '''Create a cache of all our available monitoring agents - and return it''' #print >> sys.stderr, 'CREATING AGENT CACHE' if not hasattr(context, 'get') or not hasattr(context, 'objects'): context = ExpressionContext(context) #print >> sys.stderr, 'CREATING AGENT CACHE (%s)' % str(context) for node in context.objects: if hasattr(node, '_agentcache'): # Keep pylint from getting irritated... #print >> sys.stderr, 'AGENT ATTR IS', getattr(node, '_agentcache') return getattr(node, '_agentcache') if not hasattr(node, '__iter__') or 'monitoringagents' not in node: continue agentobj = pyConfigContext(node['monitoringagents']) agentobj = agentobj['data'] ret = {} for cls in agentobj.keys(): agents = agentobj[cls] for agent in agents: if cls not in ret: ret[cls] = {} ret[cls][agent] = True setattr(node, '_agentcache', ret) #print >> sys.stderr, 'AGENT CACHE IS', str(ret) return ret #print >> sys.stderr, 'RETURNING NO AGENT CACHE AT ALL!' return {}
def logjson(self, origaddr, jsontext): 'Process and save away JSON discovery data.' assert CMAdb.store.has_node(self) jsonobj = pyConfigContext(jsontext) if 'instance' not in jsonobj or not 'data' in jsonobj: CMAdb.log.warning('Invalid JSON discovery packet: %s' % jsontext) return dtype = jsonobj['instance'] if not self.json_eq(dtype, jsontext): CMAdb.log.debug("Saved discovery type %s [%s] for endpoint %s." % (jsonobj['discovertype'], dtype, self.designation)) self[dtype] = jsontext # This is stored in separate nodes for performance else: if not self.monitors_activated and dtype == 'tcpdiscovery': # This is because we need to start the monitors anyway... if CMAdb.debug: CMAdb.log.debug('Discovery type %s for endpoint %s is unchanged' '. PROCESSING ANYWAY.' % (dtype, self.designation)) else: if CMAdb.debug: CMAdb.log.debug('Discovery type %s for endpoint %s is unchanged. ignoring' % (dtype, self.designation)) return self._process_json(origaddr, jsonobj)
def __iter__(self): '''Yield 'dot' strings for our nodes and relationships''' yield 'Digraph G {%s\n' % self.render_options() nodetypes = self.formatdict['nodes'].keys() reltypes = self.formatdict['relationships'].keys() if self.dronelist is None: params = {'nodetypes': nodetypes, 'reltypes': reltypes} queryname = 'allhostsubgraph' else: queryname = 'hostsubgraph' params = {'nodetypes': nodetypes, 'reltypes': reltypes, 'hostname': self.dronelist} print ('NODETYPES: %s ' % str(nodetypes), file=sys.stderr) print ('RELTYPES: %s ' % str(reltypes), file=sys.stderr) querymeta = assimcli.query.load_query_object(self.store, queryname) queryiter = querymeta.execute(self.executor_context, expandJSON=True, elemsonly=True, **params) # Subgraph queries produce a single row, with two elements: # nodes and relationships for jsonline in queryiter: queryobj = pyConfigContext(jsonline) break for line in self._outnodes(queryobj['nodes']): yield line.strip() + '\n' for line in self._outrels(queryobj['relationships']): yield line.strip() + '\n' yield '}\n'
def __iter__(self): '''Yield 'dot' strings for our nodes and relationships''' yield 'Digraph G {%s\n' % self.render_options() nodetypes = self.formatdict['nodes'].keys() reltypes = self.formatdict['relationships'].keys() if self.dronelist is None: params = {'nodetypes': nodetypes, 'reltypes': reltypes} queryname = 'allhostsubgraph' else: queryname = 'hostsubgraph' params = { 'nodetypes': nodetypes, 'reltypes': reltypes, 'hostname': self.dronelist } print('NODETYPES: %s ' % str(nodetypes), file=sys.stderr) print('RELTYPES: %s ' % str(reltypes), file=sys.stderr) querymeta = assimcli.query.load_query_object(self.store, queryname) queryiter = querymeta.execute(self.executor_context, expandJSON=True, elemsonly=True, **params) # Subgraph queries produce a single row, with two elements: # nodes and relationships for jsonline in queryiter: queryobj = pyConfigContext(jsonline) break for line in self._outnodes(queryobj['nodes']): yield line.strip() + '\n' for line in self._outrels(queryobj['relationships']): yield line.strip() + '\n' yield '}\n'
def cmdline_exec(self, executor_context, language='en', fmtstring=None, **params): 'Execute the command line version of the query for the specified language' if fmtstring is None: fmtstring = self._JSON_metadata['cmdline'][language] fixedparams = self.validate_parameters(params) for json in self.execute(executor_context, expandJSON=True , maxJSON=100, elemsonly=True, **fixedparams): obj = pyConfigContext(json) yield ClientQuery._cmdline_substitute(fmtstring, obj)
def __init__(self, filename=None, template=None, defaults=None): 'Init function for ConfigFile class, give us a filename!' if template is None: template = ConfigFile.default_template self.template = template if defaults is None: defaults = ConfigFile.default_defaults() self.defaults = defaults self.config = pyConfigContext(filename=filename)
def cmdline_exec(self, executor_context, language='en', fmtstring=None, **params): 'Execute the command line version of the query for the specified language' if fmtstring is None: fmtstring = self._JSON_metadata['cmdline'][language] fixedparams = self.validate_parameters(params) for json in self.execute(executor_context, expandJSON=True , maxJSON=5120, elemsonly=True, **fixedparams): obj = pyConfigContext(json) yield ClientQuery._cmdline_substitute(fmtstring, obj)
def processpkt(self, drone, _unused_srcaddr, jsonobj): '''Request discovery of auditd (log) files and directories. They will be evaluated by some auditd best practice rules''' data = jsonobj['data'] # The data portion of the JSON message params = pyConfigContext() params['parameters'] = pyConfigContext() params[CONFIGNAME_TYPE] = 'fileattrs' params[CONFIGNAME_INSTANCE] = 'auditd_fileattrs' filelist = '' if 'log_file' in data: filelist = os.path.dirname(data['log_file']) + '/' else: filelist= '/var/log/audit/' params['parameters']['filelist'] = filelist params['parameters']['ASSIM_filelist'] = filelist #print >> sys.stderr, 'DISCOVERING %s' % str(params) # repeat, warn, and interval are automatically added drone.request_discovery((params,))
def __init__(self, json, jhash=None): GraphNode.__init__(self, domain='metadata') self._map = pyConfigContext(json) self.json = str(self._map) # We use sha224 to keep the length under 60 characters (56 to be specific) # This is a performance consideration for the current (2.3) verison of Neo4j if jhash is None: jhash = self.strhash(self.json) self.jhash = jhash
def processpkt(self, drone, _unused_srcaddr, jsonobj): '''Request discovery of auditd (log) files and directories. They will be evaluated by some auditd best practice rules''' data = jsonobj['data'] # The data portion of the JSON message params = pyConfigContext() params['parameters'] = pyConfigContext() params[CONFIGNAME_TYPE] = 'fileattrs' params[CONFIGNAME_INSTANCE] = 'auditd_fileattrs' filelist = '' if 'log_file' in data: filelist = os.path.dirname(data['log_file']) + '/' else: filelist = '/var/log/audit/' params['parameters']['filelist'] = filelist params['parameters']['ASSIM_filelist'] = filelist #print >> sys.stderr, 'DISCOVERING %s' % str(params) # repeat, warn, and interval are automatically added drone.request_discovery((params, ))
def processchecksumpkt(self, drone, unused_srcaddr, jsonobj): 'Process updated checksums. Note that our drone-owned-JSON is already updated' unused_srcaddr = unused_srcaddr # make pylint happy... data = jsonobj['data'] # The data portion of the JSON message print >> sys.stderr, 'PROCESSING CHECKSUM DATA' if hasattr(drone, 'JSON_OLD_checksums'): print >> sys.stderr, 'COMPARING CHECKSUM DATA' olddata = pyConfigContext(drone.JSON_OLD_checksums)['data'] self.compare_checksums(drone, olddata, data) print >> sys.stderr, 'UPDATING CHECKSUM DATA for %d files' % len(data) drone.JSON_OLD_checksums = str(jsonobj)
def construct_mon_json(self, operation="monitor"): """ Parameters ---------- Returns ---------- JSON string representing this particular monitor action. """ if self.arglist is None: arglist_str = "" else: arglist_str = ', "%s": {' % (REQENVIRONNAMEFIELD) comma = "" for arg in self._arglist: arglist_str += '%s"%s":"%s"' % (comma, str(arg), str(self._arglist[arg])) comma = ", " arglist_str += "}" if self.provider is None: provider_str = "" else: provider_str = ', "%s":"%s"' % (REQPROVIDERNAMEFIELD, self.provider) path_str = "" if hasattr(self, "nagiospath"): path_str = ', "%s": %s' % (REQNAGIOSPATH, getattr(self, "nagiospath")) path_str = path_str.replace("u'", '"') path_str = path_str.replace("'", '"') argv_str = "" if self.argv is not None: argv_str = ', "%s": %s' % (REQARGVNAMEFIELD, getattr(self, "argv")) argv_str = argv_str.replace("u'", '"') argv_str = argv_str.replace("'", '"') json = '{"%s": %d, "%s":"%s", "%s":"%s", "%s":"%s", "%s":"%s", "%s":%d, "%s":%d%s%s%s%s}' % ( REQIDENTIFIERNAMEFIELD, self.request_id, REQOPERATIONNAMEFIELD, operation, REQCLASSNAMEFIELD, self.monitorclass, CONFIGNAME_TYPE, self.monitortype, CONFIGNAME_INSTANCE, self.monitorname, CONFIGNAME_INTERVAL, self.interval, CONFIGNAME_TIMEOUT, self.timeout, provider_str, arglist_str, path_str, argv_str, ) return str(pyConfigContext(init=json))
def _processpkt_by_type(self, drone, srcaddr, evaltype, jsonobj): "process a discovery object against its set of rules" # print >> sys.stderr, 'IN PROCESSPKT_BY_TYPE for %s: %s %s' % \ # (drone, evaltype, BestPractices.eval_objects[evaltype]) for rule_obj in BestPractices.eval_objects[evaltype]: # print >> sys.stderr, 'Fetching %s rules for %s' % (evaltype, drone) rulesobj = rule_obj.fetch_rules(drone, srcaddr, evaltype) # print >> sys.stderr, 'RULES ARE:', rulesobj statuses = pyConfigContext(rule_obj.evaluate(drone, srcaddr, jsonobj, rulesobj, evaltype)) # print >> sys.stderr, 'RESULTS ARE:', statuses self.log_rule_results(statuses, drone, srcaddr, jsonobj, rulesobj)
def processchecksumpkt(self, drone, _unused_srcaddr, jsonobj): '''Process updated checksums. The value of drone['checksums'] (if any) is the _previous_ value of the checksums attribute. ''' data = jsonobj['data'] # The data portion of the JSON message print >> sys.stderr, 'PROCESSING CHECKSUM DATA' if ('checksums' in drone): print >> sys.stderr, 'COMPARING CHECKSUM DATA' olddata = pyConfigContext(drone['checksums'])['data'] self.compare_checksums(drone, olddata, data) print >> sys.stderr, 'UPDATING CHECKSUM DATA for %d files' % len(data)
def processchecksumpkt(self, drone, _unused_srcaddr, jsonobj): """Process updated checksums. The value of drone['checksums'] (if any) is the _previous_ value of the checksums attribute. """ data = jsonobj["data"] # The data portion of the JSON message print >>sys.stderr, "PROCESSING CHECKSUM DATA" if "checksums" in drone: print >>sys.stderr, "COMPARING CHECKSUM DATA" olddata = pyConfigContext(drone["checksums"])["data"] self.compare_checksums(drone, olddata, data) print >>sys.stderr, "UPDATING CHECKSUM DATA for %d files" % len(data)
def _processpkt_by_type(self, drone, srcaddr, evaltype, jsonobj): 'process a discovery object against its set of rules' #print >> sys.stderr, 'IN PROCESSPKT_BY_TYPE for %s: %s %s' % \ # (drone, evaltype, BestPractices.eval_objects[evaltype]) for rule_obj in BestPractices.eval_objects[evaltype]: #print >> sys.stderr, 'Fetching %s rules for %s' % (evaltype, drone) rulesobj = rule_obj.fetch_rules(drone, srcaddr, evaltype) #print >> sys.stderr, 'RULES ARE:', rulesobj statuses = pyConfigContext(rule_obj.evaluate(drone, srcaddr, jsonobj, rulesobj, evaltype)) #print >> sys.stderr, 'RESULTS ARE:', statuses self.log_rule_results(statuses, drone, srcaddr, jsonobj, evaltype, rulesobj)
def processpkt(self, drone, srcaddr, jsonobj): '''Inform interested rule objects about this change''' self = self discovertype = jsonobj['discovertype'] if discovertype not in BestPractices.evaluators: return for rulecls in BestPractices.evaluators[discovertype]: ruleclsobj = rulecls(self.config, self.packetio, self.store, self.log, self.debug) rulesobj = ruleclsobj.fetch_rules(drone, srcaddr, discovertype) statuses = pyConfigContext(ruleclsobj.evaluate(drone, srcaddr, jsonobj['data'], rulesobj)) self.log_rule_results(statuses, drone, srcaddr, discovertype, rulesobj)
def ConstructFromString(s, objclass="service"): """ Construct a MonitoringRule from a string parameter. It will construct the appropriate subclass depending on its input string. Note that the input is JSON -- with whole-line comments. A whole line comment has to _begin_ with a #. """ obj = pyConfigContext(s) # print >> sys.stderr, 'CONSTRUCTING MONITORING RULE FROM', obj if obj is None: raise ValueError("Invalid JSON: %s" % s) if "class" not in obj: raise ValueError("Must have class value") legit = { "ocf": {"class": True, "type": True, "classconfig": True, "provider": True}, "lsb": {"class": True, "type": True, "classconfig": True}, "nagios": { "class": True, "type": True, "classconfig": True, "prio": True, "initargs": True, "objclass": True, }, "NEVERMON": {"class": True, "type": True, "classconfig": True}, } rscclass = obj["class"] if rscclass not in legit: raise ValueError("Illegal class value: %s" % rscclass) l = legit[obj["class"]] for key in l.keys(): if l[key] and key not in obj: raise ValueError("%s object must have %s field" % (rscclass, key)) for key in obj.keys(): if key not in l: raise ValueError("%s object cannot have a %s field" % (rscclass, key)) objclass = obj.get("objclass", "service") if rscclass == "lsb": return LSBMonitoringRule(obj["type"], obj["classconfig"]) if rscclass == "ocf": return OCFMonitoringRule(obj["provider"], obj["type"], obj["classconfig"]) if rscclass == "nagios": return NagiosMonitoringRule( obj["type"], obj["prio"], obj["initargs"], obj["classconfig"], objclass=objclass ) if rscclass == "NEVERMON": return NEVERMonitoringRule(obj["type"], obj["classconfig"]) raise ValueError('Invalid resource class ("class" = "%s")' % rscclass)
def dispatch(self, origaddr, frameset): fstype = frameset.get_framesettype() if CMAdb.debug: CMAdb.log.debug("DispatchRSCOPREPLY: received [%s] FrameSet from [%s]" % (FrameSetTypes.get(fstype)[0], str(origaddr))) for frame in frameset.iter(): frametype = frame.frametype() if frametype == FrameTypes.RSCJSONREPLY: obj = pyConfigContext(frame.getstr()) MonitorAction.logchange(origaddr, obj) return CMAdb.log.critical('RSCOPREPLY message from %s did not have a RSCJSONREPLY field' % (str(origaddr)))
def __init__(self, json=None, encryption_required=False): 'Constructor for a combined database/network transaction.' if json is None: self.tree = {'packets': []} else: self.tree = pyConfigContext(init=str(json)) if not 'packets' in self.tree: raise ValueError('Incoming JSON is malformed: >>%s<<' % json) self.namespace = {} self.created = [] self.sequence = None self.stats = {'lastcommit': timedelta(0), 'totaltime': timedelta(0)} self.encryption_required = encryption_required self.post_transaction_packets = []