class DaemonProcess(object): """Actual daemon processing for GREASE Daemon Attributes: ioc (GreaseContainer): The Grease IOC current_real_second (int): Current second in time registered (bool): If the node is registered with MongoDB impTool (ImportTool): Instance of Import Tool conf (PrototypeConfig): Prototype Configuration Instance """ ioc = None current_real_second = None registered = True contextManager = {'jobs': {}, 'prototypes': {}} impTool = None def __init__(self, ioc): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.current_real_second = datetime.utcnow().second if self.ioc.getConfig( ).NodeIdentity == "Unknown" and not self.register(): self.registered = False self.impTool = ImportTool(self.ioc.getLogger()) self.conf = PrototypeConfig(self.ioc) def server(self): """Server process for ensuring prototypes & jobs are running By Running this method this will clear the DB of any jobs a node may have Returns: bool: Server Success """ # Ensure we aren't swamping the system cpu = cpu_percent(interval=.1) mem = virtual_memory().percent if \ cpu >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')) \ or mem >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')): self.ioc.getLogger().trace( "Thread Maximum Reached CPU: [{0}] Memory: [{1}]".format( cpu, mem), trace=True) # remove variables del cpu del mem return True if not self.registered: self.ioc.getLogger().trace("Server is not registered", trace=True) return False self.ioc.getLogger().trace("Server execution starting", trace=True) # establish job collection JobsCollection = self.ioc.getCollection("SourceData") self.ioc.getLogger().trace("Searching for Jobs", trace=True) jobs = JobsCollection.find({ 'grease_data.execution.server': ObjectId(self.ioc.getConfig().NodeIdentity), 'grease_data.execution.commandSuccess': False, 'grease_data.execution.executionSuccess': False, 'grease_data.execution.failures': { '$lt': 6 } }) # Get Node Information Node = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(self.ioc.getConfig().NodeIdentity)}) if not Node: # If for some reason we couldn't find it self.ioc.getLogger().error("Failed To Load Node Information") return False # Get Prototypes prototypes = list(Node.get('prototypes')) # Del node instance del Node if prototypes: # We have prototypes to spin up for prototype in prototypes: self.ioc.getLogger().trace( "Passing ProtoType [{0}] to Runner".format(prototype), trace=True) self._run_prototype(prototype) if jobs.count(): self.ioc.getLogger().trace("Total Jobs to Execute: [{0}]".format( jobs.count())) for job in jobs: self.ioc.getLogger().trace( "Passing Job [{0}] to Runner".format(job.get("_id")), trace=True) self._run_job(job, JobsCollection) else: # Nothing to Run for Jobs self.ioc.getLogger().trace("No Jobs Scheduled to Server", trace=True) self.ioc.getLogger().trace("Server execution complete", trace=True) return True def _run_job(self, job, JobCollection): """Run a On-Demand Job Args: job (dict): Job Data to execute JobCollection (pymongo.collection.Collection): JobCollection to update for telemetry Returns: None: Void Method to kickoff execution """ if not self.contextManager['jobs'].get(job.get('_id')): # New Job to run if isinstance(job.get('configuration'), bytes): conf = job.get('configuration').decode() else: conf = job.get('configuration') inst = self.impTool.load(self.conf.get_config(conf).get('job', '')) if inst and isinstance(inst, Command): inst.ioc.getLogger().foreground = self.ioc.getLogger( ).foreground thread = threading.Thread( target=inst.safe_execute, args=(job.get('grease_data', {}).get('detection', {}).get('detection', {}), ), name="GREASE DAEMON COMMAND EXECUTION [{0}]".format( job.get('_id'))) thread.daemon = True thread.start() self.contextManager['jobs'][job.get("_id")] = { 'thread': thread, 'command': inst } else: # Invalid Job del inst self.ioc.getLogger().warning("Invalid Job", additional=job) JobCollection.update_one({'_id': ObjectId(job['_id'])}, { '$set': { 'grease_data.execution.failures': job.get('failures', 0) + 1 } }) return else: # Job already executing if self.contextManager['jobs'].get( job.get('_id')).get('thread').isAlive(): # thread still executing return else: # Execution has ended self.ioc.getLogger().trace("Job [{0}] finished running".format( job.get('_id')), trace=True) finishedJob = self.contextManager['jobs'].get( job.get('_id')).get('command') # type: Command if finishedJob.getRetVal(): # job completed successfully JobCollection.update_one( {'_id': ObjectId(job.get('_id'))}, { '$set': { 'grease_data.execution.commandSuccess': finishedJob.getRetVal(), 'grease_data.execution.executionSuccess': finishedJob.getExecVal(), 'grease_data.execution.completeTime': datetime.utcnow(), 'grease_data.execution.returnData': finishedJob.getData() } }) else: # Job Failure self.ioc.getLogger().warning( "Job Failed [{0}]".format(job.get('_id')), additional=finishedJob.getData()) # TODO: Job Execution cooldown timing JobCollection.update_one({'_id': ObjectId(job['_id'])}, { '$set': { 'grease_data.execution.failures': job.get('grease_data', {}).get( 'execution', {}).get('failures', 0) + 1 } }) # close out job finishedJob.__del__() del finishedJob # remove from contextManager del self.contextManager['jobs'][job.get('_id')] return def _run_prototype(self, prototype): """Startup a ProtoType Args: prototype (str): ProtoType to start Returns: None: Void method to start prototype """ if not self.contextManager['prototypes'].get(prototype): # ProtoType has not started inst = self.impTool.load(prototype) if not isinstance(inst, Command): # invalid ProtoType self.log_once_per_second( "Invalid ProtoType [{0}]".format(prototype), level=ERROR) return inst.ioc.getLogger().foreground = self.ioc.getLogger().foreground thread = threading.Thread( target=inst.safe_execute, args=({}), name="GREASE DAEMON PROTOTYPE [{0}]".format(prototype)) thread.daemon = True thread.start() self.contextManager['prototypes'][prototype] = thread return else: # ensure thread is alive if self.contextManager['prototypes'].get(prototype).isAlive(): self.ioc.getLogger().trace( "ProtoType [{0}] is alive".format(prototype)) return else: # Thread died for some reason self.log_once_per_second( "ProtoType [{0}] Stopped".format(prototype), level=INFO) inst = self.impTool.load(prototype) if not isinstance(inst, Command): self.log_once_per_second( "Invalid ProtoType [{0}]".format(prototype), level=ERROR) return inst.ioc.getLogger().foreground = self.ioc.getLogger( ).foreground thread = threading.Thread( target=inst.execute, name="GREASE DAEMON PROTOTYPE [{0}]".format(prototype)) thread.daemon = True thread.start() self.contextManager['prototypes'][prototype] = thread return def drain_jobs(self, JobCollection): """Will drain jobs from the current context This method is used to prevent abnormal ending of executions Args: JobCollection (pymongo.collection.Collection): Job Collection Object Returns: bool: When job queue is emptied """ Threads = True while Threads: if self.contextManager['jobs']: jobs = {} for key, val in self.contextManager['jobs'].items(): if val['thread'].isAlive(): jobs[key] = val continue else: # Execution has ended self.ioc.getLogger().trace( "Job [{0}] finished running".format(key), trace=True) finishedJob = self.contextManager['jobs'].get(key).get( 'command') # type: Command if finishedJob.getRetVal(): # job completed successfully JobCollection.update_one({'_id': ObjectId(key)}, { '$set': { 'grease_data.execution.commandSuccess': finishedJob.getRetVal(), 'grease_data.execution.executionSuccess': finishedJob.getExecVal(), 'grease_data.execution.completeTime': datetime.utcnow(), 'grease_data.execution.returnData': finishedJob.getData() } }) else: # Job Failure self.ioc.getLogger().warning( "Job Failed [{0}]".format(key), additional=finishedJob.getData()) JobCollection.update_one({'_id': ObjectId(key)}, { '$set': { 'grease_data.execution.failures': val['command'].get('failures', 0) + 1 } }) # close out job finishedJob.__del__() del finishedJob self.contextManager['jobs'] = jobs else: Threads = False return True def register(self): """Attempt to register with MongoDB Returns: bool: Registration Success """ return self.ioc.ensureRegistration() def log_once_per_second(self, message, level=DEBUG, additional=None): """Log Message once per second Args: message (str): Message to log level (int): Log Level additional (object): Additional information that is able to be str'd Returns: None: Void Method to fire log message """ if self._has_time_progressed(): self.ioc.getLogger().TriageMessage(message=message, level=level, additional=additional) def _has_time_progressed(self): """Determines if the current second and the real second are not the same Returns: bool: if true then time has passed in a meaningful way """ if self.current_real_second != datetime.utcnow().second: self.current_real_second = datetime.utcnow().second return True else: return False
class Scheduling(object): """Central scheduling class for GREASE This class routes data to nodes within GREASE Attributes: ioc (GreaseContainer): IoC access for DeDuplication """ def __init__(self, ioc=None): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.ioc.ensureRegistration() def scheduleDetection(self, source, configName, data): """Schedule a Source Parse to detection This method will take a list of single dimension dictionaries and schedule them for detection Args: source (str): Name of the source configName (str): Configuration Data was sourced from data (list[dict]): Data to be scheduled for detection Returns: bool: Scheduling success """ if len(data) is 0 or not isinstance(data, list): self.ioc.getLogger().trace( "Data provided empty or is not type list type: [{0}] len: [{1}]".format(str(type(data)), len(data)), trace=True ) return False self.ioc.getLogger().trace("Preparing to schedule [{0}] source objects".format(len(data)), trace=True) sourceCollect = self.ioc.getCollection('SourceData') jServerCollect = self.ioc.getCollection('JobServer') # begin scheduling loop of each block for elem in data: if not isinstance(elem, dict): self.ioc.getLogger().warning( "Element from data not of type dict! Got [{0}] DROPPED".format(str(type(elem))), notify=False ) continue server, jobCount = self.determineDetectionServer() if server: sourceCollect.insert_one({ 'grease_data': { 'sourcing': { 'server': ObjectId(self.ioc.getConfig().NodeIdentity) }, 'detection': { 'server': ObjectId(server), 'start': None, 'end': None, 'detection': {} }, 'scheduling': { 'server': None, 'start': None, 'end': None }, 'execution': { 'server': None, 'assignmentTime': None, 'completeTime': None, 'returnData': {}, 'executionSuccess': False, 'commandSuccess': False, 'failures': 0 } }, 'source': str(source), 'configuration': str(configName), 'data': elem, 'createTime': datetime.datetime.utcnow(), 'expiry': Deduplication.generate_max_expiry_time(1) }) jServerCollect.update_one({ '_id': ObjectId(server)}, {'$set': {'jobs': int(jobCount) + 1}} ) else: self.ioc.getLogger().warning( "Failed to find detection server for data object from source [{0}]; DROPPED".format(source), notify=False ) self.ioc.getLogger().warning( "Detection scheduling failed. Could not find detection server", notify=False ) return False return True def scheduleScheduling(self, objectId): """Schedule a source for job scheduling This method schedules a source for job scheduling Args: objectId (str): MongoDB ObjectId to schedule Returns: bool: If scheduling was successful """ server, jobCount = self.determineSchedulingServer() if not server: self.ioc.getLogger().error("Failed to find scheduling server", notify=False) return False self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(objectId)}, { '$set': { 'grease_data.scheduling.server': ObjectId(server), 'grease_data.scheduling.start': None, 'grease_data.scheduling.end': None } } ) self.ioc.getCollection('SourceData').update_one({ '_id': ObjectId(server)}, {'$set': {'jobs': int(jobCount) + 1}} ) return True def determineDetectionServer(self): """Determines detection server to use Finds the detection server available for a new detection job Returns: tuple: MongoDB Object ID of server & current job count """ result = self.ioc.getCollection('JobServer').find({ 'active': True, 'prototypes': 'detect' }).sort('jobs', pymongo.ASCENDING).limit(1) if result.count(): return str(result[0]['_id']), int(result[0]['jobs']) else: return "", 0 def determineSchedulingServer(self): """Determines scheduling server to use Finds the scheduling server available for a new scheduling job Returns: tuple: MongoDB Object ID of server & current job count """ result = self.ioc.getCollection('JobServer').find({ 'active': True, 'prototypes': 'schedule' }).sort('jobs', pymongo.DESCENDING).limit(1) if result.count(): return str(result[0]['_id']), int(result[0]['jobs']) else: return "", 0 def determineExecutionServer(self, role): """Determines execution server to use Finds the execution server available for a new execution job Returns: str: MongoDB Object ID of server; if one cannot be found then string will be empty """ result = self.ioc.getCollection('JobServer').find({ 'active': True, 'roles': str(role) }).sort('jobs', pymongo.DESCENDING).limit(1) if result.count(): return str(result[0]['_id']), int(result[0]['jobs']) else: return "", 0
def test_detectionScheduling(self): ioc = GreaseContainer() ioc.ensureRegistration() sch = Scheduling(ioc) jServer = ioc.getCollection('JobServer') jID1 = jServer.insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["detect"], 'active': True, 'activationTime': datetime.datetime.utcnow() }).inserted_id time.sleep(1) jID2 = jServer.insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["detect"], 'active': True, 'activationTime': datetime.datetime.utcnow() }).inserted_id time.sleep(1) self.assertTrue( sch.scheduleDetection('test', 'test_conf', [ { 'test0': 'var0', 'test1': 'var1', 'test2': 'var2', 'test3': 'var3', 'test4': 'var4', 'test5': 'var5', 'test6': 'var6', 'test7': 'var7', 'test8': 'var8', 'test9': 'var9', 'test10': 'var10', }, { 'test0': 'var0', 'test1': 'var1', 'test2': 'var2', 'test3': 'var3', 'test4': 'var4', 'test5': 'var5', 'test6': 'var6', 'test7': 'var7', 'test8': 'var8', 'test9': 'var9', 'test10': 'var10', }, { 'test0': 'var0', 'test1': 'var1', 'test2': 'var2', 'test3': 'var3', 'test4': 'var4', 'test5': 'var5', 'test6': 'var6', 'test7': 'var7', 'test8': 'var8', 'test9': 'var9', 'test10': 'var10', }, { 'test0': 'var0', 'test1': 'var1', 'test2': 'var2', 'test3': 'var3', 'test4': 'var4', 'test5': 'var5', 'test6': 'var6', 'test7': 'var7', 'test8': 'var8', 'test9': 'var9', 'test10': 'var10', }, { 'test0': 'var0', 'test1': 'var1', 'test2': 'var2', 'test3': 'var3', 'test4': 'var4', 'test5': 'var5', 'test6': 'var6', 'test7': 'var7', 'test8': 'var8', 'test9': 'var9', 'test10': 'var10', }, { 'test0': 'var0', 'test1': 'var1', 'test2': 'var2', 'test3': 'var3', 'test4': 'var4', 'test5': 'var5', 'test6': 'var6', 'test7': 'var7', 'test8': 'var8', 'test9': 'var9', 'test10': 'var10', }, ])) time.sleep(1) self.assertEqual( ioc.getCollection('SourceData').find({ 'grease_data.detection.server': ObjectId(jID1) }).count(), 3) self.assertEqual( ioc.getCollection('SourceData').find({ 'grease_data.detection.server': ObjectId(jID2) }).count(), 3) self.assertEqual( ioc.getCollection('JobServer').find_one({'_id': ObjectId(jID1)})['jobs'], 3) self.assertEqual( ioc.getCollection('JobServer').find_one({'_id': ObjectId(jID2)})['jobs'], 3) jServer.delete_one({'_id': ObjectId(jID1)}) jServer.delete_one({'_id': ObjectId(jID2)}) ioc.getCollection('SourceData').drop()
def test_registration(self): ioc = GreaseContainer() self.assertTrue(ioc.ensureRegistration())
class BridgeCommand(object): """Methods for Cluster Administration Attributes: imp (ImportTool): Import Tool Instance monitor (NodeMonitoring): Node Monitoring Model Instance """ def __init__(self, ioc=None): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.imp = ImportTool(self.ioc.getLogger()) self.monitor = NodeMonitoring(self.ioc) def action_register(self): """Ensures Registration of server Returns: bool: Registration status """ self.ioc.getLogger().debug("Registration Requested") if self.ioc.ensureRegistration(): print("Registration Complete!") self.ioc.getLogger().info("Registration Completed Successfully") return True print("Registration Failed!") self.ioc.getLogger().info("Registration Failed") return False def action_info(self, node=None, jobs=None, prototypeJobs=None): """Gets Node Information Args: node (str): MongoDB Object ID to get information about jobs (bool): If true then will retrieve jobs executed by this node prototypeJobs (bool): If true then prototype jobs will be printed as well Note: provide a node argument via the CLI --node=4390qwr2fvdew458239 Note: provide a jobs argument via teh CLI --jobs Note: provide a prototype jobs argument via teh CLI --pJobs Returns: bool: If Info was found """ if not self.ioc.ensureRegistration(): self.ioc.getLogger().error("Server not registered with MongoDB") print("Unregistered servers cannot talk to the cluster") return False valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False server = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(str(serverId))}) if server: server = dict(server) print(""" <<<<<<<<<<<<<< SERVER: {0} >>>>>>>>>>>>>> Activation State: {1} Date: {2} Jobs: {3} Operating System: {4} Prototypes: {5} Execution Roles: {6} """.format(server.get('_id'), server.get('active'), server.get('activationTime'), server.get('jobs'), server.get('os'), server.get('prototypes'), server.get('roles'))) if jobs and prototypeJobs: print( "======================= SOURCING =======================") for job in self.ioc.getCollection('SourceData').find( {'grease_data.sourcing.server': ObjectId(serverId)}): print( """ ------------------------------- Job: {0} ------------------------------- """, job['_id']) if jobs and prototypeJobs: print( "======================= DETECTION =======================" ) for job in self.ioc.getCollection('SourceData').find( {'grease_data.detection.server': ObjectId(serverId)}): print(""" ------------------------------- Job: {0} Start Time: {1} End Time: {2} Context: {3} ------------------------------- """.format(job['_id'], job['grease_data']['detection']['start'], job['grease_data']['detection']['end'], job['grease_data']['detection']['detection'])) if jobs and prototypeJobs: print( "======================= SCHEDULING =======================" ) for job in self.ioc.getCollection('SourceData').find( {'grease_data.scheduling.server': ObjectId(serverId)}): print(""" ------------------------------- Job: {0} Start Time: {1} End Time: {2} ------------------------------- """.format(job['_id'], job['grease_data']['scheduling']['start'], job['grease_data']['scheduling']['end'])) if jobs: print( "======================= EXECUTION =======================" ) for job in self.ioc.getCollection('SourceData').find( {'grease_data.execution.server': ObjectId(serverId)}): print(""" ------------------------------- Job: {0} Assignment Time: {1} Completed Time: {2} Execution Success: {3} Command Success: {4} Failures: {5} Return Data: {6} ------------------------------- """.format( job['_id'], job['grease_data']['execution']['assignmentTime'], job['grease_data']['execution']['completeTime'], job['grease_data']['execution']['executionSuccess'], job['grease_data']['execution']['commandSuccess'], job['grease_data']['execution']['failures'], job['grease_data']['execution']['returnData'])) return True print("Unable to locate server") self.ioc.getLogger().error( "Unable to load [{0}] server for information".format(serverId)) return False def action_assign(self, prototype=None, role=None, node=None): """Assign prototypes/roles to a node either local or remote Args: prototype (str): Prototype Job to assign role (str): Role to assign node (str): MongoDB ObjectId of node to assign to, if not provided will default to the local node Returns: bool: If successful true else false """ assigned = False if prototype: job = self.imp.load(str(prototype)) if not job or not isinstance(job, Command): print( "Cannot find prototype [{0}] to assign check search path!". format(prototype)) self.ioc.getLogger().error( "Cannot find prototype [{0}] to assign check search path!". format(prototype)) return False # Cleanup job job.__del__() del job valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$addToSet': { 'prototypes': prototype } }).acknowledged if updated: print("Prototype Assigned") self.ioc.getLogger().info( "Prototype [{0}] assigned to server [{1}]".format( prototype, serverId)) assigned = True else: print("Prototype Assignment Failed!") self.ioc.getLogger().info( "Prototype [{0}] assignment failed to server [{1}]".format( prototype, serverId)) return False if role: valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$push': { 'roles': role } }).acknowledged if updated: print("Role Assigned") self.ioc.getLogger().info( "Role [{0}] assigned to server [{1}]".format( prototype, serverId)) assigned = True else: print("Role Assignment Failed!") self.ioc.getLogger().info( "Role [{0}] assignment failed to server [{1}]".format( prototype, serverId)) return False if not assigned: print("Assignment failed, please check logs for details") return assigned def action_unassign(self, prototype=None, role=None, node=None): """Unassign prototypes to a node either local or remote Args: prototype (str): Prototype Job to unassign role (str): Role to unassign node (str): MongoDB ObjectId of node to unassign to, if not provided will default to the local node Returns: bool: If successful true else false """ unassigned = False if prototype: job = self.imp.load(str(prototype)) if not job or not isinstance(job, Command): print( "Cannot find prototype [{0}] to unassign check search path!" .format(prototype)) self.ioc.getLogger().error( "Cannot find prototype [{0}] to unassign check search path!" .format(prototype)) return False # Cleanup job job.__del__() del job valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$pull': { 'prototypes': prototype } }).acknowledged if updated: print("Prototype Assignment Removed") self.ioc.getLogger().info( "Prototype [{0}] unassigned from server [{1}]".format( prototype, serverId)) unassigned = True else: print("Prototype Unassignment Failed!") self.ioc.getLogger().info( "Prototype [{0}] unassignment failed from server [{1}]". format(prototype, serverId)) return False if role: valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$pull': { 'roles': role } }).acknowledged if updated: print("Role Removed") self.ioc.getLogger().info( "Role [{0}] removed to server [{1}]".format( prototype, serverId)) unassigned = True else: print("Role Removal Failed!") self.ioc.getLogger().info( "Role [{0}] removal failed to server [{1}]".format( prototype, serverId)) return False if not unassigned: print("Unassignment failed, please check logs for details") return unassigned def action_cull(self, node=None): """Culls a server from the active cluster Args: node (str): MongoDB ObjectId to cull; defaults to local node """ if not self.ioc.ensureRegistration(): self.ioc.getLogger().error("Server not registered with MongoDB") print("Unregistered servers cannot talk to the cluster") return False valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False if not self.monitor.deactivateServer(serverId): self.ioc.getLogger().error( "Failed deactivating server [{0}]".format(serverId)) print("Failed deactivating server [{0}]".format(serverId)) return False self.ioc.getLogger().warning( "Server [{0}] preparing to reallocate detect jobs".format( serverId)) if not self.monitor.rescheduleDetectJobs(serverId): self.ioc.getLogger().error( "Failed rescheduling detect jobs [{0}]".format(serverId)) print("Failed rescheduling detect jobs [{0}]".format(serverId)) return False self.ioc.getLogger().warning( "Server [{0}] preparing to reallocate schedule jobs".format( serverId)) if not self.monitor.rescheduleScheduleJobs(serverId): self.ioc.getLogger().error( "Failed rescheduling detect jobs [{0}]".format(serverId)) print("Failed rescheduling detect jobs [{0}]".format(serverId)) return False self.ioc.getLogger().warning( "Server [{0}] preparing to reallocate jobs".format(serverId)) if not self.monitor.rescheduleJobs(serverId): self.ioc.getLogger().error( "Failed rescheduling detect jobs [{0}]".format(serverId)) print("Failed rescheduling detect jobs [{0}]".format(serverId)) return False print("Server Deactivated") return True def action_activate(self, node=None): """activates server in cluster Args: node (str): MongoDB ObjectId to activate; defaults to local node Returns: bool: If activation is successful """ if not self.ioc.ensureRegistration(): self.ioc.getLogger().error("Server not registered with MongoDB") print("Unregistered servers cannot talk to the cluster") return False valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False if self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$set': { 'active': True, 'activationTime': datetime.datetime.utcnow() } }).modified_count < 1: self.ioc.getLogger().warning( "Server [{0}] failed to be activated".format(serverId)) return False self.ioc.getLogger().warning("Server [{0}] activated".format(serverId)) return True def valid_server(self, node=None): """Validates node is in the MongoDB instance connected to Args: node (str): MongoDB Object ID to validate; defaults to local node Returns: tuple: first element is boolean if valid second is objectId as string """ if node: try: server = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(str(node))}) except InvalidId: self.ioc.getLogger().error( "Invalid ObjectID passed to bridge info [{0}]".format( node)) return False, "" if server: return True, dict(server).get('_id') self.ioc.getLogger().error( "Failed to find server [{0}] in the database".format(node)) return False, "" return True, self.ioc.getConfig().NodeIdentity
def test_scan(self): # setup configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "TestSource", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } } ] ioc = GreaseContainer() ioc.ensureRegistration() ioc.getConfig().set('trace', True, 'Logging') ioc.getConfig().set('verbose', True, 'Logging') fil = open(ioc.getConfig().greaseConfigFile, 'r') data = json.loads(fil.read()) fil.close() fil = open(ioc.getConfig().greaseConfigFile, 'w') data['Import']['searchPath'].append('tgt_grease.enterprise.Model.tests') fil.write(json.dumps(data, sort_keys=True, indent=4)) fil.close() Configuration.ReloadConfig() jServer = ioc.getCollection('JobServer') jID1 = jServer.insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["detect"], 'active': True, 'activationTime': datetime.utcnow() }).inserted_id time.sleep(1) jID2 = jServer.insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["detect"], 'active': True, 'activationTime': datetime.utcnow() }).inserted_id # Begin Test conf = PrototypeConfig(ioc) conf.load(reloadConf=True, ConfigurationList=configList) scanner = Scan(ioc) # Scan Environment self.assertTrue(scanner.Parse()) # Begin ensuring environment is how we expect # we assert less or equal because sometimes uuid's are close :p self.assertLessEqual(ioc.getCollection('SourceData').find({ 'detectionServer': ObjectId(jID1) }).count(), 3) self.assertLessEqual(ioc.getCollection('SourceData').find({ 'detectionServer': ObjectId(jID2) }).count(), 3) self.assertLessEqual(ioc.getCollection('JobServer').find_one({ '_id': ObjectId(jID1) })['jobs'], 3) self.assertLessEqual(ioc.getCollection('JobServer').find_one({ '_id': ObjectId(jID2) })['jobs'], 3) # clean up fil = open(ioc.getConfig().greaseConfigFile, 'r') data = json.loads(fil.read()) fil.close() # remove collection ioc.getCollection('TestProtoType').drop() # remove prototypes data['NodeInformation']['ProtoTypes'] = [] # pop search path trash = data['Import']['searchPath'].pop() # close out fil = open(ioc.getConfig().greaseConfigFile, 'w') fil.write(json.dumps(data, sort_keys=True, indent=4)) fil.close() jServer.delete_one({'_id': ObjectId(jID1)}) jServer.delete_one({'_id': ObjectId(jID2)}) ioc.getCollection('SourceData').drop() ioc.getCollection('Dedup_Sourcing').drop() ioc.getConfig().set('trace', False, 'Logging') ioc.getConfig().set('verbose', False, 'Logging') Configuration.ReloadConfig()
class Detect(object): """Detection class for GREASE detect This is the model to actually utilize the detectors to parse the sources from scan Attributes: ioc (GreaseContainer): IOC for scanning impTool (ImportTool): Import Utility Instance conf (PrototypeConfig): Prototype configuration tool scheduler (Scheduling): Prototype Scheduling Service Instance """ def __init__(self, ioc=None): if ioc and isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.impTool = ImportTool(self.ioc.getLogger()) self.ioc.ensureRegistration() self.conf = PrototypeConfig(self.ioc) self.scheduler = Scheduling(self.ioc) def detectSource(self): """This will perform detection the oldest source from SourceData Returns: bool: If detection process was successful """ sourceData = self.getScheduledSource() if sourceData: if isinstance(sourceData.get('configuration'), bytes): conf = sourceData.get('configuration').decode() else: conf = sourceData.get('configuration') configurationData = self.conf.get_config(conf) if configurationData: self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(sourceData.get('_id'))}, { '$set': { 'grease_data.detection.start': datetime.datetime.utcnow() } }) result, resultData = self.detection(sourceData.get('data'), configurationData) if result: # Put constants in detection results resultData['constants'] = self.conf.get_config( configurationData.get('name')).get('constants', {}) # Update detection self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(sourceData.get('_id'))}, { '$set': { 'grease_data.detection.end': datetime.datetime.utcnow(), 'grease_data.detection.detection': resultData } }) # attempt scheduling return self.scheduler.scheduleScheduling( sourceData.get('_id')) else: self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(sourceData.get('_id'))}, { '$set': { 'grease_data.detection.end': datetime.datetime.utcnow(), 'grease_data.detection.detection': {} } }) self.ioc.getLogger().trace( "Detection yielded no detection data", trace=True) return True else: self.ioc.getLogger().error( "Failed to load Prototype Config [{0}]".format( sourceData.get('configuration')), notify=False) return False else: self.ioc.getLogger().trace( "No sources awaiting detection currently", trace=True) return True def getScheduledSource(self): """Queries for oldest source that has been assigned for detection Returns: dict: source awaiting detection """ return self.ioc.getCollection('SourceData').find_one( { 'grease_data.detection.server': ObjectId(self.ioc.getConfig().NodeIdentity), 'grease_data.detection.start': None, 'grease_data.detection.end': None, }, sort=[('grease_data.createTime', pymongo.DESCENDING)]) def detection(self, source, configuration): """Performs detection on a source with the provided configuration Args: source (dict): Key->Value pairs from sourcing to detect upon configuration (dict): Prototype configuration provided from sourcing Returns: tuple: Detection Results; first boolean for success, second dict of variables for context """ # Ensure types final = {} finalBool = False if not isinstance(source, dict): self.ioc.getLogger().warning("Detection got non-dict source data", notify=False) finalBool = False return finalBool, final if not isinstance(configuration, dict): self.ioc.getLogger().warning( "Detection got non-dict configuration", notify=False) finalBool = False return finalBool, final # Now loop through logical blocks for detector, logicBlock in configuration.get('logic', {}).items(): if not isinstance(logicBlock, list): self.ioc.getLogger().warning("Logical Block was not list", trace=True, notify=False) detect = self.impTool.load(detector) if isinstance(detect, Detector): result, resultData = detect.processObject(source, logicBlock) if not result: self.ioc.getLogger().trace( "Detection yielded false for [{0}]".format(detector), trace=True) finalBool = False break else: self.ioc.getLogger().trace( "Detection yielded true for [{0}]".format(detector), trace=True) for key, val in resultData.items(): final[key] = val finalBool = True continue else: self.ioc.getLogger().warning( "invalid detector [{0}]".format(detector), notify=False) finalBool = False return finalBool, final
class Scheduler(object): """Job Scheduler Model This model will attempt to schedule a job for execution Attributes: ioc (GreaseContainer): IOC for scanning impTool (ImportTool): Import Utility Instance conf (PrototypeConfig): Prototype configuration tool scheduler (Scheduling): Prototype Scheduling Service Instance """ def __init__(self, ioc=None): if ioc and isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.impTool = ImportTool(self.ioc.getLogger()) self.ioc.ensureRegistration() self.conf = PrototypeConfig(self.ioc) self.scheduler = Scheduling(self.ioc) def scheduleExecution(self): """Schedules the oldest successfully detected source to execution Returns: bool: True if detection is successful else false """ source = self.getDetectedSource() if source: self.ioc.getLogger().trace("Attempting schedule of source", trace=True) self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(source.get('_id'))}, { '$set': { 'grease_data.scheduling.start': datetime.datetime.utcnow() } }) if self.schedule(source): self.ioc.getLogger().trace( "Scheduling [{0}] was successful".format(source['_id']), trace=True) self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(source.get('_id'))}, { '$set': { 'grease_data.scheduling.end': datetime.datetime.utcnow() } }) return True else: self.ioc.getLogger().error( "Failed to schedule [{0}] for execution".format( source['_id']), trace=True, notify=False) self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(source.get('_id'))}, { '$set': { 'grease_data.scheduling.start': None, 'grease_data.scheduling.end': None } }) return False else: self.ioc.getLogger().trace( "No sources detected for this node at this time", trace=True) return True def getDetectedSource(self): """Gets the oldest successfully detected source Returns: dict: Object from MongoDB """ return self.ioc.getCollection('SourceData').find_one( { 'grease_data.scheduling.server': ObjectId(self.ioc.getConfig().NodeIdentity), 'grease_data.scheduling.start': None, 'grease_data.scheduling.end': None }, sort=[('grease_data.createTime', pymongo.DESCENDING)]) def schedule(self, source): """Schedules source for execution Returns: bool: If scheduling was successful or not """ if isinstance(source['configuration'], bytes): config = self.conf.get_config(source['configuration'].decode()) else: config = self.conf.get_config(source['configuration']) if not config: self.ioc.getLogger().error( "Failed to load configuration for source [{0}]".format( source['_id'])) return False server, jobs = self.scheduler.determineExecutionServer( config.get('exe_env', 'general')) if not server: self.ioc.getLogger().error( "Failed to find an Execution Node for environment [{0}]". format(config.get('exe_env', 'general'))) return False self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(source['_id'])}, { '$set': { 'grease_data.execution.server': ObjectId(server), 'grease_data.execution.assignmentTime': datetime.datetime.utcnow(), } }) self.ioc.getCollection('JobServer').update_one( {'_id': ObjectId(server)}, {'$set': { 'jobs': jobs + 1 }}) return True
def test_real(self): ############################################# # SETUP UP TIME ############################################# ioc = GreaseContainer() pConf = PrototypeConfig(ioc) ioc.ensureRegistration() ioc.getCollection('JobServer').update_one( {'_id': ObjectId(ioc.getConfig().NodeIdentity)}, {'$set': { 'prototypes': ['scan', 'detect', 'schedule'] }}) ioc.getCollection('Configuration').insert_one({ 'active': True, 'type': 'prototype_config', "name": "full_stack_test", "job": "help", "exe_env": "general", "source": "url_source", "url": ['http://google.com'], "logic": { "Regex": [{ "field": "url", "pattern": ".*", 'variable': True, 'variable_name': 'url' }], 'Range': [{ 'field': 'status_code', 'min': 199, 'max': 201 }] }, 'constants': { 'test': 'ver' } }) pConf.load(reloadConf=True) ############################################# # EXECUTE SCANNING ############################################# Scanner = scan() Scanner.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Scanner.ioc.getLogger().getConfig().set('trace', True, 'Logging') Scanner.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Scanner.execute({'loop': 1})) ############################################# # ASSERT SCANNING ############################################# self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.start': None, 'grease_data.detection.end': None })) ############################################# # EXECUTE DETECTION ############################################# Detect = detect() Detect.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Detect.ioc.getLogger().getConfig().set('trace', True, 'Logging') Detect.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Detect.execute({'loop': 1})) ############################################# # ASSERT DETECTION ############################################# self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.start': None, 'grease_data.scheduling.end': None })) ############################################# # EXECUTE SCHEDULING ############################################# Scheduling = schedule() Scheduling.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Scheduling.ioc.getLogger().getConfig().set('trace', True, 'Logging') Scheduling.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Scheduling.execute({'loop': 1})) ############################################# # ASSERT SCHEDULING ############################################# self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.start': None, 'grease_data.execution.end': None })) ############################################# # EXECUTE JOBS ############################################# ioc.getCollection('JobServer').update_one( {'_id': ObjectId(ioc.getConfig().NodeIdentity)}, {'$set': { 'prototypes': [] }}) Daemon = DaemonProcess(ioc) Daemon.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Daemon.ioc.getLogger().getConfig().set('trace', True, 'Logging') Daemon.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Daemon.server()) self.assertTrue(Daemon.drain_jobs(ioc.getCollection('SourceData'))) ############################################# # ASSERT JOB EXECUTION ############################################# # sleep a few for seconds to let help complete time.sleep(5) self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.commandSuccess': True, 'grease_data.execution.executionSuccess': True })) ############################################# # CLEAN UP TIME ############################################# ioc.getCollection('JobServer').update_one( {'_id': ObjectId(ioc.getConfig().NodeIdentity)}, {'$set': { 'prototypes': [] }}) ioc.getCollection('Configuration').drop() ioc.getCollection('SourceData').drop() ioc.getCollection('DeDup_Sourcing').drop() pConf.load(reloadConf=True)