def __init__(self, ioc=None): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.imp = ImportTool(self.ioc.getLogger()) self.monitor = NodeMonitoring(self.ioc)
def __init__(self, ioc=None): if ioc and isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.conf = PrototypeConfig(self.ioc) self.impTool = ImportTool(self.ioc.getLogger()) self.dedup = Deduplication(self.ioc) self.scheduler = Scheduling(self.ioc)
def __init__(self, ioc): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.current_real_second = datetime.utcnow().second if self.ioc.getConfig().NodeIdentity == "Unknown" and not self.register(): self.registered = False self.impTool = ImportTool(self.ioc.getLogger()) self.conf = PrototypeConfig(self.ioc)
def execute(self, context): print("") print("Welcome to GREASE Help") impTool = ImportTool(self.ioc.getLogger()) for route in self.ioc.getConfig().get('Import', 'searchPath'): mod = importlib.import_module(route) for attr in dir(mod): cmd = impTool.load(attr) if cmd and isinstance(cmd, Command): print("<======================>") print("[{0}] Purpose: [{1}]".format( cmd.__class__.__name__, cmd.purpose)) print("Author: {0}".format(cmd.__author__)) print("Current Version: {0}".format(cmd.__version__)) if cmd.os_needed: print('Needs OS: {0}'.format(cmd.os_needed)) print(cmd.help) print("<======================>") return True
def test_init_exception(self, mock_getattr, mock_import, mock_dir_contains): log = Logging() imp = ImportTool(log) def raise_exception(): raise Exception("Test Exception") d = {'times_called': 0} #Need mutable object for nonlocal updates def return_true_once(*args, **kwargs): d['times_called'] += 1 return d['times_called'] == 1 mock_dir_contains.side_effect = return_true_once mock_req = MagicMock() mock_req.side_effect = raise_exception mock_getattr.return_value = mock_req self.assertEqual(imp.load("mock_class"), None) mock_getattr.assert_called_once() mock_req.assert_called_once()
class DaemonProcess(object): """Actual daemon processing for GREASE Daemon Attributes: ioc (GreaseContainer): The Grease IOC current_real_second (int): Current second in time registered (bool): If the node is registered with MongoDB impTool (ImportTool): Instance of Import Tool conf (PrototypeConfig): Prototype Configuration Instance """ ioc = None current_real_second = None registered = True contextManager = {'jobs': {}, 'prototypes': {}} impTool = None def __init__(self, ioc): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.current_real_second = datetime.utcnow().second if self.ioc.getConfig( ).NodeIdentity == "Unknown" and not self.register(): self.registered = False self.impTool = ImportTool(self.ioc.getLogger()) self.conf = PrototypeConfig(self.ioc) def server(self): """Server process for ensuring prototypes & jobs are running By Running this method this will clear the DB of any jobs a node may have Returns: bool: Server Success """ # Ensure we aren't swamping the system cpu = cpu_percent(interval=.1) mem = virtual_memory().percent if \ cpu >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')) \ or mem >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')): self.ioc.getLogger().trace( "Thread Maximum Reached CPU: [{0}] Memory: [{1}]".format( cpu, mem), trace=True) # remove variables del cpu del mem return True if not self.registered: self.ioc.getLogger().trace("Server is not registered", trace=True) return False self.ioc.getLogger().trace("Server execution starting", trace=True) # establish job collection JobsCollection = self.ioc.getCollection("SourceData") self.ioc.getLogger().trace("Searching for Jobs", trace=True) jobs = JobsCollection.find({ 'grease_data.execution.server': ObjectId(self.ioc.getConfig().NodeIdentity), 'grease_data.execution.commandSuccess': False, 'grease_data.execution.executionSuccess': False, 'grease_data.execution.failures': { '$lt': 6 } }) # Get Node Information Node = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(self.ioc.getConfig().NodeIdentity)}) if not Node: # If for some reason we couldn't find it self.ioc.getLogger().error("Failed To Load Node Information") return False # Get Prototypes prototypes = list(Node.get('prototypes')) # Del node instance del Node if prototypes: # We have prototypes to spin up for prototype in prototypes: self.ioc.getLogger().trace( "Passing ProtoType [{0}] to Runner".format(prototype), trace=True) self._run_prototype(prototype) if jobs.count(): self.ioc.getLogger().trace("Total Jobs to Execute: [{0}]".format( jobs.count())) for job in jobs: self.ioc.getLogger().trace( "Passing Job [{0}] to Runner".format(job.get("_id")), trace=True) self._run_job(job, JobsCollection) else: # Nothing to Run for Jobs self.ioc.getLogger().trace("No Jobs Scheduled to Server", trace=True) self.ioc.getLogger().trace("Server execution complete", trace=True) return True def _run_job(self, job, JobCollection): """Run a On-Demand Job Args: job (dict): Job Data to execute JobCollection (pymongo.collection.Collection): JobCollection to update for telemetry Returns: None: Void Method to kickoff execution """ if not self.contextManager['jobs'].get(job.get('_id')): # New Job to run if isinstance(job.get('configuration'), bytes): conf = job.get('configuration').decode() else: conf = job.get('configuration') inst = self.impTool.load(self.conf.get_config(conf).get('job', '')) if inst and isinstance(inst, Command): inst.ioc.getLogger().foreground = self.ioc.getLogger( ).foreground thread = threading.Thread( target=inst.safe_execute, args=(job.get('grease_data', {}).get('detection', {}).get('detection', {}), ), name="GREASE DAEMON COMMAND EXECUTION [{0}]".format( job.get('_id'))) thread.daemon = True thread.start() self.contextManager['jobs'][job.get("_id")] = { 'thread': thread, 'command': inst } else: # Invalid Job del inst self.ioc.getLogger().warning("Invalid Job", additional=job) JobCollection.update_one({'_id': ObjectId(job['_id'])}, { '$set': { 'grease_data.execution.failures': job.get('failures', 0) + 1 } }) return else: # Job already executing if self.contextManager['jobs'].get( job.get('_id')).get('thread').isAlive(): # thread still executing return else: # Execution has ended self.ioc.getLogger().trace("Job [{0}] finished running".format( job.get('_id')), trace=True) finishedJob = self.contextManager['jobs'].get( job.get('_id')).get('command') # type: Command if finishedJob.getRetVal(): # job completed successfully JobCollection.update_one( {'_id': ObjectId(job.get('_id'))}, { '$set': { 'grease_data.execution.commandSuccess': finishedJob.getRetVal(), 'grease_data.execution.executionSuccess': finishedJob.getExecVal(), 'grease_data.execution.completeTime': datetime.utcnow(), 'grease_data.execution.returnData': finishedJob.getData() } }) else: # Job Failure self.ioc.getLogger().warning( "Job Failed [{0}]".format(job.get('_id')), additional=finishedJob.getData()) # TODO: Job Execution cooldown timing JobCollection.update_one({'_id': ObjectId(job['_id'])}, { '$set': { 'grease_data.execution.failures': job.get('grease_data', {}).get( 'execution', {}).get('failures', 0) + 1 } }) # close out job finishedJob.__del__() del finishedJob # remove from contextManager del self.contextManager['jobs'][job.get('_id')] return def _run_prototype(self, prototype): """Startup a ProtoType Args: prototype (str): ProtoType to start Returns: None: Void method to start prototype """ if not self.contextManager['prototypes'].get(prototype): # ProtoType has not started inst = self.impTool.load(prototype) if not isinstance(inst, Command): # invalid ProtoType self.log_once_per_second( "Invalid ProtoType [{0}]".format(prototype), level=ERROR) return inst.ioc.getLogger().foreground = self.ioc.getLogger().foreground thread = threading.Thread( target=inst.safe_execute, args=({}), name="GREASE DAEMON PROTOTYPE [{0}]".format(prototype)) thread.daemon = True thread.start() self.contextManager['prototypes'][prototype] = thread return else: # ensure thread is alive if self.contextManager['prototypes'].get(prototype).isAlive(): self.ioc.getLogger().trace( "ProtoType [{0}] is alive".format(prototype)) return else: # Thread died for some reason self.log_once_per_second( "ProtoType [{0}] Stopped".format(prototype), level=INFO) inst = self.impTool.load(prototype) if not isinstance(inst, Command): self.log_once_per_second( "Invalid ProtoType [{0}]".format(prototype), level=ERROR) return inst.ioc.getLogger().foreground = self.ioc.getLogger( ).foreground thread = threading.Thread( target=inst.execute, name="GREASE DAEMON PROTOTYPE [{0}]".format(prototype)) thread.daemon = True thread.start() self.contextManager['prototypes'][prototype] = thread return def drain_jobs(self, JobCollection): """Will drain jobs from the current context This method is used to prevent abnormal ending of executions Args: JobCollection (pymongo.collection.Collection): Job Collection Object Returns: bool: When job queue is emptied """ Threads = True while Threads: if self.contextManager['jobs']: jobs = {} for key, val in self.contextManager['jobs'].items(): if val['thread'].isAlive(): jobs[key] = val continue else: # Execution has ended self.ioc.getLogger().trace( "Job [{0}] finished running".format(key), trace=True) finishedJob = self.contextManager['jobs'].get(key).get( 'command') # type: Command if finishedJob.getRetVal(): # job completed successfully JobCollection.update_one({'_id': ObjectId(key)}, { '$set': { 'grease_data.execution.commandSuccess': finishedJob.getRetVal(), 'grease_data.execution.executionSuccess': finishedJob.getExecVal(), 'grease_data.execution.completeTime': datetime.utcnow(), 'grease_data.execution.returnData': finishedJob.getData() } }) else: # Job Failure self.ioc.getLogger().warning( "Job Failed [{0}]".format(key), additional=finishedJob.getData()) JobCollection.update_one({'_id': ObjectId(key)}, { '$set': { 'grease_data.execution.failures': val['command'].get('failures', 0) + 1 } }) # close out job finishedJob.__del__() del finishedJob self.contextManager['jobs'] = jobs else: Threads = False return True def register(self): """Attempt to register with MongoDB Returns: bool: Registration Success """ return self.ioc.ensureRegistration() def log_once_per_second(self, message, level=DEBUG, additional=None): """Log Message once per second Args: message (str): Message to log level (int): Log Level additional (object): Additional information that is able to be str'd Returns: None: Void Method to fire log message """ if self._has_time_progressed(): self.ioc.getLogger().TriageMessage(message=message, level=level, additional=additional) def _has_time_progressed(self): """Determines if the current second and the real second are not the same Returns: bool: if true then time has passed in a meaningful way """ if self.current_real_second != datetime.utcnow().second: self.current_real_second = datetime.utcnow().second return True else: return False
class Scan(object): """Scanning class for GREASE Scanner This is the model to actually utilize the scanners to parse the configured environments Attributes: ioc (GreaseContainer): IOC for scanning conf (PrototypeConfig): Prototype configuration instance impTool (ImportTool): Import Utility Instance dedup (Deduplication): Deduplication instance to be used """ def __init__(self, ioc=None): if ioc and isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.conf = PrototypeConfig(self.ioc) self.impTool = ImportTool(self.ioc.getLogger()) self.dedup = Deduplication(self.ioc) self.scheduler = Scheduling(self.ioc) def Parse(self, source=None, config=None): """This will read all configurations and attempt to scan the environment This is the primary business logic for scanning in GREASE. This method will use configurations to parse the environment and attempt to schedule Note: If a Source is specified then *only* that source is parsed. If a configuration is set then *only* that configuration is parsed. If both are provided then the configuration will *only* be parsed if it is of the source provided Note: **If mocking is enabled**: Deduplication *will not occur* Args: source (str): If set will only parse for the source listed config (str): If set will only parse the specified config Returns: bool: True unless error """ self.ioc.getLogger().trace("Starting Parse of Environment", trace=True) Configuration = self.generate_config_set(source=source, config=config) for conf in Configuration: inst = self.impTool.load(conf.get('source', str(uuid4()))) if not isinstance(inst, BaseSourceClass): self.ioc.getLogger().error("Invalid Source [{0}]".format( conf.get('source')), notify=False) del inst continue else: # If mock mode enabled if self.ioc.getConfig().get('Sourcing', 'mock'): data = inst.mock_data(conf) # else actually do sourcing else: if inst.parse_source(conf): # deduplicate data data = self.dedup.Deduplicate( data=inst.get_data(), source=conf.get('source'), threshold=inst.deduplication_strength, expiry_hours=inst.deduplication_expiry, expiry_max=inst.deduplication_expiry_max, collection='Dedup_Sourcing', field_set=inst.field_set) else: self.ioc.getLogger().warning( "Source [{0}] parsing failed".format( conf.get('source')), notify=False) data = [] if len(data) > 0: if self.scheduler.scheduleDetection( conf.get('source'), conf.get('name'), data): self.ioc.getLogger().info( "Data scheduled for detection from source [{0}]". format(conf.get('source')), trace=True) del inst continue else: self.ioc.getLogger().error( "Scheduling failed for source document!", notify=False) del inst continue else: self.ioc.getLogger().trace( "Length of data was empty; was not scheduled", trace=True) del inst continue return True def generate_config_set(self, source=None, config=None): """Examines configuration and returns list of configs to parse Note: If a Source is specified then *only* that source is parsed. If a configuration is set then *only* that configuration is parsed. If both are provided then the configuration will *only* be parsed if it is of the source provided Args: source (str): If set will only parse for the source listed config (str): If set will only parse the specified config Returns: list[dict]: Returns Configurations to Parse for data """ ConfigList = [] if source and config: if self.conf.get_config(config).get('source') == source: ConfigList.append(self.conf.get_config(config)) return ConfigList else: self.ioc.getLogger().warning( "Configuration [{0}] Not Found With Correct Source [{1}]". format(config, source), trace=True, notify=False) elif source and not config: if source in self.conf.get_sources(): for configuration in self.conf.get_source(source): ConfigList.append(configuration) return ConfigList else: self.ioc.getLogger().warning( "Source not found in Configuration [{0}]".format(source), trace=True, notify=False) elif not source and config: if self.conf.get_config(config): ConfigList.append(self.conf.get_config(config)) return ConfigList else: self.ioc.getLogger().warning( "Config not found in Configuration [{0}]".format(config), trace=True, notify=False) else: ConfigList = self.conf.getConfiguration().get('raw') return ConfigList
class BridgeCommand(object): """Methods for Cluster Administration Attributes: imp (ImportTool): Import Tool Instance monitor (NodeMonitoring): Node Monitoring Model Instance """ def __init__(self, ioc=None): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.imp = ImportTool(self.ioc.getLogger()) self.monitor = NodeMonitoring(self.ioc) def action_register(self): """Ensures Registration of server Returns: bool: Registration status """ self.ioc.getLogger().debug("Registration Requested") if self.ioc.ensureRegistration(): print("Registration Complete!") self.ioc.getLogger().info("Registration Completed Successfully") return True print("Registration Failed!") self.ioc.getLogger().info("Registration Failed") return False def action_info(self, node=None, jobs=None, prototypeJobs=None): """Gets Node Information Args: node (str): MongoDB Object ID to get information about jobs (bool): If true then will retrieve jobs executed by this node prototypeJobs (bool): If true then prototype jobs will be printed as well Note: provide a node argument via the CLI --node=4390qwr2fvdew458239 Note: provide a jobs argument via teh CLI --jobs Note: provide a prototype jobs argument via teh CLI --pJobs Returns: bool: If Info was found """ if not self.ioc.ensureRegistration(): self.ioc.getLogger().error("Server not registered with MongoDB") print("Unregistered servers cannot talk to the cluster") return False valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False server = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(str(serverId))}) if server: server = dict(server) print(""" <<<<<<<<<<<<<< SERVER: {0} >>>>>>>>>>>>>> Activation State: {1} Date: {2} Jobs: {3} Operating System: {4} Prototypes: {5} Execution Roles: {6} """.format(server.get('_id'), server.get('active'), server.get('activationTime'), server.get('jobs'), server.get('os'), server.get('prototypes'), server.get('roles'))) if jobs and prototypeJobs: print( "======================= SOURCING =======================") for job in self.ioc.getCollection('SourceData').find( {'grease_data.sourcing.server': ObjectId(serverId)}): print( """ ------------------------------- Job: {0} ------------------------------- """, job['_id']) if jobs and prototypeJobs: print( "======================= DETECTION =======================" ) for job in self.ioc.getCollection('SourceData').find( {'grease_data.detection.server': ObjectId(serverId)}): print(""" ------------------------------- Job: {0} Start Time: {1} End Time: {2} Context: {3} ------------------------------- """.format(job['_id'], job['grease_data']['detection']['start'], job['grease_data']['detection']['end'], job['grease_data']['detection']['detection'])) if jobs and prototypeJobs: print( "======================= SCHEDULING =======================" ) for job in self.ioc.getCollection('SourceData').find( {'grease_data.scheduling.server': ObjectId(serverId)}): print(""" ------------------------------- Job: {0} Start Time: {1} End Time: {2} ------------------------------- """.format(job['_id'], job['grease_data']['scheduling']['start'], job['grease_data']['scheduling']['end'])) if jobs: print( "======================= EXECUTION =======================" ) for job in self.ioc.getCollection('SourceData').find( {'grease_data.execution.server': ObjectId(serverId)}): print(""" ------------------------------- Job: {0} Assignment Time: {1} Completed Time: {2} Execution Success: {3} Command Success: {4} Failures: {5} Return Data: {6} ------------------------------- """.format( job['_id'], job['grease_data']['execution']['assignmentTime'], job['grease_data']['execution']['completeTime'], job['grease_data']['execution']['executionSuccess'], job['grease_data']['execution']['commandSuccess'], job['grease_data']['execution']['failures'], job['grease_data']['execution']['returnData'])) return True print("Unable to locate server") self.ioc.getLogger().error( "Unable to load [{0}] server for information".format(serverId)) return False def action_assign(self, prototype=None, role=None, node=None): """Assign prototypes/roles to a node either local or remote Args: prototype (str): Prototype Job to assign role (str): Role to assign node (str): MongoDB ObjectId of node to assign to, if not provided will default to the local node Returns: bool: If successful true else false """ assigned = False if prototype: job = self.imp.load(str(prototype)) if not job or not isinstance(job, Command): print( "Cannot find prototype [{0}] to assign check search path!". format(prototype)) self.ioc.getLogger().error( "Cannot find prototype [{0}] to assign check search path!". format(prototype)) return False # Cleanup job job.__del__() del job valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$addToSet': { 'prototypes': prototype } }).acknowledged if updated: print("Prototype Assigned") self.ioc.getLogger().info( "Prototype [{0}] assigned to server [{1}]".format( prototype, serverId)) assigned = True else: print("Prototype Assignment Failed!") self.ioc.getLogger().info( "Prototype [{0}] assignment failed to server [{1}]".format( prototype, serverId)) return False if role: valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$push': { 'roles': role } }).acknowledged if updated: print("Role Assigned") self.ioc.getLogger().info( "Role [{0}] assigned to server [{1}]".format( prototype, serverId)) assigned = True else: print("Role Assignment Failed!") self.ioc.getLogger().info( "Role [{0}] assignment failed to server [{1}]".format( prototype, serverId)) return False if not assigned: print("Assignment failed, please check logs for details") return assigned def action_unassign(self, prototype=None, role=None, node=None): """Unassign prototypes to a node either local or remote Args: prototype (str): Prototype Job to unassign role (str): Role to unassign node (str): MongoDB ObjectId of node to unassign to, if not provided will default to the local node Returns: bool: If successful true else false """ unassigned = False if prototype: job = self.imp.load(str(prototype)) if not job or not isinstance(job, Command): print( "Cannot find prototype [{0}] to unassign check search path!" .format(prototype)) self.ioc.getLogger().error( "Cannot find prototype [{0}] to unassign check search path!" .format(prototype)) return False # Cleanup job job.__del__() del job valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$pull': { 'prototypes': prototype } }).acknowledged if updated: print("Prototype Assignment Removed") self.ioc.getLogger().info( "Prototype [{0}] unassigned from server [{1}]".format( prototype, serverId)) unassigned = True else: print("Prototype Unassignment Failed!") self.ioc.getLogger().info( "Prototype [{0}] unassignment failed from server [{1}]". format(prototype, serverId)) return False if role: valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False updated = self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$pull': { 'roles': role } }).acknowledged if updated: print("Role Removed") self.ioc.getLogger().info( "Role [{0}] removed to server [{1}]".format( prototype, serverId)) unassigned = True else: print("Role Removal Failed!") self.ioc.getLogger().info( "Role [{0}] removal failed to server [{1}]".format( prototype, serverId)) return False if not unassigned: print("Unassignment failed, please check logs for details") return unassigned def action_cull(self, node=None): """Culls a server from the active cluster Args: node (str): MongoDB ObjectId to cull; defaults to local node """ if not self.ioc.ensureRegistration(): self.ioc.getLogger().error("Server not registered with MongoDB") print("Unregistered servers cannot talk to the cluster") return False valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False if not self.monitor.deactivateServer(serverId): self.ioc.getLogger().error( "Failed deactivating server [{0}]".format(serverId)) print("Failed deactivating server [{0}]".format(serverId)) return False self.ioc.getLogger().warning( "Server [{0}] preparing to reallocate detect jobs".format( serverId)) if not self.monitor.rescheduleDetectJobs(serverId): self.ioc.getLogger().error( "Failed rescheduling detect jobs [{0}]".format(serverId)) print("Failed rescheduling detect jobs [{0}]".format(serverId)) return False self.ioc.getLogger().warning( "Server [{0}] preparing to reallocate schedule jobs".format( serverId)) if not self.monitor.rescheduleScheduleJobs(serverId): self.ioc.getLogger().error( "Failed rescheduling detect jobs [{0}]".format(serverId)) print("Failed rescheduling detect jobs [{0}]".format(serverId)) return False self.ioc.getLogger().warning( "Server [{0}] preparing to reallocate jobs".format(serverId)) if not self.monitor.rescheduleJobs(serverId): self.ioc.getLogger().error( "Failed rescheduling detect jobs [{0}]".format(serverId)) print("Failed rescheduling detect jobs [{0}]".format(serverId)) return False print("Server Deactivated") return True def action_activate(self, node=None): """activates server in cluster Args: node (str): MongoDB ObjectId to activate; defaults to local node Returns: bool: If activation is successful """ if not self.ioc.ensureRegistration(): self.ioc.getLogger().error("Server not registered with MongoDB") print("Unregistered servers cannot talk to the cluster") return False valid, serverId = self.valid_server(node) if not valid: print("Invalid ObjectID") return False if self.ioc.getCollection('JobServer').update_one( { '_id': ObjectId(serverId) }, { '$set': { 'active': True, 'activationTime': datetime.datetime.utcnow() } }).modified_count < 1: self.ioc.getLogger().warning( "Server [{0}] failed to be activated".format(serverId)) return False self.ioc.getLogger().warning("Server [{0}] activated".format(serverId)) return True def valid_server(self, node=None): """Validates node is in the MongoDB instance connected to Args: node (str): MongoDB Object ID to validate; defaults to local node Returns: tuple: first element is boolean if valid second is objectId as string """ if node: try: server = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(str(node))}) except InvalidId: self.ioc.getLogger().error( "Invalid ObjectID passed to bridge info [{0}]".format( node)) return False, "" if server: return True, dict(server).get('_id') self.ioc.getLogger().error( "Failed to find server [{0}] in the database".format(node)) return False, "" return True, self.ioc.getConfig().NodeIdentity
class GreaseRouter(object): """Main GREASE CLI Router This class handles routing CLI requests as well as starting the Daemon on Windows/POSIX systems Attributes: _config (Configuration): Main Configuration Object _logger (Logging): Main Logging Instance _importTool (ImportTool): Importer Tool Instance _exit_message (str): Exit Message """ _config = Configuration(os.environ.get('GREASE_CONF', None)) _logger = Logging(_config) _importTool = ImportTool(_logger) _exit_message = None def __init__(self): self._logger.trace("Router Startup", trace=True) def StartGREASE(self): """EntryPoint for CLI scripts for GREASE Returns: None: Void Method for GREASE """ status = self.run() self.exit(status, self._exit_message) def run(self): """Route commands through GREASE Returns: int: Exit Code """ # ensure at least a sub-command has been provided if len(sys.argv) > 1: cmd, context = self.get_arguments() if cmd: # Parse long args to command context if cmd.execute(context): cmd.__del__() del cmd return 0 else: return 3 else: self._exit_message = "Command not found" return 2 else: self._logger.error("Sub-command not provided") self._exit_message = "Sub-command not provided to GREASE CLI" return 1 def exit(self, code, message=None): """Exit program with exit code Args: code (int): Exit Code message (str): Exit message if any Returns: None: Will exit program """ if message: self._logger.info("Message: [{0}]".format(message)) if code != 0: print("ERROR: {0}".format(message)) else: print(message) self._logger.debug("GREASE exit code: [{0}]".format(code), verbose=True) sys.exit(code) def get_arguments(self): """Parse CLI long arguments into dictionaries This expects arguments separated by space `--opt val`, colon `--opt:val`, or equal `--opt=val` signs Returns: object, dict: key->value pairs of arguments """ i = 1 context = {} other = [] cmd = None while i < len(sys.argv): arg = str(sys.argv[i]) if arg.startswith("--"): # Found long opt if len(arg.split("=")) > 1: # was equal separated context[arg.split("=")[0].strip("--")] = arg.split("=")[1] elif len(arg.split(":")) > 1: # was colon separated context[arg.split(":")[0].strip("--")] = arg.split(":")[1] else: if len(sys.argv) < i + 1: # we have a flag rather than an arg context[arg.strip("--")] = True i += 1 elif len(sys.argv) - 1 == i or sys.argv[i + 1].startswith( "--"): # we have a flag rather than an arg context[arg.strip("--")] = True elif sys.argv[i + 1].startswith("--"): # we have a flag rather than an arg context[arg.strip("--")] = True else: # space separated possible_imp = self._importTool.load(sys.argv[i + 1]) if not isinstance(possible_imp, Command): context[arg.strip("--")] = sys.argv[i + 1] else: cmd = possible_imp i += 1 else: possible_imp = self._importTool.load(sys.argv[i]) if isinstance(possible_imp, Command): cmd = possible_imp else: other.append(arg) i += 1 context['grease_other_args'] = other return cmd, context
class Scan(object): """Scanning class for GREASE Scanner This is the model to actually utilize the scanners to parse the configured environments Attributes: ioc (GreaseContainer): IOC for scanning conf (PrototypeConfig): Prototype configuration instance impTool (ImportTool): Import Utility Instance dedup (Deduplication): Deduplication instance to be used """ def __init__(self, ioc=None): if ioc and isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.conf = PrototypeConfig(self.ioc) self.impTool = ImportTool(self.ioc.getLogger()) self.dedup = Deduplication(self.ioc) self.scheduler = Scheduling(self.ioc) def Parse(self, source=None, config=None): """This will read all configurations and attempt to scan the environment This is the primary business logic for scanning in GREASE. This method will use configurations to parse the environment and attempt to schedule Note: If a Source is specified then *only* that source is parsed. If a configuration is set then *only* that configuration is parsed. If both are provided then the configuration will *only* be parsed if it is of the source provided Note: **If mocking is enabled**: Deduplication *will not occur* Args: source (str): If set will only parse for the source listed config (str): If set will only parse the specified config Returns: bool: True unless error """ self.ioc.getLogger().trace("Starting Parse of Environment", trace=True) Configuration = self.generate_config_set(source=source, config=config) ScanPool = [] lenConfigs = len(Configuration) i = 0 while i < lenConfigs: # ensure we don't swamp the system resources cpu = cpu_percent(interval=.1) mem = virtual_memory().percent if \ cpu >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')) or \ mem >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')): self.ioc.getLogger().trace( "Scan sleeping; System resource maximum reached", verbose=True) # remove variables del cpu del mem continue conf = Configuration[i] i += 1 # ensure no kafka prototypes come into sourcing if conf.get('source') == 'kafka': continue # ensure there is an execution environment server, _ = self.scheduler.determineExecutionServer( conf.get('exe_env', 'general')) if not server: self.ioc.getLogger().warning( 'configuration skipped -- execution environment offline', additional={ 'execution_environment': conf.get('exe_env', 'general'), 'configuration': conf.get('name') }, notify=True) continue inst = self.impTool.load(conf.get('source', str(uuid4()))) if not isinstance(inst, BaseSourceClass): self.ioc.getLogger().error("Invalid Source [{0}]".format( conf.get('source')), notify=False) del inst continue else: t = threading.Thread( target=self.ParseSource, args=( self.ioc, inst, conf, self.dedup, self.scheduler, ), name="GREASE SOURCING THREAD [{0}]".format( conf.get('name'))) t.daemon = True t.start() ScanPool.append(t) # wait for threads to finish out while len(ScanPool) > 0: self.ioc.getLogger().trace( "Total current scan threads [{0}]".format(len(ScanPool)), trace=True) threads_final = [] for thread in ScanPool: if thread.isAlive(): threads_final.append(thread) ScanPool = threads_final self.ioc.getLogger().trace( "Total current scan threads [{0}]".format(len(ScanPool)), trace=True) self.ioc.getLogger().trace("Scanning Complete".format(len(ScanPool)), trace=True) return True @staticmethod def ParseSource(ioc, source, configuration, deduplication, scheduler): """Parses an individual source and attempts to schedule it Args: ioc (GreaseContainer): IoC Instance source (BaseSourceClass): Source to parse configuration (dict): Prototype configuration to use deduplication (Deduplication): Dedup engine instance scheduler (Scheduling): Central Scheduling instance Returns: None: Meant to be run in a thread """ try: # If mock mode enabled if ioc.getConfig().get('Sourcing', 'mock'): data = source.mock_data(configuration) # else actually do sourcing else: if source.parse_source(configuration): # deduplicate data data = deduplication.Deduplicate( data=source.get_data(), source=configuration.get('source'), configuration=configuration.get('name', str(uuid4())), threshold=source.deduplication_strength, expiry_hours=source.deduplication_expiry, expiry_max=source.deduplication_expiry_max, collection='Dedup_Sourcing', field_set=source.field_set) else: ioc.getLogger().warning( "Source [{0}] parsing failed".format( configuration.get('source')), notify=False) data = [] if len(data) > 0: if scheduler.scheduleDetection(configuration.get('source'), configuration.get('name'), data): ioc.getLogger().info( "Data scheduled for detection from source [{0}]". format(configuration.get('source')), trace=True) del source else: ioc.getLogger().error( "Scheduling failed for source document!", notify=False) del source else: ioc.getLogger().trace( "Length of data was empty; was not scheduled", trace=True) del source except BaseException as e: ioc.getLogger().error( "Failed parsing message got exception! Configuration [{0}] Got [{1}]" .format(configuration, e)) del source def generate_config_set(self, source=None, config=None): """Examines configuration and returns list of configs to parse Note: If a Source is specified then *only* that source is parsed. If a configuration is set then *only* that configuration is parsed. If both are provided then the configuration will *only* be parsed if it is of the source provided Args: source (str): If set will only parse for the source listed config (str): If set will only parse the specified config Returns: list[dict]: Returns Configurations to Parse for data """ ConfigList = [] if source and config: if self.conf.get_config(config).get('source') == source: ConfigList.append(self.conf.get_config(config)) return ConfigList else: self.ioc.getLogger().warning( "Configuration [{0}] Not Found With Correct Source [{1}]". format(config, source), trace=True, notify=False) elif source and not config: if source in self.conf.get_sources(): for configuration in self.conf.get_source(source): ConfigList.append(configuration) return ConfigList else: self.ioc.getLogger().warning( "Source not found in Configuration [{0}]".format(source), trace=True, notify=False) elif not source and config: if self.conf.get_config(config): ConfigList.append(self.conf.get_config(config)) return ConfigList else: self.ioc.getLogger().warning( "Config not found in Configuration [{0}]".format(config), trace=True, notify=False) else: ConfigList = self.conf.getConfiguration().get('raw') return ConfigList
def test_load(self): log = Logging() imp = ImportTool(log) Conf = imp.load("Configuration") self.assertTrue(isinstance(Conf, Configuration))
def test_failed_path(self): log = Logging() imp = ImportTool(log) obj = imp.load("defaultdict") self.assertFalse(obj)
class Detect(object): """Detection class for GREASE detect This is the model to actually utilize the detectors to parse the sources from scan Attributes: ioc (GreaseContainer): IOC for scanning impTool (ImportTool): Import Utility Instance conf (PrototypeConfig): Prototype configuration tool scheduler (Scheduling): Prototype Scheduling Service Instance """ def __init__(self, ioc=None): if ioc and isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.impTool = ImportTool(self.ioc.getLogger()) self.ioc.ensureRegistration() self.conf = PrototypeConfig(self.ioc) self.scheduler = Scheduling(self.ioc) def detectSource(self): """This will perform detection the oldest source from SourceData Returns: bool: If detection process was successful """ sourceData = self.getScheduledSource() if sourceData: if isinstance(sourceData.get('configuration'), bytes): conf = sourceData.get('configuration').decode() else: conf = sourceData.get('configuration') configurationData = self.conf.get_config(conf) if configurationData: self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(sourceData.get('_id'))}, { '$set': { 'grease_data.detection.start': datetime.datetime.utcnow() } }) result, resultData = self.detection(sourceData.get('data'), configurationData) if result: # Put constants in detection results resultData['constants'] = self.conf.get_config( configurationData.get('name')).get('constants', {}) # Update detection self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(sourceData.get('_id'))}, { '$set': { 'grease_data.detection.end': datetime.datetime.utcnow(), 'grease_data.detection.detection': resultData } }) # attempt scheduling return self.scheduler.scheduleScheduling( sourceData.get('_id')) else: self.ioc.getCollection('SourceData').update_one( {'_id': ObjectId(sourceData.get('_id'))}, { '$set': { 'grease_data.detection.end': datetime.datetime.utcnow(), 'grease_data.detection.detection': {} } }) self.ioc.getLogger().trace( "Detection yielded no detection data", trace=True) return True else: self.ioc.getLogger().error( "Failed to load Prototype Config [{0}]".format( sourceData.get('configuration')), notify=False) return False else: self.ioc.getLogger().trace( "No sources awaiting detection currently", trace=True) return True def getScheduledSource(self): """Queries for oldest source that has been assigned for detection Returns: dict: source awaiting detection """ return self.ioc.getCollection('SourceData').find_one( { 'grease_data.detection.server': ObjectId(self.ioc.getConfig().NodeIdentity), 'grease_data.detection.start': None, 'grease_data.detection.end': None, }, sort=[('grease_data.createTime', pymongo.DESCENDING)]) def detection(self, source, configuration): """Performs detection on a source with the provided configuration Args: source (dict): Key->Value pairs from sourcing to detect upon configuration (dict): Prototype configuration provided from sourcing Returns: tuple: Detection Results; first boolean for success, second dict of variables for context """ # Ensure types final = {} finalBool = False if not isinstance(source, dict): self.ioc.getLogger().warning("Detection got non-dict source data", notify=False) finalBool = False return finalBool, final if not isinstance(configuration, dict): self.ioc.getLogger().warning( "Detection got non-dict configuration", notify=False) finalBool = False return finalBool, final # Now loop through logical blocks for detector, logicBlock in configuration.get('logic', {}).items(): if not isinstance(logicBlock, list): self.ioc.getLogger().warning("Logical Block was not list", trace=True, notify=False) detect = self.impTool.load(detector) if isinstance(detect, Detector): result, resultData = detect.processObject(source, logicBlock) if not result: self.ioc.getLogger().trace( "Detection yielded false for [{0}]".format(detector), trace=True) finalBool = False break else: self.ioc.getLogger().trace( "Detection yielded true for [{0}]".format(detector), trace=True) for key, val in resultData.items(): final[key] = val finalBool = True continue else: self.ioc.getLogger().warning( "invalid detector [{0}]".format(detector), notify=False) finalBool = False return finalBool, final