def __init__(self, ioc): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.current_real_second = datetime.utcnow().second if self.ioc.getConfig().NodeIdentity == "Unknown" and not self.register(): self.registered = False self.impTool = ImportTool(self.ioc.getLogger()) self.conf = PrototypeConfig(self.ioc)
def test_empty_conf(self): ioc = GreaseContainer() # clean up for root, dirnames, filenames in os.walk(ioc.getConfig().get('Configuration', 'dir')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) conf = PrototypeConfig() conf.load(reloadConf=True) self.assertTrue(conf.getConfiguration()) self.assertListEqual(conf.getConfiguration().get('configuration').get('pkg'), []) self.assertListEqual(conf.getConfiguration().get('configuration').get('fs'), []) self.assertListEqual(conf.getConfiguration().get('configuration').get('mongo'), []) conf.load(reloadConf=True)
def test_get_sources_all_good(self): conf = PrototypeConfig() conf.load(reloadConf=True) configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test2", "job": "fakeJob", "exe_env": "windows", "source": "stackOverflow", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] conf.load(ConfigurationList=configList) self.assertEqual(['swapi', 'stackOverflow', 'Google'], conf.get_sources()) conf.load(reloadConf=True)
def test_validation_good(self): test = { "name": "test5", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "var", "pattern": "ver.*" } ] } } conf = PrototypeConfig() self.assertTrue(conf.validate_config(test))
def test_config_load_good_source_bad_name(self): conf = PrototypeConfig() scan = Scan(conf.ioc) conf.load(reloadConf=True) configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test2", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "stackOverflow", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] self.assertTrue(isinstance(conf.load(ConfigurationList=configList), dict)) configs = scan.generate_config_set(config='test7', source='swapi') self.assertTrue(isinstance(configs, list)) self.assertTrue(len(configs) == 0) conf.load(reloadConf=True)
def test_validation_list_good(self): conf = PrototypeConfig() conf.load(reloadConf=True) configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test2", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] self.assertEqual(configList, conf.validate_config_list(configList)) conf.load(reloadConf=True)
def test_validation_bad(self): conf = PrototypeConfig() test = { "name": "test1", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } } self.assertFalse(conf.validate_config(test)) test1 = { "name": "test2", "job": "fakeJob", "exe_env": "windows", } self.assertFalse(conf.validate_config(test1)) test2 = { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": [] } self.assertFalse(conf.validate_config(test2)) test3 = { "name": "test4", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { } } self.assertFalse(conf.validate_config(test3))
def test_load_bad(self): conf = PrototypeConfig() conf.load(reloadConf=True) configList = [ { "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": {} }, { "name": "test1", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test2", "job": "fakeJob", "exe_env": "windows", }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": [] }, { "name": "test4", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { } }, { "name": "test5", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "var", "pattern": "ver.*" } ] } } ] CompareList = [ { "name": "test5", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "var", "pattern": "ver.*" } ] } } ] self.assertDictEqual( { 'raw': CompareList, 'configuration': { 'ConfigurationList': CompareList }, 'source': { 'swapi': CompareList }, 'sources': ['swapi'], 'names': ['test5'], 'name': { 'test5': CompareList[0] } }, conf.load(ConfigurationList=configList) ) conf.load(reloadConf=True)
def test_scheduleScheduling(self): d = Detect() p = PrototypeConfig(d.ioc) source = { 'key': 'var', 'ver': 'key', 'greg': 'old', 'created': '2017-11-24' } configuration = { 'name': 'demoConfig', 'job': 'otherThing', 'exe_env': 'general', 'source': 'Google', 'logic': { 'DateRange': [{ 'field': 'created', 'format': '%Y-%m-%d', 'min': '2017-11-23', 'max': '2017-11-25' }], 'Regex': [{ 'field': 'key', 'pattern': '.*', 'variable': True, 'variable_name': 'field' }, { 'field': 'ver', 'pattern': '.*' }], 'Exists': [{ 'field': 'greg', 'variable': True, 'variable_name': 'greg' }] } } p.load(True, [configuration]) sourceId = d.ioc.getCollection('SourceData').insert_one({ 'grease_data': { 'sourcing': { 'server': ObjectId(d.ioc.getConfig().NodeIdentity) }, 'detection': { 'server': ObjectId(d.ioc.getConfig().NodeIdentity), 'detectionStart': None, 'detectionEnd': None, 'detection': {} }, 'scheduling': { 'schedulingServer': None, 'schedulingStart': None, 'schedulingEnd': None }, 'execution': { 'server': None, 'assignmentTime': None, 'executionStart': None, 'executionEnd': None, 'context': {}, 'executionSuccess': False, 'commandSuccess': False, 'failures': 0, 'retryTime': datetime.datetime.utcnow() } }, 'source': str('test').encode('utf-8'), 'configuration': configuration.get('name'), 'data': source, 'createTime': datetime.datetime.utcnow(), 'expiry': Deduplication.generate_max_expiry_time(1) }).inserted_id scheduleServer = d.ioc.getCollection('JobServer').insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["schedule"], 'active': True, 'activationTime': datetime.datetime.utcnow() }).inserted_id self.assertTrue(d.detectSource()) self.assertFalse(d.getScheduledSource()) self.assertTrue( len( d.ioc.getCollection('SourceData').find_one( { 'grease_data.scheduling.server': ObjectId(scheduleServer), 'grease_data.scheduling.start': None, 'grease_data.scheduling.end': None }, sort=[('createTime', pymongo.DESCENDING)]))) d.ioc.getCollection('JobServer').delete_one( {'_id': ObjectId(scheduleServer)}) d.ioc.getCollection('SourceData').delete_one( {'_id': ObjectId(sourceId)})
def test_all_load_bad(self): ioc = GreaseContainer() # clean up for root, dirnames, filenames in os.walk(ioc.getConfig().get('Configuration', 'dir')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) # clean up for root, dirnames, filenames in os.walk(pkg_resources.resource_filename('tgt_grease.enterprise.Model', 'config/')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "badtest1", "exe_env": "windows", "source": "stackOverflow", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] GoodConfigList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] i = 0 length = len(configList) - 1 while i <= length: if i == 0: with open(ioc.getConfig().get('Configuration', 'dir') + 'conf{0}.config.json'.format(i), 'w') as fil: fil.write(json.dumps(configList[i], indent=4)) if i == 1: with open(pkg_resources.resource_filename('tgt_grease.enterprise.Model', 'config/') + 'conf{0}.config.json'.format(i), 'w') as fil: fil.write(json.dumps(configList[i], indent=4)) if i == 2: ioc.getCollection('Configuration').insert_one(configList[i]) i += 1 ioc.getCollection('Configuration').update_many({}, {'$set': {'active': True, 'type': 'prototype_config'}}) # sleep because travis is slow time.sleep(1.5) conf = PrototypeConfig(ioc) conf.load(reloadConf=True) self.assertEqual(len(conf.getConfiguration().get('configuration').get('mongo')), 1) self.assertEqual(len(conf.getConfiguration().get('configuration').get('pkg')), 0) self.assertEqual(len(conf.getConfiguration().get('configuration').get('fs')), 1) self.assertEqual(len(conf.getConfiguration().get('raw')), len(GoodConfigList)) self.assertEqual(len(conf.getConfiguration().get('source').get('swapi')), 1) self.assertEqual(len(conf.getConfiguration().get('source').get('Google')), 1) self.assertEqual(2, len(conf.get_names())) self.assertEqual(len(conf.get_source('Google')), 1) self.assertTrue(isinstance(conf.get_config('test1'), dict)) self.assertTrue(conf.get_config('test1')) # clean up ioc.getCollection('Configuration').drop() for root, dirnames, filenames in os.walk(ioc.getConfig().get('Configuration', 'dir')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) # clean up for root, dirnames, filenames in os.walk(pkg_resources.resource_filename('tgt_grease.enterprise.Model', 'config/')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) # clear the config conf.load(reloadConf=True)
def test_scan(self): # setup configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "TestSource", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } } ] ioc = GreaseContainer() ioc.ensureRegistration() ioc.getConfig().set('trace', True, 'Logging') ioc.getConfig().set('verbose', True, 'Logging') fil = open(ioc.getConfig().greaseConfigFile, 'r') data = json.loads(fil.read()) fil.close() fil = open(ioc.getConfig().greaseConfigFile, 'w') data['Import']['searchPath'].append('tgt_grease.enterprise.Model.tests') fil.write(json.dumps(data, sort_keys=True, indent=4)) fil.close() Configuration.ReloadConfig() jServer = ioc.getCollection('JobServer') jID1 = jServer.insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["detect"], 'active': True, 'activationTime': datetime.utcnow() }).inserted_id time.sleep(1) jID2 = jServer.insert_one({ 'jobs': 0, 'os': platform.system().lower(), 'roles': ["general"], 'prototypes': ["detect"], 'active': True, 'activationTime': datetime.utcnow() }).inserted_id # Begin Test conf = PrototypeConfig(ioc) conf.load(reloadConf=True, ConfigurationList=configList) scanner = Scan(ioc) # Scan Environment self.assertTrue(scanner.Parse()) # Begin ensuring environment is how we expect # we assert less or equal because sometimes uuid's are close :p self.assertLessEqual(ioc.getCollection('SourceData').find({ 'detectionServer': ObjectId(jID1) }).count(), 3) self.assertLessEqual(ioc.getCollection('SourceData').find({ 'detectionServer': ObjectId(jID2) }).count(), 3) self.assertLessEqual(ioc.getCollection('JobServer').find_one({ '_id': ObjectId(jID1) })['jobs'], 3) self.assertLessEqual(ioc.getCollection('JobServer').find_one({ '_id': ObjectId(jID2) })['jobs'], 3) # clean up fil = open(ioc.getConfig().greaseConfigFile, 'r') data = json.loads(fil.read()) fil.close() # remove collection ioc.getCollection('TestProtoType').drop() # remove prototypes data['NodeInformation']['ProtoTypes'] = [] # pop search path trash = data['Import']['searchPath'].pop() # close out fil = open(ioc.getConfig().greaseConfigFile, 'w') fil.write(json.dumps(data, sort_keys=True, indent=4)) fil.close() jServer.delete_one({'_id': ObjectId(jID1)}) jServer.delete_one({'_id': ObjectId(jID2)}) ioc.getCollection('SourceData').drop() ioc.getCollection('Dedup_Sourcing').drop() ioc.getConfig().set('trace', False, 'Logging') ioc.getConfig().set('verbose', False, 'Logging') Configuration.ReloadConfig()
def test_scheduling_fail(self): d = Detect() p = PrototypeConfig(d.ioc) s = Scheduler(d.ioc) source = { 'key': 'var', 'ver': 'key', 'greg': 'old', 'created': '2017-11-24' } configuration = { 'name': 'demoConfig', 'job': 'otherThing', 'exe_env': 'minix', 'source': 'Google', 'logic': { 'DateRange': [ { 'field': 'created', 'format': '%Y-%m-%d', 'min': '2017-11-23', 'max': '2017-11-25' } ], 'Regex': [ { 'field': 'key', 'pattern': '.*', 'variable': True, 'variable_name': 'field' }, { 'field': 'ver', 'pattern': '.*' } ], 'Exists': [ { 'field': 'greg', 'variable': True, 'variable_name': 'greg' } ] } } p.load(True, [configuration]) sourceId = d.ioc.getCollection('SourceData').insert_one({ 'grease_data': { 'sourcing': { 'server': ObjectId(d.ioc.getConfig().NodeIdentity) }, 'detection': { 'server': ObjectId(d.ioc.getConfig().NodeIdentity), 'detectionStart': None, 'detectionEnd': None, 'detection': {} }, 'scheduling': { 'schedulingServer': None, 'schedulingStart': None, 'schedulingEnd': None }, 'execution': { 'server': None, 'assignmentTime': None, 'executionStart': None, 'executionEnd': None, 'context': {}, 'executionSuccess': False, 'commandSuccess': False, 'failures': 0, 'retryTime': datetime.datetime.utcnow() } }, 'source': str('test').encode('utf-8'), 'configuration': configuration.get('name'), 'data': source, 'createTime': datetime.datetime.utcnow(), 'expiry': Deduplication.generate_max_expiry_time(1) }).inserted_id d.ioc.getCollection('JobServer').update_one( {'_id': ObjectId(d.ioc.getConfig().NodeIdentity)}, { '$set': { 'prototypes': ["scan", "detect", "schedule"] } } ) self.assertTrue(d.detectSource()) self.assertFalse(d.getScheduledSource()) self.assertTrue(s.getDetectedSource()) self.assertFalse(s.scheduleExecution()) self.assertTrue(d.ioc.getCollection('JobServer').update_one( {'_id': ObjectId(d.ioc.getConfig().NodeIdentity)}, { '$set': { 'prototypes': [] } } )) d.ioc.getCollection('SourceData').delete_one({'_id': ObjectId(sourceId)})
def test_rescheduleJobsFailed(self): n = NodeMonitoring() p = PrototypeConfig(n.ioc) server1 = n.ioc.getCollection('JobServer').insert_one({ 'jobs': 9, 'os': platform.system().lower(), 'roles': [], 'prototypes': [], 'active': True, 'activationTime': datetime.datetime.utcnow() }).inserted_id server2 = n.ioc.getCollection('JobServer').insert_one({ 'jobs': 9, 'os': platform.system().lower(), 'roles': ['test1'], 'prototypes': ['detect', 'schedule'], 'active': True, 'activationTime': datetime.datetime.utcnow() }).inserted_id config = n.ioc.getCollection('Configuration').insert_one({ 'active': True, 'type': 'prototype_config', "name": "test", "job": "help", "exe_env": "test", "source": "test", "logic": { "Regex": [{ "field": "url", "pattern": ".*", 'variable': True, 'variable_name': 'url' }], 'Range': [{ 'field': 'status_code', 'min': 199, 'max': 201 }] }, 'constants': { 'test': 'ver' } }).inserted_id p.load(reloadConf=True) source = n.ioc.getCollection('SourceData').insert_one({ 'grease_data': { 'sourcing': { 'server': server1 }, 'detection': { 'server': server1, 'start': datetime.datetime.utcnow(), 'end': datetime.datetime.utcnow(), 'detection': {} }, 'scheduling': { 'server': server1, 'start': datetime.datetime.utcnow(), 'end': datetime.datetime.utcnow(), }, 'execution': { 'server': server1, 'assignmentTime': datetime.datetime.utcnow(), 'completeTime': None, 'returnData': {}, 'executionSuccess': False, 'commandSuccess': False, 'failures': 0 } }, 'source': 'test', 'configuration': 'test', 'createTime': datetime.datetime.utcnow(), 'expiry': Deduplication.generate_max_expiry_time(1) }).inserted_id self.assertTrue(n.deactivateServer(str(server1))) self.assertFalse(n.rescheduleJobs(str(server1))) self.assertFalse( n.ioc.getCollection('SourceData').find({ '_id': source, 'grease_data.execution.server': server2 }).count()) self.assertEqual( n.ioc.getCollection('JobServer').delete_one({ '_id': server1 }).deleted_count, 1) self.assertEqual( n.ioc.getCollection('JobServer').delete_one({ '_id': server2 }).deleted_count, 1) n.ioc.getCollection('SourceData').delete_one({'_id': source}) n.ioc.getCollection('Configuration').drop() n.ioc.getCollection('ServerHealth').drop() p.load(reloadConf=True)
def test_job_execution(self): ioc = GreaseContainer() cmd = DaemonProcess(ioc) proto = PrototypeConfig(ioc) ioc.getCollection('Configuration').insert_one( { 'active': True, 'type': 'prototype_config', "name": "exe_test", "job": "help", "exe_env": "general", "source": "url_source", "logic": { "Regex": [ { "field": "url", "pattern": ".*", 'variable': True, 'variable_name': 'url' } ], 'Range': [ { 'field': 'status_code', 'min': 199, 'max': 201 } ] }, 'constants': { 'test': 'ver' } } ) proto.load(reloadConf=True) jobid = ioc.getCollection('SourceData').insert_one({ 'grease_data': { 'sourcing': { 'server': ObjectId(ioc.getConfig().NodeIdentity) }, 'detection': { 'server': ObjectId(ioc.getConfig().NodeIdentity), 'start': datetime.datetime.utcnow(), 'end': datetime.datetime.utcnow(), 'detection': {} }, 'scheduling': { 'server': ObjectId(ioc.getConfig().NodeIdentity), 'start': datetime.datetime.utcnow(), 'end': datetime.datetime.utcnow(), }, 'execution': { 'server': ObjectId(ioc.getConfig().NodeIdentity), 'assignmentTime': datetime.datetime.utcnow(), 'completeTime': None, 'returnData': {}, 'executionSuccess': False, 'commandSuccess': False, 'failures': 0 } }, 'source': 'dev', 'configuration': 'exe_test', 'data': {}, 'createTime': datetime.datetime.utcnow(), 'expiry': Deduplication.generate_max_expiry_time(1) }).inserted_id # Run for a bit self.assertTrue(cmd.server()) self.assertTrue(cmd.drain_jobs(ioc.getCollection('SourceData'))) result = ioc.getCollection('SourceData').find_one({'_id': ObjectId(jobid)}) self.assertTrue(result) self.assertTrue(result.get('grease_data').get('execution').get('executionSuccess')) self.assertTrue(result.get('grease_data').get('execution').get('commandSuccess')) ioc.getCollection('SourceData').drop() ioc.getCollection('Configuration').drop()
def test_pkg_load_bad(self): ioc = GreaseContainer() # clean up for root, dirnames, filenames in os.walk(pkg_resources.resource_filename('tgt_grease.enterprise.Model', 'config/')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "badtest1", "exe_env": "windows", "source": "stackOverflow", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] GoodConfigList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] i = 0 for conf in configList: with open(pkg_resources.resource_filename('tgt_grease.enterprise.Model', 'config/') + 'conf{0}.config.json'.format(i), 'w') as fil: fil.write(json.dumps(conf, indent=4)) i += 1 conf = PrototypeConfig(ioc) conf.load(reloadConf=True) self.assertEqual(len(conf.getConfiguration().get('configuration').get('pkg')), len(GoodConfigList)) self.assertEqual(len(conf.getConfiguration().get('raw')), len(GoodConfigList)) self.assertEqual(len(conf.getConfiguration().get('source').get('swapi')), 1) self.assertEqual(len(conf.getConfiguration().get('source').get('Google')), 1) self.assertEqual(2, len(conf.get_sources())) self.assertEqual(2, len(conf.get_names())) self.assertEqual(len(conf.get_source('Google')), 1) self.assertTrue(isinstance(conf.get_config('test1'), dict)) self.assertTrue(conf.get_config('test1')) # clean up for root, dirnames, filenames in os.walk(pkg_resources.resource_filename('tgt_grease.enterprise.Model', 'config/')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) # clear the config conf.load(reloadConf=True)
def test_mongo_load_bad(self): ioc = GreaseContainer() # clean up for root, dirnames, filenames in os.walk(ioc.getConfig().get('Configuration', 'dir')): for filename in fnmatch.filter(filenames, '*.config.json'): self.assertIsNone(os.remove(os.path.join(root, filename))) configList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "badtest1", "exe_env": "windows", "source": "stackOverflow", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] GoodConfigList = [ { "name": "test1", "job": "fakeJob", "exe_env": "windows", "source": "swapi", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ] } }, { "name": "test3", "job": "fakeJob", "exe_env": "windows", "source": "Google", "logic": { "regex": [ { "field": "character", "pattern": ".*skywalker.*" } ], "exists": [ { "field": "var" } ] } } ] for conf in configList: ioc.getCollection('Configuration').insert_one(conf) ioc.getCollection('Configuration').update_many({}, {'$set': {'active': True, 'type': 'prototype_config'}}) # sleep because travis is slow sometimes time.sleep(1.5) conf = PrototypeConfig(ioc) conf.load(reloadConf=True) self.assertEqual(len(conf.getConfiguration().get('configuration').get('mongo')), len(GoodConfigList)) self.assertEqual(len(conf.getConfiguration().get('raw')), len(GoodConfigList)) self.assertEqual(len(conf.getConfiguration().get('source').get('swapi')), 1) self.assertEqual(len(conf.getConfiguration().get('source').get('Google')), 1) self.assertEqual(2, len(conf.get_sources())) self.assertEqual(2, len(conf.get_names())) self.assertEqual(len(conf.get_source('Google')), 1) self.assertTrue(isinstance(conf.get_config('test1'), dict)) self.assertTrue(conf.get_config('test1')) # clean up ioc.getCollection('Configuration').drop() # clear the config conf.load(reloadConf=True)
def test_type(self): conf = PrototypeConfig() self.assertTrue(isinstance(conf, object))
class DaemonProcess(object): """Actual daemon processing for GREASE Daemon Attributes: ioc (GreaseContainer): The Grease IOC current_real_second (int): Current second in time registered (bool): If the node is registered with MongoDB impTool (ImportTool): Instance of Import Tool conf (PrototypeConfig): Prototype Configuration Instance """ ioc = None current_real_second = None registered = True contextManager = {'jobs': {}, 'prototypes': {}} impTool = None def __init__(self, ioc): if isinstance(ioc, GreaseContainer): self.ioc = ioc else: self.ioc = GreaseContainer() self.current_real_second = datetime.utcnow().second if self.ioc.getConfig( ).NodeIdentity == "Unknown" and not self.register(): self.registered = False self.impTool = ImportTool(self.ioc.getLogger()) self.conf = PrototypeConfig(self.ioc) def server(self): """Server process for ensuring prototypes & jobs are running By Running this method this will clear the DB of any jobs a node may have Returns: bool: Server Success """ # Ensure we aren't swamping the system cpu = cpu_percent(interval=.1) mem = virtual_memory().percent if \ cpu >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')) \ or mem >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')): self.ioc.getLogger().trace( "Thread Maximum Reached CPU: [{0}] Memory: [{1}]".format( cpu, mem), trace=True) # remove variables del cpu del mem return True if not self.registered: self.ioc.getLogger().trace("Server is not registered", trace=True) return False self.ioc.getLogger().trace("Server execution starting", trace=True) # establish job collection JobsCollection = self.ioc.getCollection("SourceData") self.ioc.getLogger().trace("Searching for Jobs", trace=True) jobs = JobsCollection.find({ 'grease_data.execution.server': ObjectId(self.ioc.getConfig().NodeIdentity), 'grease_data.execution.commandSuccess': False, 'grease_data.execution.executionSuccess': False, 'grease_data.execution.failures': { '$lt': 6 } }) # Get Node Information Node = self.ioc.getCollection('JobServer').find_one( {'_id': ObjectId(self.ioc.getConfig().NodeIdentity)}) if not Node: # If for some reason we couldn't find it self.ioc.getLogger().error("Failed To Load Node Information") return False # Get Prototypes prototypes = list(Node.get('prototypes')) # Del node instance del Node if prototypes: # We have prototypes to spin up for prototype in prototypes: self.ioc.getLogger().trace( "Passing ProtoType [{0}] to Runner".format(prototype), trace=True) self._run_prototype(prototype) if jobs.count(): self.ioc.getLogger().trace("Total Jobs to Execute: [{0}]".format( jobs.count())) for job in jobs: self.ioc.getLogger().trace( "Passing Job [{0}] to Runner".format(job.get("_id")), trace=True) self._run_job(job, JobsCollection) else: # Nothing to Run for Jobs self.ioc.getLogger().trace("No Jobs Scheduled to Server", trace=True) self.ioc.getLogger().trace("Server execution complete", trace=True) return True def _run_job(self, job, JobCollection): """Run a On-Demand Job Args: job (dict): Job Data to execute JobCollection (pymongo.collection.Collection): JobCollection to update for telemetry Returns: None: Void Method to kickoff execution """ if not self.contextManager['jobs'].get(job.get('_id')): # New Job to run if isinstance(job.get('configuration'), bytes): conf = job.get('configuration').decode() else: conf = job.get('configuration') inst = self.impTool.load(self.conf.get_config(conf).get('job', '')) if inst and isinstance(inst, Command): inst.ioc.getLogger().foreground = self.ioc.getLogger( ).foreground thread = threading.Thread( target=inst.safe_execute, args=(job.get('grease_data', {}).get('detection', {}).get('detection', {}), ), name="GREASE DAEMON COMMAND EXECUTION [{0}]".format( job.get('_id'))) thread.daemon = True thread.start() self.contextManager['jobs'][job.get("_id")] = { 'thread': thread, 'command': inst } else: # Invalid Job del inst self.ioc.getLogger().warning("Invalid Job", additional=job) JobCollection.update_one({'_id': ObjectId(job['_id'])}, { '$set': { 'grease_data.execution.failures': job.get('failures', 0) + 1 } }) return else: # Job already executing if self.contextManager['jobs'].get( job.get('_id')).get('thread').isAlive(): # thread still executing return else: # Execution has ended self.ioc.getLogger().trace("Job [{0}] finished running".format( job.get('_id')), trace=True) finishedJob = self.contextManager['jobs'].get( job.get('_id')).get('command') # type: Command if finishedJob.getRetVal(): # job completed successfully JobCollection.update_one( {'_id': ObjectId(job.get('_id'))}, { '$set': { 'grease_data.execution.commandSuccess': finishedJob.getRetVal(), 'grease_data.execution.executionSuccess': finishedJob.getExecVal(), 'grease_data.execution.completeTime': datetime.utcnow(), 'grease_data.execution.returnData': finishedJob.getData() } }) else: # Job Failure self.ioc.getLogger().warning( "Job Failed [{0}]".format(job.get('_id')), additional=finishedJob.getData()) # TODO: Job Execution cooldown timing JobCollection.update_one({'_id': ObjectId(job['_id'])}, { '$set': { 'grease_data.execution.failures': job.get('grease_data', {}).get( 'execution', {}).get('failures', 0) + 1 } }) # close out job finishedJob.__del__() del finishedJob # remove from contextManager del self.contextManager['jobs'][job.get('_id')] return def _run_prototype(self, prototype): """Startup a ProtoType Args: prototype (str): ProtoType to start Returns: None: Void method to start prototype """ if not self.contextManager['prototypes'].get(prototype): # ProtoType has not started inst = self.impTool.load(prototype) if not isinstance(inst, Command): # invalid ProtoType self.log_once_per_second( "Invalid ProtoType [{0}]".format(prototype), level=ERROR) return inst.ioc.getLogger().foreground = self.ioc.getLogger().foreground thread = threading.Thread( target=inst.safe_execute, args=({}), name="GREASE DAEMON PROTOTYPE [{0}]".format(prototype)) thread.daemon = True thread.start() self.contextManager['prototypes'][prototype] = thread return else: # ensure thread is alive if self.contextManager['prototypes'].get(prototype).isAlive(): self.ioc.getLogger().trace( "ProtoType [{0}] is alive".format(prototype)) return else: # Thread died for some reason self.log_once_per_second( "ProtoType [{0}] Stopped".format(prototype), level=INFO) inst = self.impTool.load(prototype) if not isinstance(inst, Command): self.log_once_per_second( "Invalid ProtoType [{0}]".format(prototype), level=ERROR) return inst.ioc.getLogger().foreground = self.ioc.getLogger( ).foreground thread = threading.Thread( target=inst.execute, name="GREASE DAEMON PROTOTYPE [{0}]".format(prototype)) thread.daemon = True thread.start() self.contextManager['prototypes'][prototype] = thread return def drain_jobs(self, JobCollection): """Will drain jobs from the current context This method is used to prevent abnormal ending of executions Args: JobCollection (pymongo.collection.Collection): Job Collection Object Returns: bool: When job queue is emptied """ Threads = True while Threads: if self.contextManager['jobs']: jobs = {} for key, val in self.contextManager['jobs'].items(): if val['thread'].isAlive(): jobs[key] = val continue else: # Execution has ended self.ioc.getLogger().trace( "Job [{0}] finished running".format(key), trace=True) finishedJob = self.contextManager['jobs'].get(key).get( 'command') # type: Command if finishedJob.getRetVal(): # job completed successfully JobCollection.update_one({'_id': ObjectId(key)}, { '$set': { 'grease_data.execution.commandSuccess': finishedJob.getRetVal(), 'grease_data.execution.executionSuccess': finishedJob.getExecVal(), 'grease_data.execution.completeTime': datetime.utcnow(), 'grease_data.execution.returnData': finishedJob.getData() } }) else: # Job Failure self.ioc.getLogger().warning( "Job Failed [{0}]".format(key), additional=finishedJob.getData()) JobCollection.update_one({'_id': ObjectId(key)}, { '$set': { 'grease_data.execution.failures': val['command'].get('failures', 0) + 1 } }) # close out job finishedJob.__del__() del finishedJob self.contextManager['jobs'] = jobs else: Threads = False return True def register(self): """Attempt to register with MongoDB Returns: bool: Registration Success """ return self.ioc.ensureRegistration() def log_once_per_second(self, message, level=DEBUG, additional=None): """Log Message once per second Args: message (str): Message to log level (int): Log Level additional (object): Additional information that is able to be str'd Returns: None: Void Method to fire log message """ if self._has_time_progressed(): self.ioc.getLogger().TriageMessage(message=message, level=level, additional=additional) def _has_time_progressed(self): """Determines if the current second and the real second are not the same Returns: bool: if true then time has passed in a meaningful way """ if self.current_real_second != datetime.utcnow().second: self.current_real_second = datetime.utcnow().second return True else: return False
def test_real(self): ############################################# # SETUP UP TIME ############################################# ioc = GreaseContainer() pConf = PrototypeConfig(ioc) ioc.ensureRegistration() ioc.getCollection('JobServer').update_one( {'_id': ObjectId(ioc.getConfig().NodeIdentity)}, {'$set': { 'prototypes': ['scan', 'detect', 'schedule'] }}) ioc.getCollection('Configuration').insert_one({ 'active': True, 'type': 'prototype_config', "name": "full_stack_test", "job": "help", "exe_env": "general", "source": "url_source", "url": ['http://google.com'], "logic": { "Regex": [{ "field": "url", "pattern": ".*", 'variable': True, 'variable_name': 'url' }], 'Range': [{ 'field': 'status_code', 'min': 199, 'max': 201 }] }, 'constants': { 'test': 'ver' } }) pConf.load(reloadConf=True) ############################################# # EXECUTE SCANNING ############################################# Scanner = scan() Scanner.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Scanner.ioc.getLogger().getConfig().set('trace', True, 'Logging') Scanner.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Scanner.execute({'loop': 1})) ############################################# # ASSERT SCANNING ############################################# self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.start': None, 'grease_data.detection.end': None })) ############################################# # EXECUTE DETECTION ############################################# Detect = detect() Detect.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Detect.ioc.getLogger().getConfig().set('trace', True, 'Logging') Detect.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Detect.execute({'loop': 1})) ############################################# # ASSERT DETECTION ############################################# self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.start': None, 'grease_data.scheduling.end': None })) ############################################# # EXECUTE SCHEDULING ############################################# Scheduling = schedule() Scheduling.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Scheduling.ioc.getLogger().getConfig().set('trace', True, 'Logging') Scheduling.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Scheduling.execute({'loop': 1})) ############################################# # ASSERT SCHEDULING ############################################# self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.start': None, 'grease_data.execution.end': None })) ############################################# # EXECUTE JOBS ############################################# ioc.getCollection('JobServer').update_one( {'_id': ObjectId(ioc.getConfig().NodeIdentity)}, {'$set': { 'prototypes': [] }}) Daemon = DaemonProcess(ioc) Daemon.ioc.getLogger().getConfig().set('verbose', True, 'Logging') Daemon.ioc.getLogger().getConfig().set('trace', True, 'Logging') Daemon.ioc.getLogger().getConfig().set('config', 'full_stack_test', 'Sourcing') self.assertTrue(Daemon.server()) self.assertTrue(Daemon.drain_jobs(ioc.getCollection('SourceData'))) ############################################# # ASSERT JOB EXECUTION ############################################# # sleep a few for seconds to let help complete time.sleep(5) self.assertTrue( ioc.getCollection('SourceData').find_one({ 'grease_data.sourcing.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.detection.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.scheduling.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.server': ObjectId(ioc.getConfig().NodeIdentity), 'grease_data.execution.commandSuccess': True, 'grease_data.execution.executionSuccess': True })) ############################################# # CLEAN UP TIME ############################################# ioc.getCollection('JobServer').update_one( {'_id': ObjectId(ioc.getConfig().NodeIdentity)}, {'$set': { 'prototypes': [] }}) ioc.getCollection('Configuration').drop() ioc.getCollection('SourceData').drop() ioc.getCollection('DeDup_Sourcing').drop() pConf.load(reloadConf=True)