def alarm_check2(self): ## This check queries public sources given a list of md5 hashes. If a hash was seen we set an alarm\n q = "cslogtype:ioc AND NOT tags:ALARMED_*" report = {} report['alarm'] = False report['fname'] = "alarm_check2" report['name'] = "Test IOC's against public sources" report[ 'description'] = "This check queries public sources given a list of md5 hashes. If a hash was seen we set an alarm\n" report['query'] = q iocs = [] #FIRST WE GET ALL IOC's i = countQuery(q, index="rtops-*") if i >= 10000: i = 10000 r = getQuery(q, i, index="rtops-*") if type(r) != type([]): r = [] for l in r: if l['_source']['csmessage'].startswith("[indicator] file:"): arr = l['_source']['csmessage'].split() l['_source']['ioc_bytesize'] = arr[3] l['_source']['ioc_hash'] = arr[2] l['_source']['ioc_path'] = arr[5] l['_source']['ioc_type'] = arr[1][:-1] iocs.append(l) #THEN WE GET MANUALLY ADDED IOC's q = "cslogtype:ioc AND NOT tags:ALARMED_*" i = countQuery(q, index="rtops-*") r = getQuery(q, i, index="rtops-*") if type(r) != type([]): r = [] for l in r: if l['_source']['csmessage'].startswith("[indicator] file:"): arr = l['_source']['csmessage'].split() l['_source']['ioc_bytesize'] = arr[3] l['_source']['ioc_hash'] = arr[2] l['_source']['ioc_path'] = arr[5] l['_source']['ioc_type'] = arr[1][:-1] iocs.append(l) #we now have an array with all IOCs md5d = {} md5s = [] for ioc in iocs: if ioc['_source']['ioc_hash'] in md5d: md5d[ioc['_source']['ioc_hash']].append(ioc) else: md5d[ioc['_source']['ioc_hash']] = [ioc] for key in md5d: md5s.append(key) #we now have an aray with unique md5's to go test ## INSERT CODE reportI = {} ### ioc VirusTotal from iocsources import ioc_vt as vt t = vt.VT() t.test(md5s) reportI['VirusTotal'] = t.report ### ioc IBM x-force from iocsources import ioc_ibm as ibm i = ibm.IBM() i.test(md5s) reportI['IBM X-Force'] = i.report ### ioc_vt from iocsources import ioc_hybridanalysis as ha h = ha.HA() h.test(md5s) reportI['Hybrid Analysis'] = h.report #print(pprint(report)) alarm = False report['results'] = {} alarmedHashes = [] for engine in reportI.keys(): for hash in reportI[engine].keys(): if type(reportI[engine][hash]) == type({}): if reportI[engine][hash]['result'] == "newAlarm": alarmedHashes.append(hash) reportI[engine][hash]['alarm'] = True #reportI['alarm'] = True alarm = True print("[A] alarm set in %s" % report['fname']) alarmItem = {} alarmItem = [] report['results'][hash] = {} if 'engine' not in report['results'][hash]: report['results'][hash]['engine'] = [] report['results'][hash]['engine'].append(engine) #find all filenames fnameList = [] for fileI in md5d[hash]: fnameList.append(fileI['_source']['ioc_name']) report['results'][hash]['fileNames'] = fnameList #print("[newAlarm] - %s reports %s"%(engine,hash)) #TODO ### REBUILD REPORT #### TODO # list of results, each has atleast an 'alarm' boolean in order to allow parent to find alarmworthy items # before returning we might have to set an tag on our resultset so we alarm only once. (maybe a tag per alarm? "ALARMED_%s"%report['fname'] migt do) alarmed_set = [] for l in r: if l['_source']['csmessage'].startswith("[indicator] file:"): if l['_source']['ioc_hash'] in alarmedHashes: alarmed_set.append(l) setTags("ALARMED_%s" % report['fname'], alarmed_set) return (report)
def alarm_check(self): # This check queries public sources given a list of md5 hashes. If a hash was seen we set an alarm q = 'c2.log.type:ioc AND NOT tags:alarm_filehash AND ioc.type:file' alarmed_md5_q = { "aggs": { "interval_filter": { "filter": { "range": { "alarm.last_checked": { "gte": "now-%ds" % interval, "lt": "now" } } }, "aggs": { "md5_interval": { "terms": { "field": "file.hash.md5" } } } }, "alarmed_filter": { "filter": { "terms": { "tags": ["alarm_filehash"] } }, "aggs": { "md5_alarmed": { "terms": { "field": "file.hash.md5" } } } } } } report = {} iocs = [] self.logger.debug('Running query %s' % q) # FIRST WE GET ALL IOC's i = countQuery(q, index='rtops-*') if i >= 10000: i = 10000 iocs = getQuery(q, i, index='rtops-*') if type(iocs) != type([]): iocs = [] self.logger.debug('found ioc: %s' % iocs) # Then we get an aggregation of all md5 alarmed within the last 'interval' time self.logger.debug('Running query %s' % alarmed_md5_q) omd5 = rawSearch(alarmed_md5_q, index='rtops-*') self.logger.debug(omd5['aggregations']) already_checked = [] already_alarmed = [] # add md5 hashes that have been checked within the 'interval' in 'already_checked' for h in omd5['aggregations']['interval_filter']['md5_interval'][ 'buckets']: already_checked.append(h['key']) # add md5 hashes that have been alarmed previously in 'already_alarmed' for h in omd5['aggregations']['alarmed_filter']['md5_alarmed'][ 'buckets']: already_alarmed.append(h['key']) md5d = {} md5s = [] md5ShouldCheck = {} ival = timedelta(seconds=interval) last_checked_max = (datetime.utcnow() - ival) # Group all hits per md5 hash value for ioc in iocs: h = getValue('_source.file.hash.md5', ioc) if h in md5d: md5d[h].append(ioc) else: md5d[h] = [ioc] should_check = True # Check if the IOC has already been alarmed if h in already_alarmed: # Skip it should_check = False # Set the last checked date addAlarmData(ioc, {}, info['submodule'], False) # Tag the doc as alarmed setTags(info['submodule'], [ioc]) # Check if the IOC has already been checked within 'interval' if h in already_checked: # Skip if for now should_check = False if h in md5ShouldCheck: md5ShouldCheck[h] = should_check & md5ShouldCheck[h] else: md5ShouldCheck[h] = should_check # self.logger.debug('Should check: %s' % md5ShouldCheck[h]) for hash in dict.copy(md5d): # If we should not check the hash, remove it from the list if hash in md5ShouldCheck and md5ShouldCheck[hash] == False: self.logger.debug( '[%s] md5 hash already checked within interval or already alarmed previously, skipping' % hash) del md5d[hash] # Create an array with all md5 hashes to send to the different providers # we now have an aray with unique md5's to go test for hash in md5d: md5s.append(hash) self.logger.debug('md5 hashes to check: %s' % md5s) reportI = {} # ioc VirusTotal self.logger.debug('Checking IOC against VirusTotal') t = vt.VT(alarms[info['submodule']]['vt_api_key']) t.test(md5s) reportI['VirusTotal'] = t.report self.logger.debug('Results from VirusTotal: %s' % t.report) # ioc IBM x-force self.logger.debug('Checking IOC against IBM X-Force') i = ibm.IBM(alarms[info['submodule']]['ibm_basic_auth']) i.test(md5s) reportI['IBM X-Force'] = i.report # ioc Hybrid Analysis self.logger.debug('Checking IOC against Hybrid Analysis') h = ha.HA(alarms[info['submodule']]['ha_api_key']) h.test(md5s) reportI['Hybrid Analysis'] = h.report # Will store mutations per hash (temporarily) alarmedHashes = {} # Loop through the engines for engine in reportI.keys(): # Loop through the hashes results for hash in reportI[engine].keys(): if type(reportI[engine][hash]) == type({}): if reportI[engine][hash]['result'] == 'newAlarm': # If hash was already alarmed by an engine if hash in alarmedHashes: alarmedHashes[hash][engine] = reportI[engine][hash] else: alarmedHashes[hash] = { engine: reportI[engine][hash] } # Prepare the object to be returned report = {'mutations': {}, 'hits': []} # Loop through all hashes for hash in md5d: # Loop through all related ES docs for ioc in md5d[hash]: # Hash has been found in one of the engines and should be alarmed if hash in alarmedHashes.keys(): report['mutations'][ioc['_id']] = alarmedHashes[hash] report['hits'].append(ioc) # Hash was not found so we update the last_checked date else: self.logger.debug( 'md5 hash not alarmed, updating last_checked date: [%s]' % hash) addAlarmData(ioc, {}, info['submodule'], False) return (report)