def analyze(self, dict): observable_sample = dict["title"] context_sample = {} context_sample["description"] = "Pony sample" context_sample["date_added"] = datetime.strptime(dict["pubDate"], "%d-%m-%Y") context_sample["source"] = self.name link_c2 = re.search("https?://[^ ]*", dict["description"].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2["description"] = "Pony c2" context_c2["date_added"] = datetime.strptime(dict["pubDate"], "%d-%m-%Y") context_c2["source"] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ["pony", "objectives"] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ["c2", "pony"] c2.tag(c2_tags) sample.active_link_to(c2, "c2", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, dict): observable_sample = dict['title'] context_sample = {} context_sample['description'] = "Atmos sample" context_sample['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_sample['source'] = self.name link_c2 = re.search("<a href[^>]+>(?P<url>[^<]+)", dict['description'].lower()).group("url") observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Atmos c2" context_c2['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['atmos', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'atmos'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def each(f): if f.body: f.hashes = [] for hash_type, h in HashFile.extract_hashes(f.body.contents): hash_object = Hash.get_or_create(value=h.hexdigest()) hash_object.add_source("analytics") hash_object.save() f.active_link_to(hash_object, "{} hash".format(hash_type.upper()), "HashFile", clean_old=False) f.hashes.append({"hash": hash_type, "value": h.hexdigest()}) f.save()
def each(f): try: l = f.body.length except AttributeError as e: # File item has no content l = 0 if l > 0: for h in HashFile.extract_hashes(f): h = Hash.get_or_create(value=h.hexdigest()).save() h.add_source("analytics") Link.connect(f, h)
def analyze(observable, results): links = set() json_result = MalshareAPI.fetch( observable, results.settings['malshare_api_key']) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if 'SOURCES' in json_result: for source in json_result['SOURCES']: new_url = None try: new_url = Url.get_or_create(value=source.strip()) links.update( observable.active_link_to( new_url, 'c2', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add {} to the database". format(source.strip())) result['nb C2'] = len(json_result['SOURCES']) try: new_hash = Hash.get_or_create(value=json_result['MD5']) links.update( new_hash.active_link_to(observable, 'md5', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA1']) links.update( new_hash.active_link_to(observable, 'sha1', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA256']) links.update( new_hash.active_link_to(observable, 'sha256', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add hashes {} to the database". format(json_string)) return list(links)
def analyze(observable, results): links = set() params = { 'query': observable.value } data = PassiveTotalApi.get('/enrichment/malware', results.settings, params) for record in data['results']: collection_date = datetime.strptime(record['collectionDate'], "%Y-%m-%d %H:%M:%S") malware = Hash.get_or_create(value=record['sample']) links.update(malware.link_to(observable, "Contacted Host", record['source'], collection_date)) return list(links)
def analyze(self, line): if not line or line[0].startswith("#"): return try: ip, domain, family, md5, link, date = tuple(map(strip, line)) context = { "first_seen": date, "family": family, "report": link, "source": self.name } c2 = None sample = None try: sample = Hash.get_or_create(value=md5) sample.add_context(context) sample.tag(family.lower()) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) try: if domain: if '/' in domain: c2 = Url.get_or_create(value=domain) else: c2 = Hostname.get_or_create(value=domain) elif ip: c2 = Ip.get_or_create(value=ip) else: return c2.add_context(context) c2.tag(['c2', family.lower()]) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) if c2 and sample: sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ValueError: logging.error("Error unpacking line: {}".format(line))
def analyze(self, line): md5_obs = False sha256_obs = False url_obs = False malware_file = False first_seen = line["firstseen"] url = line["url"] filetype = line["filetype"] md5_hash = line["md5"] sha256_hash = line["sha256"] signature = line["signature"] context = {"source": self.name, "first_seen": first_seen} if url: try: url_obs = Url.get_or_create(value=url) if signature != "None": url_obs.tag(signature) url_obs.add_context(context) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e) if sha256_hash: try: malware_file = File.get_or_create( value="FILE:{}".format(sha256_hash)) malware_file.add_context(context) malware_file.tag(filetype) sha256_obs = Hash.get_or_create(value=sha256_hash) sha256_obs.tag(filetype) sha256_obs.add_context(context) if signature != "None": sha256_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if md5_hash: try: md5_obs = Hash.get_or_create(value=md5_hash) md5_obs.add_context(context) md5_obs.tag(filetype) if signature != "None": md5_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if malware_file: if signature != "None": malware_file.tag(signature) if md5_obs: malware_file.active_link_to(md5_obs, "md5", self.name) if sha256_obs: malware_file.active_link_to(sha256_obs, "sha256", self.name) if url_obs: url_obs.active_link_to(malware_file, "drops", self.name)
def analyze(observable, results): links = set() json_result = VirustotalApi.fetch( observable, results.settings['virutotal_api_key']) json_string = json.dumps(json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = dict([('raw', json_string), ('source', 'virustotal_query')]) if json_result['response_code'] != 1: result['scan_date'] = None result['positives'] = 0 result['total'] = 0 result['permalink'] = None observable.add_context(result) return if isinstance(observable, Ip): # Parse results for ip if json_result.get('as_owner'): result['owner'] = json_result['as_owner'] o_isp = Company.get_or_create(name=json_result['as_owner']) links.update( observable.active_link_to(o_isp, 'hosting', 'virustotal_query')) if json_result.get('detected_urls'): result['detected_urls'] = json_result['detected_urls'] for detected_url in json_result['detected_urls']: o_url = Url.get_or_create(value=detected_url['url']) scan_date = parser.parse(detected_url.get("scan_date")) links.update( observable.link_to(o_url, 'url', 'virustotal_query', scan_date, scan_date)) if json_result.get('permalink'): result['permalink'] = json_result['permalink'] elif isinstance(observable, Hostname): if json_result.get('permalink'): result['permalink'] = json_result['permalink'] result['positives'] = json_result.get('positives', 0) if json_result.get('total'): result['total'] = json_result['total'] elif isinstance(observable, Hash): result['positives'] = json_result.get('positives', 0) if 'permalink' in json_result: result['permalink'] = json_result['permalink'] if 'total' in json_result: result['total'] = json_result['total'] hashes = { 'md5': json_result['md5'], 'sha1': json_result['sha1'], 'sha256': json_result['sha256'] } create_hashes = [(k, v) for k, v in hashes.items() if v != observable.value] for k, v in create_hashes: new_hash = Hash.get_or_create(value=v) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, k, 'virustotal_query')) result['scan_date'] = json_result['scan_date'] observable.add_context(result) return list(links)
def analyze(observable, results): links = set() json_result = ThreatCrowdAPI.fetch(observable) json_string = json.dumps(json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {} if isinstance(observable, Hostname): if 'resolutions' in json_result: result['ip on this domains'] = 0 for ip in json_result['resolutions']: if ip['ip_address'].strip() != observable.value: if ip['last_resolved'] != '0000-00-00': last_resolved = datetime.datetime.strptime( ip['last_resolved'], "%Y-%m-%d") try: new_ip = Ip.get_or_create( value=ip['ip_address'].strip()) links.update( new_ip.active_link_to( observable, 'IP', 'ThreatCrowd', last_resolved)) result['ip on this domains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database" .format(ip['ip_address'])) if 'emails' in json_result: result['nb emails'] = 0 for email in json_result['emails']: try: new_email = Email.get_or_create(value=email) links.update( new_email.active_link_to(observable, 'Used by', 'ThreatCrowd')) result['nb emails'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add email {} to the database" .format(email)) if 'subdomains' in json_result: result['nb subdomains'] = 0 for subdomain in json_result['subdomains']: try: new_domain = Hostname.get_or_create(value=subdomain) links.update( observable.active_link_to(new_domain, 'subdomain', 'ThreatCrowd')) result['nb subdomains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database" .format(subdomain)) if isinstance(observable, Ip): if 'resolutions' in json_result: result['domains resolved'] = 0 for domain in json_result['resolutions']: if domain['domain'].strip() != observable.value: try: last_resolved = datetime.datetime.strptime( domain['last_resolved'], "%Y-%m-%d") new_domain = Hostname.get_or_create( value=domain['domain'].strip()) links.update( new_domain.active_link_to( observable, 'A Record', 'ThreatCrowd', last_resolved)) result['domains resolved'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add domain {} to the database" .format(domain['domain'])) if 'hashes' in json_result and len(json_result['hashes']) > 0: result['malwares'] = 0 for h in json_result['hashes']: new_hash = Hash.get_or_create(value=h) links.update( new_hash.active_link_to(observable, 'hash', 'ThreatCrowd')) result['malwares'] += 1 if isinstance(observable, Email): if 'domains' in json_result and len(json_result) > 0: result['domains recorded by email'] = 0 for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( new_domain.active_link_to(observable, 'recorded by', 'ThreatCrowd')) result['domains recorded by email'] += 1 if isinstance(observable, Hash): result['nb c2'] = 0 if 'md5' in json_result: new_hash = Hash.get_or_create(value=json_result['md5']) links.update( new_hash.active_link_to(observable, 'md5', 'ThreadCrowd')) if 'sha1' in json_result: new_hash = Hash.get_or_create(value=json_result['sha1']) links.update( new_hash.active_link_to(observable, 'sha1', 'ThreadCrowd')) if 'sha256' in json_result: new_hash = Hash.get_or_create(value=json_result['sha256']) links.update( new_hash.active_link_to(observable, 'sha256', 'ThreadCrowd')) if 'domains' in json_result and len(json_result['domains']): for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( observable.active_link_to(new_domain, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'ips' in json_result and len(json_result['ips']): for ip in json_result['ips']: new_ip = Ip.get_or_create(value=ip.strip()) links.update( observable.active_link_to(new_ip, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'permalink' in json_result: result['permalink'] = json_result['permalink'] result['source'] = 'threatcrowd_query' result['raw'] = json_string observable.add_context(result) return list(links)
def analyze(self, item): first_seen = item["analysis_start_time"] f_hyb = File.get_or_create(value="FILE:{}".format(item["sha256"])) sha256 = Hash.get_or_create(value=item["sha256"]) f_hyb.active_link_to(sha256, "sha256", self.name) tags = [] context = {"source": self.name, "date": first_seen} if "vxfamily" in item: context["vxfamily"] = item["vxfamily"] if "tags" in item: tags.extend(item["tags"]) if "threatlevel_human" in item: context["threatlevel_human"] = item["threatlevel_human"] if "threatlevel" in item: context["threatlevel"] = item["threatlevel"] if "type" in item: context["type"] = item["type"] if "size" in item: context["size"] = item["size"] if "vt_detect" in item: context["virustotal_score"] = item["vt_detect"] if "et_alerts_total" in item: context["et_alerts_total"] = item["et_alerts_total"] if "process_list" in item: context["count_process_spawn"] = len(item["process_list"]) context["url"] = "https://www.hybrid-analysis.com" + item["reporturl"] f_hyb.add_context(context) f_hyb.tag(tags) f_hyb.add_source("feed") sha256.add_context(context) md5 = Hash.get_or_create(value=item["md5"]) md5.add_source("feed") md5.add_context(context) f_hyb.active_link_to(md5, "md5", self.name) sha1 = Hash.get_or_create(value=item["sha1"]) sha1.add_source("feed") sha1.add_context(context) f_hyb.active_link_to(sha1, "sha1", self.name) if "domains" in item: for domain in item["domains"]: try: new_host = Hostname.get_or_create(value=domain) f_hyb.active_link_to(new_host, "C2", self.name) logging.debug(domain) new_host.add_context({ "source": self.name, "contacted_by": f_hyb }) new_host.add_source("feed") except ObservableValidationError as e: logging.error(e) if "extracted_files" in item: for extracted_file in item["extracted_files"]: context_file_dropped = {"source": self.name} if not "sha256" in extracted_file: logging.error(extracted_file) continue new_file = File.get_or_create( value="FILE:{}".format(extracted_file["sha256"])) sha256_new_file = Hash.get_or_create( value=extracted_file["sha256"]) sha256_new_file.add_source("feed") new_file.active_link_to(sha256_new_file, "sha256", self.name) context_file_dropped["virustotal_score"] = 0 context_file_dropped["size"] = extracted_file["file_size"] if "av_matched" in extracted_file: context_file_dropped["virustotal_score"] = extracted_file[ "av_matched"] if "threatlevel_readable" in extracted_file: context_file_dropped["threatlevel"] = extracted_file[ "threatlevel_readable"] if "av_label" in extracted_file: context_file_dropped["av_label"] = extracted_file[ "av_label"] if "type_tags" in extracted_file: new_file.tag(extracted_file["type_tags"]) new_file.add_context(context_file_dropped) sha256_new_file.add_context(context_file_dropped) new_file.add_source(self.name) f_hyb.active_link_to(new_file, "drop", self.name)
def analyze(self, item): context = item date_string = re.search( r"\((?P<datetime>[\d\- :]+)\)", context['title']).group('datetime') try: context['date_added'] = datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match( r'^Host: (?P<host>.+), Version: (?P<version>\w)', context['description']) g = g.groupdict() context['version'] = g['version'] context['description'] = FeodoTracker.descriptions[g['version']] context['subfamily'] = FeodoTracker.variants[g['version']] context['source'] = self.name del context['title'] new = None variant_tag = FeodoTracker.variants[g['version']].lower() try: if re.search(r"[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}", g['host']): new = Ip.get_or_create(value=g['host']) else: new = Hostname.get_or_create(value=g['host']) new.add_context(context) new.add_source("feed") new.tag([variant_tag, 'malware', 'crimeware', 'banker', 'c2']) except ObservableValidationError as e: logging.error(e) try: url_fedeo = context['guid'] r = requests.get(url_fedeo) if r.status_code == 200: s = r.text soup = BeautifulSoup(s, 'html.parser') res = soup.find_all('table') res = res[1].find_all('td') results = [{ 'timestamp': res[i].text, 'md5_hash': res[i + 1].text, 'filesize': res[i + 2].text, 'VT': res[i + 3].text, 'Host': res[i + 4].text, 'Port': res[i + 5].text, 'SSL Certif or method': res[i + 6].text } for i in range(0, len(res), 7)] for r in results: new_hash = Hash.get_or_create(value=r['md5_hash']) new_hash.add_context(context) new_hash.add_source('feed') new_hash.tag([ variant_tag, 'malware', 'crimeware', 'banker', 'payload' ]) new_hash.active_link_to( new, 'c2', self.name, clean_old=False) host = Url.get_or_create( value='https://%s:%s' % (g['host'], r['Port'])) host.add_source('feed') host.add_context(context) host.tag( [variant_tag, 'malware', 'crimeware', 'banker', 'c2']) new_hash.active_link_to( host, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def _get_threat_forensics_nodes_inner( self, evidence, general_context, tags): # create context from notes context = general_context.copy() _ctx = self._make_context_from_notes([evidence]) context.update(_ctx) # add evidence['type'] and unicify tags tags = [{ 'name': _ } for _ in set([evidence['type']] + [d['name'] for d in tags])] # create Tags in DB for _ in tags: Tag.get_or_create(name=_['name']) # threat_forensics = [] # technical hack: set optional comments values for optional in ['action', 'rule', 'path', 'rule']: if optional not in evidence['what']: evidence['what'][optional] = None # add attributes for the known evidence type if evidence['type'] in ['file', 'dropper']: if 'path' in evidence['what']: threat_forensics.append( File.get_or_create( value=evidence['what']['path'], context=[context])) if 'md5' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['md5'], context=[context])) if 'sha256' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['sha256'], context=[context])) elif evidence['type'] == 'cookie': pass elif evidence['type'] == 'dns': threat_forensics.append( Hostname.get_or_create( value=evidence['what']['host'], context=[context])) elif evidence['type'] == 'ids': threat_forensics.append( Text.get_or_create( value=evidence['what']['ids'], context=[context])) pass elif evidence['type'] == 'mutex': threat_forensics.append( Text.get_or_create( value=evidence['what']['name'], context=[context])) elif evidence['type'] == 'network': if 'ip' in evidence['what']: # FIXME port, type threat_forensics.append( Ip.get_or_create( value=evidence['what']['ip'], context=[context])) elif 'domain' in evidence['what']: threat_forensics.append( Hostname.get_or_create( value=evidence['what']['domain'], context=[context])) elif evidence['type'] == 'process': pass elif evidence['type'] == 'registry': # threat_forensics.append(evidence['what']['key']) # threat_forensics.append(evidence['what']['value']) pass elif evidence['type'] == 'url': # BUG yeti-#115 ObservableValidationError: Invalid URL: http://xxxxx-no-tld/ threat_forensics.append( Url.get_or_create( value=evidence['what']['url'], context=[context])) # add note as tag because its a signature if 'note' in evidence: threat_forensics[-1].tag( evidence['note'].replace('.', '_').strip('_')) # tag all of that for o in threat_forensics: o.tag([t['name'] for t in tags]) return threat_forensics
def _add_events_nodes(self, events, context, tags): log.debug('_add_events_nodes on {nb} events'.format(nb=len(events))) attach_unsupported = dict( [(_, 0) for _ in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None]]) event_nodes = list() for msg in events: create_t = datetime.strptime( msg['messageTime'], "%Y-%m-%dT%H:%M:%S.%fZ") # PPS unique value guid = Text.get_or_create( value='proofpoint://%s' % msg['GUID'], created=create_t, context=[context]) log.debug('Event {msg}'.format(msg=msg['messageID'])) message_contents = list() src_ip = Ip.get_or_create( value=msg['senderIP'], created=create_t, context=[context]) src_ip.tag(['MTA']) guid.active_link_to([src_ip], "MTA src ip", self.name) # new event event_nodes.append(guid) # if self.config['import_email_metadata']: # email details # messageID message_id = Email.get_or_create( value=msg['messageID'], created=create_t, context=[context]) guid.active_link_to([message_id], "seen in", self.name) # sender _s1 = Email.get_or_create( value=msg['sender'], created=create_t, context=[context]) _s1.tag(['sender']) guid.active_link_to([_s1], "sender", self.name) if 'headerFrom' in msg: # header From _s2 = Email.get_or_create( value=msg['headerFrom'], created=create_t, context=[context]) _s2.tag(['sender']) guid.active_link_to([_s2], "headerFrom", self.name) # FIXME is that a duplicate of attachment-malware ? # attachment events for attach in msg['messageParts']: if attach['sandboxStatus'] in ['THREAT']: md5 = Hash.get_or_create( value=attach['md5'], created=create_t, context=[context]) md5.tag([t['name'] for t in tags]) fname = File.get_or_create( value=attach['filename'], created=create_t, context=[context]) fname.tag([t['name'] for t in tags]) # this should be a DUP from threat_nodes in analyse() sha_threat = Hash.get_or_create( value=attach['sha256'], created=create_t, context=[context]) sha_threat.active_link_to([md5, fname], "relates", self.name) sha_threat.tag([t['name'] for t in tags]) message_contents.append(sha_threat) # link the 3 together elif attach['sandboxStatus'] in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None]: attach_unsupported[attach['sandboxStatus']] += 1 log.debug(pprint.pformat(attach)) # add context to the hashes guid.active_link_to(message_contents, "delivers", self.name) _stats = ', '.join( "%s: %d" % (k, v) for k, v in attach_unsupported.items()) log.warning('Ignored unsupported attachments: %s', _stats) for o in event_nodes: o.tag([t['name'] for t in tags]) return event_nodes
def _add_events_nodes(self, events, context, tags): log.debug('_add_events_nodes on {nb} events'.format(nb=len(events))) attach_unsupported = dict([ (_, 0) for _ in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None] ]) event_nodes = list() for msg in events: create_t = datetime.strptime(msg['messageTime'], "%Y-%m-%dT%H:%M:%S.%fZ") # PPS unique value guid = Text.get_or_create(value='proofpoint://%s' % msg['GUID'], created=create_t, context=[context]) log.debug('Event {msg}'.format(msg=msg['messageID'])) message_contents = list() src_ip = Ip.get_or_create(value=msg['senderIP'], created=create_t, context=[context]) src_ip.tag(['MTA']) guid.active_link_to([src_ip], "MTA src ip", self.name) # new event event_nodes.append(guid) # if self.config['import_email_metadata']: # email details # messageID message_id = Email.get_or_create(value=msg['messageID'], created=create_t, context=[context]) guid.active_link_to([message_id], "seen in", self.name) # sender _s1 = Email.get_or_create(value=msg['sender'], created=create_t, context=[context]) _s1.tag(['sender']) guid.active_link_to([_s1], "sender", self.name) if 'headerFrom' in msg: # header From _s2 = Email.get_or_create(value=msg['headerFrom'], created=create_t, context=[context]) _s2.tag(['sender']) guid.active_link_to([_s2], "headerFrom", self.name) # FIXME is that a duplicate of attachment-malware ? # attachment events for attach in msg['messageParts']: if attach['sandboxStatus'] in ['THREAT']: md5 = Hash.get_or_create(value=attach['md5'], created=create_t, context=[context]) md5.tag([t['name'] for t in tags]) fname = File.get_or_create(value=attach['filename'], created=create_t, context=[context]) fname.tag([t['name'] for t in tags]) # this should be a DUP from threat_nodes in analyse() sha_threat = Hash.get_or_create(value=attach['sha256'], created=create_t, context=[context]) sha_threat.active_link_to([md5, fname], "relates", self.name) sha_threat.tag([t['name'] for t in tags]) message_contents.append(sha_threat) # link the 3 together elif attach['sandboxStatus'] in [ 'UNSUPPORTED_TYPE', 'TOO_SMALL', None ]: attach_unsupported[attach['sandboxStatus']] += 1 log.debug(pprint.pformat(attach)) # add context to the hashes guid.active_link_to(message_contents, "delivers", self.name) _stats = ', '.join("%s: %d" % (k, v) for k, v in attach_unsupported.items()) log.warning('Ignored unsupported attachments: %s', _stats) for o in event_nodes: o.tag([t['name'] for t in tags]) return event_nodes
def analyze(observable, results): links = set() json_result = VirustotalApi.fetch( observable, results.settings['virutotal_api_key']) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if isinstance(observable, Ip): # Parse results for ip if json_result.get('as_owner'): result['Owner'] = json_result['as_owner'] o_isp = Company.get_or_create(name=json_result['as_owner']) links.update( observable.active_link_to( o_isp, 'hosting', 'virustotal_query')) if json_result.get('detected_urls'): result['detected_urls'] = json_result['detected_urls'] for detected_url in json_result['detected_urls']: o_url = Url.get_or_create(value=detected_url['url']) links.update( o_url.active_link_to( o_url, 'hostname', 'virustotal_query')) elif isinstance(observable, Hostname): if json_result.get('permalink'): result['permalink'] = json_result['permalink'] result['positives'] = json_result.get('positives', 0) if json_result.get('total'): result['total'] = json_result['total'] elif isinstance(observable, Hash): result['positives'] = json_result['positives'] if 'permalink' in json_result: result['permalink'] = json_result['permalink'] if 'total' in json_result: result['total'] = json_result['total'] hashes = { 'md5': json_result['md5'], 'sha1': json_result['sha1'], 'sha256': json_result['sha256'] } create_hashes = [ (k, v) for k, v in hashes.items() if v != observable.value ] for k, v in create_hashes: new_hash = Hash.get_or_create(value=v) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, k, 'virustotal_query')) result['source'] = 'virustotal_query' observable.add_context(result) return list(links)
def analyze(self, block): # pylint: disable=arguments-differ """ block example {u"first_seen": u"2019-03-13 14:38:12", u"hash": {u"md5": u"e1513c048e520e8d5fc5999d82994ea7", u"sha1": u"f09f66c3bd4cd54cc030b7d102be32376fd993b5", u"sha256": u"bbb450f1f68735054af6d1c64bd3a7e62f9977d40eeb286340d8bc1dac6f7e7e"}, u"hash_seen": 1, u"id": u"5c8915d47a324f51d460e8e5", u"sample": {u"name": u"vdvdv.exe", u"size": u"600576"}, u"server": {u"AS": u"AS197695", u"country": u"ru", u"ip": u"37.140.192.146", u"url": u"byhlavash.chimkent.su/vdvdv.exe"}} """ url = block["server"]["url"] if "http" not in url: url = "http://" + url context = {} context["date_added"] = block["first_seen"] context["as"] = block["server"]["AS"] context["country"] = block["server"]["country"] context["ip"] = block["server"]["ip"] context["source"] = self.name context["md5"] = block["hash"]["md5"] context["sha1"] = block["hash"]["sha1"] context["sha256"] = block["hash"]["sha256"] url_data = None try: url_data = Url.get_or_create(value=url) url_data.add_context(context) url_data.add_source(self.name) except ObservableValidationError as e: logging.error(e) if block.get("server", {}).get("ip", ""): try: ip = Ip.get_or_create(value=block["server"]["ip"]) ip.add_context(context) ip.add_source(self.name) if url_data: url_data.active_link_to(ip, "ip", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) if block.get("hash", []): # md5, sha1, sha256 for hash_type in block["hash"]: try: hash_data = Hash.get_or_create( value=block["hash"][hash_type]) hash_data.add_context(context) hash_data.add_source(self.name) if url_data: url_data.active_link_to(hash_data, "hash", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(self, block): # pylint: disable=arguments-differ """ { "sha256_hash": "7a094ad0ef65079505b37da09b647597143ea7fdfac8c838796cf294da3ff388", "sha1_hash": "6c823dad668aa14147b48487ccc0b79a477a4fce", "md5_hash": "b319c7702230a785be61331defc25120", "first_seen": "2020-03-17 12:58:49", "last_seen": null, "file_name": "a3aca2964e.sys", "file_size": 4755520, "file_type_mime": "application\/x-dosexec", "file_type": "sys", "reporter": "mxdh000", "anonymous": 0, "signature": null, "imphash": "9c3307eb75e37993d0067f7cc6f873a6", "ssdeep": "98304:BCPd6KWRbNKPVJFbZT4uX\/74m+7+LcDK3yaf:BCFPPF5D4bKYDji", "tags": [ "zyba-rootkit-2" ], "intelligence": { "clamav": null, "downloads": "4", "uploads": "1", "mail": null } } """ if not block.get('sha256_hash'): return context = {} context["date_added"] = block['first_seen'] context["source"] = self.name context["filename"] = block["file_name"] context["md5"] = block["md5_hash"] context["sha1"] = block["sha1_hash"] context["sha256"] = block["sha256_hash"] context["imphash"] = block["hash"]["imphash"] context["ssdeep"] = block["hash"]["ssdeep"] malware_file = File.get_or_create( value='FILE:{}'.format(block["sha256_hash"])) malware_file.add_context(context) malware_file.tag(block["file_type_mime"]) # md5, sha1, sha256 for hash_type in ("md5_hash", "sha1_hash", "sha256_hash"): try: hash_data = Hash.get_or_create(value=block[hash_type]) hash_data.add_context(context) hash_data.add_source(self.name) if block.get("tags"): hash_data.tag(block["tags"]) malware_file.active_link_to(hash_data, hash_type.split("_")[0], self.name) except ObservableValidationError as e: logging.error(e)
def analyze(self, item): context = item date_string = re.search( r"\((?P<datetime>[\d\- :]+)\)", context['title']).group('datetime') try: context['date_added'] = datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match( r'^Host: (?P<host>.+), Version: (?P<version>\w)', context['description']) g = g.groupdict() context['version'] = g['version'] context['description'] = FeodoTracker.descriptions[g['version']] context['subfamily'] = FeodoTracker.variants[g['version']] context['source'] = self.name del context['title'] new = None variant_tag = FeodoTracker.variants[g['version']].lower() try: if re.search(r"[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}", g['host']): new = Ip.get_or_create(value=g['host']) else: new = Hostname.get_or_create(value=g['host']) new.add_context(context) new.add_source("feed") new.tag([variant_tag, 'malware', 'crimeware', 'banker', 'c2']) except ObservableValidationError as e: logging.error(e) try: url_fedeo = context['guid'] r = requests.get(url_fedeo) if r.status_code == 200: html_source = r.text soup = BeautifulSoup(html_source, 'html.parser') tab = soup.find('table', attrs='sortable') results = [] if tab: all_tr = tab.find_all('tr') for tr in all_tr: all_td = tr.find_all('td') if all_td and len(all_td) == 7: results.append({ 'timestamp': all_td[0].text, 'md5_hash': all_td[1].text, 'filesize': all_td[2].text, 'VT': all_td[3].text, 'Host': all_td[4].text, 'Port': all_td[5].text, 'SSL Certif or method': all_td[6].text }) for r in results: new_hash = Hash.get_or_create(value=r['md5_hash']) new_hash.add_context(context) new_hash.add_source('feed') new_hash.tag([ variant_tag, 'malware', 'crimeware', 'banker', 'payload' ]) new_hash.active_link_to( new, 'c2', self.name, clean_old=False) host = Url.get_or_create( value='https://%s:%s' % (g['host'], r['Port'])) host.add_source('feed') host.add_context(context) host.tag( [variant_tag, 'malware', 'crimeware', 'banker', 'c2']) new_hash.active_link_to( host, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): md5_obs = False sha256_obs = False url_obs = False malware_file = False first_seen = line['firstseen'] url = line['url'] filetype = line['filetype'] md5_hash = line['md5'] sha256_hash = line['sha256'] signature = line['signature'] context = {'source': self.name, 'first_seen': first_seen} if url: try: url_obs = Url.get_or_create(value=url) if signature != 'None': url_obs.tag(signature) url_obs.add_context(context) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e) if sha256_hash: try: malware_file = File.get_or_create( value='FILE:{}'.format(sha256_hash)) malware_file.add_context(context) malware_file.tag(filetype) sha256_obs = Hash.get_or_create(value=sha256_hash) sha256_obs.tag(filetype) sha256_obs.add_context(context) if signature != 'None': sha256_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if md5_hash: try: md5_obs = Hash.get_or_create(value=md5_hash) md5_obs.add_context(context) md5_obs.tag(filetype) if signature != 'None': md5_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if malware_file: if signature != 'None': malware_file.tag(signature) if md5_obs: malware_file.active_link_to(md5_obs, 'md5', self.name) if sha256_obs: malware_file.active_link_to(sha256_obs, 'sha256', self.name) if url_obs: url_obs.active_link_to(malware_file, 'drops', self.name)
def _get_threat_forensics_nodes_inner(self, evidence, general_context, tags): # create context from notes context = general_context.copy() _ctx = self._make_context_from_notes([evidence]) context.update(_ctx) # add evidence['type'] and unicify tags tags = [{ 'name': _ } for _ in set([evidence['type']] + [d['name'] for d in tags])] # create Tags in DB for _ in tags: Tag.get_or_create(name=_['name']) # threat_forensics = [] # technical hack: set optional comments values for optional in ['action', 'rule', 'path', 'rule']: if optional not in evidence['what']: evidence['what'][optional] = None # add attributes for the known evidence type if evidence['type'] in ['file', 'dropper']: if 'path' in evidence['what']: threat_forensics.append( File.get_or_create(value=evidence['what']['path'], context=[context])) if 'md5' in evidence['what']: threat_forensics.append( Hash.get_or_create(value=evidence['what']['md5'], context=[context])) if 'sha256' in evidence['what']: threat_forensics.append( Hash.get_or_create(value=evidence['what']['sha256'], context=[context])) elif evidence['type'] == 'cookie': pass elif evidence['type'] == 'dns': threat_forensics.append( Hostname.get_or_create(value=evidence['what']['host'], context=[context])) elif evidence['type'] == 'ids': threat_forensics.append( Text.get_or_create(value=evidence['what']['ids'], context=[context])) elif evidence['type'] == 'mutex': threat_forensics.append( Text.get_or_create(value=evidence['what']['name'], context=[context])) elif evidence['type'] == 'network': if 'ip' in evidence['what']: # FIXME port, type threat_forensics.append( Ip.get_or_create(value=evidence['what']['ip'], context=[context])) elif 'domain' in evidence['what']: threat_forensics.append( Hostname.get_or_create(value=evidence['what']['domain'], context=[context])) elif evidence['type'] == 'process': pass elif evidence['type'] == 'registry': # threat_forensics.append(evidence['what']['key']) # threat_forensics.append(evidence['what']['value']) pass elif evidence['type'] == 'url': # BUG yeti-#115 ObservableValidationError: Invalid URL: http://xxxxx-no-tld/ threat_forensics.append( Url.get_or_create(value=evidence['what']['url'], context=[context])) # add note as tag because its a signature if 'note' in evidence: threat_forensics[-1].tag(evidence['note'].replace( '.', '_').strip('_')) # tag all of that for o in threat_forensics: o.tag([t['name'] for t in tags]) return threat_forensics
def analyze(observable, results): links = set() json_result = ThreatCrowdAPI.fetch(observable) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {} if isinstance(observable, Hostname): if 'resolutions' in json_result: result['ip on this domains'] = 0 for ip in json_result['resolutions']: if ip['ip_address'].strip() != observable.value: if ip['last_resolved'] != '0000-00-00': last_resolved = datetime.datetime.strptime( ip['last_resolved'], "%Y-%m-%d") try: new_ip = Ip.get_or_create( value=ip['ip_address'].strip()) links.update( new_ip.active_link_to( observable, 'IP', 'ThreatCrowd', last_resolved)) result['ip on this domains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database". format(ip['ip_address'])) if 'emails' in json_result: result['nb emails'] = 0 for email in json_result['emails']: try: new_email = Email.get_or_create(value=email) links.update( new_email.active_link_to( observable, 'Used by', 'ThreatCrowd')) result['nb emails'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add email {} to the database". format(email)) if 'subdomains' in json_result: result['nb subdomains'] = 0 for subdomain in json_result['subdomains']: try: new_domain = Hostname.get_or_create(value=subdomain) links.update( observable.active_link_to( new_domain, 'subdomain', 'ThreatCrowd')) result['nb subdomains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database". format(subdomain)) if isinstance(observable, Ip): if 'resolutions' in json_result: result['domains resolved'] = 0 for domain in json_result['resolutions']: if domain['domain'].strip() != observable.value: try: last_resolved = datetime.datetime.strptime( domain['last_resolved'], "%Y-%m-%d") new_domain = Hostname.get_or_create( value=domain['domain'].strip()) links.update( new_domain.active_link_to( observable, 'A Record', 'ThreatCrowd', last_resolved)) result['domains resolved'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add domain {} to the database". format(domain['domain'])) if 'hashes' in json_result and len(json_result['hashes']) > 0: result['malwares'] = 0 for h in json_result['hashes']: new_hash = Hash.get_or_create(value=h) links.update( new_hash.active_link_to( observable, 'hash', 'ThreatCrowd')) result['malwares'] += 1 if isinstance(observable, Email): if 'domains' in json_result and len(json_result) > 0: result['domains recorded by email'] = 0 for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( new_domain.active_link_to( observable, 'recorded by', 'ThreatCrowd')) result['domains recorded by email'] += 1 if isinstance(observable, Hash): result['nb c2'] = 0 if 'md5' in json_result: new_hash = Hash.get_or_create(value=json_result['md5']) links.update( new_hash.active_link_to(observable, 'md5', 'ThreadCrowd')) if 'sha1' in json_result: new_hash = Hash.get_or_create(value=json_result['sha1']) links.update( new_hash.active_link_to(observable, 'sha1', 'ThreadCrowd')) if 'sha256' in json_result: new_hash = Hash.get_or_create(value=json_result['sha256']) links.update( new_hash.active_link_to( observable, 'sha256', 'ThreadCrowd')) if 'domains' in json_result and len(json_result['domains']): for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( observable.active_link_to( new_domain, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'ips' in json_result and len(json_result['ips']): for ip in json_result['ips']: new_ip = Ip.get_or_create(value=ip.strip()) links.update( observable.active_link_to(new_ip, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'permalink' in json_result: result['permalink'] = json_result['permalink'] result['source'] = 'threatcrowd_query' result['raw'] = json_string observable.add_context(result) return list(links)
def analyze(observable, results): links = set() json_result = ThreatCrowdAPI.fetch(observable) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(",", ": ") ) results.update(raw=json_string) result = {} if isinstance(observable, Hostname): if "resolutions" in json_result: result["ip on this domains"] = 0 for ip in json_result["resolutions"]: if ip["ip_address"].strip() != observable.value: if ip["last_resolved"] != "0000-00-00": last_resolved = datetime.datetime.strptime( ip["last_resolved"], "%Y-%m-%d" ) try: new_ip = Ip.get_or_create( value=ip["ip_address"].strip() ) links.update( new_ip.active_link_to( observable, "IP", "ThreatCrowd", last_resolved ) ) result["ip on this domains"] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database".format( ip["ip_address"] ) ) if "emails" in json_result: result["nb emails"] = 0 for email in json_result["emails"]: try: new_email = Email.get_or_create(value=email) links.update( new_email.active_link_to( observable, "Used by", "ThreatCrowd" ) ) result["nb emails"] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add email {} to the database".format( email ) ) if "subdomains" in json_result: result["nb subdomains"] = 0 for subdomain in json_result["subdomains"]: try: new_domain = Hostname.get_or_create(value=subdomain) links.update( observable.active_link_to( new_domain, "subdomain", "ThreatCrowd" ) ) result["nb subdomains"] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database".format( subdomain ) ) if isinstance(observable, Ip): if "resolutions" in json_result: result["domains resolved"] = 0 for domain in json_result["resolutions"]: if domain["domain"].strip() != observable.value: try: last_resolved = datetime.datetime.strptime( domain["last_resolved"], "%Y-%m-%d" ) new_domain = Hostname.get_or_create( value=domain["domain"].strip() ) links.update( new_domain.active_link_to( observable, "A Record", "ThreatCrowd", last_resolved ) ) result["domains resolved"] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add domain {} to the database".format( domain["domain"] ) ) if "hashes" in json_result and len(json_result["hashes"]) > 0: result["malwares"] = 0 for h in json_result["hashes"]: new_hash = Hash.get_or_create(value=h) links.update( new_hash.active_link_to(observable, "hash", "ThreatCrowd") ) result["malwares"] += 1 if isinstance(observable, Email): if "domains" in json_result and len(json_result) > 0: result["domains recorded by email"] = 0 for domain in json_result["domains"]: new_domain = Hostname.get_or_create(value=domain) links.update( new_domain.active_link_to( observable, "recorded by", "ThreatCrowd" ) ) result["domains recorded by email"] += 1 if isinstance(observable, Hash): result["nb c2"] = 0 if "md5" in json_result: new_hash = Hash.get_or_create(value=json_result["md5"]) links.update(new_hash.active_link_to(observable, "md5", "ThreadCrowd")) if "sha1" in json_result: new_hash = Hash.get_or_create(value=json_result["sha1"]) links.update(new_hash.active_link_to(observable, "sha1", "ThreadCrowd")) if "sha256" in json_result: new_hash = Hash.get_or_create(value=json_result["sha256"]) links.update( new_hash.active_link_to(observable, "sha256", "ThreadCrowd") ) if "domains" in json_result and len(json_result["domains"]): for domain in json_result["domains"]: new_domain = Hostname.get_or_create(value=domain) links.update( observable.active_link_to(new_domain, "c2", "ThreatCrowd") ) result["nb c2"] += 1 if "ips" in json_result and len(json_result["ips"]): for ip in json_result["ips"]: new_ip = Ip.get_or_create(value=ip.strip()) links.update(observable.active_link_to(new_ip, "c2", "ThreatCrowd")) result["nb c2"] += 1 if "permalink" in json_result: result["permalink"] = json_result["permalink"] result["source"] = "threatcrowd_query" result["raw"] = json_string observable.add_context(result) return list(links)
def analyze(self, item): f_hyb = File.get_or_create(value='FILE: {}'.format(item['sha256'])) sha256 = Hash.get_or_create(value=item['sha256']) tags = [] context = {'source': self.name} if 'vxfamily' in item: tags.append(' '.join(item['vxfamily'].split('.'))) if 'tags' in item: tags.extend(item['tags']) if 'threatlevel_human' in item: context['threatlevel_human'] = item['threatlevel_human'] if 'threatlevel' in item: context['threatlevel'] = item['threatlevel'] if 'type' in item: context['type'] = item['type'] if 'size' in item: context['size'] = item['size'] if 'vt_detect' in item: context['virustotal_score'] = item['vt_detect'] if 'et_alerts_total' in item: context['et_alerts_total'] = item['et_alerts_total'] if 'process_list' in item: context['count_process_spawn'] = len(item['process_list']) context['url'] = 'https://www.hybrid-analysis.com' + item['reporturl'] f_hyb.add_context(context) f_hyb.tag(tags) md5 = Hash.get_or_create(value=item['md5']) f_hyb.active_link_to(md5, 'md5', self.name) sha1 = Hash.get_or_create(value=item['sha1']) f_hyb.active_link_to(sha1, 'sha1', self.name) if 'domains' in item: for domain in item['domains']: try: new_host = Hostname.get_or_create(value=domain) f_hyb.active_link_to(new_host, 'C2', self.name) logging.debug(domain) new_host.add_context({ 'source': self.name, 'contacted by': f_hyb }) except ObservableValidationError as e: logging.error(e) if 'extracted_files' in item: for extracted_file in item['extracted_files']: context_file_dropped = {'source': self.name} if not 'sha256' in extracted_file: logging.error(extracted_file) continue new_file = File.get_or_create( value='FILE: {}'.format(extracted_file['sha256'])) sha256_new_file = Hash.get_or_create( value=extracted_file['sha256']) new_file.active_link_to(sha256_new_file, 'sha256', self.name) context_file_dropped['virustotal_score'] = 0 context_file_dropped['size'] = extracted_file['file_size'] if 'av_matched' in extracted_file: context_file_dropped['virustotal_score'] = extracted_file[ 'av_matched'] if 'threatlevel_readable' in extracted_file: context_file_dropped['threatlevel'] = extracted_file[ 'threatlevel_readable'] if 'av_label' in extracted_file: new_file.tag(extracted_file['av_label']) if 'type_tags' in extracted_file: new_file.tag(extracted_file['type_tags']) new_file.add_context(context_file_dropped) f_hyb.active_link_to(new_file, 'drop', self.name)
def analyze(self, item): if not item or item[0].startswith("#"): return first_seen, url, filetype, md5, sha256, signature = item item_date = dateutil.parser.parse(first_seen) max_age = yeti_config.get('limits', 'max_age') limit_date = datetime.now() - timedelta(days=max_age) if item_date < limit_date: return if url: try: url_obs = Url.get_or_create(value=url) if signature != None: tag = signature\ .replace(' ', '_')\ .replace('/', '_')\ .replace(':', '_')\ .replace('.', '-')\ .replace('!', '-') url_obs.tag(tag) context = { 'first_seen': first_seen, 'source': self.name } url_obs.add_context(context) url_obs.add_source('feed') context_malware = { 'source': self.name } malware_file = File.get_or_create( value='FILE:{}'.format(sha256)) malware_file.add_context(context_malware) sha256 = Hash.get_or_create(value=sha256) sha256.tag(filetype) sha256.add_context(context_malware) if signature != 'None': sha256.tag(signature) md5 = Hash.get_or_create(value=md5) md5.add_context(context_malware) md5.tag(filetype) if signature != 'None': md5.tag(signature) malware_file.active_link_to(md5, 'md5', self.name) malware_file.active_link_to(sha256, 'sha256', self.name) if signature != 'None': malware_file.tag(signature) malware_file.tag(filetype) url_obs.active_link_to(malware_file, 'drops', self.name) except ObservableValidationError as e: logging.error(e)
def _make_threat_nodes(threat, context, tags): # extract Url and Hash info threats = dict() if threat["threatStatus"] != "active": # FIXME, clear out false positive ? log.warning( "threatStatus %s for threat %s", threat["threatStatus"], threat["threatID"], ) log.debug(pprint.pformat(threat)) return None log.debug("_make_threat_nodes for threat %s", threat["threatID"]) # threattype, classification # url, phish: url leads to phishing page (threat is url) # url, malware: url leads to malware download (threat is url, threatid is maybe sha256) # attachment, malware: attachement is malware (threat is sha256) # spam, url if threat["threatType"] == "url": if threat["classification"] == "phish": pass # just keep the url elif threat["classification"] == "malware": # get url and hash threats["attachment"] = threat elif threat["classification"] == "spam": log.info("URL threat - ignore classification %s", threat["classification"]) else: log.error("Type: url, Unsupported classification %s", threat["classification"]) log.debug(pprint.pformat(threat)) return None threats["url"] = threat elif threat["threatType"] == "attachment": if threat["classification"] == "malware": threats["attachment"] = threat else: log.error( "Type: attachment, Unsupported classification %s", threat["classification"], ) log.debug(pprint.pformat(threat)) return None else: log.error( "Unsupported threatType %s classification %s", threat["threatType"], threat["classification"], ) log.debug(pprint.pformat(threat)) return None # FIXME check if they exist already. # if they do, do not parse the threat a second time ? threat_nodes = [] if "url" in threats: # Proofpoint sometimes supplies a hostname marked as a Url. # this relies on Yeti to determine the type/class and add act appropriately threat_nodes.append( Observable.guess_type(threats["url"]["threat"]).get_or_create( value=threats["url"]["threat"], context=[context])) if "attachment" in threats: threat_nodes.append( Hash.get_or_create(value=threats["attachment"]["threatID"], context=[context])) for o in threat_nodes: o.tag([t["name"] for t in tags]) return threat_nodes
def _get_threat_forensics_nodes_inner(self, evidence, general_context, tags): # create context from notes context = general_context.copy() _ctx = self._make_context_from_notes([evidence]) context.update(_ctx) # add evidence['type'] and unicify tags tags = [{ "name": _ } for _ in set([evidence["type"]] + [d["name"] for d in tags])] # create Tags in DB for _ in tags: Tag.get_or_create(name=_["name"]) # threat_forensics = [] # technical hack: set optional comments values for optional in ["action", "rule", "path", "rule"]: if optional not in evidence["what"]: evidence["what"][optional] = None # add attributes for the known evidence type if evidence["type"] in ["file", "dropper"]: if "path" in evidence["what"]: threat_forensics.append( File.get_or_create(value=evidence["what"]["path"], context=[context])) if "md5" in evidence["what"]: threat_forensics.append( Hash.get_or_create(value=evidence["what"]["md5"], context=[context])) if "sha256" in evidence["what"]: threat_forensics.append( Hash.get_or_create(value=evidence["what"]["sha256"], context=[context])) elif evidence["type"] == "cookie": pass elif evidence["type"] == "dns": threat_forensics.append( Hostname.get_or_create(value=evidence["what"]["host"], context=[context])) elif evidence["type"] == "ids": threat_forensics.append( Text.get_or_create(value=evidence["what"]["ids"], context=[context])) elif evidence["type"] == "mutex": threat_forensics.append( Text.get_or_create(value=evidence["what"]["name"], context=[context])) elif evidence["type"] == "network": if "ip" in evidence["what"]: # FIXME port, type threat_forensics.append( Ip.get_or_create(value=evidence["what"]["ip"], context=[context])) elif "domain" in evidence["what"]: threat_forensics.append( Hostname.get_or_create(value=evidence["what"]["domain"], context=[context])) elif evidence["type"] == "process": pass elif evidence["type"] == "registry": # threat_forensics.append(evidence['what']['key']) # threat_forensics.append(evidence['what']['value']) pass elif evidence["type"] == "url": # BUG yeti-#115 ObservableValidationError: Invalid URL: http://xxxxx-no-tld/ threat_forensics.append( Url.get_or_create(value=evidence["what"]["url"], context=[context])) # add note as tag because its a signature if "note" in evidence: threat_forensics[-1].tag(evidence["note"].replace( ".", "_").strip("_")) # tag all of that for o in threat_forensics: o.tag([t["name"] for t in tags]) return threat_forensics
def _make_threat_nodes(threat, context, tags): # extract Url and Hash info threats = dict() if threat['threatStatus'] != 'active': # FIXME, clear out false positive ? log.warning( "threatStatus %s for threat %s", threat['threatStatus'], threat['threatID']) log.debug(pprint.pformat(threat)) return None log.debug('_make_threat_nodes for threat %s', threat['threatID']) # threattype, classification # url, phish: url leads to phishing page (threat is url) # url, malware: url leads to malware download (threat is url, threatid is maybe sha256) # attachment, malware: attachement is malware (threat is sha256) # spam, url if threat['threatType'] == 'url': if threat['classification'] == 'phish': pass # just keep the url elif threat['classification'] == 'malware': # get url and hash threats['attachment'] = threat elif threat['classification'] == 'spam': log.info( 'URL threat - ignore classification %s', threat['classification']) else: log.error( 'Type: url, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None threats['url'] = threat elif threat['threatType'] == 'attachment': if threat['classification'] == 'malware': threats['attachment'] = threat else: log.error( 'Type: attachment, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None else: log.error( 'Unsupported threatType %s classification %s', threat['threatType'], threat['classification']) log.debug(pprint.pformat(threat)) return None # FIXME check if they exist already. # if they do, do not parse the threat a second time ? threat_nodes = [] if 'url' in threats: #Proofpoint sometimes supplies a hostname marked as a Url. #this relies on Yeti to determine the type/class and add act appropriately threat_nodes.append( Observable.guess_type(threats['url']['threat']).get_or_create( value=threats['url']['threat'], context=[context])) if 'attachment' in threats: threat_nodes.append( Hash.get_or_create( value=threats['attachment']['threatID'], context=[context])) for o in threat_nodes: o.tag([t['name'] for t in tags]) return threat_nodes