def analyze(self, item): tags = [] json_string = item.to_json() context = {"source": self.name} f_vt = File.get_or_create(value="FILE:{}".format(item["sha256"])) sha256 = Hash.get_or_create(value=item["sha256"]) md5 = Hash.get_or_create(value=item["md5"]) sha1 = Hash.get_or_create(value=item["sha1"]) f_vt.active_link_to(md5, "md5", self.name) f_vt.active_link_to(sha1, "sha1", self.name) f_vt.active_link_to(sha256, "sha256", self.name) tags.append(item["ruleset_name"]) tags.append(item["type"]) context["raw"] = json_string context["size"] = item["size"] context["score vt"] = "%s/%s" % (item["positives"], item["total"]) f_vt.tag(tags) f_vt.add_context(context)
def analyze(self, item): tags = [] json_string = item.to_json() context = {'source': self.name} f_vt = File.get_or_create(value='FILE:{}'.format(item['sha256'])) sha256 = Hash.get_or_create(value=item['sha256']) md5 = Hash.get_or_create(value=item['md5']) sha1 = Hash.get_or_create(value=item['sha1']) f_vt.active_link_to(md5, 'md5', self.name) f_vt.active_link_to(sha1, 'sha1', self.name) f_vt.active_link_to(sha256, 'sha256', self.name) tags.append(item['ruleset_name']) tags.append(item['type']) context['raw'] = json_string context['size'] = item['size'] context['score vt'] = '%s/%s' % (item['positives'], item['total']) f_vt.tag(tags) f_vt.add_context(context)
def analyze(self, line): md5_obs = False sha256_obs = False url_obs = False malware_file = False context = {'source': self.name} first_seen, url, filetype, md5, sha256, signature = line if url: try: url_obs = Url.get_or_create(value=url) if signature != 'None': url_obs.tag(signature) url_obs.add_context(context) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e) if sha256: try: malware_file = File.get_or_create( value='FILE:{}'.format(sha256)) malware_file.add_context(context) malware_file.tag(filetype) sha256_obs = Hash.get_or_create(value=sha256) sha256_obs.tag(filetype) sha256_obs.add_context(context) if signature != 'None': sha256_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if md5: try: md5_obs = Hash.get_or_create(value=md5) md5_obs.add_context(context) md5_obs.tag(filetype) if signature != 'None': md5_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if malware_file: if signature != 'None': malware_file.tag(signature) if md5_obs: malware_file.active_link_to(md5_obs, 'md5', self.name) if sha256_obs: malware_file.active_link_to(sha256_obs, 'sha256', self.name) if url_obs: url_obs.active_link_to(malware_file, 'drops', self.name)
def analyze(observable, results): links = set() if isinstance(observable, Hostname): params = {"q": observable.value, "rt": 4} json_result = ThreatMinerApi.fetch(observable, params, "domain.php") _results, result = aux_checker(json_result) for r in _results: hashes = { "sha256": r } for family, _hash in hashes.items(): if _hash == observable.value: continue try: new_hash = Hash.get_or_create(value=_hash) new_hash.tag(observable.get_tags()) links.update(new_hash.active_link_to( observable, family, "threatminer_query") ) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) observable.add_context(result) elif isinstance(observable, Ip): params = {"q": observable.value, "rt": 4} json_result = ThreatMinerApi.fetch(observable, params, "host.php") _results, result = aux_checker(json_result) for r in _results: hashes = { "sha256": r } for family, _hash in hashes.items(): if _hash == observable.value: continue try: new_hash = Hash.get_or_create(value=_hash) new_hash.tag(observable.get_tags()) links.update(new_hash.active_link_to( observable, family, "threatminer_query") ) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) observable.add_context(result) return list(links)
def analyze(self, block): if not block.get('sha256_hash'): return context = {} context['date_added'] = block['first_seen_utc'] context['source'] = self.name context['filename'] = block['file_name'] context['md5'] = block['md5_hash'] context['sha1'] = block['sha1_hash'] context['sha256'] = block['sha256_hash'] context['imphash'] = block['imphash'] context['ssdeep'] = block['ssdeep'] malware_file = File.get_or_create( value='FILE:{}'.format(block['sha256_hash'])) malware_file.add_context(context) malware_file.tag(block['mime_type']) # md5, sha1, sha256 for hash_type in ('md5_hash', 'sha1_hash', 'sha256_hash'): try: hash_data = Hash.get_or_create(value=block[hash_type]) hash_data.add_context(context) hash_data.add_source(self.name) if block.get('tags'): hash_data.tag(block['tags']) malware_file.active_link_to(hash_data, hash_type.split('_')[0], self.name) except ObservableValidationError as e: logging.error(e)
def analyze(self, item): tags = [] context = {"source": self.name} # Parse value of interest subject = item["attributes"]["rule_name"] date = item["attributes"]["date"] tags2 = item["attributes"]["tags"] sha2 = re.search(regex, str(tags2)).group() date_string = datetime.utcfromtimestamp(date).strftime( "%d/%m/%Y %H:%M:%S") tags2.remove(sha2) # Update to Yeti DB f_vt3 = File.get_or_create(value="FILE:{}".format(sha2)) sha256 = Hash.get_or_create(value=sha2) f_vt3.active_link_to(sha256, "sha256", self.name) tags.append(tags2) tags.append(subject) context["date_added"] = date_string context["snippet"] = item["attributes"]["snippet"] # context['source_country'] = item["attributes"]['source_country'] context["raw"] = item f_vt3.tag(str(tags)) f_vt3.add_context(context)
def _make_threat_nodes(threat, context, tags): # extract Url and Hash info threats = dict() if threat['threatStatus'] != 'active': # FIXME, clear out false positive ? log.warning("threatStatus %s for threat %s", threat['threatStatus'], threat['threatID']) log.debug(pprint.pformat(threat)) return None log.debug('_make_threat_nodes for threat %s', threat['threatID']) # threattype, classification # url, phish: url leads to phishing page (threat is url) # url, malware: url leads to malware download (threat is url, threatid is maybe sha256) # attachment, malware: attachement is malware (threat is sha256) # spam, url if threat['threatType'] == 'url': if threat['classification'] == 'phish': pass # just keep the url elif threat['classification'] == 'malware': # get url and hash threats['attachment'] = threat elif threat['classification'] == 'spam': log.info('URL threat - ignore classification %s', threat['classification']) else: log.error('Type: url, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None threats['url'] = threat elif threat['threatType'] == 'attachment': if threat['classification'] == 'malware': threats['attachment'] = threat else: log.error('Type: attachment, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None else: log.error('Unsupported threatType %s classification %s', threat['threatType'], threat['classification']) log.debug(pprint.pformat(threat)) return None # FIXME check if they exist already. # if they do, do not parse the threat a second time ? threat_nodes = [] if 'url' in threats: #Proofpoint sometimes supplies a hostname marked as a Url. #this relies on Yeti to determine the type/class and add act appropriately threat_nodes.append( Observable.guess_type(threats['url']['threat']).get_or_create( value=threats['url']['threat'], context=[context])) if 'attachment' in threats: threat_nodes.append( Hash.get_or_create(value=threats['attachment']['threatID'], context=[context])) for o in threat_nodes: o.tag([t['name'] for t in tags]) return threat_nodes
def analyze(self, dict): observable_sample = dict["title"] context_sample = {} context_sample["description"] = "Pony sample" context_sample["date_added"] = datetime.strptime(dict["pubDate"], "%d-%m-%Y") context_sample["source"] = self.name link_c2 = re.search("https?://[^ ]*", dict["description"].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2["description"] = "Pony c2" context_c2["date_added"] = datetime.strptime(dict["pubDate"], "%d-%m-%Y") context_c2["source"] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ["pony", "objectives"] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ["c2", "pony"] c2.tag(c2_tags) sample.active_link_to(c2, "c2", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, item): if not item: return item_date = parser.parse(item['pubDate']) max_age = yeti_config.get('limits', 'max_age') limit_date = pytz.UTC.localize(datetime.now()) - timedelta(days=max_age) if item_date < limit_date: return md5_search = re.search(r'md5:\t([a-fA-F\d]{32})<br />', item['description']) if not bool(md5_search): return context = {} tags = ['malware'] if item['category'] != '': context['threat'] = item['category'] signature = item['category']\ .replace(' ', '_')\ .replace('/', '_')\ .replace(':', '_')\ .replace('.', '-')\ .replace('!', '-') if signature == 'clean_site': return tags.append(signature) context['date_added'] = item_date context['source'] = self.name context['reference'] = item['link'] try: sample = Hash.get_or_create(value=md5_search.group(1)) sample.add_context(context) sample.add_source("feed") sample.tag(tags) except ObservableValidationError as e: logging.error(e) return except Exception as e: print(e) try: url = Url.get_or_create(value=item['title']) url.add_context(context) url.add_source("feed") url.tag(tags) sample.active_link_to(url, 'drops', self.name) except ObservableValidationError as e: logging.error(e) return
def analyze(self, dict): observable_sample = dict['title'] context_sample = {} context_sample['description'] = "Pony sample" context_sample['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_sample['source'] = self.name link_c2 = re.search("https?://[^ ]*", dict['description'].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Pony c2" context_c2['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['pony', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'pony'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, line): first_seen = line['Listingdate'] _sha1 = line['SHA1'] reason = line['Listingreason'] tags = [] tag = reason.split(' ') if len(tag) >= 2: family = tag[0] tags.append(family.lower()) _type = tag[-1] if TYPE_DICT.get(_type): tags += TYPE_DICT[_type] tags.append("ssl_fingerprint") context_hash = {'source': self.name, 'first_seen': first_seen} try: sha1 = Hash.get_or_create(value=_sha1) sha1.tag(tags) sha1.add_context(context_hash) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line))
def process_file(file_vt, attributes): context = {"source": "VirusTotal"} links = set() stat_files = attributes["last_analysis_stats"] for k, v in stat_files.items(): context[k] = v context["magic"] = attributes["magic"] first_seen = attributes["first_submission_date"] context["first_seen"] = datetime.fromtimestamp(first_seen).isoformat() last_seen = attributes["last_analysis_date"] context["last_seen"] = datetime.fromtimestamp(last_seen).isoformat() context["names"] = " ".join(n for n in attributes["names"]) tags = attributes["tags"] if attributes["last_analysis_results"]: context["raw"] = attributes["last_analysis_results"] if tags: file_vt.tag(tags) observables = [(h, Hash.get_or_create(value=attributes[h])) for h in ("sha256", "md5", "sha1") if file_vt.value != attributes[h]] for h, obs in observables: obs.add_context(context) links.update(obs.active_link_to(file_vt, h, context["source"])) file_vt.add_context(context) return links
def analyze(self, block): if not block.get("sha256_hash"): return context = {} context["date_added"] = block["first_seen_utc"] context["source"] = self.name context["filename"] = block["file_name"] context["md5"] = block["md5_hash"] context["sha1"] = block["sha1_hash"] context["sha256"] = block["sha256_hash"] context["imphash"] = block["imphash"] context["ssdeep"] = block["ssdeep"] malware_file = File.get_or_create(value="FILE:{}".format(block["sha256_hash"])) malware_file.add_context(context) malware_file.tag(block["mime_type"]) # md5, sha1, sha256 for hash_type in ("md5_hash", "sha1_hash", "sha256_hash"): try: hash_data = Hash.get_or_create(value=block[hash_type]) hash_data.add_context(context) hash_data.add_source(self.name) if block.get("tags"): hash_data.tag(block["tags"]) malware_file.active_link_to( hash_data, hash_type.split("_")[0], self.name ) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): if not line or line[0].startswith("#"): return tokens = line.split(',') if len(tokens) == 3: dateadded = tokens[0] md5 = tokens[1] variant = tokens[2] context = { "first_seen": dateadded, "subfamily": variant, "source": self.name } if md5: try: hash_obs = Hash.get_or_create(value=md5) hash_obs.tag([variant, 'malware', 'crimeware', 'banker']) hash_obs.add_context(context) hash_obs.add_source('feed') except ObservableValidationError as e: logging.error(e)
def analyze(self, item): observable_sample = item['title'] context_sample = {} context_sample['description'] = "Atmos sample" context_sample['date_added'] = parser.parse(item['pubDate']) context_sample['source'] = self.name link_c2 = re.search("<a href[^>]+>(?P<url>[^<]+)", item['description'].lower()).group("url") observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Atmos c2" context_c2['date_added'] = parser.parse(item['pubDate']) context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['atmos', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'atmos'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, dict): observable_sample = dict['title'] context_sample = {} context_sample['description'] = "Atmos sample" context_sample['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_sample['source'] = self.name link_c2 = re.search("<a href[^>]+>(?P<url>[^<]+)", dict['description'].lower()).group("url") observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Atmos c2" context_c2['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['atmos', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'atmos'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def _process_data(json_result, observable): links = set() for key in ("undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file"): for file_hash in json_result.get(key, []): new_hash = Hash.get_or_create(value=file_hash) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, key, "malwares.com")) for host in json_result.get("hostname_history", {}).get("list", []): new_host = Hostname.get_or_create(value=host) new_host.tag(observable.get_tags()) links.update( new_host.active_link_to(observable, "hostname", "malwares.com")) for key in ("detected_url", "undetected_url", "distribution_url"): for url in json_result.get(key, []): new_url = Url.get_or_create(value=url) new_url.tag(observable.get_tags()) links.update( new_url.active_link_to(observable, key, "malwares.com")) observable.add_context(json_result) return list(links)
def analyze(observable, results): links = set() params = {"q": observable.value, "rt": 1} json_result = ThreatMinerApi.fetch(observable, params, "sample.php") try: _results, result = aux_checker(json_result) except GenericYetiError as e: logging.error(e.value) return links for r in _results: hashes = { "md5": r["md5"], "sha1": r["sha1"], "sha256": r["sha256"] } for family, _hash in hashes.items(): if _hash == observable.value: continue try: new_hash = Hash.get_or_create(value=_hash) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, family, "threatminer_query")) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) observable.add_context(result) return list(links)
def analyze(self, item, pub_date): # pylint: disable=arguments-differ observable_sample = item["title"] context_sample = {} context_sample["description"] = "Pony sample" context_sample["date_added"] = pub_date context_sample["source"] = self.name link_c2 = re.search("https?://[^ ]*", item["description"].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2["description"] = "Pony c2" context_c2["date_added"] = pub_date context_c2["source"] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source(self.name) sample_tags = ["pony", "objectives"] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source(self.name) c2_tags = ["c2", "pony"] c2.tag(c2_tags) sample.active_link_to(c2, "c2", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, item): if not item or item[0].startswith("#"): return first_seen, url, filetype, md5, sha256, signature = item if url: try: url_obs = Url.get_or_create(value=url) context = {'first_seen': first_seen, 'source': self.name} if signature != 'None': url_obs.tag(signature) url_obs.add_context(context) url_obs.add_source('feed') context_malware = {'source': self.name} malware_file = File.get_or_create( value='FILE:{}'.format(sha256)) malware_file.add_context(context_malware) sha256 = Hash.get_or_create(value=sha256) sha256.tag(filetype) sha256.add_context(context_malware) if signature != 'None': sha256.tag(signature) md5 = Hash.get_or_create(value=md5) md5.add_context(context_malware) md5.tag(filetype) if signature != 'None': md5.tag(signature) malware_file.active_link_to(md5, 'md5', self.name) malware_file.active_link_to(sha256, 'sha256', self.name) if signature != 'None': malware_file.tag(signature) malware_file.tag(filetype) url_obs.active_link_to(malware_file, 'drops', self.name) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() context = { "source": "malwares.com", } params = { "api_key": results.settings["malwares_api_key"], "hostname": observable.value, } json_result = MalwaresApi.fetch(observable, params, "hostname/info") if json_result: context["raw"] = json_result for key in ( "undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file", ): item = json_result[key] for f in item["list"]: new_hash = Hash.get_or_create(value=f["sha256"]) links.update( new_hash.active_link_to(observable, key, "malwares.com")) context[key] = item["total"] for host in json_result.get("hostname_history", {}).get("list", []): new_host = Hostname.get_or_create(value=host) new_host.tag(observable.get_tags()) links.update( new_host.active_link_to(observable, "hostname", "malwares.com")) for key in ("detected_url", "undetected_url"): item = json_result[key] for i in item["list"]: try: new_url = Url.get_or_create(value=i["url"]) new_url.tag(observable.get_tags()) links.update( new_url.active_link_to(observable, key, "malwares.com")) except ObservableValidationError: logging.error("Url is not valid %s" % i["url"]) context[key] = item["total"] ip_history = json_result["ip_history"] for i in ip_history["list"]: ip = Ip.get_or_create(value=i["ip"]) links.update( ip.active_link_to(observable, "ip_story", "malwares.com")) context["ip_story"] = ip_history["total"] return links
def analyze(observable, results): links = set() json_result = MalshareAPI.fetch(observable, results.settings["malshare_api_key"]) if json_result is None: return [] json_string = json.dumps(json_result, sort_keys=True, indent=4, separators=(",", ": ")) results.update(raw=json_string) result = {"raw": json_string} if "SOURCES" in json_result: for source in json_result["SOURCES"]: new_url = None try: new_url = Url.get_or_create(value=source.strip()) links.update( observable.active_link_to(new_url, "c2", "malshare_query")) except ObservableValidationError: logging.error( "An error occurred when trying to add {} to the database" .format(source.strip())) result["nb C2"] = len(json_result["SOURCES"]) try: new_hash = Hash.get_or_create(value=json_result["MD5"]) links.update( new_hash.active_link_to(observable, "md5", "malshare_query")) new_hash = Hash.get_or_create(value=json_result["SHA1"]) links.update( new_hash.active_link_to(observable, "sha1", "malshare_query")) new_hash = Hash.get_or_create(value=json_result["SHA256"]) links.update( new_hash.active_link_to(observable, "sha256", "malshare_query")) except ObservableValidationError: logging.error( "An error occurred when trying to add hashes {} to the database" .format(json_string)) return list(links)
def analyze(observable, results): links = set() json_result = MalshareAPI.fetch(observable, results.settings['malshare_api_key']) if json_result is None: return [] json_string = json.dumps(json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if 'SOURCES' in json_result: for source in json_result['SOURCES']: new_url = None try: new_url = Url.get_or_create(value=source.strip()) links.update( observable.active_link_to(new_url, 'c2', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add {} to the database" .format(source.strip())) result['nb C2'] = len(json_result['SOURCES']) try: new_hash = Hash.get_or_create(value=json_result['MD5']) links.update( new_hash.active_link_to(observable, 'md5', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA1']) links.update( new_hash.active_link_to(observable, 'sha1', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA256']) links.update( new_hash.active_link_to(observable, 'sha256', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add hashes {} to the database" .format(json_string)) return list(links)
def analyze(observable, results): links = set() context = { "source": "malwares.com", } params = { "api_key": results.settings["malwares_api_key"], "ip": observable.value, } json_result = MalwaresApi.fetch(observable, params, "ip/info") for key in ( "undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file", ): for item in json_result[key]["list"]: h = Hash.get_or_create(value=item["sha256"]) links.update( h.active_link_to(observable, key, context["source"])) h.add_context({ "source": context["source"], "firs_seen": item["date"] }) context[key] = json_result[key]["total"] for key in ("detected_url", "undetected_url"): for item in json_result[key]["list"]: url = Url.get_or_create(value=item["url"]) links.update( url.active_link_to(observable, key, context["source"])) url.add_context({ "source": context["source"], "firs_seen": item["date"] }) context[key] = json_result[key]["total"] hostname_history = json_result["hostname_history"] for item in hostname_history["list"]: try: hostname = Hostname.get_or_create(value=item["hostname"]) links.update( hostname.active_link_to(observable, "hostname_history", context["source"])) hostname.add_context({ "source": context["source"], "firs_seen": item["date"] }) except ObservableValidationError: logging.error("%s is not a hostname valid" % item["hostname"]) context["hostname_history"] = hostname_history["total"] observable.add_context(context) return links
def each(f): try: l = f.body.length except AttributeError as e: # File item has no content l = 0 if l > 0: for h in HashFile.extract_hashes(f): h = Hash.get_or_create(value=h.hexdigest()).save() h.add_source("analytics") Link.connect(f, h)
def each(f): if f.body: f.hashes = [] for hash_type, h in HashFile.extract_hashes(f.body.contents): hash_object = Hash.get_or_create(value=h.hexdigest()) hash_object.add_source("analytics") hash_object.save() f.active_link_to(hash_object, "{} hash".format(hash_type.upper()), "HashFile", clean_old=False) f.hashes.append({"hash": hash_type, "value": h.hexdigest()}) f.save()
def analyze(self, line): _id = line['id'] _ = line['firstseen'] url = line['url'] _status = line['status'] _hash = line['hash'] country = line['country'] asn = line['as'] tags = ["collected_by_honeypot"] context = {"source": self.name, "country": country} url_obs = None if url: try: url_obs = Url.get_or_create(value=url.rstrip()) url_obs.add_context(context) url_obs.tag(tags) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e) if _hash and len(_hash) > 16: try: hash_obs = Hash.get_or_create(value=_hash) hash_obs.add_context(context) hash_obs.tag(tags) hash_obs.add_source(self.name) if url_obs: hash_obs.active_link_to(url_obs, "MD5", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) if asn: try: asn = asn.split(" ")[0].replace("AS", "") asn_obs = AutonomousSystem.get_or_create(value=asn) asn_obs.add_context(context) asn_obs.tag(tags) asn_obs.add_source(self.name) asn_obs.active_link_to(url_obs, "ASN", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): params = { "api_key": results.settings["malwares_api_key"], "hash": observable.value, } json_result = MalwaresApi.fetch(observable, params, "file/mwsinfo") links = set() context = { "source": "malwares.com", } if json_result: context["raw"] = json_result observable.tag(json_result["taglist"]) if observable.family != "md5": hash_md5 = Hash.get_or_create(value=json_result["md5"]) links.update( hash_md5.active_link_to(observable, "md5", "malwares.com")) if observable.family != "sha1": hash_sha1 = Hash.get_or_create(value=json_result["sha1"]) links.update( hash_sha1.active_link_to(observable, "sha1", "malwares.com")) if observable.family != "sha256": hash_sha256 = Hash.get_or_create(value=json_result["sha256"]) links.update( hash_sha256.active_link_to(observable, "sha256", "malwares.com")) if "virustotal" in json_result: vt = json_result["virustotal"] context["vt"] = "%s/%s" % (vt["positives"], vt["total"]) context["scan_date"] = vt["scan_date"] observable.add_context(context) return links
def analyze(observable, results): links = set() json_result = MalshareAPI.fetch( observable, results.settings['malshare_api_key']) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if 'SOURCES' in json_result: for source in json_result['SOURCES']: new_url = None try: new_url = Url.get_or_create(value=source.strip()) links.update( observable.active_link_to( new_url, 'c2', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add {} to the database". format(source.strip())) result['nb C2'] = len(json_result['SOURCES']) try: new_hash = Hash.get_or_create(value=json_result['MD5']) links.update( new_hash.active_link_to(observable, 'md5', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA1']) links.update( new_hash.active_link_to(observable, 'sha1', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA256']) links.update( new_hash.active_link_to(observable, 'sha256', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add hashes {} to the database". format(json_string)) return list(links)
def analyze(observable, result): links = set() endpoint = "/ip_addresses/%s/referrer_files" % observable.value api_key = result.settings["virutotal_api_key"] result = VirustotalApi.fetch(api_key, endpoint) for data in result["data"]: attributes = data["attributes"] file_vt = Hash.get_or_create(value=data["id"]) links.update( file_vt.active_link_to(observable, "Referrer File", "Virustotal")) links.update(VirustotalApi.process_file(file_vt, attributes)) return list(links)
def analyze(observable, results): links = set() json_result = VirustotalApi.fetch(observable, results.settings['virutotal_api_key']) json_string = json.dumps(json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if isinstance(observable, Ip): # Parse results for ip if json_result.get('as_owner'): result['Owner'] = json_result['as_owner'] o_isp = Company.get_or_create(name=json_result['as_owner']) links.update(observable.active_link_to(o_isp, 'hosting', 'virustotal_query')) if json_result.get('detected_urls'): result['detected_urls'] = json_result['detected_urls'] for detected_url in json_result['detected_urls']: o_url = Url.get_or_create(value=detected_url['url']) links.update(o_url.active_link_to(o_url, 'hostname', 'virustotal_query')) elif isinstance(observable, Hostname): if json_result.get('permalink'): result['permalink'] = json_result['permalink'] result['positives'] = json_result.get('positives', 0) if json_result.get('total'): result['total'] = json_result['total'] elif isinstance(observable, Hash): result['positives'] = json_result['positives'] if 'permalink' in json_result: result['permalink'] = json_result['permalink'] if 'total' in json_result: result['total'] = json_result['total'] hashes ={ 'md5': json_result['md5'], 'sha1': json_result['sha1'], 'sha256': json_result['sha256']} create_hashes = [(k, v) for k,v in hashes.items() if v != observable.value] for k, v in create_hashes: new_hash = Hash.get_or_create(value=v) new_hash.tag(observable.get_tags()) links.update(new_hash.active_link_to(observable, k, 'virustotal_query')) result['source'] = 'virustotal_query' observable.add_context(result) return list(links)
def analyze(self, line): ssdeep, imphash, sha256, sha1, md5 = line context = {} context['source'] = self.name file_obs = False try: file_obs = File.get_or_create(value='FILE:{}'.format(sha256)) file_obs.add_context(context) file_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e) try: md5_data = Hash.get_or_create(value=md5) md5_data.add_context(context) md5_data.add_source(self.name) file_obs.active_link_to(md5_data, 'md5', self.name) except ObservableValidationError as e: logging.error(e) try: sha1_data = Hash.get_or_create(value=sha1) sha1_data.add_context(context) sha1_data.add_source(self.name) file_obs.active_link_to(sha1_data, 'sha1', self.name) except ObservableValidationError as e: logging.error(e) try: sha256_data = Hash.get_or_create(value=sha256) sha256_data.add_context(context) sha256_data.add_source(self.name) file_obs.active_link_to(sha256_data, 'sha256', self.name) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() params = { 'query': observable.value } data = PassiveTotalApi.get('/enrichment/malware', results.settings, params) for record in data['results']: collection_date = datetime.strptime(record['collectionDate'], "%Y-%m-%d %H:%M:%S") malware = Hash.get_or_create(value=record['sample']) links.update(malware.link_to(observable, "Contacted Host", record['source'], collection_date)) return list(links)
def analyze(self, line): if not line or line[0].startswith("#"): return try: ip, domain, family, md5, link, date = tuple(map(strip, line)) context = { "first_seen": date, "family": family, "report": link, "source": self.name } c2 = None sample = None try: sample = Hash.get_or_create(value=md5) sample.add_context(context) sample.tag(family.lower()) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) try: if domain: if '/' in domain: c2 = Url.get_or_create(value=domain) else: c2 = Hostname.get_or_create(value=domain) elif ip: c2 = Ip.get_or_create(value=ip) else: return c2.add_context(context) c2.tag(['c2', family.lower()]) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) if c2 and sample: sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ValueError: logging.error("Error unpacking line: {}".format(line))
def _get_threat_forensics_nodes_inner( self, evidence, general_context, tags): # create context from notes context = general_context.copy() _ctx = self._make_context_from_notes([evidence]) context.update(_ctx) # add evidence['type'] and unicify tags tags = [{ 'name': _ } for _ in set([evidence['type']] + [d['name'] for d in tags])] # create Tags in DB for _ in tags: Tag.get_or_create(name=_['name']) # threat_forensics = [] # technical hack: set optional comments values for optional in ['action', 'rule', 'path', 'rule']: if optional not in evidence['what']: evidence['what'][optional] = None # add attributes for the known evidence type if evidence['type'] in ['file', 'dropper']: if 'path' in evidence['what']: threat_forensics.append( File.get_or_create( value=evidence['what']['path'], context=[context])) if 'md5' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['md5'], context=[context])) if 'sha256' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['sha256'], context=[context])) elif evidence['type'] == 'cookie': pass elif evidence['type'] == 'dns': threat_forensics.append( Hostname.get_or_create( value=evidence['what']['host'], context=[context])) elif evidence['type'] == 'ids': threat_forensics.append( Text.get_or_create( value=evidence['what']['ids'], context=[context])) pass elif evidence['type'] == 'mutex': threat_forensics.append( Text.get_or_create( value=evidence['what']['name'], context=[context])) elif evidence['type'] == 'network': if 'ip' in evidence['what']: # FIXME port, type threat_forensics.append( Ip.get_or_create( value=evidence['what']['ip'], context=[context])) elif 'domain' in evidence['what']: threat_forensics.append( Hostname.get_or_create( value=evidence['what']['domain'], context=[context])) elif evidence['type'] == 'process': pass elif evidence['type'] == 'registry': # threat_forensics.append(evidence['what']['key']) # threat_forensics.append(evidence['what']['value']) pass elif evidence['type'] == 'url': # BUG yeti-#115 ObservableValidationError: Invalid URL: http://xxxxx-no-tld/ threat_forensics.append( Url.get_or_create( value=evidence['what']['url'], context=[context])) # add note as tag because its a signature if 'note' in evidence: threat_forensics[-1].tag( evidence['note'].replace('.', '_').strip('_')) # tag all of that for o in threat_forensics: o.tag([t['name'] for t in tags]) return threat_forensics
def _add_events_nodes(self, events, context, tags): log.debug('_add_events_nodes on {nb} events'.format(nb=len(events))) attach_unsupported = dict( [(_, 0) for _ in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None]]) event_nodes = list() for msg in events: create_t = datetime.strptime( msg['messageTime'], "%Y-%m-%dT%H:%M:%S.%fZ") # PPS unique value guid = Text.get_or_create( value='proofpoint://%s' % msg['GUID'], created=create_t, context=[context]) log.debug('Event {msg}'.format(msg=msg['messageID'])) message_contents = list() src_ip = Ip.get_or_create( value=msg['senderIP'], created=create_t, context=[context]) src_ip.tag(['MTA']) guid.active_link_to([src_ip], "MTA src ip", self.name) # new event event_nodes.append(guid) # if self.config['import_email_metadata']: # email details # messageID message_id = Email.get_or_create( value=msg['messageID'], created=create_t, context=[context]) guid.active_link_to([message_id], "seen in", self.name) # sender _s1 = Email.get_or_create( value=msg['sender'], created=create_t, context=[context]) _s1.tag(['sender']) guid.active_link_to([_s1], "sender", self.name) if 'headerFrom' in msg: # header From _s2 = Email.get_or_create( value=msg['headerFrom'], created=create_t, context=[context]) _s2.tag(['sender']) guid.active_link_to([_s2], "headerFrom", self.name) # FIXME is that a duplicate of attachment-malware ? # attachment events for attach in msg['messageParts']: if attach['sandboxStatus'] in ['THREAT']: md5 = Hash.get_or_create( value=attach['md5'], created=create_t, context=[context]) md5.tag([t['name'] for t in tags]) fname = File.get_or_create( value=attach['filename'], created=create_t, context=[context]) fname.tag([t['name'] for t in tags]) # this should be a DUP from threat_nodes in analyse() sha_threat = Hash.get_or_create( value=attach['sha256'], created=create_t, context=[context]) sha_threat.active_link_to([md5, fname], "relates", self.name) sha_threat.tag([t['name'] for t in tags]) message_contents.append(sha_threat) # link the 3 together elif attach['sandboxStatus'] in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None]: attach_unsupported[attach['sandboxStatus']] += 1 log.debug(pprint.pformat(attach)) # add context to the hashes guid.active_link_to(message_contents, "delivers", self.name) _stats = ', '.join( "%s: %d" % (k, v) for k, v in attach_unsupported.items()) log.warning('Ignored unsupported attachments: %s', _stats) for o in event_nodes: o.tag([t['name'] for t in tags]) return event_nodes
def analyze(self, item): context = item date_string = re.search( r"\((?P<datetime>[\d\- :]+)\)", context['title']).group('datetime') try: context['date_added'] = datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match( r'^Host: (?P<host>.+), Version: (?P<version>\w)', context['description']) g = g.groupdict() context['version'] = g['version'] context['description'] = FeodoTracker.descriptions[g['version']] context['subfamily'] = FeodoTracker.variants[g['version']] context['source'] = self.name del context['title'] new = None variant_tag = FeodoTracker.variants[g['version']].lower() try: if re.search(r"[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}", g['host']): new = Ip.get_or_create(value=g['host']) else: new = Hostname.get_or_create(value=g['host']) new.add_context(context) new.add_source("feed") new.tag([variant_tag, 'malware', 'crimeware', 'banker', 'c2']) except ObservableValidationError as e: logging.error(e) try: url_fedeo = context['guid'] r = requests.get(url_fedeo) if r.status_code == 200: html_source = r.text soup = BeautifulSoup(html_source, 'html.parser') tab = soup.find('table', attrs='sortable') results = [] if tab: all_tr = tab.find_all('tr') for tr in all_tr: all_td = tr.find_all('td') if all_td and len(all_td) == 7: results.append({ 'timestamp': all_td[0].text, 'md5_hash': all_td[1].text, 'filesize': all_td[2].text, 'VT': all_td[3].text, 'Host': all_td[4].text, 'Port': all_td[5].text, 'SSL Certif or method': all_td[6].text }) for r in results: new_hash = Hash.get_or_create(value=r['md5_hash']) new_hash.add_context(context) new_hash.add_source('feed') new_hash.tag([ variant_tag, 'malware', 'crimeware', 'banker', 'payload' ]) new_hash.active_link_to( new, 'c2', self.name, clean_old=False) host = Url.get_or_create( value='https://%s:%s' % (g['host'], r['Port'])) host.add_source('feed') host.add_context(context) host.tag( [variant_tag, 'malware', 'crimeware', 'banker', 'c2']) new_hash.active_link_to( host, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def _make_threat_nodes(threat, context, tags): # extract Url and Hash info threats = dict() if threat['threatStatus'] != 'active': # FIXME, clear out false positive ? log.warning( "threatStatus %s for threat %s", threat['threatStatus'], threat['threatID']) log.debug(pprint.pformat(threat)) return None log.debug('_make_threat_nodes for threat %s', threat['threatID']) # threattype, classification # url, phish: url leads to phishing page (threat is url) # url, malware: url leads to malware download (threat is url, threatid is maybe sha256) # attachment, malware: attachement is malware (threat is sha256) # spam, url if threat['threatType'] == 'url': if threat['classification'] == 'phish': pass # just keep the url elif threat['classification'] == 'malware': # get url and hash threats['attachment'] = threat elif threat['classification'] == 'spam': log.info( 'URL threat - ignore classification %s', threat['classification']) else: log.error( 'Type: url, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None threats['url'] = threat elif threat['threatType'] == 'attachment': if threat['classification'] == 'malware': threats['attachment'] = threat else: log.error( 'Type: attachment, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None else: log.error( 'Unsupported threatType %s classification %s', threat['threatType'], threat['classification']) log.debug(pprint.pformat(threat)) return None # FIXME check if they exist already. # if they do, do not parse the threat a second time ? threat_nodes = [] if 'url' in threats: #Proofpoint sometimes supplies a hostname marked as a Url. #this relies on Yeti to determine the type/class and add act appropriately threat_nodes.append( Observable.guess_type(threats['url']['threat']).get_or_create( value=threats['url']['threat'], context=[context])) if 'attachment' in threats: threat_nodes.append( Hash.get_or_create( value=threats['attachment']['threatID'], context=[context])) for o in threat_nodes: o.tag([t['name'] for t in tags]) return threat_nodes
def analyze(observable, results): links = set() json_result = ThreatCrowdAPI.fetch(observable) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {} if isinstance(observable, Hostname): if 'resolutions' in json_result: result['ip on this domains'] = 0 for ip in json_result['resolutions']: if ip['ip_address'].strip() != observable.value: if ip['last_resolved'] != '0000-00-00': last_resolved = datetime.datetime.strptime( ip['last_resolved'], "%Y-%m-%d") try: new_ip = Ip.get_or_create( value=ip['ip_address'].strip()) links.update( new_ip.active_link_to( observable, 'IP', 'ThreatCrowd', last_resolved)) result['ip on this domains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database". format(ip['ip_address'])) if 'emails' in json_result: result['nb emails'] = 0 for email in json_result['emails']: try: new_email = Email.get_or_create(value=email) links.update( new_email.active_link_to( observable, 'Used by', 'ThreatCrowd')) result['nb emails'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add email {} to the database". format(email)) if 'subdomains' in json_result: result['nb subdomains'] = 0 for subdomain in json_result['subdomains']: try: new_domain = Hostname.get_or_create(value=subdomain) links.update( observable.active_link_to( new_domain, 'subdomain', 'ThreatCrowd')) result['nb subdomains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database". format(subdomain)) if isinstance(observable, Ip): if 'resolutions' in json_result: result['domains resolved'] = 0 for domain in json_result['resolutions']: if domain['domain'].strip() != observable.value: try: last_resolved = datetime.datetime.strptime( domain['last_resolved'], "%Y-%m-%d") new_domain = Hostname.get_or_create( value=domain['domain'].strip()) links.update( new_domain.active_link_to( observable, 'A Record', 'ThreatCrowd', last_resolved)) result['domains resolved'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add domain {} to the database". format(domain['domain'])) if 'hashes' in json_result and len(json_result['hashes']) > 0: result['malwares'] = 0 for h in json_result['hashes']: new_hash = Hash.get_or_create(value=h) links.update( new_hash.active_link_to( observable, 'hash', 'ThreatCrowd')) result['malwares'] += 1 if isinstance(observable, Email): if 'domains' in json_result and len(json_result) > 0: result['domains recorded by email'] = 0 for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( new_domain.active_link_to( observable, 'recorded by', 'ThreatCrowd')) result['domains recorded by email'] += 1 if isinstance(observable, Hash): result['nb c2'] = 0 if 'md5' in json_result: new_hash = Hash.get_or_create(value=json_result['md5']) links.update( new_hash.active_link_to(observable, 'md5', 'ThreadCrowd')) if 'sha1' in json_result: new_hash = Hash.get_or_create(value=json_result['sha1']) links.update( new_hash.active_link_to(observable, 'sha1', 'ThreadCrowd')) if 'sha256' in json_result: new_hash = Hash.get_or_create(value=json_result['sha256']) links.update( new_hash.active_link_to( observable, 'sha256', 'ThreadCrowd')) if 'domains' in json_result and len(json_result['domains']): for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( observable.active_link_to( new_domain, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'ips' in json_result and len(json_result['ips']): for ip in json_result['ips']: new_ip = Ip.get_or_create(value=ip.strip()) links.update( observable.active_link_to(new_ip, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'permalink' in json_result: result['permalink'] = json_result['permalink'] result['source'] = 'threatcrowd_query' result['raw'] = json_string observable.add_context(result) return list(links)
def analyze(observable, results): links = set() json_result = VirustotalApi.fetch( observable, results.settings['virutotal_api_key']) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if isinstance(observable, Ip): # Parse results for ip if json_result.get('as_owner'): result['Owner'] = json_result['as_owner'] o_isp = Company.get_or_create(name=json_result['as_owner']) links.update( observable.active_link_to( o_isp, 'hosting', 'virustotal_query')) if json_result.get('detected_urls'): result['detected_urls'] = json_result['detected_urls'] for detected_url in json_result['detected_urls']: o_url = Url.get_or_create(value=detected_url['url']) links.update( o_url.active_link_to( o_url, 'hostname', 'virustotal_query')) elif isinstance(observable, Hostname): if json_result.get('permalink'): result['permalink'] = json_result['permalink'] result['positives'] = json_result.get('positives', 0) if json_result.get('total'): result['total'] = json_result['total'] elif isinstance(observable, Hash): result['positives'] = json_result['positives'] if 'permalink' in json_result: result['permalink'] = json_result['permalink'] if 'total' in json_result: result['total'] = json_result['total'] hashes = { 'md5': json_result['md5'], 'sha1': json_result['sha1'], 'sha256': json_result['sha256'] } create_hashes = [ (k, v) for k, v in hashes.items() if v != observable.value ] for k, v in create_hashes: new_hash = Hash.get_or_create(value=v) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, k, 'virustotal_query')) result['source'] = 'virustotal_query' observable.add_context(result) return list(links)