def _process_data(json_result, observable): links = set() for key in ("undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file"): for file_hash in json_result.get(key, []): new_hash = Hash.get_or_create(value=file_hash) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, key, "malwares.com")) for host in json_result.get("hostname_history", {}).get("list", []): new_host = Hostname.get_or_create(value=host) new_host.tag(observable.get_tags()) links.update( new_host.active_link_to(observable, "hostname", "malwares.com")) for key in ("detected_url", "undetected_url", "distribution_url"): for url in json_result.get(key, []): new_url = Url.get_or_create(value=url) new_url.tag(observable.get_tags()) links.update( new_url.active_link_to(observable, key, "malwares.com")) observable.add_context(json_result) return list(links)
def analyze(self, line): if not line or line[0].startswith("#"): return first_seen, c2_ip, c2_port, family = tuple(line) tags = [] tags.append(family.lower()) tags.append("c2") tags.append("blocklist") context = { "first_seen": first_seen, "source": self.name } try: new_url = Url.get_or_create(value="http://{}:{}/".format( c2_ip, c2_port) ) new_url.add_context(context) new_url.tag(tags) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line))
def analyze(self, item): observable_sample = item['title'] context_sample = {} context_sample['description'] = "Atmos sample" context_sample['date_added'] = parser.parse(item['pubDate']) context_sample['source'] = self.name link_c2 = re.search("<a href[^>]+>(?P<url>[^<]+)", item['description'].lower()).group("url") observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Atmos c2" context_c2['date_added'] = parser.parse(item['pubDate']) context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['atmos', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'atmos'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, line): url_obs = False ip_obs = False family = line['Family'] url = line['URL'] ip = line['IP'] first_seen = line['FirstSeen'] family = family.lower() context = {'first_seen': first_seen, 'source': self.name} if url: try: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(["c2", family]) except ObservableValidationError as e: logging.error(e) if ip: try: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.tag(family.lower()) except ObservableValidationError as e: logging.error(e) if url_obs and ip_obs: url_obs.active_link_to(ip_obs, 'ip', self.name)
def analyze(self, item): if not item: return item_date = parser.parse(item['pubDate']) max_age = yeti_config.get('limits', 'max_age') limit_date = pytz.UTC.localize(datetime.now()) - timedelta(days=max_age) if item_date < limit_date: return md5_search = re.search(r'md5:\t([a-fA-F\d]{32})<br />', item['description']) if not bool(md5_search): return context = {} tags = ['malware'] if item['category'] != '': context['threat'] = item['category'] signature = item['category']\ .replace(' ', '_')\ .replace('/', '_')\ .replace(':', '_')\ .replace('.', '-')\ .replace('!', '-') if signature == 'clean_site': return tags.append(signature) context['date_added'] = item_date context['source'] = self.name context['reference'] = item['link'] try: sample = Hash.get_or_create(value=md5_search.group(1)) sample.add_context(context) sample.add_source("feed") sample.tag(tags) except ObservableValidationError as e: logging.error(e) return except Exception as e: print(e) try: url = Url.get_or_create(value=item['title']) url.add_context(context) url.add_source("feed") url.tag(tags) sample.active_link_to(url, 'drops', self.name) except ObservableValidationError as e: logging.error(e) return
def analyze(self, dict): url_string = re.search(r"URL: (?P<url>\S+),", dict['description']).group('url') url_string = url_string.replace( 'http://https://', 'https://') # feed malformed, nasty hack context = {} date_string = re.search(r"\((?P<date>[0-9\-]+)\)", dict['title']).group('date') context['date_added'] = datetime.strptime(date_string, "%Y-%m-%d") context['status'] = re.search(r"status: (?P<status>[^,]+)", dict['description']).group('status') context['guid'] = dict['guid'] context['source'] = self.name try: context['md5'] = re.search(r"MD5 hash: (?P<md5>[a-f0-9]+)", dict['description']).group('md5') except AttributeError as e: pass try: n = Url.get_or_create(value=url_string) n.add_context(context) n.add_source("feed") n.tag(['zeus', 'delivery', 'banker', 'crimeware', 'malware']) except ObservableValidationError as e: logging.error(e)
def analyze(self, dict): observable_sample = dict['title'] context_sample = {} context_sample['description'] = "Pony sample" context_sample['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_sample['source'] = self.name link_c2 = re.search("https?://[^ ]*", dict['description'].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Pony c2" context_c2['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['pony', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'pony'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, data): if not data or data[0].startswith('phish_id'): return _, url, phish_detail_url, submission_time, verified, verification_time, online, target = tuple( data) tags = ['phishing'] context = { 'source': self.name, 'phish_detail_url': phish_detail_url, 'submission_time': submission_time, 'verified': verified, 'verification_time': verification_time, 'online': online, 'target': target } if url is not None and url != '': try: url = Url.get_or_create(value=url) url.add_context(context) url.add_source('feed') url.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, item): url_string = re.search(r"URL: (?P<url>\S+),", item['description']).group('url') context = {} date_string = re.search(r"\((?P<date>[0-9\-]+)\)", item['title']).group('date') context['date_added'] = datetime.strptime(date_string, "%Y-%m-%d") context['status'] = re.search(r"status: (?P<status>[^,]+)", item['description']).group('status') context['version'] = int( re.search(r"version: (?P<version>[^,]+)", item['description']).group('version')) context['guid'] = item['guid'] context['source'] = self.name try: context['md5'] = re.search(r"MD5 hash: (?P<md5>[a-f0-9]+)", item['description']).group('md5') except AttributeError as e: pass try: n = Url.get_or_create(value=url_string) n.add_context(context) n.add_source("feed") n.tag(['zeus', 'c2', 'banker', 'crimeware', 'malware']) except ObservableValidationError as e: logging.error(e)
def analyze(self, item): if not item or item[0].startswith("#"): return family, threat, url, url_status, first_seen, first_active, last_active, last_update = item context = { "first_seen": first_seen, "first_active": first_active, "last_active": last_active, "last_update": last_update, "status": url_status, "source": self.name, "threat": threat } if url: try: url_obs = Url.get_or_create(value=url) url_obs.tag([family, 'malware']) url_obs.add_context(context) url_obs.add_source('feed') except ObservableValidationError as e: logging.error(e)
def _process_data(json_result, observable): links = set() for page in json_result: if not page.get("page"): continue # IP iocs has more data than the rest if not isinstance(observable, Ip) and page['page'].get('ip'): new_ip = Ip.get_or_create(value=page['page']['ip']) links.update( new_ip.active_link_to(observable, 'ip', 'UrlScanIo Query')) if not isinstance(observable, Hostname) and page['page'].get('domain'): new_host = Hostname.get_or_create(value=page['page']['domain']) links.update( new_host.active_link_to(observable, 'hostname', 'UrlScanIo Query')) if not isinstance(observable, Url) and page['page'].get('url'): new_url = Url.get_or_create(value=page['page']['url']) links.update( new_url.active_link_to(observable, 'url', 'UrlScanIo Query')) links.update(UrlScanIoApi._process_asn_data(page, observable))
def analyze(self, line): if line[0] == 'Number': return # split the entry into observables Number, Status, CC, Host, Port, Protocol, ASN, Last_Updated, First_Seen, Last_Seen, First_Active, Last_Active, SBL, Abuse_Contact, Details = line url = "{}://{}".format(Protocol, Host) context = {} context['status'] = Status context['port'] = Port context['cc'] = CC context['status'] = Status context['date_added'] = datetime.strptime(First_Seen, "%Y-%m-%d %H:%M:%S") context['last_seen'] = datetime.strptime(Last_Seen, "%Y-%m-%d %H:%M:%S") if Last_Seen else datetime.utcnow() context['sbl'] = SBL context['abuse_contact'] = Abuse_Contact context['description'] = Details if Details else "N/A" context['source'] = self.name try: url = Url.get_or_create(value=url) url.add_context(context) url.add_source("feed") url.tag(['asprox', 'c2', 'scanner']) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): id_feed = line["id"] first_seen = line["dateadded"] url = line["url"] url_status = line["url_status"] threat = line["threat"] tags = line["tags"] urlhaus_link = line["urlhaus_link"] source = line["reporter"] # pylint: disable=line-too-long context = { "id_urlhaus": id_feed, "status": url_status, "source": self.name, "report": urlhaus_link, "threat": threat, "reporter": source, "first_seen": first_seen, } if url: try: url_obs = Url.get_or_create(value=url) url_obs.tag(tags.split(",")) url_obs.add_context(context) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): url_obs = False ip_obs = False family = line["Family"] url = line["URL"] ip = line["IP"] first_seen = line["FirstSeen"] family = family.lower() context = {"first_seen": first_seen, "source": self.name} if url: try: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(["c2", family]) except ObservableValidationError as e: logging.error(e) if ip: try: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.tag(family.lower()) except ObservableValidationError as e: logging.error(e) if url_obs and ip_obs: url_obs.active_link_to(ip_obs, "ip", self.name)
def analyze(self, line): url_obs = False url = line["url"] ip = line["ip"] family = line["type"] context = {} context["date_added"] = line["date"] context["source"] = self.name tags = [] tags.append(family.lower()) try: if url: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(tags) except ObservableValidationError as e: logging.error(e) try: if ip: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.add_source(self.name) ip_obs.tag(tags) if url_obs: ip_obs.active_link_to(url_obs, "url", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(self, data): if not data or data[0].startswith('phish_id'): return _, url, phish_detail_url, submission_time, verified, verification_time, online, target = data item_date = dateutil.parser.parse(submission_time) max_age = yeti_config.get('limits', 'max_age') limit_date = pytz.UTC.localize( datetime.now()) - timedelta(days=max_age) if item_date < limit_date: return tags = ['phishing'] context = { 'source': self.name, 'phish_detail_url': phish_detail_url, 'submission_time': submission_time, 'verified': verified, 'verification_time': verification_time, 'online': online, 'target': target } if url is not None and url != '': try: url = Url.get_or_create(value=url) url.add_context(context) url.add_source('feed') url.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, item, pub_date): # pylint: disable=arguments-differ observable_sample = item["title"] context_sample = {} context_sample["description"] = "Pony sample" context_sample["date_added"] = pub_date context_sample["source"] = self.name link_c2 = re.search("https?://[^ ]*", item["description"].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2["description"] = "Pony c2" context_c2["date_added"] = pub_date context_c2["source"] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source(self.name) sample_tags = ["pony", "objectives"] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source(self.name) c2_tags = ["c2", "pony"] c2.tag(c2_tags) sample.active_link_to(c2, "c2", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, line, first_seen): context = {} context['date_added'] = first_seen context['source'] = self.name _, family, url, ip, first_seen, _ = line if not url.startswith(('http://', 'https://')): url = "http://" + url tags = [] tags.append(family.lower()) tags.append("rat") try: if url: url = Url.get_or_create(value=url) url.add_context(context) url.add_source(self.name) url.tag(tags) except ObservableValidationError as e: logging.error(e) try: if ip: ip = Ip.get_or_create(value=ip) ip.add_context(context) ip.add_source(self.name) ip.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, line, first_seen): url_obs = False _, family, url, ip, first_seen, _ = line context = {} context['date_added'] = first_seen context['source'] = self.name tags = [] tags.append(family.lower()) try: if url: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(tags) except ObservableValidationError as e: logging.error(e) try: if ip: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.add_source(self.name) ip_obs.tag(tags) if url_obs: ip_obs.active_link_to( url_obs, "url", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): id_feed = line['id'] first_seen = line['dateadded'] url = line['url'] url_status = line['url_status'] threat = line['threat'] tags = line['tags'] urlhaus_link = line['urlhaus_link'] source = line['reporter'] # pylint: disable=line-too-long context = { "id_urlhaus": id_feed, "status": url_status, "source": self.name, "report": urlhaus_link, "threat": threat, "reporter": source, 'first_seen': first_seen } if url: try: url_obs = Url.get_or_create(value=url) url_obs.tag(tags.split(',')) url_obs.add_context(context) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e)
def analyze(self, dict): observable_sample = dict['title'] context_sample = {} context_sample['description'] = "Atmos sample" context_sample['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_sample['source'] = self.name link_c2 = re.search("<a href[^>]+>(?P<url>[^<]+)", dict['description'].lower()).group("url") observable_c2 = link_c2 context_c2 = {} context_c2['description'] = "Atmos c2" context_c2['date_added'] = datetime.strptime(dict['pubDate'], "%d-%m-%Y") context_c2['source'] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ['atmos', 'objectives'] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ['c2', 'atmos'] c2.tag(c2_tags) sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, line): first_seen = line['Firstseen'] dst_ip = line['DstIP'] ip_obs = False tags = ["potentially_malicious_infrastructure", "c2"] port = line['DstPort'] context = dict(source=self.name) context["first_seen"] = first_seen try: ip_obs = Ip.get_or_create(value=dst_ip) ip_obs.add_source(self.name) ip_obs.tag(tags) ip_obs.add_context(context) except ObservableValidationError as e: logging.error(e) return False try: _url = "https://{dst_ip}:{port}/".format(dst_ip=dst_ip, port=port) url = Url.get_or_create(value=_url) url.add_source(self.name) url.tag(tags) url.add_context(context) if ip_obs: url.active_link_to(ip_obs, 'ip', self.name) except ObservableValidationError as e: logging.error(e) return False
def analyze(self, dict): observable_sample = dict["title"] context_sample = {} context_sample["description"] = "Pony sample" context_sample["date_added"] = datetime.strptime(dict["pubDate"], "%d-%m-%Y") context_sample["source"] = self.name link_c2 = re.search("https?://[^ ]*", dict["description"].lower()).group() observable_c2 = link_c2 context_c2 = {} context_c2["description"] = "Pony c2" context_c2["date_added"] = datetime.strptime(dict["pubDate"], "%d-%m-%Y") context_c2["source"] = self.name try: sample = Hash.get_or_create(value=observable_sample) sample.add_context(context_sample) sample.add_source("feed") sample_tags = ["pony", "objectives"] sample.tag(sample_tags) except ObservableValidationError as e: logging.error(e) return try: c2 = Url.get_or_create(value=observable_c2) c2.add_context(context_c2) c2.add_source("feed") c2_tags = ["c2", "pony"] c2.tag(c2_tags) sample.active_link_to(c2, "c2", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) return
def analyze(self, item): url_string = re.search(r"URL: (?P<url>\S+),", item['description']).group('url') context = {} date_string = re.search(r"\((?P<date>[0-9\-]+)\)", item['title']).group('date') context['date_added'] = datetime.strptime(date_string, "%Y-%m-%d") context['status'] = re.search( r"status: (?P<status>[^,]+)", item['description']).group('status') context['version'] = int( re.search(r"version: (?P<version>[^,]+)", item['description']).group('version')) context['guid'] = item['guid'] context['source'] = self.name try: context['md5'] = re.search( r"MD5 hash: (?P<md5>[a-f0-9]+)", item['description']).group('md5') except AttributeError as e: pass try: n = Url.get_or_create(value=url_string) n.add_context(context) n.add_source("feed") n.tag(['zeus', 'c2', 'banker', 'crimeware', 'malware']) except ObservableValidationError as e: logging.error(e)
def analyze(self, item): if not item or item[0].startswith("#"): return id_feed, dateadded, url, url_status, threat, tags, urlhaus_link, reporter = item context = { "id_urlhaus": id_feed, "first_seen": dateadded, "status": url_status, "source": self.name, "urlhaus_link": urlhaus_link, "reporter": reporter, "threat": threat } if url: try: url_obs = Url.get_or_create(value=url) url_obs.tag(tags.split(',')) url_obs.add_context(context) url_obs.add_source('feed') except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() if isinstance(observable, Hostname): params = {"q": observable.value, "rt": 3} json_result = ThreatMinerApi.fetch(observable, params, "domain.php") _results, result = aux_checker(json_result) for r in _results: try: o_url = Url.get_or_create(value=r.get("uri")) o_url.tag(observable.get_tags()) links.update( observable.link_to( o_url, description="related url", source="ThreatMiner", last_seen=r["last_seen"], )) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) observable.add_context(result) elif isinstance(observable, Ip): params = {"q": observable.value, "rt": 3} json_result = ThreatMinerApi.fetch(observable, params, "host.php") _results, result = aux_checker(json_result) for r in _results: try: o_url = Url.get_or_create(value=r.get("uri")) o_url.tag(observable.get_tags()) links.update( observable.link_to( o_url, description="related url", source="ThreatMiner", last_seen=r["last_seen"], )) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) observable.add_context(result) return list(links)
def analyze(self, line): md5_obs = False sha256_obs = False url_obs = False malware_file = False context = {'source': self.name} first_seen, url, filetype, md5, sha256, signature = line if url: try: url_obs = Url.get_or_create(value=url) if signature != 'None': url_obs.tag(signature) url_obs.add_context(context) url_obs.add_source(self.name) except ObservableValidationError as e: logging.error(e) if sha256: try: malware_file = File.get_or_create( value='FILE:{}'.format(sha256)) malware_file.add_context(context) malware_file.tag(filetype) sha256_obs = Hash.get_or_create(value=sha256) sha256_obs.tag(filetype) sha256_obs.add_context(context) if signature != 'None': sha256_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if md5: try: md5_obs = Hash.get_or_create(value=md5) md5_obs.add_context(context) md5_obs.tag(filetype) if signature != 'None': md5_obs.tag(signature) except ObservableValidationError as e: logging.error(e) if malware_file: if signature != 'None': malware_file.tag(signature) if md5_obs: malware_file.active_link_to(md5_obs, 'md5', self.name) if sha256_obs: malware_file.active_link_to(sha256_obs, 'sha256', self.name) if url_obs: url_obs.active_link_to(malware_file, 'drops', self.name)
def analyze(observable, results): links = set() context = { "source": "malwares.com", } params = { "api_key": results.settings["malwares_api_key"], "hostname": observable.value, } json_result = MalwaresApi.fetch(observable, params, "hostname/info") if json_result: context["raw"] = json_result for key in ( "undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file", ): item = json_result[key] for f in item["list"]: new_hash = Hash.get_or_create(value=f["sha256"]) links.update( new_hash.active_link_to(observable, key, "malwares.com")) context[key] = item["total"] for host in json_result.get("hostname_history", {}).get("list", []): new_host = Hostname.get_or_create(value=host) new_host.tag(observable.get_tags()) links.update( new_host.active_link_to(observable, "hostname", "malwares.com")) for key in ("detected_url", "undetected_url"): item = json_result[key] for i in item["list"]: try: new_url = Url.get_or_create(value=i["url"]) new_url.tag(observable.get_tags()) links.update( new_url.active_link_to(observable, key, "malwares.com")) except ObservableValidationError: logging.error("Url is not valid %s" % i["url"]) context[key] = item["total"] ip_history = json_result["ip_history"] for i in ip_history["list"]: ip = Ip.get_or_create(value=i["ip"]) links.update( ip.active_link_to(observable, "ip_story", "malwares.com")) context["ip_story"] = ip_history["total"] return links
def analyze(self, url): try: url_data = Url.get_or_create(value=url) url_data.normalize() url_data.tags(["payload_delivery"]) url_data.add_source(self.name) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() context = { "source": "malwares.com", } params = { "api_key": results.settings["malwares_api_key"], "ip": observable.value, } json_result = MalwaresApi.fetch(observable, params, "ip/info") for key in ( "undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file", ): for item in json_result[key]["list"]: h = Hash.get_or_create(value=item["sha256"]) links.update( h.active_link_to(observable, key, context["source"])) h.add_context({ "source": context["source"], "firs_seen": item["date"] }) context[key] = json_result[key]["total"] for key in ("detected_url", "undetected_url"): for item in json_result[key]["list"]: url = Url.get_or_create(value=item["url"]) links.update( url.active_link_to(observable, key, context["source"])) url.add_context({ "source": context["source"], "firs_seen": item["date"] }) context[key] = json_result[key]["total"] hostname_history = json_result["hostname_history"] for item in hostname_history["list"]: try: hostname = Hostname.get_or_create(value=item["hostname"]) links.update( hostname.active_link_to(observable, "hostname_history", context["source"])) hostname.add_context({ "source": context["source"], "firs_seen": item["date"] }) except ObservableValidationError: logging.error("%s is not a hostname valid" % item["hostname"]) context["hostname_history"] = hostname_history["total"] observable.add_context(context) return links
def _make_threat_nodes(threat, context, tags): # extract Url and Hash info threats = dict() if threat['threatStatus'] != 'active': # FIXME, clear out false positive ? log.warning("threatStatus %s for threat %s", threat['threatStatus'], threat['threatID']) log.debug(pprint.pformat(threat)) return None log.debug('_make_threat_nodes for threat %s', threat['threatID']) # threattype, classification # url, phish: url leads to phishing page (threat is url) # url, malware: url leads to malware download (threat is url, threatid is maybe sha256) # attachment, malware: attachement is malware (threat is sha256) # spam, url if threat['threatType'] == 'url': if threat['classification'] == 'phish': pass # just keep the url elif threat['classification'] == 'malware': # get url and hash threats['attachment'] = threat elif threat['classification'] == 'spam': log.info('URL threat - ignore classification %s', threat['classification']) else: log.error('Type: url, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None threats['url'] = threat elif threat['threatType'] == 'attachment': if threat['classification'] == 'malware': threats['attachment'] = threat else: log.error('Type: attachment, Unsupported classification %s', threat['classification']) log.debug(pprint.pformat(threat)) return None else: log.error('Unsupported threatType %s classification %s', threat['threatType'], threat['classification']) log.debug(pprint.pformat(threat)) return None # FIXME check if they exist already. # if they do, do not parse the threat a second time ? threat_nodes = [] if 'url' in threats: threat_nodes.append( Url.get_or_create(value=threats['url']['threat'], context=[context])) if 'attachment' in threats: threat_nodes.append( Hash.get_or_create(value=threats['attachment']['threatID'], context=[context])) for o in threat_nodes: o.tag([t['name'] for t in tags]) return threat_nodes
def analyze(self, url): context = {"source": self.name} try: url = Url.get_or_create(value=url) url.add_context(context) url.add_source(self.name) url.tag(["phishing"]) except ObservableValidationError as e: logging.error(e)
def analyze(self, url): context = {'source': self.name} try: url = Url.get_or_create(value=url) url.add_context(context) url.add_source('feed') url.tag(['phishing']) except ObservableValidationError as e: logging.error(e)
def analyze(self, data): if data.startswith('http'): tags = ['malware'] context = {'source': self.name} try: url = Url.get_or_create(value=data.rstrip()) url.add_context(context) url.add_source('feed') url.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, data): if data.startswith('http'): tags = ['malware'] context = {'source': self.name} try: url = Url.get_or_create(value=data.rstrip()) url.add_context(context) url.add_source(self.name) url.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, url): context = { 'source': self.name } try: if len(url) > 1023: logging.info('URL is too long for mongo db. url=%s' % str(url)) else: url = Url.get_or_create(value=url) url.add_context(context) url.add_source('feed') url.tag(['phishing']) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): if not line or line[0].startswith("#"): return try: ip, domain, family, md5, link, date = tuple(map(strip, line)) context = { "first_seen": date, "family": family, "report": link, "source": self.name } c2 = None sample = None try: sample = Hash.get_or_create(value=md5) sample.add_context(context) sample.tag(family.lower()) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) try: if domain: if '/' in domain: c2 = Url.get_or_create(value=domain) else: c2 = Hostname.get_or_create(value=domain) elif ip: c2 = Ip.get_or_create(value=ip) else: return c2.add_context(context) c2.tag(['c2', family.lower()]) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) if c2 and sample: sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ValueError: logging.error("Error unpacking line: {}".format(line))
def analyze(self, dict): # Create the new URL and store it in the DB url = re.search(r"Host: (?P<url>[^,]+),", dict['description']).group('url') context = {} context['source'] = self.name context['description'] = re.search(r"Description: (?P<description>.*)$", dict['description']).group('description') context['guid'] = dict['guid'] date_string = re.search(r"\((?P<date>.*)\)", dict['title']).group('date') context['date_added'] = datetime.strptime(date_string, "%Y/%m/%d_%H:%M") try: url = Url.get_or_create(value=url) url.add_context(context) url.add_source("feed") url.tag(['malware', 'crimeware']) except ObservableValidationError as e: logging.error(e)
def analyze(self, data): if data.startswith('http'): if len(data) > 1023: logging.info('URL is too long for mongo db. url=%s' % str(data)) else: tags = ['malware'] context = { 'source': self.name } try: url = Url.get_or_create(value=data) url.add_context(context) url.add_source('feed') url.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): if not line or line[0].startswith("#"): return date, _type, family, hostname, url, status, registrar, ips, asns, countries = tuple( line) tags = [] tags += TYPE_DICT[_type] tags.append(family.lower()) context = { "first_seen": date, "status": status, "registrar": registrar, "countries": countries.split("|"), "asns": asns.split("|"), "source": self.name } try: url = Url.get_or_create(value=url.rstrip()) url.add_context(context) url.tag(tags) hostname = Observable.add_text(hostname) hostname.tag(tags + ['blocklist']) for ip in ips.split("|"): if ip != hostname and ip is not None and ip != '': try: i = Ip.get_or_create(value=ip) i.active_link_to( hostname, "First seen IP", self.name, clean_old=False) except ObservableValidationError as e: logging.error("Invalid Observable: {}".format(e)) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line))
def analyze(self, dict): g = re.match(r'^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$', dict['description']) if g: context = g.groupdict() context['link'] = dict['link'] context['source'] = self.name try: url_string = context.pop('url') context['description'] = dict['description'].encode('UTF-8') url = Url.get_or_create(value=url_string) url.add_context(context) url.add_source("feed") url.tag(['malware', 'delivery']) except UnicodeError: sys.stderr.write('Unicode error: %s' % dict['description']) except ObservableValidationError as e: logging.error(e) except Exception as e: logging.error("UNKNOWN EXCEPTION: {}".format(e))
def analyze(self, dict): g = re.match( r"^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$", dict["description"], ) if g: context = g.groupdict() context["link"] = dict["link"] context["source"] = self.name try: url_string = context.pop("url") context["description"] = dict["description"].encode("UTF-8") url = Url.get_or_create(value=url_string) url.add_context(context) url.add_source("feed") url.tag(["malware", "delivery"]) except UnicodeError: sys.stderr.write("Unicode error: %s" % dict["description"]) except ObservableValidationError as e: logging.error(e)
def analyze(self, dict): url_string = re.search(r"URL: (?P<url>\S+),", dict['description']).group('url') url_string = url_string.replace('http://https://', 'https://') # feed malformed, nasty hack context = {} date_string = re.search(r"\((?P<date>[0-9\-]+)\)", dict['title']).group('date') context['date_added'] = datetime.strptime(date_string, "%Y-%m-%d") context['status'] = re.search(r"status: (?P<status>[^,]+)", dict['description']).group('status') context['guid'] = dict['guid'] context['source'] = self.name try: context['md5'] = re.search(r"MD5 hash: (?P<md5>[a-f0-9]+)", dict['description']).group('md5') except AttributeError as e: pass try: n = Url.get_or_create(value=url_string) n.add_context(context) n.add_source("feed") n.tag(['zeus', 'delivery', 'banker', 'crimeware', 'malware']) except ObservableValidationError as e: logging.error(e)
def analyze(self, dict): url_string = re.search(r"URL: (?P<url>\S+),", dict["description"]).group("url") context = {} date_string = re.search(r"\((?P<date>[0-9\-]+)\)", dict["title"]).group("date") context["date_added"] = datetime.strptime(date_string, "%Y-%m-%d") context["status"] = re.search(r"status: (?P<status>[^,]+)", dict["description"]).group("status") context["guid"] = dict["guid"] context["source"] = self.name try: context["md5"] = re.search(r"MD5 hash: (?P<md5>[a-f0-9]+)", dict["description"]).group("md5") except AttributeError as e: pass try: n = Url.get_or_create(value=url_string) n.add_context(context) n.add_source("feed") n.tag(["zeus", "objective", "banker", "crimeware", "malware"]) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() json_result = MalshareAPI.fetch( observable, results.settings['malshare_api_key']) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if 'SOURCES' in json_result: for source in json_result['SOURCES']: new_url = None try: new_url = Url.get_or_create(value=source.strip()) links.update( observable.active_link_to( new_url, 'c2', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add {} to the database". format(source.strip())) result['nb C2'] = len(json_result['SOURCES']) try: new_hash = Hash.get_or_create(value=json_result['MD5']) links.update( new_hash.active_link_to(observable, 'md5', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA1']) links.update( new_hash.active_link_to(observable, 'sha1', 'malshare_query')) new_hash = Hash.get_or_create(value=json_result['SHA256']) links.update( new_hash.active_link_to(observable, 'sha256', 'malshare_query')) except ObservableValidationError: logging.error( "An error occurred when trying to add hashes {} to the database". format(json_string)) return list(links)
def analyze(self, item): if not item or item[0].startswith("#"): return id_feed, dateadded, url, url_status, threat, tags, urlhaus_link = item context = { "id_urlhaus": id_feed, "first_seen": dateadded, "status": url_status, "source": self.name, "report": urlhaus_link, "threat": threat } if url: try: url_obs = Url.get_or_create(value=url) url_obs.tag(tags.split(',')) url_obs.add_context(context) url_obs.add_source('feed') except ObservableValidationError as e: logging.error(e)
def _get_threat_forensics_nodes_inner( self, evidence, general_context, tags): # create context from notes context = general_context.copy() _ctx = self._make_context_from_notes([evidence]) context.update(_ctx) # add evidence['type'] and unicify tags tags = [{ 'name': _ } for _ in set([evidence['type']] + [d['name'] for d in tags])] # create Tags in DB for _ in tags: Tag.get_or_create(name=_['name']) # threat_forensics = [] # technical hack: set optional comments values for optional in ['action', 'rule', 'path', 'rule']: if optional not in evidence['what']: evidence['what'][optional] = None # add attributes for the known evidence type if evidence['type'] in ['file', 'dropper']: if 'path' in evidence['what']: threat_forensics.append( File.get_or_create( value=evidence['what']['path'], context=[context])) if 'md5' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['md5'], context=[context])) if 'sha256' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['sha256'], context=[context])) elif evidence['type'] == 'cookie': pass elif evidence['type'] == 'dns': threat_forensics.append( Hostname.get_or_create( value=evidence['what']['host'], context=[context])) elif evidence['type'] == 'ids': threat_forensics.append( Text.get_or_create( value=evidence['what']['ids'], context=[context])) pass elif evidence['type'] == 'mutex': threat_forensics.append( Text.get_or_create( value=evidence['what']['name'], context=[context])) elif evidence['type'] == 'network': if 'ip' in evidence['what']: # FIXME port, type threat_forensics.append( Ip.get_or_create( value=evidence['what']['ip'], context=[context])) elif 'domain' in evidence['what']: threat_forensics.append( Hostname.get_or_create( value=evidence['what']['domain'], context=[context])) elif evidence['type'] == 'process': pass elif evidence['type'] == 'registry': # threat_forensics.append(evidence['what']['key']) # threat_forensics.append(evidence['what']['value']) pass elif evidence['type'] == 'url': # BUG yeti-#115 ObservableValidationError: Invalid URL: http://xxxxx-no-tld/ threat_forensics.append( Url.get_or_create( value=evidence['what']['url'], context=[context])) # add note as tag because its a signature if 'note' in evidence: threat_forensics[-1].tag( evidence['note'].replace('.', '_').strip('_')) # tag all of that for o in threat_forensics: o.tag([t['name'] for t in tags]) return threat_forensics
def analyze(observable, results): links = set() json_result = VirustotalApi.fetch( observable, results.settings['virutotal_api_key']) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {'raw': json_string} if isinstance(observable, Ip): # Parse results for ip if json_result.get('as_owner'): result['Owner'] = json_result['as_owner'] o_isp = Company.get_or_create(name=json_result['as_owner']) links.update( observable.active_link_to( o_isp, 'hosting', 'virustotal_query')) if json_result.get('detected_urls'): result['detected_urls'] = json_result['detected_urls'] for detected_url in json_result['detected_urls']: o_url = Url.get_or_create(value=detected_url['url']) links.update( o_url.active_link_to( o_url, 'hostname', 'virustotal_query')) elif isinstance(observable, Hostname): if json_result.get('permalink'): result['permalink'] = json_result['permalink'] result['positives'] = json_result.get('positives', 0) if json_result.get('total'): result['total'] = json_result['total'] elif isinstance(observable, Hash): result['positives'] = json_result['positives'] if 'permalink' in json_result: result['permalink'] = json_result['permalink'] if 'total' in json_result: result['total'] = json_result['total'] hashes = { 'md5': json_result['md5'], 'sha1': json_result['sha1'], 'sha256': json_result['sha256'] } create_hashes = [ (k, v) for k, v in hashes.items() if v != observable.value ] for k, v in create_hashes: new_hash = Hash.get_or_create(value=v) new_hash.tag(observable.get_tags()) links.update( new_hash.active_link_to(observable, k, 'virustotal_query')) result['source'] = 'virustotal_query' observable.add_context(result) return list(links)
def analyze(self, item): context = item date_string = re.search( r"\((?P<datetime>[\d\- :]+)\)", context['title']).group('datetime') try: context['date_added'] = datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match( r'^Host: (?P<host>.+), Version: (?P<version>\w)', context['description']) g = g.groupdict() context['version'] = g['version'] context['description'] = FeodoTracker.descriptions[g['version']] context['subfamily'] = FeodoTracker.variants[g['version']] context['source'] = self.name del context['title'] new = None variant_tag = FeodoTracker.variants[g['version']].lower() try: if re.search(r"[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}", g['host']): new = Ip.get_or_create(value=g['host']) else: new = Hostname.get_or_create(value=g['host']) new.add_context(context) new.add_source("feed") new.tag([variant_tag, 'malware', 'crimeware', 'banker', 'c2']) except ObservableValidationError as e: logging.error(e) try: url_fedeo = context['guid'] r = requests.get(url_fedeo) if r.status_code == 200: html_source = r.text soup = BeautifulSoup(html_source, 'html.parser') tab = soup.find('table', attrs='sortable') results = [] if tab: all_tr = tab.find_all('tr') for tr in all_tr: all_td = tr.find_all('td') if all_td and len(all_td) == 7: results.append({ 'timestamp': all_td[0].text, 'md5_hash': all_td[1].text, 'filesize': all_td[2].text, 'VT': all_td[3].text, 'Host': all_td[4].text, 'Port': all_td[5].text, 'SSL Certif or method': all_td[6].text }) for r in results: new_hash = Hash.get_or_create(value=r['md5_hash']) new_hash.add_context(context) new_hash.add_source('feed') new_hash.tag([ variant_tag, 'malware', 'crimeware', 'banker', 'payload' ]) new_hash.active_link_to( new, 'c2', self.name, clean_old=False) host = Url.get_or_create( value='https://%s:%s' % (g['host'], r['Port'])) host.add_source('feed') host.add_context(context) host.tag( [variant_tag, 'malware', 'crimeware', 'banker', 'c2']) new_hash.active_link_to( host, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)