def analyze(self, line): if not line or line[0].startswith("#"): return tokens = line.split(',') c2_domain = [] ips_c2 = [] names_servers = [] ip_names_servers = [] context_feed = [] if len(tokens) == 6: c2_domain = tokens[0] ips_c2 = tokens[1].split('|') names_servers = tokens[2].split('|') ip_names_servers = tokens[3].split('|') context_feed = tokens[4] m = BambenekOsintIpmaster.reg.match(context_feed) malware_family = '' if m: malware_family = m.group(1) context = { "status": context_feed, "name servers": names_servers, "source": self.name } tags = [malware_family] c2 = None if c2_domain: c2 = Hostname.get_or_create(value=c2_domain) c2.add_context(context) c2.tag(tags) c2.add_source('feed') for ip in ips_c2: if ip: ip_obs = Ip.get_or_create(value=ip) ip_obs.tag(tags) ip_obs.add_source('feed') if c2: c2.active_link_to(ip_obs, "IP", self.source) for name_server in names_servers: if name_server: ns_obs = Hostname.get_or_create(value=name_server) c2.active_link_to(ns_obs, 'NS', self.source) ns_obs.tag(tags) ns_obs.add_context(context) ns_obs.add_source('feed') for ip_ns in ip_names_servers: if ip_ns: ip_ns_obs = Ip.get_or_create(value=ip_ns) c2.active_link_to(ip_ns_obs, 'IP NS', self.source) ip_ns_obs.tag(tags) ip_ns_obs.add_context(context) ip_ns_obs.add_source('feed') else: logging.error('Parsing error in line: %s' % line)
def analyze(self, line): tokens = line.split(",") c2_domain = [] ips_c2 = [] names_servers = [] ip_names_servers = [] context_feed = [] if len(tokens) == 6: c2_domain = tokens[0] ips_c2 = tokens[1].split("|") names_servers = tokens[2].split("|") ip_names_servers = tokens[3].split("|") context_feed = tokens[4] m = BambenekOsintIpmaster.reg.match(context_feed) malware_family = "" if m: malware_family = m.group(1) context = { "status": context_feed, "name servers": names_servers, "source": self.name, } tags = [malware_family] c2 = None if c2_domain: c2 = Hostname.get_or_create(value=c2_domain) c2.add_context(context) c2.tag(tags) c2.add_source("feed") for ip in ips_c2: if ip: ip_obs = Ip.get_or_create(value=ip) ip_obs.tag(tags) ip_obs.add_source(self.name) if c2: c2.active_link_to(ip_obs, "IP", self.source) for name_server in names_servers: if name_server: ns_obs = Hostname.get_or_create(value=name_server) c2.active_link_to(ns_obs, "NS", self.source) ns_obs.tag(tags) ns_obs.add_context(context) ns_obs.add_source(self.name) for ip_ns in ip_names_servers: if ip_ns: ip_ns_obs = Ip.get_or_create(value=ip_ns) c2.active_link_to(ip_ns_obs, "IP NS", self.source) ip_ns_obs.tag(tags) ip_ns_obs.add_context(context) ip_ns_obs.add_source(self.name) else: logging.error("Parsing error in line: %s" % line)
def _process_data(json_result, observable): links = set() for page in json_result: if not page.get("page"): continue # IP iocs has more data than the rest if not isinstance(observable, Ip) and page['page'].get('ip'): new_ip = Ip.get_or_create(value=page['page']['ip']) links.update( new_ip.active_link_to(observable, 'ip', 'UrlScanIo Query')) if not isinstance(observable, Hostname) and page['page'].get('domain'): new_host = Hostname.get_or_create(value=page['page']['domain']) links.update( new_host.active_link_to(observable, 'hostname', 'UrlScanIo Query')) if not isinstance(observable, Url) and page['page'].get('url'): new_url = Url.get_or_create(value=page['page']['url']) links.update( new_url.active_link_to(observable, 'url', 'UrlScanIo Query')) links.update(UrlScanIoApi._process_asn_data(page, observable))
def analyze(self, line): fields = line.split('|') if len(fields) < 8: return context = {} ip = fields[0] context['name'] = fields[1] context['router-port'] = fields[2] context['directory-port'] = fields[3] context['flags'] = fields[4] context['uptime'] = fields[5] context['version'] = fields[6] context['contactinfo'] = fields[7] context['description'] = "Tor exit node: %s (%s)" % (context['name'], ip) context['source'] = self.name try: ip = Ip.get_or_create(value=fields[0]) ip.add_context(context) ip.add_source("feed") ip.tag(['tor']) except ObservableValidationError as e: logging.error(e)
def analyze(self, dict): context = dict date_string = re.search(r"\((?P<datetime>[\d\- :]+)\)", dict['title']).group('datetime') try: context['date_added'] = datetime.strptime(date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match(r'^Host: (?P<host>.+), Version: (?P<version>\w)', dict['description']) g = g.groupdict() context['version'] = g['version'] context['description'] = FeodoTracker.descriptions[g['version']] context['subfamily'] = FeodoTracker.variants[g['version']] context['source'] = self.name del context['title'] variant_tag = FeodoTracker.variants[g['version']].lower() try: if re.search(r"[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}", g['host']): new = Ip.get_or_create(value=g['host']) else: new = Hostname.get_or_create(value=g['host']) new.add_context(context) new.add_source("feed") new.tag([variant_tag, 'malware', 'crimeware', 'banker', 'c2']) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): fields = line.split('|') if len(fields) < 8: return context = {} ip = fields[0] context['name'] = fields[1] context['router-port'] = fields[2] context['directory-port'] = fields[3] context['flags'] = fields[4] context['version'] = fields[6] context['contactinfo'] = fields[7] context['description'] = "Tor exit node: %s (%s)" % (context['name'], ip) context['source'] = self.name try: ip = Ip.get_or_create(value=fields[0]) ip.add_context(context) ip.add_source("feed") ip.tag(['tor']) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() if isinstance(observable, Ip): params = {"q": observable.value, "rt": 2} json_result = ThreatMinerApi.fetch(observable, params, "host.php") _results, result = aux_checker(json_result) for r in _results: o_hostname = Hostname.get_or_create(value=r.get("domain")) links.update(observable.link_to(o_hostname, description="a record", source="ThreatMiner", first_seen=r["first_seen"], last_seen=r["last_seen"]) ) observable.add_context(result) elif isinstance(observable, Hostname): params = {"q": observable.value, "rt": 2} json_result = ThreatMinerApi.fetch(observable, params, "domain.php") _results, result = aux_checker(json_result) for r in _results: o_ip = Ip.get_or_create(value=r.get("ip")) links.update(observable.link_to( o_ip, description="a record", source="ThreatMiner", first_seen=r["first_seen"], last_seen=r["last_seen"] )) observable.add_context(result) return list(links)
def analyze(self, item): try: context = dict(source=self.name) ip_str = item["IP"] category = item["Tag"] country = item["Country"] ip = None try: ip = Ip.get_or_create(value=ip_str) except ObservableValidationError as e: logging.error(e) return False ip.add_source(self.name) context["country"] = country context["threat"] = category ip.tag(category) ip.add_context(context) except Exception as e: logging.error("Error to process the item %s %s" % (item, e)) return False return True
def analyze(self, line): if not line or line[0].startswith("#"): return tokens = line.split(',') if len(tokens) == 4: dateadded = tokens[0] c2_ip = tokens[1] c2_port = tokens[2] variant = tokens[3] context = { "first_seen": dateadded, "port": c2_port, "subfamily": variant, "source": self.name } if c2_ip: try: ip_obs = Ip.get_or_create(value=c2_ip) ip_obs.tag( [variant, 'malware', 'crimeware', 'banker', 'c2']) ip_obs.add_context(context) ip_obs.add_source('feed') except ObservableValidationError as e: logging.error(e)
def analyze(observable, result): links = set() context = {"source": "VirusTotal PDNS"} endpoint = "/domains/%s/resolutions" % observable.value api_key = result.settings["virutotal_api_key"] result = VirustotalApi.fetch(api_key, endpoint) if result: for data in result["data"]: attribute = data["attributes"] ip_address = attribute["ip_address"] ip = Ip.get_or_create(value=ip_address) links.update( ip.active_link_to(observable, "PDNS", context["source"])) timestamp_resolv = attribute["date"] date_last_resolv = datetime.fromtimestamp( timestamp_resolv).isoformat() context[ip_address] = date_last_resolv ip.add_context({ "source": context["source"], observable.value: date_last_resolv }) observable.add_context(context) return list(links)
def analyze(self, line): url_obs = False ip_obs = False family = line['Family'] url = line['URL'] ip = line['IP'] first_seen = line['FirstSeen'] family = family.lower() context = {'first_seen': first_seen, 'source': self.name} if url: try: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(["c2", family]) except ObservableValidationError as e: logging.error(e) if ip: try: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.tag(family.lower()) except ObservableValidationError as e: logging.error(e) if url_obs and ip_obs: url_obs.active_link_to(ip_obs, 'ip', self.name)
def analyze(self, line, first_seen): url_obs = False _, family, url, ip, first_seen, _ = line context = {} context['date_added'] = first_seen context['source'] = self.name tags = [] tags.append(family.lower()) try: if url: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(tags) except ObservableValidationError as e: logging.error(e) try: if ip: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.add_source(self.name) ip_obs.tag(tags) if url_obs: ip_obs.active_link_to( url_obs, "url", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(self, line, first_seen): context = {} context['date_added'] = first_seen context['source'] = self.name _, family, url, ip, first_seen, _ = line if not url.startswith(('http://', 'https://')): url = "http://" + url tags = [] tags.append(family.lower()) tags.append("rat") try: if url: url = Url.get_or_create(value=url) url.add_context(context) url.add_source(self.name) url.tag(tags) except ObservableValidationError as e: logging.error(e) try: if ip: ip = Ip.get_or_create(value=ip) ip.add_context(context) ip.add_source(self.name) ip.tag(tags) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): url_obs = False ip_obs = False family = line["Family"] url = line["URL"] ip = line["IP"] first_seen = line["FirstSeen"] family = family.lower() context = {"first_seen": first_seen, "source": self.name} if url: try: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(["c2", family]) except ObservableValidationError as e: logging.error(e) if ip: try: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.tag(family.lower()) except ObservableValidationError as e: logging.error(e) if url_obs and ip_obs: url_obs.active_link_to(ip_obs, "ip", self.name)
def analyze(self, item): if not item: return try: context = dict(source=self.name) ip_str = item[0] category = item[3] country = item[4] ip = None try: ip = Ip.get_or_create(value=ip_str) except ObservableValidationError as e: logging.error(e) return False ip.add_source('feed') context['country'] = country context['threat'] = category ip.tag(category) ip.add_context(context) except Exception as e: logging.error('Error to process the item %s %s' % (item, e)) return False return True
def analyze(self, line): fields = line.split("|") if len(fields) < 8: return context = { "name": fields[1], "router-port": fields[2], "directory-port": fields[3], "flags": fields[4], "version": fields[6], "contactinfo": fields[7], "source": self.name, "description": "Tor exit node: %s (%s)" % (fields[1], fields[0]), } try: ip = Ip.get_or_create(value=fields[0]) ip.add_context(context) ip.add_source(self.name) ip.tag(["tor"]) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): fields = line.split('|') if len(fields) < 8: return context = { 'name': fields[1], 'router-port': fields[2], 'directory-port': fields[3], 'flags': fields[4], 'version': fields[6], 'contactinfo': fields[7], 'source': self.name, 'description': "Tor exit node: %s (%s)" % (fields[1], fields[0]), } try: ip = Ip.get_or_create(value=fields[0]) ip.add_context(context) ip.add_source(self.name) ip.tag(['tor']) except ObservableValidationError as e: logging.error(e)
def analyze(self, line): first_seen = line['Firstseen'] dst_ip = line['DstIP'] ip_obs = False tags = ["potentially_malicious_infrastructure", "c2"] port = line['DstPort'] context = dict(source=self.name) context["first_seen"] = first_seen try: ip_obs = Ip.get_or_create(value=dst_ip) ip_obs.add_source(self.name) ip_obs.tag(tags) ip_obs.add_context(context) except ObservableValidationError as e: logging.error(e) return False try: _url = "https://{dst_ip}:{port}/".format(dst_ip=dst_ip, port=port) url = Url.get_or_create(value=_url) url.add_source(self.name) url.tag(tags) url.add_context(context) if ip_obs: url.active_link_to(ip_obs, 'ip', self.name) except ObservableValidationError as e: logging.error(e) return False
def analyze(self, item): if not item: return try: context = dict(source=self.name) ip_str = item[0] category = item[3] country = item[4] ip = None try: ip = Ip.get_or_create(value=ip_str) except ObservableValidationError as e: logging.error(e) return False ip.add_source(self.name) context['country'] = country context['threat'] = category ip.tag(category) ip.add_context(context) #TODO(doomedraven) check what we're catching here, I think we can remove this exception except Exception as e: logging.error('Error to process the item %s %s' % (item, e)) return False return True
def analyze(self, item): if not item or item[0].startswith('first_seen'): return try: context = dict(source=self.name) first_seen, last_seen, ip_address, category, attacks_count = item try: ip = Ip.get_or_create(value=ip_address) except ObservableValidationError as e: logging.error('Error in IP format %s %s' % (ip_address, e)) return False context['threat'] = category ip.tag(category) context['first_seen'] = first_seen context['last_seen'] = last_seen context['attack_count'] = attacks_count ip.add_source('feed') ip.add_context(context) except Exception as e: logging.error('Error processing the line %s %s' % (item, e)) return False return True
def analyze(self, line): url_obs = False url = line["url"] ip = line["ip"] family = line["type"] context = {} context["date_added"] = line["date"] context["source"] = self.name tags = [] tags.append(family.lower()) try: if url: url_obs = Url.get_or_create(value=url) url_obs.add_context(context) url_obs.add_source(self.name) url_obs.tag(tags) except ObservableValidationError as e: logging.error(e) try: if ip: ip_obs = Ip.get_or_create(value=ip) ip_obs.add_context(context) ip_obs.add_source(self.name) ip_obs.tag(tags) if url_obs: ip_obs.active_link_to(url_obs, "url", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() context = { "source": "malwares.com", } params = { "api_key": results.settings["malwares_api_key"], "hostname": observable.value, } json_result = MalwaresApi.fetch(observable, params, "hostname/info") if json_result: context["raw"] = json_result for key in ( "undetected_communicating_file", "detected_downloaded_file", "undetected_downloaded_file", "detected_communicating_file", ): item = json_result[key] for f in item["list"]: new_hash = Hash.get_or_create(value=f["sha256"]) links.update( new_hash.active_link_to(observable, key, "malwares.com")) context[key] = item["total"] for host in json_result.get("hostname_history", {}).get("list", []): new_host = Hostname.get_or_create(value=host) new_host.tag(observable.get_tags()) links.update( new_host.active_link_to(observable, "hostname", "malwares.com")) for key in ("detected_url", "undetected_url"): item = json_result[key] for i in item["list"]: try: new_url = Url.get_or_create(value=i["url"]) new_url.tag(observable.get_tags()) links.update( new_url.active_link_to(observable, key, "malwares.com")) except ObservableValidationError: logging.error("Url is not valid %s" % i["url"]) context[key] = item["total"] ip_history = json_result["ip_history"] for i in ip_history["list"]: ip = Ip.get_or_create(value=i["ip"]) links.update( ip.active_link_to(observable, "ip_story", "malwares.com")) context["ip_story"] = ip_history["total"] return links
def analyze(self, data): try: host_ip = Ip.get_or_create(value=data.rstrip()) host_ip.add_context({'source': self.name}) host_ip.add_source('feed') host_ip.tag(['malicious']) except ObservableValidationError as e: logging.error(e)
def analyze(self, context, details_dict): tags = [details_dict["type"], "proxy"] try: ip = Ip.get_or_create(value=details_dict["ip"]) ip.add_context(context) ip.tag(tags) ip.add_source(self.name) except ObservableValidationError as e: logging.error(e)
def analyze(self, item): s_re = '\[([^\]]*)] Type: (\w+) - IP: (\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})' r = re.compile(s_re) m = r.match(item['description']) malware_family = '' c2_IP = '' if m: malware_family = m.group(2) c2_IP = m.group(3) observable = item['title'] description = item['description'].lower() context = {} context['description'] = "{} C2 server".format(c2_IP) context['date_added'] = parser.parse(item['pubDate']) context['source'] = self.name c2 = None e = None try: e = Observable.add_text(observable) if c2_IP: c2 = Ip.get_or_create(value=c2_IP) e.active_link_to( c2, "IP", self.name, clean_old=False) except ObservableValidationError as e: logging.error(e) logging.error(description) return tags = ['malware', 'c2', malware_family.lower(), 'crimeware'] if malware_family == 'pony': tags.extend(['stealer', 'dropper']) elif malware_family == 'athena': tags.extend(['stealer', 'ddos']) elif malware_family in ['zeus', 'citadel','lokibot']: tags.extend(['banker']) if e: e.add_context(context) e.add_source("feed") e.tag(tags) if c2: c2.add_context(context) c2.add_source("feed") c2.tag(tags)
def analyze(self, line): first_seen, _type, family, hostname, url, status, registrar, ips, asns, countries = line tags = [] tags += TYPE_DICT[_type] tags.append(family.lower()) context = { "first_seen": parser.parse(first_seen), "status": status, "registrar": registrar, "countries": countries.split("|"), "asns": asns.split("|"), "source": self.name } url_obs = False hostname_obs = False try: url_obs = Url.get_or_create(value=url.rstrip()) url_obs.add_context(context) url_obs.tag(tags) except (ObservableValidationError, UnicodeEncodeError) as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) try: hostname = Observable.add_text(hostname) hostname.tag(tags + ['blocklist']) except (ObservableValidationError, UnicodeEncodeError) as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) for ip in ips.split("|"): if ip != hostname and ip is not None and ip != '': try: ip_obs = Ip.get_or_create(value=ip) ip_obs.active_link_to((url_obs, hostname), "ip", self.name, clean_old=False) except (ObservableValidationError, UnicodeEncodeError) as e: logging.error("Invalid Observable: {}".format(e)) for asn in asns.split("|"): try: asn_obs = AutonomousSystem.get_or_create(value=asn) asn_obs.active_link_to((hostname, ip_obs), "asn", self.name, clean_old=False) except (ObservableValidationError, UnicodeEncodeError) as e: logging.error("Invalid Observable: {}".format(e))
def analyze(self, ip, first_seen, raw): # pylint: disable=arguments-differ context = {} context["first_seen"] = first_seen context["source"] = self.name context["raw"] = raw try: ip = Ip.get_or_create(value=ip) ip.add_context(context) ip.add_source(self.name) except ObservableValidationError as e: logging.error(e)
def save_observables(self): results = self.results for host in self.hosts: ip=Ip.get_or_create(value=host) ip.active_link_to(self,"Scan Report","web interface") context={'source':self.name} i=1 for res in results: if res.host==host: context['Result_{}'.format(i)]=res.name i+=1 ip.add_context(context, replace_source=self.name)
def analyze(self, row): context = {} context["first_seen"] = row["last_report"] context["source"] = self.name context["count"] = row["count"] context["id"] = row["id"] ip = row["ip"] try: ip = Ip.get_or_create(value=ip) ip.add_context(context) ip.add_source(self.name) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() params = {"q": observable.value, "rt": 3} json_result = ThreatMinerApi.fetch(observable, params, "sample.php") _results, result = aux_checker(json_result) for r in _results: for ip in r.get("hosts"): try: o_ip = Ip.get_or_create(value=ip) o_ip.tag(observable.get_tags()) links.update( o_ip.active_link_to(observable, "seen connecting to", "ThreatMiner")) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) for domain in r.get("domains"): try: if domain.get("domain"): o_host = Hostname.get_or_create( value=domain.get("domain")) o_host.tag(observable.get_tags()) links.update( o_host.active_link_to(observable, "seen connecting to", "ThreatMiner")) if domain.get("ip"): o_ip = Ip.get_or_create(value=domain.get("ip")) o_ip.tag(o_host.get_tags()) links.update( o_host.active_link_to(o_ip, "resolved to", "ThreatMiner")) except ObservableValidationError as e: logging.error("Caught an exception: {}".format(e)) observable.add_context(result) return list(links)
def analyze(self, line): line = line.strip() ip = line context = {"source": self.name} try: ip = Ip.get_or_create(value=ip) ip.add_context(context) ip.add_source(self.name) ip.tag("abuseIPDB") except ObservableValidationError as e: raise logging.error(e)
def analyze(self, line): if not line or line[0].startswith("#"): return try: ip, domain, family, md5, link, date = tuple(map(strip, line)) context = { "first_seen": date, "family": family, "report": link, "source": self.name } c2 = None sample = None try: sample = Hash.get_or_create(value=md5) sample.add_context(context) sample.tag(family.lower()) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) try: if domain: if '/' in domain: c2 = Url.get_or_create(value=domain) else: c2 = Hostname.get_or_create(value=domain) elif ip: c2 = Ip.get_or_create(value=ip) else: return c2.add_context(context) c2.tag(['c2', family.lower()]) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line)) if c2 and sample: sample.active_link_to(c2, 'c2', self.name, clean_old=False) except ValueError: logging.error("Error unpacking line: {}".format(line))
def analyze(self, line): if not line or line[0].startswith("#"): return date, _type, family, hostname, url, status, registrar, ips, asns, countries = tuple( line) tags = [] tags += TYPE_DICT[_type] tags.append(family.lower()) context = { "first_seen": date, "status": status, "registrar": registrar, "countries": countries.split("|"), "asns": asns.split("|"), "source": self.name } try: url = Url.get_or_create(value=url.rstrip()) url.add_context(context) url.tag(tags) hostname = Observable.add_text(hostname) hostname.tag(tags + ['blocklist']) for ip in ips.split("|"): if ip != hostname and ip is not None and ip != '': try: i = Ip.get_or_create(value=ip) i.active_link_to( hostname, "First seen IP", self.name, clean_old=False) except ObservableValidationError as e: logging.error("Invalid Observable: {}".format(e)) except ObservableValidationError as e: logging.error("Invalid line: {}\nLine: {}".format(e, line))
def _get_threat_forensics_nodes_inner( self, evidence, general_context, tags): # create context from notes context = general_context.copy() _ctx = self._make_context_from_notes([evidence]) context.update(_ctx) # add evidence['type'] and unicify tags tags = [{ 'name': _ } for _ in set([evidence['type']] + [d['name'] for d in tags])] # create Tags in DB for _ in tags: Tag.get_or_create(name=_['name']) # threat_forensics = [] # technical hack: set optional comments values for optional in ['action', 'rule', 'path', 'rule']: if optional not in evidence['what']: evidence['what'][optional] = None # add attributes for the known evidence type if evidence['type'] in ['file', 'dropper']: if 'path' in evidence['what']: threat_forensics.append( File.get_or_create( value=evidence['what']['path'], context=[context])) if 'md5' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['md5'], context=[context])) if 'sha256' in evidence['what']: threat_forensics.append( Hash.get_or_create( value=evidence['what']['sha256'], context=[context])) elif evidence['type'] == 'cookie': pass elif evidence['type'] == 'dns': threat_forensics.append( Hostname.get_or_create( value=evidence['what']['host'], context=[context])) elif evidence['type'] == 'ids': threat_forensics.append( Text.get_or_create( value=evidence['what']['ids'], context=[context])) pass elif evidence['type'] == 'mutex': threat_forensics.append( Text.get_or_create( value=evidence['what']['name'], context=[context])) elif evidence['type'] == 'network': if 'ip' in evidence['what']: # FIXME port, type threat_forensics.append( Ip.get_or_create( value=evidence['what']['ip'], context=[context])) elif 'domain' in evidence['what']: threat_forensics.append( Hostname.get_or_create( value=evidence['what']['domain'], context=[context])) elif evidence['type'] == 'process': pass elif evidence['type'] == 'registry': # threat_forensics.append(evidence['what']['key']) # threat_forensics.append(evidence['what']['value']) pass elif evidence['type'] == 'url': # BUG yeti-#115 ObservableValidationError: Invalid URL: http://xxxxx-no-tld/ threat_forensics.append( Url.get_or_create( value=evidence['what']['url'], context=[context])) # add note as tag because its a signature if 'note' in evidence: threat_forensics[-1].tag( evidence['note'].replace('.', '_').strip('_')) # tag all of that for o in threat_forensics: o.tag([t['name'] for t in tags]) return threat_forensics
def _add_events_nodes(self, events, context, tags): log.debug('_add_events_nodes on {nb} events'.format(nb=len(events))) attach_unsupported = dict( [(_, 0) for _ in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None]]) event_nodes = list() for msg in events: create_t = datetime.strptime( msg['messageTime'], "%Y-%m-%dT%H:%M:%S.%fZ") # PPS unique value guid = Text.get_or_create( value='proofpoint://%s' % msg['GUID'], created=create_t, context=[context]) log.debug('Event {msg}'.format(msg=msg['messageID'])) message_contents = list() src_ip = Ip.get_or_create( value=msg['senderIP'], created=create_t, context=[context]) src_ip.tag(['MTA']) guid.active_link_to([src_ip], "MTA src ip", self.name) # new event event_nodes.append(guid) # if self.config['import_email_metadata']: # email details # messageID message_id = Email.get_or_create( value=msg['messageID'], created=create_t, context=[context]) guid.active_link_to([message_id], "seen in", self.name) # sender _s1 = Email.get_or_create( value=msg['sender'], created=create_t, context=[context]) _s1.tag(['sender']) guid.active_link_to([_s1], "sender", self.name) if 'headerFrom' in msg: # header From _s2 = Email.get_or_create( value=msg['headerFrom'], created=create_t, context=[context]) _s2.tag(['sender']) guid.active_link_to([_s2], "headerFrom", self.name) # FIXME is that a duplicate of attachment-malware ? # attachment events for attach in msg['messageParts']: if attach['sandboxStatus'] in ['THREAT']: md5 = Hash.get_or_create( value=attach['md5'], created=create_t, context=[context]) md5.tag([t['name'] for t in tags]) fname = File.get_or_create( value=attach['filename'], created=create_t, context=[context]) fname.tag([t['name'] for t in tags]) # this should be a DUP from threat_nodes in analyse() sha_threat = Hash.get_or_create( value=attach['sha256'], created=create_t, context=[context]) sha_threat.active_link_to([md5, fname], "relates", self.name) sha_threat.tag([t['name'] for t in tags]) message_contents.append(sha_threat) # link the 3 together elif attach['sandboxStatus'] in ['UNSUPPORTED_TYPE', 'TOO_SMALL', None]: attach_unsupported[attach['sandboxStatus']] += 1 log.debug(pprint.pformat(attach)) # add context to the hashes guid.active_link_to(message_contents, "delivers", self.name) _stats = ', '.join( "%s: %d" % (k, v) for k, v in attach_unsupported.items()) log.warning('Ignored unsupported attachments: %s', _stats) for o in event_nodes: o.tag([t['name'] for t in tags]) return event_nodes
def analyze(self, item): context = item date_string = re.search( r"\((?P<datetime>[\d\- :]+)\)", context['title']).group('datetime') try: context['date_added'] = datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match( r'^Host: (?P<host>.+), Version: (?P<version>\w)', context['description']) g = g.groupdict() context['version'] = g['version'] context['description'] = FeodoTracker.descriptions[g['version']] context['subfamily'] = FeodoTracker.variants[g['version']] context['source'] = self.name del context['title'] new = None variant_tag = FeodoTracker.variants[g['version']].lower() try: if re.search(r"[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}", g['host']): new = Ip.get_or_create(value=g['host']) else: new = Hostname.get_or_create(value=g['host']) new.add_context(context) new.add_source("feed") new.tag([variant_tag, 'malware', 'crimeware', 'banker', 'c2']) except ObservableValidationError as e: logging.error(e) try: url_fedeo = context['guid'] r = requests.get(url_fedeo) if r.status_code == 200: html_source = r.text soup = BeautifulSoup(html_source, 'html.parser') tab = soup.find('table', attrs='sortable') results = [] if tab: all_tr = tab.find_all('tr') for tr in all_tr: all_td = tr.find_all('td') if all_td and len(all_td) == 7: results.append({ 'timestamp': all_td[0].text, 'md5_hash': all_td[1].text, 'filesize': all_td[2].text, 'VT': all_td[3].text, 'Host': all_td[4].text, 'Port': all_td[5].text, 'SSL Certif or method': all_td[6].text }) for r in results: new_hash = Hash.get_or_create(value=r['md5_hash']) new_hash.add_context(context) new_hash.add_source('feed') new_hash.tag([ variant_tag, 'malware', 'crimeware', 'banker', 'payload' ]) new_hash.active_link_to( new, 'c2', self.name, clean_old=False) host = Url.get_or_create( value='https://%s:%s' % (g['host'], r['Port'])) host.add_source('feed') host.add_context(context) host.tag( [variant_tag, 'malware', 'crimeware', 'banker', 'c2']) new_hash.active_link_to( host, 'c2', self.name, clean_old=False) except ObservableValidationError as e: logging.error(e)
def analyze(observable, results): links = set() json_result = ThreatCrowdAPI.fetch(observable) json_string = json.dumps( json_result, sort_keys=True, indent=4, separators=(',', ': ')) results.update(raw=json_string) result = {} if isinstance(observable, Hostname): if 'resolutions' in json_result: result['ip on this domains'] = 0 for ip in json_result['resolutions']: if ip['ip_address'].strip() != observable.value: if ip['last_resolved'] != '0000-00-00': last_resolved = datetime.datetime.strptime( ip['last_resolved'], "%Y-%m-%d") try: new_ip = Ip.get_or_create( value=ip['ip_address'].strip()) links.update( new_ip.active_link_to( observable, 'IP', 'ThreatCrowd', last_resolved)) result['ip on this domains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database". format(ip['ip_address'])) if 'emails' in json_result: result['nb emails'] = 0 for email in json_result['emails']: try: new_email = Email.get_or_create(value=email) links.update( new_email.active_link_to( observable, 'Used by', 'ThreatCrowd')) result['nb emails'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add email {} to the database". format(email)) if 'subdomains' in json_result: result['nb subdomains'] = 0 for subdomain in json_result['subdomains']: try: new_domain = Hostname.get_or_create(value=subdomain) links.update( observable.active_link_to( new_domain, 'subdomain', 'ThreatCrowd')) result['nb subdomains'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add subdomain {} to the database". format(subdomain)) if isinstance(observable, Ip): if 'resolutions' in json_result: result['domains resolved'] = 0 for domain in json_result['resolutions']: if domain['domain'].strip() != observable.value: try: last_resolved = datetime.datetime.strptime( domain['last_resolved'], "%Y-%m-%d") new_domain = Hostname.get_or_create( value=domain['domain'].strip()) links.update( new_domain.active_link_to( observable, 'A Record', 'ThreatCrowd', last_resolved)) result['domains resolved'] += 1 except ObservableValidationError: logging.error( "An error occurred when trying to add domain {} to the database". format(domain['domain'])) if 'hashes' in json_result and len(json_result['hashes']) > 0: result['malwares'] = 0 for h in json_result['hashes']: new_hash = Hash.get_or_create(value=h) links.update( new_hash.active_link_to( observable, 'hash', 'ThreatCrowd')) result['malwares'] += 1 if isinstance(observable, Email): if 'domains' in json_result and len(json_result) > 0: result['domains recorded by email'] = 0 for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( new_domain.active_link_to( observable, 'recorded by', 'ThreatCrowd')) result['domains recorded by email'] += 1 if isinstance(observable, Hash): result['nb c2'] = 0 if 'md5' in json_result: new_hash = Hash.get_or_create(value=json_result['md5']) links.update( new_hash.active_link_to(observable, 'md5', 'ThreadCrowd')) if 'sha1' in json_result: new_hash = Hash.get_or_create(value=json_result['sha1']) links.update( new_hash.active_link_to(observable, 'sha1', 'ThreadCrowd')) if 'sha256' in json_result: new_hash = Hash.get_or_create(value=json_result['sha256']) links.update( new_hash.active_link_to( observable, 'sha256', 'ThreadCrowd')) if 'domains' in json_result and len(json_result['domains']): for domain in json_result['domains']: new_domain = Hostname.get_or_create(value=domain) links.update( observable.active_link_to( new_domain, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'ips' in json_result and len(json_result['ips']): for ip in json_result['ips']: new_ip = Ip.get_or_create(value=ip.strip()) links.update( observable.active_link_to(new_ip, 'c2', 'ThreatCrowd')) result['nb c2'] += 1 if 'permalink' in json_result: result['permalink'] = json_result['permalink'] result['source'] = 'threatcrowd_query' result['raw'] = json_string observable.add_context(result) return list(links)
def _query_and_filter_previous_new_threat_for_campaign( campaign_info, context): # get all threat for this campaign from the API # filter out the threat we already have in DB # return the net new threats # TODO: alternative solution, query by type, get all campaign threat, intersect sets # Q/A: why do i have to play with perf issues ? # only create Observables and link them when they do not exists. cls_action = { 'COMPLETE_URL': Url, 'NORMALIZED_URL': Url, 'ATTACHMENT': Hash, 'DOMAIN': Hostname, 'HOSTNAME': Hostname } threats = [] log.info( "There are {nb} threat associated to campaign".format( nb=len(campaign_info['campaignMembers']))) for threat in campaign_info['campaignMembers']: # ATTACHMENT, COMPLETE_URL, NORMALIZED_URL, or DOMAIN # BUG #5: undocumentated value HOSTNAME, could be hostname or ip v = threat['threat'] # t = threat['threatTime'][:10] # last_seen ? create_t = datetime.strptime( threat['threatTime'], "%Y-%m-%dT%H:%M:%S.%fZ") # TODO threat['threatStatus'] in active, ... if threat['threatStatus'] != 'active': log.warning( 'Campaign threat - threatStatus %s unsupported', threat['threatStatus']) # FIXME Campaign threat - threatStatus falsePositive unsupported # threatStatus ? if threat['subType'] not in cls_action: log.error( 'Campaign threat - subtype %s unsupported', threat['subType']) continue cls = cls_action[threat['subType']] try: # if it exists, don't do anything. tags and context are the same cls.objects.get(value=v) except DoesNotExist: # otherwise return it to link to it. # threats.append(cls.get_or_create(value=v, context=[context], created=t)) # tags named argument in constructor does not work the same as .tag() try: o = cls.get_or_create( value=v, context=[context], created=create_t) o.tag([threat['type'], threat['subType']]) except DoesNotExist: # wtf log.error( "{cls} {v} has a weird problem - FIXME".format( cls=cls, v=v)) except ObservableValidationError: try: if threat['subType'] == 'HOSTNAME': # could be an Ip o = Ip.get_or_create( value=v, context=[context], created=create_t) except ObservableValidationError as e: log.error(e) log.error(pprint.pformat(threat)) log.error( "Campaign {name}".format( name=campaign_info['name'])) o = Text.get_or_create( value=v, context=[context], created=create_t) o.tag([threat['type'], threat['subType']]) threats.append(o) log.info("Found %d new threat on campaign, new to us", len(threats)) # there is a bug here... log.debug( ", ".join( ["%s:%s" % (t.__class__.__name__, t.value) for t in threats])) return threats