def get_ransomware(): reports = [] r = requests.get( "https://ransomwaretracker.abuse.ch/downloads/RW_DOMBL.txt") lines = r.text.split("\n") domains = [] for line in lines: if len(line) < 3: continue if line[0] == "#": continue domains.append(line.strip()) fields = { 'iocs': { "dns": domains, }, 'timestamp': int(time.mktime(time.gmtime())), 'link': "https://ransomwaretracker.abuse.ch/downloads/RW_DOMBL.txt", 'id': 'abusech-ransomware', 'title': 'abuse.ch Ransomware on domain blocklist', 'score': 100, } if domains: reports.append(CbReport(**fields)) return reports else: return []
def get_zeus(): reports = [] r = requests.get( "https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist") lines = r.text.split("\n") domains = [] for line in lines: if len(line) < 3: continue if line[0] == "#": continue domains.append(line.strip()) fields = { 'iocs': { "dns": domains, }, 'timestamp': int(time.mktime(time.gmtime())), 'link': "https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist", 'id': 'abusech-zeus', 'title': 'abuse.ch Zeus hit on Standard domain blocklist', 'score': 100, } reports.append(CbReport(**fields)) return reports
def build_reports(nodes): # TODO - this is one "report" per TOR node IP. Not ideal. reports = [] unique_ips = set() for node in nodes: # avoid duplicated reports # CBAPI-22 if node['ip'] in unique_ips: continue else: unique_ips.add(node['ip']) fields = { 'iocs': { 'ipv4': [ node['ip'], ] }, 'score': 0, 'timestamp': int(time.mktime(time.gmtime())), 'link': 'http://www.torproject.org', 'id': "TOR-Node-%s" % node['ip'], 'title': "%s has been a TOR %s node since %s and was last seen %s on port %s. Contact: %s" % (node['ip'], node['type'], node['firstseen'], node['lastseen'], node['port'], node['contact']) } reports.append(CbReport(**fields)) return reports
def reports_from_csv(lines): """ takes a file-like object that is full list of CSV data from from malwaredomainlist. creates a report per line """ reports = [] try: for line in unicode_csv_reader(lines): if len(line)== 0: continue try: rawdate, url, ip, reverse_lookup, desc, registrant, asn, _, _ = line #rawdate 2013/10/27_03:06 report_date = time.strptime(rawdate, "%Y/%m/%d_%H:%M") # skip any report older than DAYS_BACK report_datetime = datetime.fromtimestamp(time.mktime(report_date)) start = datetime.now() - timedelta(days=DAYS_BACK) if report_datetime < start: continue #url www.slivki.com.ua/as/Ponynl.exe url = urlparse.urlsplit("http://%s" % url) host = url.netloc if ":" in host: host = host.split(":", 1)[0] if len(host) <= 3: print "WARNING: no domain, skipping %s" % line continue fields = {'iocs': { "dns": [host], }, 'timestamp': int(time.mktime(report_date)), 'link': "http://www.malwaredomainlist.com/mdl.php", 'id': 'MDL-%s-%s' % (time.strftime("%Y%m%d-%H%M", report_date), url.netloc), 'title': '%s found on malware domain list: "%s"' % (url.netloc, desc) + ' IP (reverse lookup) at the time: %s (%s)' % (ip, reverse_lookup), 'score': 50, } reports.append(CbReport(**fields)) except Exception, err: print "WARNING: error parsing %s\n%s" % (line, err) continue except Exception, err: print err print line
def build_reports(nodes): # TODO - this is one "report" per TOR node IP. Not ideal. reports = [] for node in nodes: fields = {'iocs': { 'ipv4': [node['ip'], ] }, 'score': 0, 'timestamp': int(time.mktime(time.gmtime())), 'link': 'http://www.torproject.org', 'id': "TOR-Node-%s" % node['ip'], 'title':"%s has been a TOR exit node since %s and was last seen %s on port %s. Contact: %s" % (node['ip'],node['firstseen'], node['lastseen'], node['port'], node['contact']) } reports.append(CbReport(**fields)) return reports
def build_report(fname): """ parse the provided STIX package and create a CB Feed Report that includes all suitable observables as CB IOCs """ # The python STIX libs are pedantic about document versions. See # https://github.com/STIXProject/python-stix/issues/124 # parser = EntityParser() # pkg = parser.parse_xml(fname, check_version=False) pkg = STIXPackage.from_xml(fname) iocs = {} if pkg.observables: iocs = parse_observables(pkg.observables.observables) if pkg.indicators: for indicator in pkg.indicators: iocs = merge(iocs, parse_observables(indicator.observables)) ts = int(time.mktime(pkg.timestamp.timetuple())) if pkg.timestamp else int( time.mktime(time.gmtime())) fields = { 'iocs': iocs, 'score': 100, # does STIX have a severity field? 'timestamp': ts, 'link': 'http://stix.mitre.org', 'id': pkg.id_, 'title': pkg.stix_header.title, } if len(iocs.keys()) == 0 or all(len(iocs[k]) == 0 for k in iocs): print( "-> No suitable observables found in {0}; skipping.".format(fname)) return None print("-> Including %s observables from {0}.".format( sum(len(iocs[k]) for k in iocs), fname)) return CbReport(**fields)
'iocs': {}, 'timestamp': int(time.mktime(time.gmtime())), 'link': options.url, 'title': options.report, 'id': gen_report_id(ips + domains + md5s), 'score': 100 } if len(ips) > 0: fields['iocs']['ipv4'] = ips if len(domains) > 0: fields['iocs']['dns'] = domains if len(md5s) > 0: fields['iocs']['md5'] = md5s reports.append(CbReport(**fields)) return reports def create_feed(options): # generate the required feed information fields # based on command-line arguments # feedinfo = { 'name': options.name, 'display_name': options.display_name, 'provider_url': options.url, 'summary': options.summary, 'tech_data': options.techdata
def build_reports(options): reports = [] ips = [] domains = [] md5s = [] # read all of the lines (of text) from the provided # input file (of IOCs) # raw_iocs = open(options.ioc_filename).readlines() # iterate over each of the lines # attempt to determine if each line is a suitable # ipv4 address, dns name, or md5 # for raw_ioc in raw_iocs: # strip off any leading or trailing whitespace # skip any empty lines # raw_ioc = raw_ioc.strip() if len(raw_ioc) == 0: continue try: # attempt to parse the line as an ipv4 address # socket.inet_aton(raw_ioc) # parsed as an ipv4 address! # ips.append(raw_ioc) except Exception as e: # attept to parse the line as a md5 and, if that fails, # as a domain. use trivial parsing # if 32 == len(raw_ioc) and \ re.findall(r"([a-fA-F\d]{32})", raw_ioc): md5s.append(raw_ioc) elif -1 != raw_ioc.find("."): domains.append(raw_ioc) fields = { 'iocs': {}, 'timestamp': int(time.mktime(time.gmtime())), 'link': options.url, 'title': options.report, 'id': gen_report_id(ips + domains + md5s), 'score': 100 } if options.tags is not None: fields['tags'] = options.tags.split(',') if options.description is not None: fields['description'] = options.description if len(ips) > 0: fields['iocs']['ipv4'] = ips if len(domains) > 0: fields['iocs']['dns'] = domains if len(md5s) > 0: fields['iocs']['md5'] = md5s reports.append(CbReport(**fields)) return reports
def generate_reports(raw, api): """ generate the reports data as a list of dictionaries. each list entry corresponds to a single report, which is a single report in the case of iSight. """ reports = [] for rawkey in raw.keys(): entry = {} rawentry = raw[rawkey] entry["id"] = rawkey entry["title"] = rawentry["title"] entry["link"] = "https://mysight.isightpartners.com/report/full/%s" % ( rawkey) entry["timestamp"] = rawentry["report_timestamp"] entry["iocs"] = {} for rawmd5 in rawentry["md5"]: if not "md5" in entry["iocs"]: entry["iocs"]["md5"] = [] entry["iocs"]["md5"].append(rawmd5) # @todo uncomment this block to support ips # #for rawip in rawentry["ipaddr"]: # if not "ipv4" in entry["iocs"]: # entry["iocs"]["ipv4"] = [] # # entry["iocs"]["ipv4"].append(rawip) for rawdns in rawentry["domain"]: if not "dns" in entry["iocs"]: entry["iocs"]["dns"] = [] entry["iocs"]["dns"].append(rawdns) # if we ended up with no IOCs for this report, just skip it. # if len(entry["iocs"]) == 0: continue # the score or severity is not provided as part of the iSight # report enumeration (their "i_and_w" or "indications and warnings" # api. instead, we must retreive the report in XML format, parse the # report, and look for the criticality. # # Some iSIGHT reports have NO criticality rating. # For lack of clear obvious next steps, simply report the score as # 75 -- "medium high" # entry["score"] = retrieve_report_score(entry["id"], api, 75) reports.append(CbReport(**entry)) return reports