def test_fields_minimum(self): """ Ensure minimum required fields. """ data = self.core() rpt = CbReport(**data) rpt.validate()
def test_fields_all(self): """ Ensure all required fields. """ data = self.core() data['description'] = "The Decription" data['tags'] = ["md5"] rpt = CbReport(**data) rpt.validate()
def test_fields_with_ipv4(self): """ Ensure ipv4 ioc can be added. """ iocs = {'ipv4': ["12.34.56.78"]} data = self.core(iocs=iocs) data['description'] = "The Decription" data['tags'] = ["ipv4"] rpt = CbReport(**data) rpt.validate()
def test_fields_with_malformed_ipv4(self): """ Ensure invalid ipv4 is caught. """ iocs = {'ipv4': ["Bogus"]} data = self.core(iocs=iocs) data['description'] = "The Decription" with self.assertRaises(CbInvalidReport) as err: rpt = CbReport(**data) rpt.validate() assert "Malformed IPv4 addr" in "{0}".format(err.exception.args[0])
def test_fields_with_short_sha256(self): """ Ensure short sha256 is caught. """ iocs = {'sha256': ["11111111112222222222"]} data = self.core(iocs=iocs) data['description'] = "The Decription" with self.assertRaises(CbInvalidReport) as err: rpt = CbReport(**data) rpt.validate() assert "Invalid sha256 length" in "{0}".format(err.exception.args[0])
def test_fields_with_long_md5(self): """ Ensure long md5 is caught. """ iocs = {'md5': ["1111111111222222222233333333334444444444"]} data = self.core(iocs=iocs) data['description'] = "The Decription" with self.assertRaises(CbInvalidReport) as err: rpt = CbReport(**data) rpt.validate() assert "Invalid md5 length" in "{0}".format(err.exception.args[0])
def test_fields_with_query_bogus_query(self): """ Ensure query with missing query is caught. """ iocs = {'query': [{'index_type': "events", 'search_query': "BOGUS"}]} data = self.core(iocs=iocs) data['description'] = "The Decription" data['tags'] = ["query"] with self.assertRaises(CbInvalidReport) as err: rpt = CbReport(**data) rpt.validate() assert "Query IOC for report RepId1 missing q= on query" in "{0}".format( err.exception.args[0])
def test_fields_with_query_missing_index_type(self): """ Ensure query with missing index type is caught. """ iocs = {'query': [{'search_query': "cb.q.commandline=foo.txt"}]} data = self.core(iocs=iocs) data['description'] = "The Decription" data['tags'] = ["query"] with self.assertRaises(CbInvalidReport) as err: rpt = CbReport(**data) rpt.validate() assert "Query IOC section for report 'RepId1' missing index_type" in "{0}".format( err.exception.args[0])
def test_fields_with_sha256(self): """ Ensure sha256 ioc can be added. """ iocs = { 'sha256': [ "0000000000111111111122222222223333333333444444444455555555556666" ] } data = self.core(iocs=iocs) data['description'] = "The Decription" data['tags'] = ["sha256"] rpt = CbReport(**data) rpt.validate()
def test_fields_with_query(self): """ Ensure query ioc can be added. """ iocs = { 'query': [{ 'index_type': "events", 'search_query': "cb.q.commandline=foo.txt" }] } data = self.core(iocs=iocs) data['description'] = "The Decription" data['tags'] = ["query"] rpt = CbReport(**data) rpt.validate()
def generate_feed_from_db() -> None: """ Creates a feed based on specific database information and save to our output file. """ query = BinaryDetonationResult.select().where(BinaryDetonationResult.score > 0) reports = [] for binary in query: fields = { "iocs": {"md5": [binary.md5]}, "score": binary.score, "timestamp": int(time.mktime(time.gmtime())), "link": "", "id": "binary_{0}".format(binary.md5), "title": binary.last_success_msg, "description": binary.last_success_msg, } reports.append(CbReport(**fields)) feedinfo = { "name": "yara", "display_name": "Yara", "provider_url": "http://plusvic.github.io/yara/", "summary": "Scan binaries collected by Carbon Black with Yara.", "tech_data": "There are no requirements to share any data with Carbon Black to use this feed.", "icon": "./yara-logo.png", "category": "Connectors", } feedinfo = CbFeedInfo(**feedinfo) feed = CbFeed(feedinfo, reports) # logger.debug("Writing out feed '{0}' to disk".format(feedinfo.data["name"])) with open(globals.g_output_file, "w") as fp: fp.write(feed.dump())
def test_fields_with_query_invalid_index_type(self): """ Ensure query with bogus index type is caught. """ iocs = { 'query': [{ 'index_type': "BOGUS", 'search_query': "cb.q.commandline=foo.txt" }] } data = self.core(iocs=iocs) data['description'] = "The Decription" data['tags'] = ["query"] with self.assertRaises(CbInvalidReport) as err: rpt = CbReport(**data) rpt.validate() assert "Report IOCs section for 'query' contains invalid index_type: BOGUS" in "{0}".format( err.exception.args[0])
def test_fields_all_required_only(self): """ Ensure all required fields. """ data = self.core() data['description'] = "The Decription" data['tags'] = ["md5"] rpt = CbReport(**data) rpt.validate() with self.assertRaises(CbInvalidReport) as err: rpt.validate(pedantic=True) assert "Report contains non-required key 'description'" in "{0}".format( err.exception.args[0])
def generate_feed_from_threatconnect(self): first = True reports = [] feedinfo = { 'name': 'threatconnect', 'display_name': "ThreatConnect", 'provider_url': "http://www.threatconnect.com", 'summary': "Sends threat intelligence from Threatconnect platform to Carbon Black Response", 'tech_data': "There are no requirements to share any data with Carbon Black to use this feed.", 'icon': 'threatconnect-logo.png', 'category': "Connectors", } feedinfo = CbFeedInfo(**feedinfo) self.feed = CbFeed(feedinfo, reports) created_feed = self.feed.dump(validate=False, indent=0) with open(self.out_file, 'w') as fp: fp.write(created_feed) fp.seek(0) offset = len(created_feed) - 1 # create an Indicators object for source in self.sources: for t in self.ioc_types: indicators = self.tcapi.indicators() filter1 = indicators.add_filter() # filter1.add_owner(source) filter1.add_pf_type(t, FilterOperator.EQ) if self.ioc_min is not None: filter1.add_pf_rating(self.ioc_min, FilterOperator.GE) try: # retrieve Indicators indicators.retrieve() except RuntimeError as e: print('Error: {0}'.format(e)) logger.info("Number of indicators:{0}".format( len(indicators))) for index, indicator in enumerate(indicators): if index > self.max_iocs: logger.info("Max number of IOCs reached") break # print (indicator.type) score = indicator.rating * 20 if indicator.rating is not None else 0 # int(row.get('rating', 0)) * 20 # Many entries are missing a description so I placed this here to default them # to the IOC value in the absence of a description. title = indicator.description if indicator.description is not None else "{0}-{1}".format( source, indicator.id) # row.get('description', None) # if not title: # title = row.get('summary') fields = { 'iocs': {}, 'id': str(indicator.id), 'link': indicator.weblink, 'title': title, 'score': int(score), 'timestamp': int( datetime.strptime( indicator.date_added, "%Y-%m-%dT%H:%M:%SZ").timestamp()), } # The next few lines are designed to insert the Cb supported IOCs into the record. logger.debug("Indacator is {0}".format(indicator)) if indicator.type == "File": fields['iocs'] = { k: [indicator.indicator[k]] for k in indicator.indicator if indicator.indicator[k] is not None } elif indicator.type == "Address": fields['iocs']['ipv4'] = [indicator.indicator] elif indicator.type == "Host": fields['iocs']['dns'] = [indicator.indicator] else: squery = urllib.parse.urlencode({ "cb.urlver": "1", "q": indicator.indicator[self.custom_ioc_key] }) fields['iocs']['query'] = [{ 'index_type': 'modules', 'search_query': squery }] report = CbReport(**fields) try: report.dump(validate=True) except: logger.info("This query is not valid: {0}".format( indicator.indicator[self.custom_ioc_key])) continue # APPEND EACH NEW REPORT ONTO THE LIST IN THE JSON FEED # THIS METHOD IS VERY LONG LIVED # THIS METHOD CALL WILL LAST FOR # HOURS -> DAYS IN LARGE ORGS reports.append(report) self.feed = CbFeed(feedinfo, reports) fp.write(self.feed.dump(validate=False, indent=0))