def prepare_subject_observable(id, subject): hive_address = ''.join(settings.stored_hive_address[0]) hive_api = ''.join(settings.stored_api_key[0]) #Define the connection to thehive installation (including the generated API key). api = TheHiveApi(hive_address, hive_api, None, {'http': '', 'https': ''}) print(subject) print('Create subject observable') print('---------------------') domain = CaseObservable(dataType='subject', data=[subject], tlp=0, ioc=False, tags=['ExtractedSubject'], message='Subject Extracted') response = api.create_case_observable(id, domain) if response.status_code == 201: # print(json.dumps(response.json(), indent=4, sort_keys=True)) print( str(datetime.datetime.now()) + " Observable succesfully created.") elif response.status.code == 400: print( str(datetime.datetime.now()) + " Subject Observable already exists") else: print( str(datetime.datetime.now()) + " Error creating Subject Observables.") print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0)
async def create_case_observable(self, case_id, url, api_key, data_type, data, description=None, tlp=0, is_ioc=False, is_sighted=False, tags=None): tags = tags if tags is not None else [] self.logger.info(f'Creating observable for {case_id} in TheHive...') if not url.startswith("http"): url = f"http://{url}" api = TheHiveApi(url, api_key) obs = CaseObservable(dataType=data_type, message=description, tlp=tlp, tags=tags, ioc=is_ioc, sighted=is_sighted, data=data) r = api.create_case_observable(case_id, obs) if r.status_code == 201: return r.json() else: raise IOError(r.text)
class Cortex(object): def __init__(self): self.es = Elasticsearch(['127.0.0.1'], port=9200) self.api = Api('http://<CORTEX URL>', <CORTEX KEY>) self.hive_api = TheHiveApi('<HIVE URL>', <HIVE KEY>, cert=False) def parseArtifacts(self, cortexJobId, jobId): artifacts = self.api.jobs.get_artifacts(cortexJobId) if artifacts: caseId = self.es.search(index='the_hive_13', body={'query':{'match':{'objectId': jobId,}}})['hits']['hits'][0]['_source']['rootId'] for a in artifacts: ob = CaseObservable(dataType=a.dataType, data=a.data) self.hive_api.create_case_observable(caseId, ob) time.sleep(1)
def prepare_url_observable(id, url_array): hive_address = ''.join(settings.stored_hive_address[0]) hive_api = ''.join(settings.stored_api_key[0]) #We will need to run through the arrays to extract the values. #Define the connection to thehive installation (including the generated API key). api = TheHiveApi(hive_address, hive_api, None, {'http': '', 'https': ''}) for urladdress in url_array: print( str(datetime.datetime.now()) + " Creating url observable:" + urladdress) domain = CaseObservable(dataType='url', data=[urladdress], tlp=0, ioc=False, tags=['ExtractedUrls'], message='Urls Extracted') response = api.create_case_observable(id, domain) if response.status_code == 201: # print(json.dumps(response.json(), indent=4, sort_keys=True)) print( str(datetime.datetime.now()) + " Observable succesfully created.") elif response.status_code == 400: print( str(datetime.datetime.now()) + " URL Observable already exists") else: print( str(datetime.datetime.now()) + " Error creating URL Observables.") print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0)
def hiveupdate(thehive_api_url, thehive_api_password, thehive_caseId, thehive_observableId, artifactList, sortedList, dt): api=TheHiveApi(thehive_api_url,thehive_api_password,cert=DHCPConf.ssl_cert_path) curlMsgString='' testString='' # Build the message-String for the original observable (The one that the responder was run on) for entry in sortedList: if (dt=='hostname'): testString=str(entry['host']['name'] + ':') elif(dt=='ip'): testString=str(entry['source']['ip'] + ':') if(testString not in curlMsgString): curlMsgString+='{0} \n\n'.format(testString) curlMsgString+=' {0}'.format(DHCPConf.msgStrBuilder(str(entry['event']['action']), str(entry['source']['ip']), str(entry['host']['hostname']), str(entry['@timestamp']))) # Build message-String, and create a new observable per entry in artifactList for artifact in artifactList: msgString='' for entry in sortedList: if(dt=='hostname'): testString=str(entry['host']['name']) elif(dt=='ip'): testString=str(entry['host']['name']) if(testString==artifact): msgString+=DHCPConf.msgStrBuilder(str(entry['event']['action']), str(entry['source']['ip']), str(entry['host']['hostname']), str(entry['@timestamp'])) domain=CaseObservable(dataType=dt,data=str(artifact),tlp=DHCPConf.defaultTlp,ioc=DHCPConf.defaultIoc,tags=DHCPConf.defaultTags,message=msgString) response=api.create_case_observable(thehive_caseId,domain) # Because it is not possible to edit existing observables through TheHive4py, # We edit the original observable (the one that started the DHCP_Responder), # through a HTTP.Patch call: headers={'Authorization': 'Bearer {0}'.format(thehive_api_password)} data={'message':curlMsgString} urlString='{0}/api/case/artifact/{1}'.format(thehive_api_url,thehive_observableId) response=requests.patch(urlString, headers=headers, data=data, verify=DHCPConf.ssl_cert_path)
class THApi: def __init__(self, filepath): self.api = TheHiveApi('http://THE_HIVE_IP:PORT', 'API_KEY') self.case_id = None self.osservable_data = [] self.filepath = filepath self.filetype = filepath.split(".")[-1] self.filename = os.path.basename(filepath) def run_iocp(self): data = [] proc = subprocess.Popen(['iocp', '-i', self.filetype, self.filepath], stdout=subprocess.PIPE) for line in proc.stdout.readlines(): self.osservable_data.append( [x for x in line.strip().split(',') if line.strip() != '']) def create_osservables(self): for oss in self.osservable_data: domain = CaseObservable(dataType=exchange[oss[2]], tlp=1, ioc=True, tags=['thehive4py'], data=oss[3]) response = self.api.create_case_observable(self.case_id, domain) if response.status_code == 201: print(json.dumps(response.json(), indent=4, sort_keys=True)) print('') else: print('ko: {}/{}'.format(response.status_code, response.text)) print("adding OSSERVABLE", oss[2], "-", oss[3], "to", self.case_id) def create_case(self): case = Case(title='From TheHive4Py', tlp=3, flag=True, tags=['TheHive4Py', 'sample'], description=self.filename) response = self.api.create_case(case) if response.status_code == 201: self.case_id = response.json()['id'] else: self.case_id = None
class TheHive(AppBase): """ An example of a Walkoff App. Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. """ __version__ = "1.0.0" app_name = "thehive" def __init__(self, redis, logger, console_logger=None): """ Each app should have this __init__ to set up Redis and logging. :param redis: :param logger: :param console_logger: """ super().__init__(redis, logger, console_logger) # async def run_analyzer(self, apikey, url, title_query): # self.thehive = TheHiveApi(url, apikey, cert=False) # response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) # return response.text async def search_cases(self, apikey, url, title_query): self.thehive = TheHiveApi(url, apikey, cert=False) response = self.thehive.find_cases(query=ContainsString( "title", title_query), range="all", sort=[]) return response.text async def search_query(self, apikey, url, search_for, custom_query): self.thehive = TheHiveApi(url, apikey, cert=False) try: query = json.loads(custom_query) except: raise IOError("Invalid JSON payload received.") if search_for == "alert": response = self.thehive.find_alerts(query=query, range="all", sort=[]) else: response = self.thehive.find_cases(query=query, range="all", sort=[]) if response.status_code == 200: return response.text else: raise IOError(response.text) async def add_observable(self, apikey, url, case_id, data, datatype, tags): self.thehive = TheHiveApi(url, apikey, cert=False) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] item = thehive4py.models.CaseObservable( dataType=datatype, data=data, tlp=1, ioc=False, sighted=False, tags=["Shuffle"], message="Created by shuffle", ) return self.thehive.create_case_observable(case_id, item).text async def search_alerts(self, apikey, url, title_query, search_range="0-25"): self.thehive = TheHiveApi(url, apikey, cert=False) # Could be "all" too if search_range == "": search_range = "0-25" response = self.thehive.find_alerts(query=ContainsString( "title", title_query), range=search_range, sort=[]) return response.text async def create_case(self, apikey, url, title, description="", tlp=1, severity=1, tags=""): self.thehive = TheHiveApi(url, apikey, cert=False) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp case = thehive4py.models.Case( title=title, tlp=tlp, severity=severity, tags=tags, description=description, ) try: ret = self.thehive.create_case(case) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert( self, apikey, url, type, source, sourceref, title, description="", tlp=1, severity=1, tags="", ): self.thehive = TheHiveApi(url, apikey, cert=False) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-3, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 1-3, not %s" % severity severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 3 or severity < 1: return "Severity needs to be a number from 1-3, not %d" % severity alert = thehive4py.models.Alert( title=title, tlp=tlp, severity=severity, tags=tags, description=description, type=type, source=source, sourceRef=sourceref, ) try: ret = self.thehive.create_alert(alert) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert_artifact(self, apikey, url, alert_id, dataType, data, message=None, tlp="2", ioc="False", sighted="False", ignoreSimilarity="False", tags=None): self.thehive = TheHiveApi(url, apikey, cert=False, version=4) if tlp: tlp = int(tlp) else: tlp = 2 ioc = ioc.lower().strip() == "true" sighted = sighted.lower().strip() == "true" ignoreSimilarity = ignoreSimilarity.lower().strip() == "true" if tags: tags = [x.strip() for x in tags.split(",")] else: tags = [] alert_artifact = thehive4py.models.AlertArtifact( dataType=dataType, data=data, message=message, tlp=tlp, ioc=ioc, sighted=sighted, ignoreSimilarity=ignoreSimilarity, tags=tags) try: ret = self.thehive.create_alert_artifact(alert_id, alert_artifact) except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e if ret.status_code > 299: raise ConnectionError(ret.text) return ret.text # Gets an item based on input. E.g. field_type = Alert async def get_item(self, apikey, url, field_type, cur_id): self.thehive = TheHiveApi(url, apikey, cert=False) newstr = "" ret = "" if field_type.lower() == "alert": ret = self.thehive.get_alert(cur_id + "?similarity=1") elif field_type.lower() == "case": ret = self.thehive.get_case(cur_id) elif field_type.lower() == "case_observables": ret = self.thehive.get_case_observables(cur_id) elif field_type.lower() == "case_task": ret = self.thehive.get_case_task(cur_id) elif field_type.lower() == "case_tasks": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "case_template": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "linked_cases": ret = self.thehive.get_linked_cases(cur_id) elif field_type.lower() == "task_log": ret = self.thehive.get_task_log(cur_id) elif field_type.lower() == "task_logs": ret = self.thehive.get_task_logs(cur_id) else: return ( "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type) return ret.text async def close_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey, cert=False) return self.thehive.mark_alert_as_read(alert_id).text async def reopen_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey, cert=False) return self.thehive.mark_alert_as_unread(alert_id).text async def create_case_from_alert(self, apikey, url, alert_id, case_template=None): self.thehive = TheHiveApi(url, apikey, cert=False) response = self.thehive.promote_alert_to_case( alert_id=alert_id, case_template=case_template) return response.text async def merge_alert_into_case(self, apikey, url, alert_id, case_id): self.thehive = TheHiveApi(url, apikey, cert=False) req = url + f"/api/alert/{alert_id}/merge/{case_id}" ret = requests.post(req, auth=self.thehive.auth) return ret.text # Not sure what the data should be async def update_field(self, apikey, url, field_type, cur_id, field, data): # This is kinda silly but.. if field_type.lower() == "alert": newdata = {} if data.startswith("%s"): ticket = self.thehive.get_alert(cur_id) if ticket.status_code != 200: pass newdata[field] = "%s%s" % (ticket.json()[field], data[2:]) else: newdata[field] = data # Bleh url = "%s/api/alert/%s" % (url, cur_id) if field == "status": if data == "New" or data == "Updated": url = "%s/markAsUnread" % url elif data == "Ignored": url = "%s/markAsRead" % url ret = requests.post( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, ) else: ret = requests.patch( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, json=newdata, ) return str(ret.status_code) else: return ( "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type) # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def run_analyzer(self, apikey, url, cortex_id, analyzer_id, artifact_id): self.thehive = TheHiveApi(url, apikey, cert=False) return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text # Creates a task log in TheHive with file async def create_task_log(self, apikey, url, task_id, message, filedata={}): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } data = {"_json": """{"message": "%s"}""" % message} response = requests.post( "%s/api/case/task/%s/log" % (url, task_id), headers=headers, files=files, data=data, ) return response.text # Creates an observable as a file in a case async def create_case_file_observable(self, apikey, url, case_id, tags, filedata): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } outerarray = {"dataType": "file", "tags": tags} data = {"_json": """%s""" % json.dumps(outerarray)} response = requests.post( "%s/api/case/%s/artifact" % (url, case_id), headers=headers, files=files, data=data, ) return response.text
def submitTheHive(message): ''' Create a new case in TheHive based on the email Return 'TRUE' is successfully processed otherwise 'FALSE' ''' # Decode email msg = email.message_from_bytes(message) # gets full content of the email decode = email.header.decode_header(msg['From'])[0] fromField = str(decode[0]) decode = email.header.decode_header(msg['Subject'])[0] subjectField = str(decode[0]) if args.verbose: print("[INFO] From: %s Subject: %s" % (fromField, subjectField)) attachments = [] observables = [] body = '' bodyMessage = '' for part in msg.walk(): if part.get_content_type() == "text/plain": body = part.get_payload(decode=True).decode() bodyMessage += body observables = searchObservables( body, observables ) # searches the body of the email for supplied observables elif part.get_content_type( ) == "text/html": # if email is html based will search throuh html source code if args.verbose: print("[INFO] Searching for observable in HTML code") html = part.get_payload(decode=True).decode() observables = searchObservables(html, observables) elif part.get_content_type( ) == "application/vnd.ms-excel": #ONLY WORKS FOR .CSV body = part.get_payload(decode=True).decode('UTF-8') observables = searchObservables(body, observables) else: # Extract MIME parts filename = part.get_filename() mimetype = part.get_content_type() if filename and mimetype: if mimetype in config['caseFiles'] or not config['caseFiles']: print("[INFO] Found attachment: %s (%s)" % (filename, mimetype)) # Decode the attachment and save it in a temporary file charset = part.get_content_charset() if charset is None: charset = chardet.detect(bytes(part))['encoding'] fd, path = tempfile.mkstemp(prefix=slugify(filename) + "_") try: with os.fdopen(fd, 'w+b') as tmp: tmp.write(part.get_payload(decode=1)) attachments.append(path) except OSError as e: print("[ERROR] Cannot dump attachment to %s: %s" % (path, e.errno)) return False api = TheHiveApi(config['thehiveURL'], config['thehiveUser'], config['thehivePassword'], { 'http': '', 'https': '' }) # if '[ALERT]' in subjectField: if re.match(config['alertKeywords'], subjectField, flags=0): # # Add observables found in the mail body # artifacts = [] if config['thehiveObservables'] and len(observables) > 0: print("t1") for o in observables: print("t2") artifacts.append( AlertArtifact(dataType=o['type'], data=o['value'])) # # Prepare tags - add alert keywords found to the list of tags # tags = config['alertTags'] match = re.findall(config['alertKeywords'], subjectField) for m in match: tags.append(m) # # Prepare the alert # sourceRef = str(uuid.uuid4())[0:6] alert = Alert( title=subjectField.replace('[ALERT]', ''), tlp=int( config['alertTLP'] ), #setting it blank since custom template allows default color, set it back to tlp = int for conf value tags=tags, description=body, type='external', source=fromField, sourceRef=sourceRef, artifacts=artifacts) # Create the Alert id = None response = api.create_alert(alert) if response.status_code == 201: if args.verbose: print('[INFO] Created alert %s' % response.json()['sourceRef']) else: print('[ERROR] Cannot create alert: %s (%s)' % (response.status_code, response.text)) return False else: # Prepare the sample case tasks = [] for task in config['caseTasks']: tasks.append(CaseTask(title=task)) # Prepare the custom fields customFields = CustomFieldHelper() \ .add_string('from', fromField) \ .add_string('attachment', str(attachments)) \ .build() # If a case template is specified, use it instead of the tasks m = 1 if m == 1: templates = [] for task in config['caseTemplates']: templates.append(task) temptouse = config['caseTemplate'] descrip = re.compile('-"(.+)"') name = re.compile('(.+)-"') for x in templates: z = descrip.search(x) tempVar = name.search(x) searchVar = z.group(1) tempVar = tempVar.group(1) if searchVar in subjectField: print( x ) #if 2 template names in subject, take the latest defined temptouse = tempVar print("TEMPLATE", temptouse) if body: testerVar = False print("body") try: albert = re.compile('Albert Incident #: (\d+)') m = albert.search(body) albertId = m.group(1) print(albertId) customFields = CustomFieldHelper() \ .add_string('from', fromField) \ .add_string('attachment', str(attachments)) \ .add_string('albertId', albertId) \ .build() print(customFields) except: print("albert id doesnt exist") if "Update" in subjectField: #update code testerVar = True print("UPDATE") #INTIAL try: findBodyInfo = re.compile('---((?:.+[\r\n]+)+)---') except: print( "Unable to update, unable to find two '---'s, exiting.." ) sys.exit(0) m = findBodyInfo.search(body) bigGroup = m.group(1) caseId = parseBody("Case Id", bigGroup) print("caseid", caseId) try: caseId = int(caseId) except: print("invalid case id") updateACase(caseId, bigGroup, fromField, attachments) id = None if testerVar == True: print("g") sys.exit(0) #end update code caseTags = [] for tag in config['caseTags']: descripFound = descrip.search(tag) nameFound = name.search(tag) descripFound = descripFound.group(1) nameFound = nameFound.group(1) if descripFound == 'always': caseTags.append(nameFound) elif descripFound in bodyMessage: caseTags.append(nameFound) try: # # Add observables found in the mail body # artifacts = [] if config['thehiveObservables'] and len(observables) > 0: for o in observables: artifacts.append( AlertArtifact(dataType=o['type'], data=o['value'])) # # Prepare tags - add alert keywords found to the list of tags # tags = config['alertTags'] match = re.findall(config['alertKeywords'], subjectField) for m in match: tags.append(m) # # Prepare the alert # sourceRef = str(uuid.uuid4())[0:6] alert = Alert( title=subjectField, tlp=int( config['alertTLP'] ), #setting it blank since custom template allows default color, set it back to tlp = int for conf value tags=caseTags, description=body, type='external', source=fromField, sourceRef=sourceRef, customFields=customFields, severity=None, artifacts=artifacts) except FileExistsError: print( "Error with creating alert, wrong template name or tags?") else: print("") # Create the alert response = api.create_alert(alert) print("Alert being created..") if response.status_code == 201: newID = response.json()['id'] if args.verbose: print('[INFO] Created alert %s' % response.json()['caseId']) if len(attachments) > 0: for path in attachments: observable = CaseObservable( dataType='file', data=[path], tlp=int(config['caseTLP']), ioc=False, tags=config['caseTags'], message='Found as email attachment') response = api.create_case_observable(newID, observable) if response.status_code == 201: if args.verbose: print('[INFO] Added observable %s to case ID %s' % (path, newID)) os.unlink(path) else: print('[WARNING] Cannot add observable: %s - %s (%s)' % (path, response.status_code, response.text)) # # Add observables found in the mail body # if config['thehiveObservables'] and len(observables) > 0: for o in observables: observable = CaseObservable( dataType=o['type'], data=o['value'], tlp=int(config['caseTLP']), ioc=False, tags=caseTags, #switched to custom tags message='Found in the email body') response = api.create_case_observable(newID, observable) if response.status_code == 201: if args.verbose: print( '[INFO] Added observable %s: %s to case ID %s' % (o['type'], o['value'], newID)) else: print( '[WARNING] Cannot add observable %s: %s - %s (%s)' % (o['type'], o['value'], response.status_code, response.text)) else: print('[ERROR] Cannot create case: %s (%s)' % (response.status_code, response.text)) return False return True
class TheHive(AppBase): """ An example of a Walkoff App. Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. """ __version__ = "1.0.0" app_name = "thehive" def __init__(self, redis, logger, console_logger=None): """ Each app should have this __init__ to set up Redis and logging. :param redis: :param logger: :param console_logger: """ super().__init__(redis, logger, console_logger) #async def run_analyzer(self, apikey, url, title_query): # self.thehive = TheHiveApi(url, apikey) # response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) # return response.text async def search_cases(self, apikey, url, title_query): self.thehive = TheHiveApi(url, apikey) response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) return response.text async def add_observable(self, apikey, url, case_id, data, datatype, tags): self.thehive = TheHiveApi(url, apikey) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [] else: tags = [] item = thehive4py.models.CaseObservable( dataType=datatype, data=data, tlp=1, ioc=False, sighted=False, tags=["Shuffle"], message="Created by shuffle", ) return self.thehive.create_case_observable(case_id, item).text async def search_alerts(self, apikey, url, title_query, search_range="0-25"): self.thehive = TheHiveApi(url, apikey) # Could be "all" too if search_range == "": search_range = "0-25" response = self.thehive.find_alerts(query=String("title:'%s'" % title_query), range=search_range, sort=[]) return response.text async def create_case(self, apikey, url, title, description="", tlp=1, severity=1, tags=""): self.thehive = TheHiveApi(url, apikey) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp case = thehive4py.models.Case( title=title, tlp=tlp, severity=severity, tags=tags, description=description, ) try: ret = self.thehive.create_case(case) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert(self, apikey, url, type, source, sourceref, title, description="", tlp=1, severity=1, tags=""): self.thehive = TheHiveApi(url, apikey) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 2 or tlp < 0: return "TLP needs to be a number from 0-2, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp alert = thehive4py.models.Alert( title=title, tlp=tlp, severity=severity, tags=tags, description=description, type=type, source=source, sourceRef=sourceref, ) try: ret = self.thehive.create_alert(alert) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e # Gets an item based on input. E.g. field_type = Alert async def get_item(self, apikey, url, field_type, cur_id): self.thehive = TheHiveApi(url, apikey) newstr = "" ret = "" if field_type.lower() == "alert": ret = self.thehive.get_alert(cur_id + "?similarity=1") elif field_type.lower() == "case": ret = self.thehive.get_case(cur_id) elif field_type.lower() == "case_observables": ret = self.thehive.get_case_observables(cur_id) elif field_type.lower() == "case_task": ret = self.thehive.get_case_task(cur_id) elif field_type.lower() == "case_tasks": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "case_template": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "linked_cases": ret = self.thehive.get_linked_cases(cur_id) elif field_type.lower() == "task_log": ret = self.thehive.get_task_log(cur_id) elif field_type.lower() == "task_logs": ret = self.thehive.get_task_logs(cur_id) else: return "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type return ret.text async def close_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey) return self.thehive.mark_alert_as_read(alert_id).text async def reopen_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey) return self.thehive.mark_alert_as_unread(alert_id).text async def create_case_from_alert(self, apikey, url, alert_id, case_template=None): self.thehive = TheHiveApi(url, apikey) response = self.thehive.promote_alert_to_case(alert_id=alert_id, case_template=case_template) return response.text async def merge_alert_into_case(self, apikey, url, alert_id, case_id): self.thehive = TheHiveApi(url, apikey) req = url + f"/api/alert/{alert_id}/merge/{case_id}" ret = requests.post(req, auth=self.thehive.auth) return ret.text # Not sure what the data should be async def update_field(self, apikey, url, field_type, cur_id, field, data): # This is kinda silly but.. if field_type.lower() == "alert": newdata = {} if data.startswith("%s"): ticket = self.thehive.get_alert(cur_id) if ticket.status_code != 200: pass newdata[field] = "%s%s" % (ticket.json()[field], data[2:]) else: newdata[field] = data # Bleh url = "%s/api/alert/%s" % (url, cur_id) if field == "status": if data == "New" or data == "Updated": url = "%s/markAsUnread" % url elif data == "Ignored": url = "%s/markAsRead" % url ret = requests.post( url, headers={ 'Content-Type': 'application/json', 'Authorization': 'Bearer %s' % apikey } ) else: ret = requests.patch( url, headers={ 'Content-Type': 'application/json', 'Authorization': 'Bearer %s' % apikey }, json=newdata, ) return str(ret.status_code) else: return "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def run_analyzer(self, apikey, url, cortex_id, analyzer_id, artifact_id): self.thehive = TheHiveApi(url, apikey) return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text
def submitTheHive(message): ''' Create a new case in TheHive based on the email Return 'TRUE' is successfully processed otherwise 'FALSE' ''' global log # Decode email msg = email.message_from_bytes(message) decode = email.header.decode_header(msg['From'])[0] if decode[1] is not None: fromField = decode[0].decode(decode[1]) else: fromField = str(decode[0]) decode = email.header.decode_header(msg['Subject'])[0] if decode[1] is not None: subjectField = decode[0].decode(decode[1]) else: subjectField = str(decode[0]) log.info("From: %s Subject: %s" % (fromField, subjectField)) attachments = [] observables = [] # Extract SMTP headers and search for observables parser = HeaderParser() headers = parser.parsestr(msg.as_string()) headers_string = '' i = 0 while i < len(headers.keys()): headers_string = headers_string + headers.keys()[i] + ': ' + headers.values()[i] + '\n' i+=1 # Temporary disabled # observables = searchObservables(headers_string, observables) body = '' for part in msg.walk(): if part.get_content_type() == "text/plain": try: body = part.get_payload(decode=True).decode() except UnicodeDecodeError: body = part.get_payload(decode=True).decode('ISO-8859-1') observables.extend(searchObservables(body, observables)) elif part.get_content_type() == "text/html": try: html = part.get_payload(decode=True).decode() except UnicodeDecodeError: html = part.get_payload(decode=True).decode('ISO-8859-1') observables.extend(searchObservables(html, observables)) else: # Extract MIME parts filename = part.get_filename() mimetype = part.get_content_type() if filename and mimetype: if mimetype in config['caseFiles'] or not config['caseFiles']: log.info("Found attachment: %s (%s)" % (filename, mimetype)) # Decode the attachment and save it in a temporary file charset = part.get_content_charset() if charset is None: charset = chardet.detect(bytes(part))['encoding'] # Get filename extension to not break TheHive analysers (see Github #11) fname, fextension = os.path.splitext(filename) fd, path = tempfile.mkstemp(prefix=slugify(fname) + "_", suffix=fextension) try: with os.fdopen(fd, 'w+b') as tmp: tmp.write(part.get_payload(decode=1)) attachments.append(path) except OSerror as e: log.error("Cannot dump attachment to %s: %s" % (path,e.errno)) return False # Cleanup observables (remove duplicates) new_observables = [] for o in observables: if not {'type': o['type'], 'value': o['value'] } in new_observables: # Is the observable whitelisted? if isWhitelisted(o['value']): log.debug('Skipping whitelisted observable: %s' % o['value']) else: new_observables.append({ 'type': o['type'], 'value': o['value'] }) log.debug('Found observable %s: %s' % (o['type'], o['value'])) else: log.info('Ignoring duplicate observable: %s' % o['value']) log.info("Removed duplicate observables: %d -> %d" % (len(observables), len(new_observables))) observables = new_observables api = TheHiveApi(config['thehiveURL'], config['thehiveUser'], config['thehivePassword'], {'http': '', 'https': ''}) # Search for interesting keywords in subjectField: log.debug("Searching for %s in '%s'" % (config['alertKeywords'], subjectField)) if re.match(config['alertKeywords'], subjectField, flags=0): # # Add observables found in the mail body # artifacts = [] if config['thehiveObservables'] and len(observables) > 0: for o in observables: artifacts.append(AlertArtifact(dataType=o['type'], data=o['value'])) # # Prepare tags - add alert keywords found to the list of tags # tags = list(config['alertTags']) match = re.findall(config['alertKeywords'], subjectField) for m in match: tags.append(m) # # Prepare the alert # sourceRef = str(uuid.uuid4())[0:6] alert = Alert(title=subjectField.replace('[ALERT]', ''), tlp = int(config['alertTLP']), tags = tags, description = body, type = 'external', source = fromField, sourceRef = sourceRef, artifacts = artifacts) # Create the Alert id = None response = api.create_alert(alert) if response.status_code == 201: log.info('Created alert %s' % response.json()['sourceRef']) else: log.error('Cannot create alert: %s (%s)' % (response.status_code, response.text)) return False else: # Prepare the sample case tasks = [] for task in config['caseTasks']: tasks.append(CaseTask(title=task)) # Prepare the custom fields customFields = CustomFieldHelper()\ .add_string('from', fromField)\ .add_string('attachment', str(attachments))\ .build() # If a case template is specified, use it instead of the tasks if len(config['caseTemplate']) > 0: case = Case(title=subjectField, tlp = int(config['caseTLP']), flag = False, tags = config['caseTags'], description = body, template = config['caseTemplate'], customFields = customFields) else: case = Case(title = subjectField, tlp = int(config['caseTLP']), flag = False, tags = config['caseTags'], description = body, tasks = tasks, customFields = customFields) # Create the case id = None response = api.create_case(case) if response.status_code == 201: newID = response.json()['id'] log.info('Created case %s' % response.json()['caseId']) if len(attachments) > 0: for path in attachments: observable = CaseObservable(dataType='file', data = [path], tlp = int(config['caseTLP']), ioc = False, tags = config['caseTags'], message = 'Found as email attachment' ) response = api.create_case_observable(newID, observable) if response.status_code == 201: log.info('Added observable %s to case ID %s' % (path, newID)) os.unlink(path) else: log.warning('Cannot add observable: %s - %s (%s)' % (path, response.status_code, response.text)) # # Add observables found in the mail body # if config['thehiveObservables'] and len(observables) > 0: for o in observables: observable = CaseObservable( dataType = o['type'], data = o['value'], tlp = int(config['caseTLP']), ioc = False, tags = config['caseTags'], message = 'Found in the email body' ) response = api.create_case_observable(newID, observable) if response.status_code == 201: log.info('Added observable %s: %s to case ID %s' % (o['type'], o['value'], newID)) else: log.warning('Cannot add observable %s: %s - %s (%s)' % (o['type'], o['value'], response.status_code, response.text)) else: log.error('Cannot create case: %s (%s)' % (response.status_code, response.text)) return False return True
def run(self): Responder.run(self) data_type = self.get_param('data.dataType') case_id = self.get_param('data._parent') ioc_types = ["hash"] if data_type in ioc_types: url = 'https://www.virustotal.com/vtapi/v2/file/download' params = { 'apikey': self.virustotal_apikey, 'hash': self.get_param('data.data') } response = requests.get(url, params=params) if response.status_code == 200: filename = "" downloaded_file = response.content tempdir = tempfile.gettempdir() f = open(tempdir + "/" + self.get_param('data.data'), 'wb') f.write(downloaded_file) f.close() filename = f.name kind = filetype.guess(f.name) if kind.extension != None: os.rename(f.name, f.name + "." + kind.extension) filename = f.name + "." + kind.extension api = TheHiveApi(self.thehive_url, self.thehive_apikey) file_observable = CaseObservable( dataType='file', data=[filename], tlp=self.get_param('data.tlp'), ioc=True, tags=[ 'src:VirusTotal', str(kind.mime), str(kind.extension), 'parent:' + self.get_param('data.data') ], message='') response = api.create_case_observable(case_id, file_observable) self.report({ 'message': str(response.status_code) + " " + response.text }) else: self.report({ 'message': 'Virustotal returned the following error code: ' + str(response.status_code) + ". If you receive 403 this means that you are using a public API key but this responder needs a private Virustotal API key!" }) else: self.error('Incorrect dataType. "Hash" expected.')
def run(self): Responder.run(self) data_type = self.get_param("data.dataType") case_id = self.get_param("data.case._id") ioc_types = ["hash"] if data_type in ioc_types: url = "https://www.virustotal.com/vtapi/v2/file/download" params = { "apikey": self.virustotal_apikey, "hash": self.get_param("data.data"), } response = requests.get(url, params=params) if response.status_code == 200: filename = "" downloaded_file = response.content tempdir = tempfile.gettempdir() f = open(tempdir + "/" + self.get_param("data.data"), "wb") f.write(downloaded_file) f.close() filename = f.name kind = filetype.guess(f.name) api = TheHiveApi(self.thehive_url, self.thehive_apikey) if kind and kind.extension != None: os.rename(f.name, f.name + "." + kind.extension) filename = f.name + "." + kind.extension file_observable = CaseObservable( dataType="file", data=[filename], tlp=self.get_param("data.tlp"), ioc=True, tags=[ "src:VirusTotal", str(kind.mime), str(kind.extension), "parent:" + self.get_param("data.data"), ], message="", ) else: file_observable = CaseObservable( dataType="file", data=[f.name], tlp=self.get_param("data.tlp"), ioc=True, tags=[ "src:VirusTotal", "parent:" + self.get_param("data.data"), ], message="", ) response = api.create_case_observable(case_id, file_observable) self.report( {"message": str(response.status_code) + " " + response.text} ) else: self.report( { "message": "Virustotal returned the following error code: " + str(response.status_code) + ". If you receive 403 this means that you are using a public API key but this responder needs a private Virustotal API key!" } ) else: self.error('Incorrect dataType. "Hash" expected.')
print('') id = response.json()['id'] else: print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0) print('Create domain observable') print('-----------------------------') domain = CaseObservable(dataType='filename', data=['pic.png'], tlp=1, ioc=True, sighted=True, tags=['thehive4py'], message='test') response = api.create_case_observable(id, domain) if response.status_code == 201: print(json.dumps(response.json(), indent=4, sort_keys=True)) print('') else: print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0) print('Create file observable') print('-----------------------------') file_observable = CaseObservable(dataType='file', data=['pic.png'], tlp=1, ioc=True, sighted=True, tags=['thehive4py'],
class Offense(object): """ Class used for handling offenses and customers. Uses customer.py to handle each and every customer in the configuration file. """ def __init__(self): self.customers = [] self.db_status = False if cfg.TheHive: self.hive = TheHiveApi("http://%s" % cfg.hiveip, cfg.hiveusername, cfg.hivepassword, {"http": "", "https": ""}) self.cortex_log_path = "log/cortex_analysis.log" self.cortex_listener = cortex_listen(self.cortex_log_path) # Function only in use when either customer_values.db does not exists or is empty def db_setup(self): """ Creates db for a customer if it doesn't exist. """ database = "%s/database/customer_values.db" % dir_path if not os.path.isfile(database): open(database, 'w+').close() try: self.db = pickledb.load(database, False) except pickledb.simplejson.scanner.JSONDecodeError: # Remove file, and recreate os.remove(database) logging.info("Creating database") self.db = pickledb.load(database, False) # Creates folders for customers. def create_customer_folder(self, customer_name): """ Creates a directory for a customer to save offenses. Used for backlogging. """ customer_dir = "%s/database/customers/%s" % (dir_path, customer_name ) if not os.path.exists(customer_dir): os.makedirs(customer_dir) # Creates database for customer if it doesnt exist and SEC token exists def create_db(self, name): """ Uses pickledb to keep track of latest offenses. """ self.db_setup() self.create_customer_folder(name) if not name in self.db.getall(): self.db.lcreate(name) self.db.ladd(name, 0) self.db.set(name+"_counter", 0) self.db.set(name+"_status_code", 200) self.db.set(name+"_code_status", 0) self.db.dump() logging.info("%s Initialized database for %s" % (self.get_time, name)) return False return True # Gets current time for print format. def get_time(self): # Workaround for wrong time hourstr = time.strftime("%H") hourint = int(hourstr)+2 return "%d:%s" % (hourint, time.strftime("%M:%S")) # Reloading the complete customers object for every iteration def add_customers(self, customer_json): """ Creates customer object => Loops through each and every one and verifies if they exist or not in the customer list. (self.customers) """ self.customers = [] # Catches exception related to unbound variables try: for item in customer_json: try: # Verifies Json data if item['SEC'] and len(item['SEC']) is 36: a = Customer(item['name'], item['SEC'], \ item['target'], item['version'], \ item['rules'], item['subnet'], \ item['cert'], item['domain']) logging.info("%s: Customer %s added/reloaded to customer" % (self.get_time(), item['name'])) self.create_db(item['name']) self.customers.append(a) else: logging.info("%s: No SEC token found for %s" % (self.get_time(), item['name'])) except KeyError as e: logging.warning("%s: Bad key: %s" % (self.get_time(), e)) continue except UnboundLocalError: return # Checks if the json is valid with expected inputs def load_objects(self, customers = []): """ Verifies if the JSON interpreted contains errors and if it should be refreshed or not. THis function exists to make real-time addition of new customers possible. """ global json_time file = "%s/database/customer.json" % dir_path # Verifies if file has been edited. if os.path.getmtime(file) > json_time: json_time = os.path.getmtime(file) msg = "%s: Reloading %s because of timedifference" % (self.get_time(), file) if len(sys.argv) > 1: if sys.argv[1] == "--verbose" or sys.argv[1] == "-v": print(msg) self.write_offense_log(msg) logging.info("%s: Reloading %s because of timedifference" % (self.get_time(), file)) else: logging.info("%s: No changes made to %s" % (self.get_time(), file)) return try: with open(file, 'r') as tmp: #self.verify_json(open(file, 'r')) customer_json = json.loads(tmp.read()) except IOError as e: logging.info("%s: %s" % (self.get_time(), e)) return except ValueError as e: logging.info("%s: %s" % (self.get_time(), e)) return # Create customer info customer_value = self.add_customers(customer_json) return customer_value # Uses Sveve for SMS sending def send_sms(self, message): """ Originally made to send an SMS with the message variable to a specific number. """ logging.info("%s: %s" % (self.get_time(), "Attempting to send sms")) if isinstance(message, dict): message = "\n".join(message['categories']) passwd="" # Measure to not make api calls for SMS service. if not passwd: logging.info("%s: %s" % (self.get_time(), "Aborting sms sending")) return username = "******" url = "https://sveve.no/SMS/SendMessage?" target = "" sender = "IT ME" tot_url = "%suser=%s&passwd=%s&to=%s&from=%s&msg=%s - %s" % (url, username, passwd, target, sender, message['id'], message) tot_url += "%20SMS" logging.info("%s: should send alarm for ****\n%s" % (self.get_time(), tot_url)) try: request = requests.get(tot_url, timeout=5, verify=False) except (requests.exceptions.ConnectionError,\ requests.exceptions.ReadTimeout,\ AttributeError) as e: logging.warning("%s: %s" % (self.get_time(), e)) return # Runs the alarm def run_alarm(self, item, customer): """ Originally used to control on-screen offenses, but later found to be annoying. """ logging.info("%s: New highest offense - %s - customer %s, %s" % \ (self.get_time(), item['id'], customer.name, item['categories'])) if self.db.get(customer.name+"_counter") is 0: self.db.set(customer.name+"_counter", \ int(self.db.get(customer.name+"_counter"))+1) return logging.warning("%s: Sending alarm to %s" % (self.get_time(), customer.name)) new_data = urllib.quote("Offense #%s: %s" % \ (item['id'], "\n".join(item['categories']))) # Return to only get one alarm at a time per customer. return False def reverse_list(self, customer, request): """ Reverses a list. QRadar API > 7.0 wasn't stable. """ tmp_arr = [] if not customer.new_version: for i in range(len(request.json())-1, -1, -1): tmp_arr.append(request.json()[i]) return tmp_arr else: return request.json() # Removes the "Range" header for some specific API calls. def remove_range_header(self, customer): """ Removes a specific header. Depends on which API call is used. """ headers = dict.copy(customer.header) try: del headers["Range"] except KeyError as e: logging.warning("%s: Bad key: %s" % (self.get_time(), e)) return headers # If it doesn't exist already def find_ip(self, customer, ID, headers, src_dst="src"): """ Finds and IP based on ID. Almost same as above, but not in bulk. """ search_field = "" find_ip = "" if src_dst == "dst": src_dst = "local_destination_addresses" search_field = "local_destination_ip" else: src_dst = "source_address_ids" search_field = "source_ip" target_path = "https://%s/api/siem/%s" % (customer.target, src_dst) header = self.remove_range_header(customer) try: find_ip = requests.get(target_path+"/%s?fields=id%s%s" % \ (str(ID), "%2C", search_field), headers=header, timeout=5, verify=False) except (requests.exceptions.ConnectionError,\ requests.exceptions.ReadTimeout,\ AttributeError) as e: logging.warning("%s: %s" % (self.get_time(), e)) try: ret_val = find_ip.json()[search_field] except (KeyError, UnboundLocalError) as e: ret_val = False return ret_val # Gets the a list of IDs related to IPs def get_reflist(self, customer, ref_name): """ Gets the actual data used to correlate with customer.json rules. """ fields = "" headers = self.remove_range_header(customer) ref_list = "https://%s/api/reference_data/sets/%s" % (customer.target, ref_name) try: ref_set = requests.get("%s" % ref_list, headers=headers, timeout=5, verify=False) except (requests.exceptions.ConnectionError,\ requests.exceptions.ReadTimeout,\ AttributeError) as e: logging.warning("%s: %s" % (self.get_time(), e)) return ref_set def get_network_list(self, network_list): """ Finds the list of networks that are more valuable (e.g. server network) """ arr = [] for subnet in network_list: arr.append(subnet["value"]) return arr # Returns def get_affected_subnet(self, req, customer, network_list, id_list_name, src_dst): """ Checks if the network found in an offense is part of the actual subnet """ affected_subnet = [] headers = self.remove_range_header(customer) if src_dst == "dst": ip_variable = "local_destination_ip" base_url = "https://%s/api/siem/local_destination_addresses/" % customer.target fields = "?fields=local_destination_ip" elif src_dst == "src": ip_variable = "source_ip" base_url = "https://%s/api/siem/source_addresses/" % customer.target fields = "?fields=source_ip" for ID in req.json()[id_list_name]: url = base_url+str(ID)+fields cnt = 0 try: ip = requests.get(url, headers=headers, verify=False, timeout=5) except requests.exceptions.ConnectionError: continue try: ip = ip.json()[ip_variable] except KeyError as e: logging.warning("%s: %s" % (self.get_time(), e)) continue for network in network_list: try: if ip in netaddr.IPNetwork(network): return ip except netaddr.core.AddrFormatError as e: logging.warning("%s: %s" % (self.get_time(), e)) cnt += 1 return False # Verifies alarms related to reference lists def verify_reflist(self, customer, req): """ Verifies multiple reference set alarms. """ id_list = ["source_address_ids", "local_destination_address_ids"] affected_subnet = [] # List of subnets to check for ref_set_list in customer.ref_list: ref_set = self.get_reflist(customer, ref_set_list) # Works because < 255 if not ref_set.status_code is 200: logging.warning("Cannot access reflist.") continue try: network_list = self.get_network_list(ref_set.json()["data"]) except KeyError as e: logging.warning("%s: %s" % (self.get_time(), e)) if ref_set.json()["number_of_elements"] is 0: msg = "%s might be empty for %s, no action taken." \ % (ref_set_list, customer.name) if len(sys.argv) > 1: if sys.argv[1] == "--verbose" or sys.argv[1] == "-v": print(msg) self.write_offense_log(msg) continue src_affected_subnet = self.get_affected_subnet(req, customer, \ network_list, "source_address_ids", "src") if src_affected_subnet: #sys.stdout.write("SUBNET %s. " % src_affected_subnet) return True dst_affected_subnet = self.get_affected_subnet(req, customer, \ network_list, "local_destination_address_ids", "dst") if dst_affected_subnet: return True return False def check_alarm(self, ID, customer): """ Verifies an ID, if it's new etc. Bulk loads and checks if the lowest number is greater than the oldest saved one. The horrible forloop verifies if rules are matched based on rules in customer.json """ fields = "" valid = True headers = self.remove_range_header(customer) try: req = requests.get("https://%s/api/siem/offenses/%s%s" % (customer.target, str(ID), fields),\ timeout=5, headers=headers, verify=False) except (requests.exceptions.ConnectionError,\ requests.exceptions.ReadTimeout,\ AttributeError) as e: logging.warning("%s: %s" % (self.get_time(), e)) return False if req.status_code != 200: logging.warning("%s Unable to retrieve %s" % (self.get_time(), customer.target)) return False # Checks reference lists from database/customer.json if customer.ref_list[0]: valid = self.verify_reflist(customer, req) else: return False # Skips if reference list match # Can add alarm sending in this one if not valid: return False logging.info("%s: %s" % (self.get_time(), \ "In subnet range. Verifying rules for %s" % customer.name)) # Checks rules only if offense contains IP in specified IP range rule_counter = 0 for rules in customer.rules: # Iter keys inside rule for keys, values in rules.iteritems(): # Do stuff if not integer values if not isinstance(values, int): if values == ".*": rule_counter += 1 continue # Checks multiple arguments in same rule split on "|". for split_item in values.split("|"): for categories in req.json()[keys]: if split_item.lower().startswith("!") \ and split_item.lower()[1:] in categories.lower(): return False #rule_counter -= 1 if split_item.lower() in categories.lower(): rule_counter += 1 # INT CHECK else: if req.json()[keys] > values: rule_counter += 1 else: break # Runs alarm if counter is high enough. if rule_counter is len(rules): msg = "RULES MATCHED. SHOULD SEND ALARM \o/" if len(sys.argv) > 1: if sys.argv[1] == "--verbose" or sys.argv[1] == "-v": print(msg) self.write_offense_log(msg) logging.info("%s: Rule triggered - sending alarm" % self.get_time()) self.run_alarm(req.json(), customer) break rule_counter = 0 return True # Verify ID here def add_new_ID(self, customer, request): path = "database/customers/%s/%s" % (customer.name, str(request.json()["id"])) if not os.path.exists(path): with open(path, "w+") as tmp: json.dump(request.json(), tmp) logging.info("%s: Added new offense to %s" % (self.get_time(), path)) # DISCORD SETUP def discord_setup(self, ID, msg): alarm_msg = "%s - %s" % (ID, msg) call = ["python3.6", "%s/dependencies/chat.py" % dir_path, "\"%s\"" % alarm_msg] subprocess.call(" ".join(call), shell=True) logging.info("%s: Message sent to discord server." % self.get_time()) # BEST LOGGER AYY \o/ LMAO def write_offense_log(self, data): with open("log/offense.log", "a") as tmp: try: tmp.write("\n%s" % str(data)) except UnicodeEncodeError as e: tmp.write("\nError in parsing data.\n%s" % e) # Returns tasklist based on casetitle def get_hive_task_data(self, data): # Reload every time so it's editable while running. with open(cfg.incident_task, "r") as tmp: cur_data = json.load(tmp) # Is cur_data["description"] in data["description"]: for item in json.load(open(cfg.incident_task, "r"))["ruleslist"]: if item["description"].lower() in data["description"].lower(): return item["result"] # Checks the normal local subnet ranges. Theres like 7 missing. def check_local_subnet(self, ip_address): # Returns false if ip not a local address # Yes I know there are more.. local_ranges = [ "192.168.0.0/16", "172.16.0.0/12", "10.0.0.0/8" ] for item in local_ranges: if netaddr.IPAddress(ip_address) in netaddr.IPNetwork(item): return False return True # IP verification lmao def verify_offense_source(self, input): try: netaddr.IPAddress(str(input)) if not self.check_local_subnet(input): return False return True except netaddr.core.AddrFormatError: return False # Returns all IPs in an offense by ID def get_ip_data(self, customer, data): verify_local_ip = [] # Should prolly cache this data. # Finds IPs based on and ID - destination if data["local_destination_count"] > 0: for item in data["local_destination_address_ids"]: ip_output = self.find_ip(customer, item, customer.header, "dst") if ip_output: if ip_output not in verify_local_ip and self.check_local_subnet(ip_output): verify_local_ip.append(str(ip_output)) # Finds IPs based on and ID - source if data["source_count"] > 0: for item in data["source_address_ids"]: ip_output = self.find_ip(customer, item, customer.header) if ip_output: if ip_output not in verify_local_ip and self.check_local_subnet(ip_output): verify_local_ip.append(str(ip_output)) return verify_local_ip # Only created for IP currently. # Hardcoded for QRadar def get_hive_cases(self, customer, data): # Offense doesn't return all the IP-addresses. verify_local_ip = self.get_ip_data(customer, data) find_source = self.verify_offense_source(data["offense_source"]) # Adds offense source if IP observed if find_source: verify_local_ip.append(str(data["offense_source"])) # Returns if no observables found # Also means a case will not be created. if not verify_local_ip: return False # Check basic case details first. Customername > Name of offense > category # Might be able to search title field for customer name as well. Tags can also be used. allcases = self.hive.find_cases(query={"_field": "status", "_value": "Open"}) customer_caselist = [] # Finds all the specified customers cases for item in allcases.json(): if customer.name.lower() in item["title"].lower(): customer_caselist.append(item) # Creates a case if no cases are found. Returns list of observed IoCs for case creation if not customer_caselist: return verify_local_ip use_case = "" casename = "" # Looks for exact casename match for case in customer_caselist: casetitle = case["title"].split(" - ")[1] if casetitle == data["description"]: use_case = case break if use_case: not_matching = [] matching_categories = data["categories"] # Try to match two categories if exact name match isn't found if not use_case: # Least amount of categories needed to match category_match_number = 2 category_counter = 0 for case in customer_caselist: matching_categories = [] not_matching = [] for category in data["categories"]: if category in case["tags"]: matching_categories.append(category) else: not_matching.append(category) if len(matching_categories) > (category_match_number-1): use_case = case break # Will create a new case if observable found and no similar case. if not use_case: return verify_local_ip # FIX - Hardcoded datatype datatype = "ip" actual_data = [] # Finds actual observables for the specified case observables = [x["data"] for x in self.hive.get_case_observables(\ use_case["id"]).json() if x["dataType"] == datatype] # Finds if observable exists in previous list actual_data = [x for x in verify_local_ip if not x in observables] # FIX - check logic here. Might need to add tags etc (offenseID) etc. # Only appends data if new observables are detected if not actual_data: return False # Defines what categories to append category_breaker = "" if not_matching: category_breaker = not_matching else: category_breaker = matching_categories self.add_observable_data(use_case["id"], actual_data, datatype, data, not_matching) # False to not create another case return False # Add by caseid and list of specified datatype and a QRadar offense def add_observable_data(self, case_id, observables, datatype, data, category): observable_items = [] data_items = [] tags = [str(data["id"])] tags.extend(category) for item in observables: observable = CaseObservable( dataType=datatype, data=item, tlp=0, ioc=True, tags=tags, message="Possible IoC" ) # Creates the observable ret = self.hive.create_case_observable(case_id, observable) if ret.ok: observable_items.append(ret.json()) data_items.append(item) else: continue if data_items: self.cortex_listener.run_cortex_analyzer(datatype, data_items, observable_items) # TheHive case creation def create_hive_case(self, customer, data): create_hive_bool = self.get_hive_cases(customer, data) # Returns if case already merged. if not create_hive_bool: return False # Baseline for creating a case title = ("%s: %s - %s" % (customer.name, str(data["id"]), data["description"])) static_task = "Why did it happen? Check rule.", task_data = self.get_hive_task_data(data) tasks = [ CaseTask(title=static_task) ] if task_data: for item in task_data: tasks.append(CaseTask(title=item)) # Creates a case object case = Case(title=title, tlp=0, flag=False, tags=data["categories"], \ description=data["description"], tasks=tasks) # Creates the actual case based on prior info ret = self.hive.create_case(case) if ret.ok: # FIX, datatype is static self.add_observable_data(ret.json()["id"], create_hive_bool, \ "ip", data, data["categories"]) return True return False # Verifies the ID, and returns if it's not a new incident. def verify_ID(self, request, customer): # In case there are no offenses related to customer. Basically domain management. # Attempts to reanalyze in case of failed analysis jobs #self.cortex_listener.find_failed_cortex_jobs() try: if float(customer.version) < 7.0: try: json_id = request.json()[len(request.json())-1]['id'] except (ValueError, IndexError) as e: logging.warning("%s: Customer %s: %s" % (self.get_time(), customer.name, e)) return False customer.new_version = False else: json_id = request.json()[0]['id'] except IndexError: logging.info("No offenses for customer.") return # Use difference between last seen offense and newest. last_db = self.db.lget(customer.name, self.db.llen(customer.name)-1) cur_array = [] if json_id > last_db: difference = 1 # Not even a point /o\ if not json_id-last_db is difference: difference = json_id-last_db # Looping through incase of earlier crash / multiple offenses in one minute for i in range(json_id, last_db, -1): cur_var = False if i in self.db.get(customer.name): continue # Verifies if the id actually exists for item in request.json(): if i == item['id']: cur_var = True break if not cur_var: continue logging.info("%s: %s: New highest offense found: %d" % (self.get_time(), customer.name, i)) target = "https://%s/api/siem/offenses/%s" % (customer.target, str(i)) new_header = self.remove_range_header(customer) try: new_req = requests.get(target, headers=new_header, timeout=5, verify=False) except requests.exceptions.ConnectionError as e: logging.warning("Internal alarmserver might be down: %s" % e) continue except requests.exceptions.ReadTimeout as e: logging.warning("Timeout %s" % e) continue # Appends current offense to database/customers/customer/ID in json format. # This is to backtrack ID_ret = self.add_new_ID(customer, new_req) new_req = new_req.json() try: # Compatibility issue if missing prerequisites. new_data = urllib.quote("Offense #%s: %s" % (str(i), \ "\n".join(new_req['categories']))) except TypeError as e: logging.warning("%s: TypeError: %s" % (self.get_time(), e)) new_data = urllib.quote("Offense #%s: %s" % (str(i), "Arbitrary categories")) except KeyError as e: logging.warning("%s: KeyError: %s" % (self.get_time(), e)) new_data = urllib.quote("Offense #%s: %s" % (str(i), "Arbitrary categories")) # Sends a local alarm if an alarmserver is running on the current system. # Prints to screen. Try/catch only in case of errors. try: msg = "%s: %s - %s - %s" % (self.get_time(), \ str(i).ljust(5), customer.name.ljust(10), ", ".join(new_req['categories'])) if len(sys.argv) > 1: if sys.argv[1] == "--verbose" or sys.argv[1] == "-v": print(msg) self.write_offense_log(msg) except TypeError as e: logging.warning("%s: TypeError: %s" % (self.get_time(), e)) except KeyError as e: logging.warning("%s: KeyError: %s" % (self.get_time(), e)) if cfg.TheHive: self.create_hive_case(customer, new_req) if cfg.discordname and cfg.discordpw: self.discord_setup(str(i), ", ".join(new_req['categories'])) # verifying if an alarm should be triggered. difference = json_id-self.db.llen(customer.name)-1 # Adds data to the DB cur_array.append(i) alarm_check = self.check_alarm(i, customer) if not alarm_check: continue # Adds all the data to the database if cur_array: cur_array = sorted(cur_array) for items in cur_array: self.db.ladd(customer.name, items) else: return False # Reload json every time, and check it to prevent failures. verify_json(self, x) def check_connection(self): global resetcounter for customer in self.customers: self.db.dump() domain_field = "" self.db.set(customer.name+"_counter", int(self.db.get(customer.name+"_counter"))+1) # Verifies status codes if not self.db.get(customer.name+"_status_code") is 200 \ and customer.fail_counter % 10 > 0: continue # Domain management because of some bullshit. if customer.domain > 0: domain_field = "?filter=domain_id%s%d" % (r'%3D', customer.domain) # Makes original request per customer try: request = requests.get('%s%s' % (customer.target_path, domain_field), \ headers=customer.header, timeout=5, verify=False) except (requests.exceptions.ConnectionError,\ requests.exceptions.ReadTimeout,\ AttributeError) as e: try: logging.info("%s: Connection failure for %s" % \ (self.get_time(), customer.name)) continue except TypeError as e: logging.warning("%s" % e) self.db.set(customer.name+"_status_code", 401) continue # Set previous status code? # Legacy, but doesn't hurt nothing \o/ if request.status_code != 200: logging.info("%s: Not 200 for %s - %s" % (self.get_time(), customer.name, \ self.db.get(customer.name+"_status_code"))) self.db.set(customer.name+"_status_code", request.status_code) continue # Sets previous status code in case of shutdown self.db.set(customer.name+"_status_code", request.status_code) verify_request = self.verify_ID(request, customer) if not verify_request: continue
class HiveManagement: def __init__( self, config_file='C:\\automation-hunting\\the-hive\\conf\\thehive-provider.yaml' ): self.hive_url = None self.api_key = None self.alert_tags = None self.source = None self.alert_type = None self.case_tags = None self.ioc_tags = None if not self.get_config_data(config_file): raise Exception('Invalid Configuration File') self.api = TheHiveApi(self.hive_url, self.api_key) def get_config_data(self, yaml_file): with open(yaml_file, 'r') as ymlfile: cfg = yaml.load(ymlfile, Loader=yaml.FullLoader) valid = False if self.validate_cfg_yml(cfg): self.hive_url = cfg['hive']['hive_url'] self.api_key = cfg['hive']['api_key'] self.alert_tags = cfg['hive']['alert_tags'] self.source = cfg['hive']['source'] self.alert_type = cfg['hive']['alert_type'] self.case_tags = cfg['hive']['case_tags'] self.ioc_tags = cfg['hive']['ioc_tags'] valid = True return valid @staticmethod def validate_cfg_yml(cfg): if 'hive' not in cfg: print('Not main') return False else: if 'hive_url' not in cfg['hive'] or 'api_key' not in cfg['hive']: return False return True def create_alarm(self, title, source_ref=None, description='N/A', alert_type='external', source='LogRhythm', iocs=None, additional_fields=None, additional_tags=None, tlp=TLP.AMBER, pap=PAP.AMBER, severity=HiveSeverity.MEDIUM): if source_ref is None: source_ref = str(uuid.uuid4())[0:6] alert_tags = self.alert_tags.copy() if additional_tags is not None: for additional_tag in additional_tags: alert_tags.append(additional_tag) custom_fields_helper = CustomFieldHelper() if additional_fields is not None: for field in additional_fields: custom_fields_helper.add_string(field['name'], field['value']) custom_fields = custom_fields_helper.build() artifacts = list() if iocs is not None: for ioc in iocs: artifacts.append( AlertArtifact(dataType=ioc['type'].value, data=ioc['value'])) hive_alert = Alert(title=title, tlp=tlp.value, tags=alert_tags, description=description, type=alert_type, source=source, sourceRef=source_ref, pap=pap.value, artifacts=artifacts, customFields=custom_fields, severity=severity.value) response = self.api.create_alert(hive_alert) if response.status_code == 201: print('Alerta Creada Exitosamente') print(json.dumps(response.json(), indent=4, sort_keys=True)) else: print('Error') print(response.text) return response.json() def create_case(self, title, tasks=None, tlp=TLP.AMBER, pap=PAP.AMBER, severity=HiveSeverity.MEDIUM, additional_fields=None, additional_tags=None, flag=False, description='N/A'): case_tags = self.case_tags.copy() if additional_tags is not None: for additional_tag in additional_tags: case_tags.append(additional_tag) custom_fields_helper = CustomFieldHelper() if additional_fields is not None: for field in additional_fields: custom_fields_helper.add_string(field['name'], field['value']) custom_fields = custom_fields_helper.build() new_tasks = list() if tasks is not None: for task in tasks: new_tasks.append(CaseTask(title=task)) hive_case = Case(title=title, tlp=tlp.value, pap=pap.value, description=description, tags=case_tags, severity=severity.value, flag=flag, customFields=custom_fields, tasks=new_tasks) response = self.api.create_case(hive_case) if response.status_code == 201: print('Caso Creada Exitosamente') print(json.dumps(response.json(), indent=4, sort_keys=True)) else: print('Error') print(response.text) return response.json() def create_case_observable(self, data_type: HiveDataType, value: list, tlp=TLP.AMBER, ioc=True, additional_tags=None, description='LogRhythm IoC'): ioc_tags = self.ioc_tags.copy() if additional_tags is not None: for additional_tag in additional_tags: ioc_tags.append(additional_tag) hive_observable = CaseObservable(data_type=data_type.value, data=value, tlp=tlp.value, ioc=ioc, tags=ioc_tags, message=description) return hive_observable def add_observable_to_case(self, case_id, observable: CaseObservable): response = self.api.create_case_observable(case_id, observable) if response.status_code == 201: print('Observable successfully added to the case') print(json.dumps(response.json(), indent=4, sort_keys=True)) else: print('Error') print(response.text) def search_case(self, title=None, tlp: TLP = None, pap: PAP = None, severity: HiveSeverity = None, or_operator=False): if title is None and tlp is None and pap is None and severity is None: print('Can\'t search without a filter') return None operators = list() if title is not None: operators.append(String('title: ' + urllib.parse.quote(title))) if tlp is not None: operators.append(Gte('tlp', tlp.value)) if pap is not None: operators.append(Gte('pap', pap.value)) if severity is not None: operators.append(Gte('severity', severity.value)) if len(operators) == 1: query = operators[0] else: if or_operator: query = Or(operators) else: query = And(operators) response = self.api.find_cases(query=query, range='all', sort=[]) if response.status_code == 200: print('Busqueda correcta') print(json.dumps(response.json(), indent=4, sort_keys=True)) else: print('Error') print(response.text) return response.json() def promote_alert(self, alert_id): response = self.api.promote_alert_to_case(alert_id) if response.status_code == 201: print('Correct Promotion') print(json.dumps(response.json(), indent=4, sort_keys=True)) else: print('Error') print(response.text) return response.json()
def submitTheHive(message): '''Create a new case in TheHive based on the email''' # Decode email msg = email.message_from_bytes(message) decode = email.header.decode_header(msg['From'])[0] fromField = str(decode[0]) decode = email.header.decode_header(msg['Subject'])[0] subjectField = str(decode[0]) if args.verbose: print("[INFO] From: %s Subject: %s" % (fromField, subjectField)) attachments = [] body = '' for part in msg.walk(): if part.get_content_type() == "text/plain": body = part.get_payload(decode=True).decode() else: # Extract MIME parts filename = part.get_filename() mimetype = part.get_content_type() if filename and mimetype: if mimetype in config['caseFiles'] or not config['caseFiles']: print("[INFO] Found attachment: %s (%s)" % (filename, mimetype)) # Decode the attachment and save it in a temporary file charset = part.get_content_charset() if charset is None: charset = chardet.detect(bytes(part))['encoding'] fd, path = tempfile.mkstemp(prefix=slugify(filename) + "_") try: with os.fdopen(fd, 'w+b') as tmp: tmp.write(part.get_payload(decode=1)) attachments.append(path) except OSError as e: print("[ERROR] Cannot dump attachment to %s: %s" % (path, e.errno)) api = TheHiveApi(config['thehiveURL'], config['thehiveUser'], config['thehivePassword'], { 'http': '', 'https': '' }) if '[ALERT]' in subjectField: # Prepare the alert sourceRef = str(uuid.uuid4())[0:6] alert = Alert(title=subjectField.replace('[ALERT]', ''), tlp=int(config['alertTLP']), tags=config['alertTags'], description=body, type='external', source=fromField, sourceRef=sourceRef) # Create the Alert id = None response = api.create_alert(alert) if response.status_code == 201: if args.verbose: print('[INFO] Created alert %s' % response.json()['sourceRef']) else: print('[ERROR] Cannot create alert: %s (%s)' % (response.status_code, response.text)) sys.exit(0) else: # Prepare the sample case tasks = [] for task in config['caseTasks']: tasks.append(CaseTask(title=task)) # Prepare the custom fields customFields = CustomFieldHelper()\ .add_string('from', fromField)\ .add_string('attachment', str(attachments))\ .build() case = Case(title=subjectField, tlp=int(config['caseTLP']), flag=False, tags=config['caseTags'], description=body, tasks=tasks, customFields=customFields) # Create the case id = None response = api.create_case(case) if response.status_code == 201: newID = response.json()['id'] if args.verbose: print('[INFO] Created case %s' % response.json()['caseId']) if len(attachments) > 0: for path in attachments: observable = CaseObservable( dataType='file', data=[path], tlp=int(config['caseTLP']), ioc=False, tags=config['caseTags'], message='Created by imap2thehive.py') response = api.create_case_observable(newID, observable) if response.status_code == 201: if args.verbose: print('[INFO] Added observable %s to case ID %s' % (path, newID)) os.unlink(path) else: print('[ERROR] Cannot add observable: %s - %s (%s)' % (path, response.status_code, response.text)) sys.exit(0) else: print('[ERROR] Cannot create case: %s (%s)' % (response.status_code, response.text)) sys.exit(0) return
def main(): parser = argparse.ArgumentParser() parser.add_argument("-u", "--url", required=True, help="Suspect URL") args = parser.parse_args() surl = args.url user = input("Username: "******"threatType").lower() except KeyError: safebrowse = "nullSafeBrowseTag" urls = set() for i in results['data']['requests']: for k, v in i.iteritems(): if k == "request": urls.add(v.get("documentURL")) # Locate template case = Case(title='Email Campaign', description='N/A', tlp=2, template='Email - Suspect Phishing', tags=['email']) # Create the case response = thehive.create_case(case) id = response.json()['id'] # Add captured values as observables '\n'.join(urls) for i in urls: urlv = CaseObservable(dataType='url', data=i, tlp=1, ioc=False, tags=['thehive4py', 'url', 'phishing'], message='from urlscan.io') urlv.tags.append(safebrowse) response = thehive.create_case_observable(id, urlv) if response.status_code == 201: print '[*] Added URL observable for ' + i for i in domains: domainv = CaseObservable(dataType='domain', data=i, tlp=1, ioc=False, tags=['thehive4py', 'domain', 'phishing'], message='from urlscan.io') domainv.tags.append(safebrowse) response = thehive.create_case_observable(id, domainv) if response.status_code == 201: print '[*] Added domain observable for ' + i case.description = '[Scan Summary](https://urlscan.io/results/{0}/#summary)\n\n'.format( uuid) case.description += screenshot + "\n\n" if certificates: for k, v in certificates[0].iteritems(): if k == "subjectName": case.description += "```\nSubject Name: " + v + "\n" if k == "validFrom": case.description += "Valid from: " + time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(v)) + "\n" if k == "validTo": case.description += "Valid to: " + time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(v)) + "\n"
'ioc': True, 'sighted': True, 'message': "Test2", } r_thehive2 = requests.post( url="http://10.6.5.157:9000/api/case/:84803832/artifact", headers={'Authorization': 'Bearer eEBuNfc36ccy2Nk4OOdv+xOGIydptbmV'}, data=xxx2, verify=False) data3 = r_thehive2.json() print(data3) print("data3 :", data3) print(thehive_caseid) # Init the CaseObservable object ip_observable = CaseObservable( dataType='ip', data=offense_source, tlp=1, ioc=True, sighted=True, ) # Call the API response = api.create_case_observable(thehive_caseid, ip_observable) print(data2) print(type(data2)) n = n + 1
def connectEws(): logger = logging.getLogger(__name__) logger.info('%s.connectEws starts', __name__) report = dict() report['success'] = bool() try: cfg = getConf() ewsConnector = EwsConnector(cfg) folder_name = cfg.get('EWS', 'folder_name') unread = ewsConnector.scan(folder_name) theHiveConnector = TheHiveConnector(cfg) api = TheHiveApi('http://127.0.0.1:9000', API_KEY) for msg in unread: #type(msg) #<class 'exchangelib.folders.Message'> conversationId = msg.conversation_id.id #searching if case has already been created from the email #conversation esCaseId = theHiveConnector.searchCaseByDescription(conversationId) if esCaseId is None: #no case previously created from the conversation caseTitle = str(msg.subject) caseDescription = ('```\n' + 'Case created by Synapse\n' + 'conversation_id: "' + str(msg.conversation_id.id) + '"\n' + '```') if msg.categories: assignee = msg.categories[0] else: assignee = 'synapse' case = theHiveConnector.craftCase(caseTitle, caseDescription) createdCase = theHiveConnector.createCase(case) caseUpdated = theHiveConnector.assignCase( createdCase, assignee) commTask = theHiveConnector.craftCommTask() esCaseId = caseUpdated.id commTaskId = theHiveConnector.createTask(esCaseId, commTask) else: #case previously created from the conversation commTaskId = theHiveConnector.getTaskIdByTitle( esCaseId, 'Communication') if commTaskId != None: pass else: #case already exists but no Communication task found #creating comm task commTask = theHiveConnector.craftCommTask() commTaskId = theHiveConnector.createTask( esCaseId, commTask) fullBody = getEmailBody(msg) #Scan body message for observables, returns list of observables observables = searchObservables(fullBody) taskLog = theHiveConnector.craftTaskLog(fullBody) createdTaskLogId = theHiveConnector.addTaskLog(commTaskId, taskLog) readMsg = ewsConnector.markAsRead(msg) for attachmentLvl1 in msg.attachments: #uploading the attachment as file observable #is the attachment is a .msg, the eml version #of the file is uploaded tempAttachment = TempAttachment(attachmentLvl1) if not tempAttachment.isInline: #adding the attachment only if it is not inline #inline attachments are pictures in the email body tmpFilepath = tempAttachment.writeFile() to = str() for recipient in msg.to_recipients: to = to + recipient.email_address + ' ' comment = 'Attachment from email sent by ' comment += str(msg.author.email_address).lower() comment += ' and received by ' comment += str(to).lower() comment += ' with subject: <' comment += msg.subject comment += '>' theHiveConnector.addFileObservable(esCaseId, tmpFilepath, comment) if tempAttachment.isEmailAttachment: #if the attachment is an email #attachments of this email are also #uploaded to TheHive for attachmentLvl2 in tempAttachment.attachments: tempAttachmentLvl2 = TempAttachment(attachmentLvl2) tmpFilepath = tempAttachmentLvl2.writeFile() comment = 'Attachment from the email attached' theHiveConnector.addFileObservable( esCaseId, tmpFilepath, comment) #Parse obserables for o in observables: if isWhitelisted(o['value']): print("skipping %s" % o['value']) else: observable = CaseObservable( dataType=o['type'], data=o['value'], tlp=2, ioc=False, tags=['Synapse'], message='Found in the email body') #send observables to case response = api.create_case_observable(esCaseId, observable) time.sleep(1) report['success'] = True return report except Exception as e: logger.error('Failed to create case from email', exc_info=True) report['success'] = False return report
class TheHive(AppBase): """ An example of a Walkoff App. Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. """ __version__ = "1.1.0" app_name = "thehive" def __init__(self, redis, logger, console_logger=None): """ Each app should have this __init__ to set up Redis and logging. :param redis: :param logger: :param console_logger: """ super().__init__(redis, logger, console_logger) # async def run_analyzer(self, apikey, url, title_query): # self.thehive = TheHiveApi(url, apikey, cert=False) # response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) # return response.text def __connect_thehive(self, url, apikey, organisation): if organisation: self.thehive = TheHiveApi(url, apikey, cert=False, organisation=organisation) else: self.thehive = TheHiveApi(url, apikey, cert=False) async def search_case_title(self, apikey, url, organisation, title_query): self.__connect_thehive(url, apikey, organisation) response = self.thehive.find_cases(query=ContainsString( "title", title_query), range="all", sort=[]) return response.text async def custom_search(self, apikey, url, organisation, search_for, custom_query, range="all"): self.__connect_thehive(url, apikey, organisation) try: custom_query = json.loads(custom_query) except: # raise IOError("Invalid JSON payload received.") pass if search_for == "alert": response = self.thehive.find_alerts(query=custom_query, range="all", sort=[]) else: response = self.thehive.find_cases(query=custom_query, range="all", sort=[]) if (response.status_code == 200 or response.status_code == 201 or response.status_code == 202): return response.text else: raise IOError(response.text) async def add_case_artifact( self, apikey, url, organisation, case_id, data, datatype, tags=None, tlp=None, ioc=None, sighted=None, description="", ): self.__connect_thehive(url, apikey, organisation) tlp = int(tlp) if tlp else 2 ioc = True if ioc.lower() == "true" else False sighted = True if sighted.lower() == "true" else False if not description: description = "Created by shuffle" tags = (tags.split(", ") if ", " in tags else tags.split(",") if "," in tags else []) item = thehive4py.models.CaseObservable( dataType=datatype, data=data, tlp=tlp, ioc=ioc, sighted=sighted, tags=tags, message=description, ) return self.thehive.create_case_observable(case_id, item).text async def search_alert_title(self, apikey, url, organisation, title_query, search_range="0-25"): self.__connect_thehive(url, apikey, organisation) # Could be "all" too if search_range == "": search_range = "0-25" response = self.thehive.find_alerts(query=ContainsString( "title", title_query), range=search_range, sort=[]) return response.text async def create_case( self, apikey, url, organisation, template, title, description="", tlp=1, severity=1, tags="", ): self.__connect_thehive(url, apikey, organisation) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp Casetemplate = template if template else None case = thehive4py.models.Case( title=title, tlp=tlp, severity=severity, tags=tags, description=description, template=Casetemplate, ) try: ret = self.thehive.create_case(case) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert( self, apikey, url, organisation, type, source, sourceref, title, description="", tlp=1, severity=1, tags="", artifacts="", ): self.__connect_thehive(url, apikey, organisation) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-3, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 1-3, not %s" % severity severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 3 or severity < 1: return "Severity needs to be a number from 1-3, not %d" % severity all_artifacts = [] if artifacts != "": # print("ARTIFACTS: %s" % artifacts) if isinstance(artifacts, str): # print("ITS A STRING!") try: artifacts = json.loads(artifacts) except: print("[ERROR] Error in parsing artifacts!") # print("ART HERE: %s" % artifacts) # print("ART: %s" % type(artifacts)) if isinstance(artifacts, list): print("ITS A LIST!") for item in artifacts: print("ITEM: %s" % item) try: artifact = thehive4py.models.AlertArtifact( dataType=item["data_type"], data=item["data"], ) try: artifact["message"] = item["message"] except: pass if item["data_type"] == "ip": try: if item["is_private_ip"]: message += " IP is private." except: pass all_artifacts.append(artifact) except KeyError as e: print("Error in artifacts: %s" % e) alert = thehive4py.models.Alert( title=title, tlp=tlp, severity=severity, tags=tags, description=description, type=type, source=source, sourceRef=sourceref, artifacts=all_artifacts, ) try: ret = self.thehive.create_alert(alert) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert_artifact( self, apikey, url, organisation, alert_id, dataType, data, message=None, tlp="2", ioc="False", sighted="False", ignoreSimilarity="False", tags=None, ): self.__connect_thehive(url, apikey, organisation, version=4) if tlp: tlp = int(tlp) else: tlp = 2 ioc = ioc.lower().strip() == "true" sighted = sighted.lower().strip() == "true" ignoreSimilarity = ignoreSimilarity.lower().strip() == "true" if tags: tags = [x.strip() for x in tags.split(",")] else: tags = [] alert_artifact = thehive4py.models.AlertArtifact( dataType=dataType, data=data, message=message, tlp=tlp, ioc=ioc, sighted=sighted, ignoreSimilarity=ignoreSimilarity, tags=tags, ) try: ret = self.thehive.create_alert_artifact(alert_id, alert_artifact) except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e if ret.status_code > 299: raise ConnectionError(ret.text) return ret.text # Gets an item based on input. E.g. field_type = Alert async def get_item(self, apikey, url, organisation, field_type, cur_id): self.__connect_thehive(url, apikey, organisation) newstr = "" ret = "" if field_type.lower() == "alert": ret = self.thehive.get_alert(cur_id + "?similarity=1") elif field_type.lower() == "case": ret = self.thehive.get_case(cur_id) elif field_type.lower() == "case_observables": ret = self.thehive.get_case_observables(cur_id) elif field_type.lower() == "case_task": ret = self.thehive.get_case_task(cur_id) elif field_type.lower() == "case_tasks": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "case_template": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "linked_cases": ret = self.thehive.get_linked_cases(cur_id) elif field_type.lower() == "task_log": ret = self.thehive.get_task_log(cur_id) elif field_type.lower() == "task_logs": ret = self.thehive.get_task_logs(cur_id) else: return ( "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type) return ret.text async def close_alert(self, apikey, url, organisation, alert_id): self.__connect_thehive(url, apikey, organisation) return self.thehive.mark_alert_as_read(alert_id).text async def reopen_alert(self, apikey, url, organisation, alert_id): self.__connect_thehive(url, apikey, organisation) return self.thehive.mark_alert_as_unread(alert_id).text async def create_case_from_alert(self, apikey, url, organisation, alert_id, case_template=None): self.__connect_thehive(url, apikey, organisation) response = self.thehive.promote_alert_to_case( alert_id=alert_id, case_template=case_template) return response.text async def merge_alert_into_case(self, apikey, url, organisation, alert_id, case_id): self.__connect_thehive(url, apikey, organisation) req = url + f"/api/alert/{alert_id}/merge/{case_id}" ret = requests.post(req, auth=self.thehive.auth) return ret.text # Not sure what the data should be async def update_field(self, apikey, url, organisation, field_type, cur_id, field, data): # This is kinda silly but.. if field_type.lower() == "alert": newdata = {} if data.startswith("%s"): ticket = self.thehive.get_alert(cur_id) if ticket.status_code != 200: pass newdata[field] = "%s%s" % (ticket.json()[field], data[2:]) else: newdata[field] = data # Bleh url = "%s/api/alert/%s" % (url, cur_id) if field == "status": if data == "New" or data == "Updated": url = "%s/markAsUnread" % url elif data == "Ignored": url = "%s/markAsRead" % url ret = requests.post( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, ) else: ret = requests.patch( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, json=newdata, ) return str(ret.status_code) else: return ( "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type) # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def delete_alert_artifact(self, apikey, url, organisation, artifact_id): self.__connect_thehive(url, apikey, organisation, version=4) return self.thehive.delete_alert_artifact(artifact_id).text # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def run_analyzer(self, apikey, url, organisation, cortex_id, analyzer_id, artifact_id): self.__connect_thehive(url, apikey, organisation) return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text # Creates a task log in TheHive with file async def create_task_log(self, apikey, url, organisation, task_id, message, filedata={}): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } data = {"_json": """{"message": "%s"}""" % message} response = requests.post( "%s/api/case/task/%s/log" % (url, task_id), headers=headers, files=files, data=data, ) return response.text # Creates an observable as a file in a case async def create_case_file_observable(self, apikey, url, organisation, case_id, tags, filedata): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } outerarray = {"dataType": "file", "tags": tags} data = {"_json": """%s""" % json.dumps(outerarray)} response = requests.post( "%s/api/case/%s/artifact" % (url, case_id), headers=headers, files=files, data=data, verify=False, ) return response.text # Get all artifacts of a given case async def get_case_artifacts( self, apikey, url, organisation, case_id, dataType, ): self.__connect_thehive(url, apikey, organisation) query = And(Eq("dataType", dataType)) if dataType else {} # Call the API response = self.thehive.get_case_observables( case_id, query=query, sort=["-startDate", "+ioc"], range="all") # Display the result if response.status_code == 200: # Get response data list = response.json() # Display response data return (json.dumps(list, indent=4, sort_keys=True) if list else json.dumps( { "status": 200, "message": "No observable results" }, indent=4, sort_keys=True, )) else: return f"Failure: {response.status_code}/{response.text}" async def close_case( self, apikey, url, organisation, id, resolution_status="", impact_status="", summary="", ): self.__connect_thehive(url, apikey, organisation) case = self.thehive.case(id) case.status = "Resolved" case.summary = summary case.resolutionStatus = resolution_status case.impactStatus = impact_status result = self.thehive.update_case( case, fields=[ "status", "summary", "resolutionStatus", "impactStatus", ], ) return json.dumps(result.json(), indent=4, sort_keys=True) # Update TheHive Case async def update_case( self, apikey, url, organisation, id, title="", description="", severity=None, owner="", flag=None, tlp=None, pap=None, tags="", status="", custom_fields=None, custom_json=None, ): self.__connect_thehive(url, apikey, organisation) # Get current case data and update fields if new data exists case = self.thehive.get_case(id).json() print(case) case_title = title if title else case["title"] case_description = description if description else case["description"] case_severity = int(severity) if severity else case["severity"] case_owner = owner if owner else case["owner"] case_flag = ((False if flag.lower() == "false" else True) if flag else case["flag"]) case_tlp = int(tlp) if tlp else case["tlp"] case_pap = int(pap) if pap else case["pap"] case_tags = tags.split(",") if tags else case["tags"] case_tags = tags.split(",") if tags else case["tags"] case_status = status if status else case["status"] case_customFields = case["customFields"] # Prepare the customfields customfields = CustomFieldHelper() if case_customFields: for key, value in case_customFields.items(): if list(value)[0] == "integer": customfields.add_integer(key, list(value.items())[0][1]) elif list(value)[0] == "string": customfields.add_string(key, list(value.items())[0][1]) elif list(value)[0] == "boolean": customfields.add_boolean(key, list(value.items())[0][1]) elif list(value)[0] == "float": customfields.add_float(key, list(value.items())[0][1]) else: print( f'The value type "{value}" of the field {key} is not suported by the function.' ) custom_fields = json.loads(custom_fields) if custom_fields else {} for key, value in custom_fields.items(): if type(value) == int: customfields.add_integer(key, value) elif type(value) == str: customfields.add_string(key, value) elif type(value) == bool: customfields.add_boolean(key, value) elif type(value) == float: customfields.add_float(key, value) else: print( f'The value type "{value}" of the field {key} is not suported by the function.' ) customfields = customfields.build() custom_json = json.loads(custom_json) if custom_json else {} # Prepare the fields to be updated case = Case( id=id, title=case_title, description=case_description, severity=case_severity, owner=case_owner, flag=case_flag, tlp=case_tlp, pap=case_pap, tags=case_tags, status=case_status, customFields=customfields, json=custom_json, ) # resolutionStatus=case_resolutionStatus, result = self.thehive.update_case( case, fields=[ "title", "description", "severity", "owner", "flag", "tlp", "pap", "tags", "customFields", "status", ], ) return json.dumps(result.json(), indent=4, sort_keys=True) # Get TheHive Organisations async def get_organisations( self, apikey, url, organisation, ): headers = { "Authorization": f"Bearer {apikey}", "Content-Type": "application/json", } response = requests.get( f"{url}/api/organisation", headers=headers, verify=False, ) return response.text # Create TheHive Organisation async def create_organisation( self, apikey, url, organisation, name, description, ): headers = { "Authorization": f"Bearer {apikey}", "Content-Type": "application/json", } data = {"name": f"{name}", "description": f"{description}"} response = requests.post( f"{url}/api/organisation", headers=headers, json=data, verify=False, ) return response.text # Create User in TheHive async def create_user( self, apikey, url, organisation, login, name, profile, ): headers = { "Authorization": f"Bearer {apikey}", "Content-Type": "application/json", } data = { "login": f"{login}", "name": f"{name}", "profile": f"{profile}", "organisation": f"{organisation}", } response = requests.post( f"{url}/api/v1/user", headers=headers, json=data, verify=False, ) return response.text
import sys import json from thehive4py.api import TheHiveApi from thehive4py.models import CaseObservable api = TheHiveApi('http://10.6.5.157:9000', 'eEBuNfc36ccy2Nk4OOdv+xOGIydptbmV') # Init the CaseObservable object ip_observable = CaseObservable(dataType='ip',data='8.8.8.9',tlp=1,ioc=True,sighted=True,) # Call the API response = api.create_case_observable("84803832",ip_observable) # Display the result if response.status_code == 201: # Get response data observableJson = response.json() # Display response data print(json.dumps(observableJson, indent=4, sort_keys=True)) else: """print('Failure: {}/{}'.format(response.status_code, response.text))""" print("failed") sys.exit(0)