class TheHive(AppBase): """ An example of a Walkoff App. Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. """ __version__ = "1.1.0" app_name = "thehive" def __init__(self, redis, logger, console_logger=None): """ Each app should have this __init__ to set up Redis and logging. :param redis: :param logger: :param console_logger: """ super().__init__(redis, logger, console_logger) # async def run_analyzer(self, apikey, url, title_query): # self.thehive = TheHiveApi(url, apikey, cert=False) # response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) # return response.text def __connect_thehive(self, url, apikey, organisation): if organisation: self.thehive = TheHiveApi(url, apikey, cert=False, organisation=organisation) else: self.thehive = TheHiveApi(url, apikey, cert=False) async def search_case_title(self, apikey, url, organisation, title_query): self.__connect_thehive(url, apikey, organisation) response = self.thehive.find_cases(query=ContainsString( "title", title_query), range="all", sort=[]) return response.text async def custom_search(self, apikey, url, organisation, search_for, custom_query, range="all"): self.__connect_thehive(url, apikey, organisation) try: custom_query = json.loads(custom_query) except: # raise IOError("Invalid JSON payload received.") pass if search_for == "alert": response = self.thehive.find_alerts(query=custom_query, range="all", sort=[]) else: response = self.thehive.find_cases(query=custom_query, range="all", sort=[]) if (response.status_code == 200 or response.status_code == 201 or response.status_code == 202): return response.text else: raise IOError(response.text) async def add_case_artifact( self, apikey, url, organisation, case_id, data, datatype, tags=None, tlp=None, ioc=None, sighted=None, description="", ): self.__connect_thehive(url, apikey, organisation) tlp = int(tlp) if tlp else 2 ioc = True if ioc.lower() == "true" else False sighted = True if sighted.lower() == "true" else False if not description: description = "Created by shuffle" tags = (tags.split(", ") if ", " in tags else tags.split(",") if "," in tags else []) item = thehive4py.models.CaseObservable( dataType=datatype, data=data, tlp=tlp, ioc=ioc, sighted=sighted, tags=tags, message=description, ) return self.thehive.create_case_observable(case_id, item).text async def search_alert_title(self, apikey, url, organisation, title_query, search_range="0-25"): self.__connect_thehive(url, apikey, organisation) # Could be "all" too if search_range == "": search_range = "0-25" response = self.thehive.find_alerts(query=ContainsString( "title", title_query), range=search_range, sort=[]) return response.text async def create_case( self, apikey, url, organisation, template, title, description="", tlp=1, severity=1, tags="", ): self.__connect_thehive(url, apikey, organisation) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp Casetemplate = template if template else None case = thehive4py.models.Case( title=title, tlp=tlp, severity=severity, tags=tags, description=description, template=Casetemplate, ) try: ret = self.thehive.create_case(case) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert( self, apikey, url, organisation, type, source, sourceref, title, description="", tlp=1, severity=1, tags="", artifacts="", ): self.__connect_thehive(url, apikey, organisation) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-3, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 1-3, not %s" % severity severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 3 or severity < 1: return "Severity needs to be a number from 1-3, not %d" % severity all_artifacts = [] if artifacts != "": # print("ARTIFACTS: %s" % artifacts) if isinstance(artifacts, str): # print("ITS A STRING!") try: artifacts = json.loads(artifacts) except: print("[ERROR] Error in parsing artifacts!") # print("ART HERE: %s" % artifacts) # print("ART: %s" % type(artifacts)) if isinstance(artifacts, list): print("ITS A LIST!") for item in artifacts: print("ITEM: %s" % item) try: artifact = thehive4py.models.AlertArtifact( dataType=item["data_type"], data=item["data"], ) try: artifact["message"] = item["message"] except: pass if item["data_type"] == "ip": try: if item["is_private_ip"]: message += " IP is private." except: pass all_artifacts.append(artifact) except KeyError as e: print("Error in artifacts: %s" % e) alert = thehive4py.models.Alert( title=title, tlp=tlp, severity=severity, tags=tags, description=description, type=type, source=source, sourceRef=sourceref, artifacts=all_artifacts, ) try: ret = self.thehive.create_alert(alert) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert_artifact( self, apikey, url, organisation, alert_id, dataType, data, message=None, tlp="2", ioc="False", sighted="False", ignoreSimilarity="False", tags=None, ): self.__connect_thehive(url, apikey, organisation, version=4) if tlp: tlp = int(tlp) else: tlp = 2 ioc = ioc.lower().strip() == "true" sighted = sighted.lower().strip() == "true" ignoreSimilarity = ignoreSimilarity.lower().strip() == "true" if tags: tags = [x.strip() for x in tags.split(",")] else: tags = [] alert_artifact = thehive4py.models.AlertArtifact( dataType=dataType, data=data, message=message, tlp=tlp, ioc=ioc, sighted=sighted, ignoreSimilarity=ignoreSimilarity, tags=tags, ) try: ret = self.thehive.create_alert_artifact(alert_id, alert_artifact) except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e if ret.status_code > 299: raise ConnectionError(ret.text) return ret.text # Gets an item based on input. E.g. field_type = Alert async def get_item(self, apikey, url, organisation, field_type, cur_id): self.__connect_thehive(url, apikey, organisation) newstr = "" ret = "" if field_type.lower() == "alert": ret = self.thehive.get_alert(cur_id + "?similarity=1") elif field_type.lower() == "case": ret = self.thehive.get_case(cur_id) elif field_type.lower() == "case_observables": ret = self.thehive.get_case_observables(cur_id) elif field_type.lower() == "case_task": ret = self.thehive.get_case_task(cur_id) elif field_type.lower() == "case_tasks": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "case_template": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "linked_cases": ret = self.thehive.get_linked_cases(cur_id) elif field_type.lower() == "task_log": ret = self.thehive.get_task_log(cur_id) elif field_type.lower() == "task_logs": ret = self.thehive.get_task_logs(cur_id) else: return ( "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type) return ret.text async def close_alert(self, apikey, url, organisation, alert_id): self.__connect_thehive(url, apikey, organisation) return self.thehive.mark_alert_as_read(alert_id).text async def reopen_alert(self, apikey, url, organisation, alert_id): self.__connect_thehive(url, apikey, organisation) return self.thehive.mark_alert_as_unread(alert_id).text async def create_case_from_alert(self, apikey, url, organisation, alert_id, case_template=None): self.__connect_thehive(url, apikey, organisation) response = self.thehive.promote_alert_to_case( alert_id=alert_id, case_template=case_template) return response.text async def merge_alert_into_case(self, apikey, url, organisation, alert_id, case_id): self.__connect_thehive(url, apikey, organisation) req = url + f"/api/alert/{alert_id}/merge/{case_id}" ret = requests.post(req, auth=self.thehive.auth) return ret.text # Not sure what the data should be async def update_field(self, apikey, url, organisation, field_type, cur_id, field, data): # This is kinda silly but.. if field_type.lower() == "alert": newdata = {} if data.startswith("%s"): ticket = self.thehive.get_alert(cur_id) if ticket.status_code != 200: pass newdata[field] = "%s%s" % (ticket.json()[field], data[2:]) else: newdata[field] = data # Bleh url = "%s/api/alert/%s" % (url, cur_id) if field == "status": if data == "New" or data == "Updated": url = "%s/markAsUnread" % url elif data == "Ignored": url = "%s/markAsRead" % url ret = requests.post( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, ) else: ret = requests.patch( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, json=newdata, ) return str(ret.status_code) else: return ( "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type) # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def delete_alert_artifact(self, apikey, url, organisation, artifact_id): self.__connect_thehive(url, apikey, organisation, version=4) return self.thehive.delete_alert_artifact(artifact_id).text # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def run_analyzer(self, apikey, url, organisation, cortex_id, analyzer_id, artifact_id): self.__connect_thehive(url, apikey, organisation) return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text # Creates a task log in TheHive with file async def create_task_log(self, apikey, url, organisation, task_id, message, filedata={}): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } data = {"_json": """{"message": "%s"}""" % message} response = requests.post( "%s/api/case/task/%s/log" % (url, task_id), headers=headers, files=files, data=data, ) return response.text # Creates an observable as a file in a case async def create_case_file_observable(self, apikey, url, organisation, case_id, tags, filedata): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } outerarray = {"dataType": "file", "tags": tags} data = {"_json": """%s""" % json.dumps(outerarray)} response = requests.post( "%s/api/case/%s/artifact" % (url, case_id), headers=headers, files=files, data=data, verify=False, ) return response.text # Get all artifacts of a given case async def get_case_artifacts( self, apikey, url, organisation, case_id, dataType, ): self.__connect_thehive(url, apikey, organisation) query = And(Eq("dataType", dataType)) if dataType else {} # Call the API response = self.thehive.get_case_observables( case_id, query=query, sort=["-startDate", "+ioc"], range="all") # Display the result if response.status_code == 200: # Get response data list = response.json() # Display response data return (json.dumps(list, indent=4, sort_keys=True) if list else json.dumps( { "status": 200, "message": "No observable results" }, indent=4, sort_keys=True, )) else: return f"Failure: {response.status_code}/{response.text}" async def close_case( self, apikey, url, organisation, id, resolution_status="", impact_status="", summary="", ): self.__connect_thehive(url, apikey, organisation) case = self.thehive.case(id) case.status = "Resolved" case.summary = summary case.resolutionStatus = resolution_status case.impactStatus = impact_status result = self.thehive.update_case( case, fields=[ "status", "summary", "resolutionStatus", "impactStatus", ], ) return json.dumps(result.json(), indent=4, sort_keys=True) # Update TheHive Case async def update_case( self, apikey, url, organisation, id, title="", description="", severity=None, owner="", flag=None, tlp=None, pap=None, tags="", status="", custom_fields=None, custom_json=None, ): self.__connect_thehive(url, apikey, organisation) # Get current case data and update fields if new data exists case = self.thehive.get_case(id).json() print(case) case_title = title if title else case["title"] case_description = description if description else case["description"] case_severity = int(severity) if severity else case["severity"] case_owner = owner if owner else case["owner"] case_flag = ((False if flag.lower() == "false" else True) if flag else case["flag"]) case_tlp = int(tlp) if tlp else case["tlp"] case_pap = int(pap) if pap else case["pap"] case_tags = tags.split(",") if tags else case["tags"] case_tags = tags.split(",") if tags else case["tags"] case_status = status if status else case["status"] case_customFields = case["customFields"] # Prepare the customfields customfields = CustomFieldHelper() if case_customFields: for key, value in case_customFields.items(): if list(value)[0] == "integer": customfields.add_integer(key, list(value.items())[0][1]) elif list(value)[0] == "string": customfields.add_string(key, list(value.items())[0][1]) elif list(value)[0] == "boolean": customfields.add_boolean(key, list(value.items())[0][1]) elif list(value)[0] == "float": customfields.add_float(key, list(value.items())[0][1]) else: print( f'The value type "{value}" of the field {key} is not suported by the function.' ) custom_fields = json.loads(custom_fields) if custom_fields else {} for key, value in custom_fields.items(): if type(value) == int: customfields.add_integer(key, value) elif type(value) == str: customfields.add_string(key, value) elif type(value) == bool: customfields.add_boolean(key, value) elif type(value) == float: customfields.add_float(key, value) else: print( f'The value type "{value}" of the field {key} is not suported by the function.' ) customfields = customfields.build() custom_json = json.loads(custom_json) if custom_json else {} # Prepare the fields to be updated case = Case( id=id, title=case_title, description=case_description, severity=case_severity, owner=case_owner, flag=case_flag, tlp=case_tlp, pap=case_pap, tags=case_tags, status=case_status, customFields=customfields, json=custom_json, ) # resolutionStatus=case_resolutionStatus, result = self.thehive.update_case( case, fields=[ "title", "description", "severity", "owner", "flag", "tlp", "pap", "tags", "customFields", "status", ], ) return json.dumps(result.json(), indent=4, sort_keys=True) # Get TheHive Organisations async def get_organisations( self, apikey, url, organisation, ): headers = { "Authorization": f"Bearer {apikey}", "Content-Type": "application/json", } response = requests.get( f"{url}/api/organisation", headers=headers, verify=False, ) return response.text # Create TheHive Organisation async def create_organisation( self, apikey, url, organisation, name, description, ): headers = { "Authorization": f"Bearer {apikey}", "Content-Type": "application/json", } data = {"name": f"{name}", "description": f"{description}"} response = requests.post( f"{url}/api/organisation", headers=headers, json=data, verify=False, ) return response.text # Create User in TheHive async def create_user( self, apikey, url, organisation, login, name, profile, ): headers = { "Authorization": f"Bearer {apikey}", "Content-Type": "application/json", } data = { "login": f"{login}", "name": f"{name}", "profile": f"{profile}", "organisation": f"{organisation}", } response = requests.post( f"{url}/api/v1/user", headers=headers, json=data, verify=False, ) return response.text
class TheHive(AppBase): """ An example of a Walkoff App. Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. """ __version__ = "1.0.0" app_name = "thehive" def __init__(self, redis, logger, console_logger=None): """ Each app should have this __init__ to set up Redis and logging. :param redis: :param logger: :param console_logger: """ super().__init__(redis, logger, console_logger) # async def run_analyzer(self, apikey, url, title_query): # self.thehive = TheHiveApi(url, apikey, cert=False) # response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) # return response.text async def search_cases(self, apikey, url, title_query): self.thehive = TheHiveApi(url, apikey, cert=False) response = self.thehive.find_cases(query=ContainsString( "title", title_query), range="all", sort=[]) return response.text async def search_query(self, apikey, url, search_for, custom_query): self.thehive = TheHiveApi(url, apikey, cert=False) try: query = json.loads(custom_query) except: raise IOError("Invalid JSON payload received.") if search_for == "alert": response = self.thehive.find_alerts(query=query, range="all", sort=[]) else: response = self.thehive.find_cases(query=query, range="all", sort=[]) if response.status_code == 200: return response.text else: raise IOError(response.text) async def add_observable(self, apikey, url, case_id, data, datatype, tags): self.thehive = TheHiveApi(url, apikey, cert=False) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] item = thehive4py.models.CaseObservable( dataType=datatype, data=data, tlp=1, ioc=False, sighted=False, tags=["Shuffle"], message="Created by shuffle", ) return self.thehive.create_case_observable(case_id, item).text async def search_alerts(self, apikey, url, title_query, search_range="0-25"): self.thehive = TheHiveApi(url, apikey, cert=False) # Could be "all" too if search_range == "": search_range = "0-25" response = self.thehive.find_alerts(query=ContainsString( "title", title_query), range=search_range, sort=[]) return response.text async def create_case(self, apikey, url, title, description="", tlp=1, severity=1, tags=""): self.thehive = TheHiveApi(url, apikey, cert=False) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp case = thehive4py.models.Case( title=title, tlp=tlp, severity=severity, tags=tags, description=description, ) try: ret = self.thehive.create_case(case) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert( self, apikey, url, type, source, sourceref, title, description="", tlp=1, severity=1, tags="", ): self.thehive = TheHiveApi(url, apikey, cert=False) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-3, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 1-3, not %s" % severity severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 3 or severity < 1: return "Severity needs to be a number from 1-3, not %d" % severity alert = thehive4py.models.Alert( title=title, tlp=tlp, severity=severity, tags=tags, description=description, type=type, source=source, sourceRef=sourceref, ) try: ret = self.thehive.create_alert(alert) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert_artifact(self, apikey, url, alert_id, dataType, data, message=None, tlp="2", ioc="False", sighted="False", ignoreSimilarity="False", tags=None): self.thehive = TheHiveApi(url, apikey, cert=False, version=4) if tlp: tlp = int(tlp) else: tlp = 2 ioc = ioc.lower().strip() == "true" sighted = sighted.lower().strip() == "true" ignoreSimilarity = ignoreSimilarity.lower().strip() == "true" if tags: tags = [x.strip() for x in tags.split(",")] else: tags = [] alert_artifact = thehive4py.models.AlertArtifact( dataType=dataType, data=data, message=message, tlp=tlp, ioc=ioc, sighted=sighted, ignoreSimilarity=ignoreSimilarity, tags=tags) try: ret = self.thehive.create_alert_artifact(alert_id, alert_artifact) except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e if ret.status_code > 299: raise ConnectionError(ret.text) return ret.text # Gets an item based on input. E.g. field_type = Alert async def get_item(self, apikey, url, field_type, cur_id): self.thehive = TheHiveApi(url, apikey, cert=False) newstr = "" ret = "" if field_type.lower() == "alert": ret = self.thehive.get_alert(cur_id + "?similarity=1") elif field_type.lower() == "case": ret = self.thehive.get_case(cur_id) elif field_type.lower() == "case_observables": ret = self.thehive.get_case_observables(cur_id) elif field_type.lower() == "case_task": ret = self.thehive.get_case_task(cur_id) elif field_type.lower() == "case_tasks": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "case_template": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "linked_cases": ret = self.thehive.get_linked_cases(cur_id) elif field_type.lower() == "task_log": ret = self.thehive.get_task_log(cur_id) elif field_type.lower() == "task_logs": ret = self.thehive.get_task_logs(cur_id) else: return ( "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type) return ret.text async def close_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey, cert=False) return self.thehive.mark_alert_as_read(alert_id).text async def reopen_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey, cert=False) return self.thehive.mark_alert_as_unread(alert_id).text async def create_case_from_alert(self, apikey, url, alert_id, case_template=None): self.thehive = TheHiveApi(url, apikey, cert=False) response = self.thehive.promote_alert_to_case( alert_id=alert_id, case_template=case_template) return response.text async def merge_alert_into_case(self, apikey, url, alert_id, case_id): self.thehive = TheHiveApi(url, apikey, cert=False) req = url + f"/api/alert/{alert_id}/merge/{case_id}" ret = requests.post(req, auth=self.thehive.auth) return ret.text # Not sure what the data should be async def update_field(self, apikey, url, field_type, cur_id, field, data): # This is kinda silly but.. if field_type.lower() == "alert": newdata = {} if data.startswith("%s"): ticket = self.thehive.get_alert(cur_id) if ticket.status_code != 200: pass newdata[field] = "%s%s" % (ticket.json()[field], data[2:]) else: newdata[field] = data # Bleh url = "%s/api/alert/%s" % (url, cur_id) if field == "status": if data == "New" or data == "Updated": url = "%s/markAsUnread" % url elif data == "Ignored": url = "%s/markAsRead" % url ret = requests.post( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, ) else: ret = requests.patch( url, headers={ "Content-Type": "application/json", "Authorization": "Bearer %s" % apikey, }, json=newdata, ) return str(ret.status_code) else: return ( "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type) # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def run_analyzer(self, apikey, url, cortex_id, analyzer_id, artifact_id): self.thehive = TheHiveApi(url, apikey, cert=False) return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text # Creates a task log in TheHive with file async def create_task_log(self, apikey, url, task_id, message, filedata={}): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } data = {"_json": """{"message": "%s"}""" % message} response = requests.post( "%s/api/case/task/%s/log" % (url, task_id), headers=headers, files=files, data=data, ) return response.text # Creates an observable as a file in a case async def create_case_file_observable(self, apikey, url, case_id, tags, filedata): if filedata["success"] == False: return "No file to upload. Skipping message." headers = { "Authorization": "Bearer %s" % apikey, } if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [tags] files = {} if len(filedata["data"]) > 0: files = { "attachment": (filedata["filename"], filedata["data"]), } outerarray = {"dataType": "file", "tags": tags} data = {"_json": """%s""" % json.dumps(outerarray)} response = requests.post( "%s/api/case/%s/artifact" % (url, case_id), headers=headers, files=files, data=data, ) return response.text
class CortexRunner: """String: Observable Id""" observable_id = None """Dict: Observable information""" observable = None """Dict: Rules""" rules = None """Dict: Configuration""" config = None """Object: TheHiveApi""" thehive4py = None """List of dict: jobs created""" jobs = None def __init__(self, observable_id, rules, config): """Cortex jobs automation for TheHive.""" # Register the configuration self.set_config(config) # Check the observable id and retrieve information self.set_observable_id(observable_id) # Register the rules self.set_rules(rules) # Instanciate thehive4py self.thehive4py = TheHiveApi(self.config['thehive']['url'], self.config['thehive']['key'], proxies=self.proxies, cert=self.cert) def set_observable_id(self, observable_id): """Store the observable Id only if length is correct (32) and retrieve the information about the observable.""" # Check length if (len(observable_id) != 32): raise CortexRunnerException( 'Observable Id seems to be invalid. Not a 32 character Id.') # Store Id self.observable_id = observable_id # Retrieve information self.get_observable_data() def set_rules(self, rules): """Store the rules.""" self.rules = rules # Check structure if not isinstance(rules, dict): raise CortexRunnerException( '"rules" parameter is not a dictionnary.') for key in rules: if not isinstance(key, basestring): raise CortexRunnerException( 'dataType in rules must be a string. Invalid value = {}'. format(key)) for rule in rules[key]: if not rule.get('analyzer'): raise CortexRunnerException( 'rule for {} must contain an "analyzer" key-value.'. format(key)) def set_config(self, config): """Store the configuration""" # Check the structure if config.get('thehive') and config['thehive'].get( 'url') and config['thehive'].get('key'): # Store configuration items self.config = config self.proxies = config['thehive'].get('proxies') self.auth = BearerAuth(self.config['thehive'].get('key')) self.cert = config['thehive'].get('cert') else: raise CortexRunnerException( 'Configuration not valid. The configuration dictionnary must have at least the keys : "thehive.url" and "thehive.key"' ) def launch(self): """Create jobs based on dataType rules for the selected observable""" # Reset the created job list self.jobs = [] # Get the rules for the dataType of the current observable if self.rules.get(self.observable.get('dataType')): for rule in self.rules.get(self.observable.get('dataType')): # Create job for each analyzer job = self.launch_job(rule.get('analyzer')) # Report the response and the matching rule into the list self.jobs += [{"rule": rule, "job": job}] return self.jobs def launch_job(self, analyzer_id): """Create job for the observable id and the given analyzer id""" # Send request response = self.thehive4py.run_analyzer(None, self.observable_id, analyzer_id) # If job was created report information about it if (response.status_code == 200): return response.json() return {} def get_observable_data(self): """Get the observable information""" # Build url req = "{}/api/case/artifact/{}".format(self.config['thehive']['url'], self.observable_id) try: # Send request response = requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert) if response.status_code == 200: # Store the data self.observable = response.json() else: raise CortexRunnerException( 'Unable to get the observable : {}'.format(response.text)) except requests.exceptions.RequestException as e: raise CortexRunnerException( 'Unable to get the observable : {}'.format(e))
class TheHive(AppBase): """ An example of a Walkoff App. Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. """ __version__ = "1.0.0" app_name = "thehive" def __init__(self, redis, logger, console_logger=None): """ Each app should have this __init__ to set up Redis and logging. :param redis: :param logger: :param console_logger: """ super().__init__(redis, logger, console_logger) #async def run_analyzer(self, apikey, url, title_query): # self.thehive = TheHiveApi(url, apikey) # response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) # return response.text async def search_cases(self, apikey, url, title_query): self.thehive = TheHiveApi(url, apikey) response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[]) return response.text async def add_observable(self, apikey, url, case_id, data, datatype, tags): self.thehive = TheHiveApi(url, apikey) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [] else: tags = [] item = thehive4py.models.CaseObservable( dataType=datatype, data=data, tlp=1, ioc=False, sighted=False, tags=["Shuffle"], message="Created by shuffle", ) return self.thehive.create_case_observable(case_id, item).text async def search_alerts(self, apikey, url, title_query, search_range="0-25"): self.thehive = TheHiveApi(url, apikey) # Could be "all" too if search_range == "": search_range = "0-25" response = self.thehive.find_alerts(query=String("title:'%s'" % title_query), range=search_range, sort=[]) return response.text async def create_case(self, apikey, url, title, description="", tlp=1, severity=1, tags=""): self.thehive = TheHiveApi(url, apikey) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 3 or tlp < 0: return "TLP needs to be a number from 0-3, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp case = thehive4py.models.Case( title=title, tlp=tlp, severity=severity, tags=tags, description=description, ) try: ret = self.thehive.create_case(case) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e async def create_alert(self, apikey, url, type, source, sourceref, title, description="", tlp=1, severity=1, tags=""): self.thehive = TheHiveApi(url, apikey) if tags: if ", " in tags: tags = tags.split(", ") elif "," in tags: tags = tags.split(",") else: tags = [] else: tags = [] # Wutface fix if not tlp: tlp = 1 if not severity: severity = 1 if isinstance(tlp, str): if not tlp.isdigit(): return "TLP needs to be a number from 0-2, not %s" % tlp tlp = int(tlp) if isinstance(severity, str): if not severity.isdigit(): return "Severity needs to be a number from 0-2, not %s" % tlp severity = int(severity) if tlp > 2 or tlp < 0: return "TLP needs to be a number from 0-2, not %d" % tlp if severity > 2 or severity < 0: return "Severity needs to be a number from 0-2, not %d" % tlp alert = thehive4py.models.Alert( title=title, tlp=tlp, severity=severity, tags=tags, description=description, type=type, source=source, sourceRef=sourceref, ) try: ret = self.thehive.create_alert(alert) return ret.text except requests.exceptions.ConnectionError as e: return "ConnectionError: %s" % e # Gets an item based on input. E.g. field_type = Alert async def get_item(self, apikey, url, field_type, cur_id): self.thehive = TheHiveApi(url, apikey) newstr = "" ret = "" if field_type.lower() == "alert": ret = self.thehive.get_alert(cur_id + "?similarity=1") elif field_type.lower() == "case": ret = self.thehive.get_case(cur_id) elif field_type.lower() == "case_observables": ret = self.thehive.get_case_observables(cur_id) elif field_type.lower() == "case_task": ret = self.thehive.get_case_task(cur_id) elif field_type.lower() == "case_tasks": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "case_template": ret = self.thehive.get_case_tasks(cur_id) elif field_type.lower() == "linked_cases": ret = self.thehive.get_linked_cases(cur_id) elif field_type.lower() == "task_log": ret = self.thehive.get_task_log(cur_id) elif field_type.lower() == "task_logs": ret = self.thehive.get_task_logs(cur_id) else: return "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type return ret.text async def close_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey) return self.thehive.mark_alert_as_read(alert_id).text async def reopen_alert(self, apikey, url, alert_id): self.thehive = TheHiveApi(url, apikey) return self.thehive.mark_alert_as_unread(alert_id).text async def create_case_from_alert(self, apikey, url, alert_id, case_template=None): self.thehive = TheHiveApi(url, apikey) response = self.thehive.promote_alert_to_case(alert_id=alert_id, case_template=case_template) return response.text async def merge_alert_into_case(self, apikey, url, alert_id, case_id): self.thehive = TheHiveApi(url, apikey) req = url + f"/api/alert/{alert_id}/merge/{case_id}" ret = requests.post(req, auth=self.thehive.auth) return ret.text # Not sure what the data should be async def update_field(self, apikey, url, field_type, cur_id, field, data): # This is kinda silly but.. if field_type.lower() == "alert": newdata = {} if data.startswith("%s"): ticket = self.thehive.get_alert(cur_id) if ticket.status_code != 200: pass newdata[field] = "%s%s" % (ticket.json()[field], data[2:]) else: newdata[field] = data # Bleh url = "%s/api/alert/%s" % (url, cur_id) if field == "status": if data == "New" or data == "Updated": url = "%s/markAsUnread" % url elif data == "Ignored": url = "%s/markAsRead" % url ret = requests.post( url, headers={ 'Content-Type': 'application/json', 'Authorization': 'Bearer %s' % apikey } ) else: ret = requests.patch( url, headers={ 'Content-Type': 'application/json', 'Authorization': 'Bearer %s' % apikey }, json=newdata, ) return str(ret.status_code) else: return "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex async def run_analyzer(self, apikey, url, cortex_id, analyzer_id, artifact_id): self.thehive = TheHiveApi(url, apikey) return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text
class TheHiveScheduledSearches: def __init__(self, TheHive, QRadar): #Retrieve enabled datatypes from config self.qr_enabled_datatypes = QRadar['enabled_datatypes'] #Add epoch in milliseconds of current time to a variable self.current_time = int(round(time.time() * 1000)) #Assign The Hive API class self.thapi = TheHiveApi(TheHive.get('url', None), TheHive.get('key'), TheHive.get('password', None), TheHive.get('proxies'), TheHive.get('verify')) #Generic function to check the response from the hive def check_response(self, response): logger.debug('API TheHive - status code: {}'.format( response.status_code)) if response.status_code > 299: logger.error('API TheHive - raw error output: {}'.format( response.raw.read())) logger.debug('Response: %s' % response.text) def observable_search(self): #Search for cases with first_searched logger.info('Searching for matching cases') self.query = Contains('customFields.firstSearched.date') self.response = self.thapi.find_cases(query=self.query) logger.debug('Response: %s' % self.response.text) #Compare first_searched and last_searched. If longer than 60 days. Do not search. for case_data in self.response.json(): queue_item = {} #Create observable queue item queue_item['action'] = "search_observables" queue_item['data'] = case_data #Add case to the queue self.thapi_queue(queue_item) self.process_queue() while q.qsize() > 0: logger.info('Current queue size(%i)' % q.qsize()) time.sleep(60) #Define the logic that makes it possible to perform asynchronous requests to The Hive in order to speed up the integration def thapi_queue(self, queued_request): #Build up the queue logger.info( 'Adding action: %s to queue for: %s (Current queue length: %i)' % (queued_request['action'], queued_request['data']['id'], q.qsize())) q.put(queued_request) def process_queue(self): #Create the first thread thread_count = threading.active_count() if thread_count <= 1: logger.info('Creating thread') t = Thread(target=self.doWork) t.daemon = True t.start() logger.debug('Created thread') #Define the functionality each workers gets def doWork(self): #Build a loop that keeps the thread alive until queue is empty while not q.empty(): #Build up the threads thread_count = threading.active_count() #Make sure that the thread count is lower than configured limit and is lower than the queue size if thread_count < concurrent and thread_count < q.qsize(): new_thread_count = thread_count + 1 logger.info( 'Current queue size(%i) allows more threads. Creating additional thread: %i' % (q.qsize(), new_thread_count)) t = Thread(target=self.doWork) t.daemon = True t.start() logger.debug('Created thread: %i' % new_thread_count) #Retrieve a queued item queued_item = q.get() #Handle a queued item based on its provided action if queued_item['action'] == "search_observables": logger.info('Working on %s from queue, caseid: %s' % (queued_item['action'], queued_item['data']['id'])) case_data = queued_item['data'] logger.debug("event: %s" % case_data) #Store CaseID caseid = case_data['id'] #If the case is within scope of the search range. Perform the search #if (case_data['customFields']['lastSearched']['date'] - case_data['customFields']['firstSearched']['date']) < 5184000000: #logger.info('Observables in case {} have not yet been searched for longer than two months. Starting analyzers'.format(case_data['id'])) self.response = self.thapi.get_case_observables(caseid) #Perform a search for ioc's per case in the RS search results (Trigger Cortex Analyzer) for observable in self.response.json(): searched_for = False logger.debug("observable: %s" % observable) logger.debug("current_time %s, observable_time %s" % (self.current_time, observable['startDate'])) #Check if observables are not older than 2 months or 6 months for TLP:RED if (((self.current_time - observable['startDate']) < 5184000000) or (observable['tlp'] == 3 and ((self.current_time - observable['startDate']) < 15552000))): self.searchtype = observable['dataType'] if self.searchtype in self.qr_enabled_datatypes: self.supported_observable = observable['_id'] #Trigger a search for the supported ioc logger.info( 'Launching analyzers for observable: {}'. format(self.supported_observable)) self.response = self.thapi.run_analyzer( "Cortex-intern", self.supported_observable, "IBMQRadar_Search_Automated_0_1") self.check_response(self.response) searched_for = True if searched_for: #Add customFields firstSearched and lastSearched #Create a Case object? Or whatever it is self.case = Case() #Add the case id to the object self.case.id = caseid #Debug output logger.info('Updating case %s' % self.case.id) #Define which fields need to get updated fields = ['customFields'] #Retrieve all required attributes from the alert and add them as custom fields to the case self.customFields = CustomFieldHelper()\ .add_date('firstSearched', case_data['customFields']['firstSearched']['date'])\ .add_date('lastSearched', self.current_time)\ .build() #Add custom fields to the case object self.case.customFields = self.customFields #Update the case self.response = self.thapi.update_case(self.case, fields) self.check_response(self.response) logger.info("Queue is empty, nothing left to do")