class Automation(): def __init__(self, webhook, cfg): logger.info('Initiating QRadarAutomation') self.TheHiveConnector = TheHiveConnector(cfg) self.QRadarConnector = QRadarConnector(cfg) self.webhook = webhook self.cfg = cfg self.report_action = report_action def checkIfInClosedCaseOrAlertMarkedAsRead(self, sourceref): query = dict() query['sourceRef'] = str(sourceref) logger.debug( 'Checking if third party ticket({}) is linked to a closed case'. format(sourceref)) alert_results = self.TheHiveConnector.findAlert(query) if len(alert_results) > 0: alert_found = alert_results[0] if alert_found['status'] == 'Ignored': logger.info( f"{sourceref} is found in alert {alert_found['id']} that has been marked as read" ) return { "resolutionStatus": "Indeterminate", "summary": "Closed by Synapse with summary: Marked as Read within The Hive" } elif 'case' in alert_found: # Check if alert is present in closed case case_found = self.TheHiveConnector.getCase(alert_found['case']) if case_found['status'] == "Resolved": logger.info( f"{sourceref} was found in a closed case {case_found['id']}" ) resolution_status = "N/A" resolution_summary = "N/A" # Return information required to sync with third party if 'resolutionStatus' in case_found: resolution_status = case_found['resolutionStatus'] if 'summary' in case_found: resolution_summary = case_found['summary'] return { "resolutionStatus": resolution_status, "summary": resolution_summary } return False def parse_hooks(self): # Update incident status to active when imported as Alert if self.webhook.isQRadarAlertImported(): self.offense_id = self.webhook.data['object']['sourceRef'] # Check if the alert is imported in a closed case closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead( self.offense_id) if closure_info: logger.info( 'Qradar offense({}) is linked to a closed case'.format( self.offense_id)) # Close incident and continue with the next incident self.QRadarConnector.closeOffense(self.offense_id) # Close offenses in QRadar if self.webhook.isClosedQRadarCase( ) or self.webhook.isDeletedQRadarCase( ) or self.webhook.isQRadarAlertMarkedAsRead(): if self.webhook.data['operation'] == 'Delete': self.case_id = self.webhook.data['objectId'] logger.info('Case {} has been deleted'.format(self.case_id)) elif self.webhook.data['objectType'] == 'alert': self.alert_id = self.webhook.data['objectId'] logger.info('Alert {} has been marked as read'.format( self.alert_id)) self.QRadarConnector.closeOffense( self.webhook.data['object']['sourceRef']) else: self.case_id = self.webhook.data['object']['id'] logger.info('Case {} has been marked as resolved'.format( self.case_id)) if hasattr(self, 'case_id'): if hasattr(self.webhook, 'ext_alert_id'): logger.info("Closing offense {} for case {}".format( self.webhook.ext_alert_id, self.case_id)) self.QRadarConnector.closeOffense( self.webhook.ext_alert_id) elif len(self.webhook.ext_alert_ids) > 0: # Close offense for every linked offense logger.info( "Found multiple offenses {} for case {}".format( self.webhook.ext_alert_ids, self.case_id)) for offense_id in self.webhook.ext_alert_ids: logger.info("Closing offense {} for case {}".format( offense_id, self.case_id)) self.QRadarConnector.closeOffense(offense_id) self.report_action = 'closeOffense' return self.report_action
class Integration(Main): def __init__(self): super().__init__() self.lexsi = LexsiConnector(self.cfg) self.TheHiveConnector = TheHiveConnector(self.cfg) def validateRequest(self, request): if request.is_json: content = request.get_json() if 'type' in content and content['type'] == "Active": workflowReport = self.allIncidents2Alert(content['type']) if workflowReport['success']: return json.dumps(workflowReport), 200 else: return json.dumps(workflowReport), 500 else: self.logger.error('Missing type or type is not supported') return json.dumps({ 'sucess': False, 'message': "Missing type or type is not supported" }), 500 else: self.logger.error('Not json request') return json.dumps({ 'sucess': False, 'message': "Request didn't contain valid JSON" }), 400 def allIncidents2Alert(self, status): """ Get all opened incidents created within lexsi and create alerts for them in TheHive """ self.logger.info('%s.allincident2Alert starts', __name__) incidentsList = self.lexsi.getOpenItems()['result'] report = dict() report['success'] = True report['incidents'] = list() try: # each incidents in the list is represented as a dict # we enrich this dict with additional details for incident in incidentsList: # Prepare new alert incident_report = dict() self.logger.debug("incident: %s" % incident) theHiveAlert = self.IncidentToHiveAlert(incident) # searching if the incident has already been converted to alert query = dict() query['sourceRef'] = str(incident['incident']) self.logger.info('Looking for incident %s in TheHive alerts', str(incident['incident'])) results = self.TheHiveConnector.findAlert(query) if len(results) == 0: self.logger.info( 'incident %s not found in TheHive alerts, creating it', str(incident['incident'])) try: theHiveEsAlertId = self.TheHiveConnector.createAlert( theHiveAlert)['id'] self.TheHiveConnector.promoteAlertToCase( theHiveEsAlertId) incident_report['raised_alert_id'] = theHiveEsAlertId incident_report['lexsi_incident_id'] = incident[ 'incident'] incident_report['success'] = True except Exception as e: self.logger.error(incident_report) self.logger.error('%s.allincident2Alert failed', __name__, exc_info=True) incident_report['success'] = False if isinstance(e, ValueError): errorMessage = json.loads(str(e))['message'] incident_report['message'] = errorMessage else: incident_report['message'] = str( e) + ": Couldn't raise alert in TheHive" incident_report['incident_id'] = incident['incident'] # Set overall success if any fails report['success'] = False else: self.logger.info( 'incident %s already imported as alert, checking for updates', str(incident['incident'])) alert_found = results[0] if self.TheHiveConnector.checkForUpdates( theHiveAlert, alert_found, str(incident['incident'])): # Mark the alert as read self.TheHiveConnector.markAlertAsRead( alert_found['id']) incident_report['updated_alert_id'] = alert_found['id'] incident_report['sentinel_incident_id'] = str( incident['incident']) incident_report['success'] = True else: incident_report['sentinel_incident_id'] = str( incident['incident']) incident_report['success'] = True report['incidents'].append(incident_report) thehiveAlerts, open_lexsi_cases = self.lexsi_opened_alerts_thehive( ) self.set_alert_status_ignored(incidentsList, thehiveAlerts, open_lexsi_cases) except Exception as e: self.logger.error( 'Failed to create alert from Lexsi incident (retrieving incidents failed)', exc_info=True) report['success'] = False report[ 'message'] = "%s: Failed to create alert from incident" % str( e) return report def IncidentToHiveAlert(self, incident): # # Creating the alert # # Setup Tags tags = ['Lexsi', 'incident', 'Synapse'] # Skip for now artifacts = [] # Retrieve the configured case_template CaseTemplate = self.cfg.get('Lexsi', 'case_template') # Build TheHive alert alert = self.TheHiveConnector.craftAlert( "{}: {}".format(incident['incident'], incident['title']), self.craftAlertDescription(incident), self.getHiveSeverity(incident), self.timestamp_to_epoch(incident['detected'], "%Y-%m-%d %H:%M:%S"), tags, 2, 'New', 'internal', 'Lexsi', str(incident['incident']), artifacts, CaseTemplate) return alert def craftAlertDescription(self, incident): """ From the incident metadata, crafts a nice description in markdown for TheHive """ self.logger.debug('craftAlertDescription starts') # Start empty description = "" # Add incident details table description += ( '#### Summary\n\n' + '| | |\n' + '| ----------------------- | ------------- |\n' + '| **URL** | ' + "{}{}{}".format("```", str(incident['url']), "```") + ' |\n' + '| **Type** | ' + str(incident['type']) + ' |\n' + '| **Severity** | ' + str(incident['severity']) + ' |\n' + '| **Category** | ' + str(incident['category']) + ' |\n' + '| **Updated** | ' + str(incident['updated']) + ' |\n' + '| **Detected** | ' + str(incident['detected']) + ' |\n' + '| **Source** | ' + str(incident['source']) + ' |\n' + '| **Analyst Name(Lexsi)** | ' + str(incident['analystName']) + ' |\n' + '| **Link to Orange Portal** | ' + str("https://portal.cert.orangecyberdefense.com/cybercrime/{}". format(incident['id'])) + ' |\n' + '\n\n\n\n') return description def timestamp_to_epoch(self, date_time, pattern): return int(time.mktime(time.strptime(date_time, pattern))) * 1000 def getHiveSeverity(self, incident): # severity in TheHive is either low, medium, high or critical # while severity in Lexsi is from 0 to 5 if int(incident['severity']) in {0, 5}: return 1 # elif int(incident['severity']) in {2,3}: # return 2 # elif int(incident['severity']) in {4,5}: # return 3 else: return 2 def lexsi_opened_alerts_thehive(self): thehiveAlerts = [] open_lexsi_cases = {} query = In('tags', ['Lexsi']) self.logger.info( 'Looking for incident in TheHive alerts with tag Lexsi') # self.logger.info(query) results = self.TheHiveConnector.findAlert(query) for alert_found in results: # Check if a case is linked if 'case' in alert_found: try: case_found = self.TheHiveConnector.getCase( alert_found['case']) # Check if the status is open. Only then append it to the list if case_found['status'] == "Open": open_lexsi_cases[alert_found['sourceRef']] = case_found thehiveAlerts.append(alert_found['sourceRef']) except Exception as e: self.logger.error("Could not find case: {}".format(e), exc_info=True) continue self.logger.debug( "Lexsi Alerts opened in theHive: {}".format(thehiveAlerts)) return thehiveAlerts, open_lexsi_cases def compare_lists(self, list1, list2): return list(set(list1) - set(list2)) def set_alert_status_ignored(self, incidentsList, thehiveAlerts, open_lexsi_cases): lexsi_reporting = [] # incidentsList = self.lexsi.getOpenItems()['result'] for incident in incidentsList: lexsi_reporting.append(incident['incident']) self.logger.debug( "the list of opened Lexsi Incidents: {}".format(lexsi_reporting)) uncommon_elements = self.compare_lists(thehiveAlerts, lexsi_reporting) # uncommon_elements=['476121'] self.logger.debug( "Open cases present in TheHive but not in list of opened Lexsi Incidents: {}" .format((uncommon_elements))) for element in uncommon_elements: self.logger.info( "Preparing to close the case for {}".format(element)) query = dict() query['sourceRef'] = str(element) self.logger.debug('Looking for incident %s in TheHive alerts', str(element)) try: if element in open_lexsi_cases: # Resolve the case case_id = open_lexsi_cases[element]['id'] self.logger.debug("Case id for element {}: {}".format( element, case_id)) self.logger.debug("Preparing to resolve the case") self.TheHiveConnector.closeCase(case_id) self.logger.debug("Closed case with id {} for {}".format( case_id, element)) except Exception as e: self.logger.error("Could not close case: {}".format(e), exc_info=True) continue
class Automation(): def __init__(self, webhook, cfg): logger.info('Initiating AzureSentinel Automation') self.TheHiveConnector = TheHiveConnector(cfg) self.AzureSentinelConnector = AzureSentinelConnector(cfg) self.webhook = webhook self.cfg = cfg self.report_action = report_action self.closure_status = { "Indeterminate": "Undetermined", "FalsePositive": "FalsePositive", "TruePositive": "TruePositive", "Other": "BenignPositive" } def checkIfInClosedCaseOrAlertMarkedAsRead(self, sourceref): query = dict() query['sourceRef'] = str(sourceref) logger.debug( 'Checking if third party ticket({}) is linked to a closed case'. format(sourceref)) alert_results = self.TheHiveConnector.findAlert(query) if len(alert_results) > 0: alert_found = alert_results[0] if alert_found['status'] == 'Ignored': logger.info( f"{sourceref} is found in alert {alert_found['id']} that has been marked as read" ) return { "resolutionStatus": "Indeterminate", "summary": "Closed by Synapse with summary: Marked as Read within The Hive" } elif 'case' in alert_found: # Check if alert is present in closed case case_found = self.TheHiveConnector.getCase(alert_found['case']) if case_found['status'] == "Resolved": if 'resolutionStatus' in case_found and case_found[ 'resolutionStatus'] == "Duplicated": merged_case_found = self.getFinalMergedCase(case_found) logger.debug(f"found merged cases {merged_case_found}") if merged_case_found: if merged_case_found['status'] != "Resolved": return False else: case_found = merged_case_found logger.info( f"{sourceref} was found in a closed case {case_found['id']}" ) resolution_status = "N/A" resolution_summary = "N/A" # Return information required to sync with third party if 'resolutionStatus' in case_found: resolution_status = case_found['resolutionStatus'] if 'summary' in case_found: resolution_summary = case_found['summary'] return { "resolutionStatus": resolution_status, "summary": resolution_summary } return False def parse_hooks(self): # Update incident status to active when imported as Alert if self.webhook.isAzureSentinelAlertImported(): self.incidentId = self.webhook.data['object']['sourceRef'] # Check if the alert is imported in a closed case closure_info = self.checkIfInClosedCaseOrAlertMarkedAsRead( self.incidentId) if closure_info: logger.info( 'Sentinel incident({}) is linked to a closed case'.format( self.incidentId)) # Translation table for case statusses classification = self.closure_status[ closure_info['resolutionStatus']] classification_comment = "Closed by Synapse with summary: {}".format( closure_info['summary']) # Close incident and continue with the next incident self.AzureSentinelConnector.closeIncident( self.incidentId, classification, classification_comment) else: logger.info( 'Incident {} needs to be updated to status Active'.format( self.incidentId)) self.AzureSentinelConnector.updateIncidentStatusToActive( self.incidentId) self.report_action = 'updateIncident' # Close incidents in Azure Sentinel if self.webhook.isClosedAzureSentinelCase( ) or self.webhook.isDeletedAzureSentinelCase( ) or self.webhook.isAzureSentinelAlertMarkedAsRead(): if self.webhook.data['operation'] == 'Delete': self.case_id = self.webhook.data['objectId'] self.classification = "Undetermined" self.classification_comment = "Closed by Synapse with summary: Deleted within The Hive" logger.info('Case {} has been deleted'.format(self.case_id)) elif self.webhook.data['objectType'] == 'alert': self.alert_id = self.webhook.data['objectId'] self.incidentId = self.webhook.data['object']['sourceRef'] self.classification = "Undetermined" self.classification_comment = "Closed by Synapse with summary: Marked as Read within The Hive" logger.info('Alert {} has been marked as read'.format( self.webhook.data['object']['sourceRef'])) self.AzureSentinelConnector.closeIncident( self.incidentId, self.classification, self.classification_comment) # Ensure duplicated incidents don't get closed when merged, but only when merged case is closed elif 'resolutionStatus' in self.webhook.data[ 'details'] and self.webhook.data['details'][ 'resolutionStatus'] != "Duplicated": self.case_id = self.webhook.data['object']['id'] self.classification = self.closure_status[ self.webhook.data['details']['resolutionStatus']] self.classification_comment = "Closed by Synapse with summary: {}".format( self.webhook.data['details']['summary']) logger.info('Case {} has been marked as resolved'.format( self.case_id)) if 'mergeFrom' in self.webhook.data['object']: logger.info( f'Case {self.case_id} is a merged case. Finding original cases' ) original_cases = [] for merged_case in self.webhook.data['object'][ 'mergeFrom']: original_cases.extend( self.getOriginalCases(merged_case)) # Find alerts for each original case for original_case in original_cases: query = {'case': original_case['id']} found_alerts = self.TheHiveConnector.findAlert(query) # Close alerts that have been found for found_alert in found_alerts: logger.info( "Closing incident {} for case {}".format( found_alert['sourceRef'], self.case_id)) self.AzureSentinelConnector.closeIncident( found_alert['sourceRef'], self.classification, self.classification_comment) if hasattr(self, 'case_id'): if hasattr(self.webhook, 'ext_alert_id'): logger.info("Closing incident {} for case {}".format( self.webhook.ext_alert_id, self.case_id)) self.AzureSentinelConnector.closeIncident( self.webhook.ext_alert_id, self.classification, self.classification_comment) elif len(self.webhook.ext_alert_ids) > 0: # Close incident for every linked incident logger.info( "Found multiple incidents {} for case {}".format( self.webhook.ext_alert_ids, self.case_id)) for incident_id in self.webhook.ext_alert_ids: logger.info("Closing incident {} for case {}".format( incident_id, self.case_id)) self.AzureSentinelConnector.closeIncident( incident_id, self.classification, self.classification_comment) self.report_action = 'closeIncident' return self.report_action def getOriginalCases(self, merged_from_case_id, handled_cases=[]): cases_found = [] case_found = self.TheHiveConnector.getCase(merged_from_case_id) if 'mergeFrom' in case_found: if merged_from_case_id not in handled_cases: handled_cases.append(merged_from_case_id) for merged_case in self.webhook.data['object']['mergeFrom']: cases_found.extend( self.getOriginalCases(merged_case, handled_cases)) else: cases_found.append(case_found) return cases_found def getFinalMergedCase(self, duplicated_case, handled_cases=[]): if 'mergeInto' in duplicated_case: merged_into = duplicated_case['mergeInto'] case_found = self.TheHiveConnector.getCase(merged_into) if 'resolutionStatus' in case_found: if case_found[ 'resolutionStatus'] == "Duplicated" and merged_into not in handled_cases: handled_cases.append(merged_into) case_found = self.getFinalMergedCase( case_found, handled_cases) else: case_found = duplicated_case return case_found
class Automators(Main): def __init__(self, cfg, use_case_config): self.logger = logging.getLogger(__name__) self.logger.info('Initiating QRadar Automators') self.cfg = cfg self.use_case_config = use_case_config self.TheHiveConnector = TheHiveConnector(cfg) self.TheHiveAutomators = TheHiveAutomators(cfg, use_case_config) self.QRadarConnector = QRadarConnector(cfg) def search(self, action_config, webhook): # Only continue if the right webhook is triggered self.logger.debug("action_config:{}".format(action_config)) if webhook.isImportedAlert(): pass else: return False # Define variables and actions based on certain webhook types self.case_id = webhook.data['object']['case'] self.logger.debug(self.case_id) self.enriched = False for query_name, query_config in action_config.items(): try: self.logger.debug('Found the following query: {}'.format( query_config['query'])) self.query_variables = {} self.query_variables['input'] = {} # Render query try: # Prepare the template self.template = Template(query_config['query']) # Find variables in the template self.template_env = Environment() self.template_parsed = self.template_env.parse( query_config['query']) # Grab all the variales from the template and try to find them in the description self.template_vars = meta.find_undeclared_variables( self.template_parsed) self.logger.debug( "Found the following variables in query: {}".format( self.template_vars)) for template_var in self.template_vars: # Skip dynamically generated Stop_time variable if template_var == "Stop_Time": continue self.logger.debug( "Looking up variable required for template: {}". format(template_var)) # Replace the underscore from the variable name to a white space as this is used in the description table self.template_var_with_ws = template_var.replace( "_", " ") self.case_data = self.TheHiveConnector.getCase( self.case_id) self.logger.debug('output for get_case: {}'.format( self.case_data)) self.query_variables['input'][ template_var] = self.TheHiveAutomators.fetchValueFromMDTable( self.case_data['description'], self.template_var_with_ws) if 'Start_Time' not in self.query_variables['input']: self.logger.warning( "Could not find Start Time value required to build the search" ) # Parse times required for the query (with or without offset) if template_var == "Start_Time": self.logger.debug( "Found Start Time: %s" % self.query_variables['input']['Start_Time']) if 'start_time_offset' in query_config: self.query_variables['input'][ 'Start_Time'] = self.parseTimeOffset( self.query_variables['input'] ['Start_Time'], self.cfg.get( 'Automation', 'event_start_time_format'), query_config['start_time_offset'], self.cfg.get('QRadar', 'time_format')) else: self.query_variables['input'][ 'Start_Time'] = self.query_variables[ 'input']['Start_Time'] if 'stop_time_offset' in query_config: self.query_variables['input'][ 'Stop_Time'] = self.parseTimeOffset( self.query_variables['input'] ['Start_Time'], self.cfg.get( 'Automation', 'event_start_time_format'), query_config['stop_time_offset'], self.cfg.get('QRadar', 'time_format')) else: self.query_variables['input'][ 'Stop_Time'] = datetime.now().strftime( self.cfg.get( 'Automation', 'event_start_time_format')) self.rendered_query = self.template.render( self.query_variables['input']) self.logger.debug("Rendered the following query: %s" % self.rendered_query) except Exception as e: self.logger.warning( "Could not render query due to missing variables", exc_info=True) continue # Perform search queries try: self.rendered_query_result = self.QRadarConnector.aqlSearch( self.rendered_query) # Check results self.logger.debug( 'The search result returned the following information: \n %s' % self.rendered_query_result) except Exception as e: self.logger.warning("Could not perform query", exc_info=True) continue try: if query_config['create_thehive_task']: self.logger.debug("create task is enabled") # Task name self.uc_task_title = query_config['thehive_task_title'] self.uc_task_description = "The following information is found. Investigate the results and act accordingly:\n\n\n\n" # create a table header self.table_header = "|" self.rows = "|" if len(self.rendered_query_result['events']) != 0: for key in self.rendered_query_result['events'][ 0].keys(): self.table_header = self.table_header + " %s |" % key self.rows = self.rows + "---|" self.table_header = self.table_header + "\n" + self.rows + "\n" self.uc_task_description = self.uc_task_description + self.table_header # Create the data table for the results for event in self.rendered_query_result['events']: self.table_data_row = "|" for field_key, field_value in event.items(): # Escape pipe signs if field_value: field_value = field_value.replace( "|", "|") # Use to create some additional spacing self.table_data_row = self.table_data_row + " %s |" % field_value self.table_data_row = self.table_data_row + "\n" self.uc_task_description = self.uc_task_description + self.table_data_row else: self.uc_task_description = self.uc_task_description + "No results \n" # Add the case task self.uc_task = self.TheHiveAutomators.craftUcTask( self.uc_task_title, self.uc_task_description) self.TheHiveConnector.createTask( self.case_id, self.uc_task) except Exception as e: self.logger.debug(e) pass try: if query_config['create_ioc']: self.logger.debug("create IOC is enabled") self.comment = "offense enrichment" #static tags list self.tags = ['synapse'] #want to add SECID of the rule as well in the tag rule_secid = [ x for x in webhook.data['object']['tags'] if x.startswith('SEC') ] self.tags.extend(rule_secid) self.uc_ioc_type = query_config['ioc_type'] if len(self.rendered_query_result['events']) != 0: for event in self.rendered_query_result['events']: for field_key, field_value in event.items(): self.TheHiveConnector.addObservable( self.case_id, self.uc_ioc_type, list(field_value.split(",")), self.tags, self.comment) except Exception as e: self.logger.debug(e) pass except Exception as e: self.logger.debug( 'Could not process the following query: {}\n{}'.format( query_config, e)) continue # Return True when succesful return True def enrichAlert(self, action_config, webhook): # Only continue if the right webhook is triggered if webhook.isNewAlert(): pass else: return False # Define variables and actions based on certain webhook types # Alerts self.alert_id = webhook.data['object']['id'] self.alert_description = webhook.data['object']['description'] self.query_variables = {} self.query_variables['input'] = {} self.enriched = False # Prepare search queries for searches for query_name, query_config in action_config.items(): try: self.logger.info('Found the following query: %s' % (query_name)) self.query_variables[query_name] = {} # Render query try: # Prepare the template self.template = Template(query_config['query']) # Find variables in the template self.template_env = Environment() self.template_parsed = self.template_env.parse( query_config['query']) # Grab all the variales from the template and try to find them in the description self.template_vars = meta.find_undeclared_variables( self.template_parsed) self.logger.debug( "Found the following variables in query: {}".format( self.template_vars)) for template_var in self.template_vars: # Skip dynamically generated Stop_time variable if template_var == "Stop_Time": continue self.logger.debug( "Looking up variable required for template: {}". format(template_var)) # Replace the underscore from the variable name to a white space as this is used in the description table self.template_var_with_ws = template_var.replace( "_", " ") self.alert_data = self.TheHiveConnector.getAlert( self.alert_id) self.logger.debug('output for get_alert: {}'.format( self.alert_data)) self.query_variables['input'][ template_var] = self.TheHiveAutomators.fetchValueFromMDTable( self.alert_data['description'], self.template_var_with_ws) # Parse times required for the query (with or without offset) if template_var == "Start_Time": self.logger.debug( "Found Start Time: %s" % self.query_variables['input']['Start_Time']) if 'start_time_offset' in query_config: self.query_variables['input'][ 'Start_Time'] = self.parseTimeOffset( self.query_variables['input'] ['Start_Time'], self.cfg.get( 'Automation', 'event_start_time_format'), query_config['start_time_offset'], self.cfg.get('QRadar', 'time_format')) else: self.query_variables['input'][ 'Start_Time'] = self.query_variables[ 'input']['Start_Time'] if 'stop_time_offset' in query_config: self.query_variables['input'][ 'Stop_Time'] = self.parseTimeOffset( self.query_variables['input'] ['Start_Time'], self.cfg.get( 'Automation', 'event_start_time_format'), query_config['stop_time_offset'], self.cfg.get('QRadar', 'time_format')) else: self.query_variables['input'][ 'Stop_Time'] = datetime.now().strftime( self.cfg.get( 'Automation', 'event_start_time_format')) if not self.query_variables['input']['Start_Time']: self.logger.warning("Could not find Start Time value ") raise GetOutOfLoop self.query_variables[query_name][ 'query'] = self.template.render( self.query_variables['input']) self.logger.debug( "Rendered the following query: %s" % self.query_variables[query_name]['query']) except Exception as e: self.logger.warning( "Could not render query due to missing variables", exc_info=True) raise GetOutOfLoop # Perform search queries try: self.query_variables[query_name][ 'result'] = self.QRadarConnector.aqlSearch( self.query_variables[query_name]['query']) except Exception as e: self.logger.warning("Could not perform query", exc_info=True) raise GetOutOfLoop # Check results self.logger.debug( 'The search result returned the following information: \n %s' % self.query_variables[query_name]['result']) # making enrichment results presentable clean_enrichment_results = self.TheHiveAutomators.make_it_presentable( self.query_variables[query_name]['result']['events'][0] ['enrichment_result']) # Add results to description success = self.enrichAlertDescription( self.alert_data['description'], query_name, self.query_variables[query_name]['result']['events'][0] ['enrichment_result']) if not success: self.logger.warning( "Could not add results from the query to the description. Error: {}" .format(e)) raise GetOutOfLoop except GetOutOfLoop: pass return True
class Automation(): def __init__(self, webhook, cfg): logger.info('Initiating MISPautomation') self.TheHiveConnector = TheHiveConnector(cfg) if self.cfg.getboolean('Cortex', 'enabled'): self.CortexConnector = CortexConnector(cfg) self.webhook = webhook self.report_action = report_action self.qr_config = {} for key, value in cfg.items('QRadar'): self.qr_config[key] = value def parse_hooks(self): """ Check for new MISP Alert containing supported IOC to search automatically """ if self.webhook.isNewMispAlert(): logger.info( 'Alert {} has been tagged as MISP and is just created'.format( self.webhook.data['rootId'])) #Check alert for supported ioc types supported_iocs = False for artifact in self.webhook.data['object']['artifacts']: if artifact['dataType'] in self.qr_config[ 'supported_datatypes']: supported_iocs = True #Promote alert to case if there are support ioc types if supported_iocs: alert_id = self.webhook.data['rootId'] casetemplate = "MISP Event" logger.info('Alert {} contains IOCs that are supported'.format( alert_id)) response = self.TheHiveConnector.createCaseFromAlert( alert_id, casetemplate) self.report_action = 'createCase' """ Add timestamps to keep track of the search activity per case (we do not want to keep searching forever) """ #Perform automated Analyzer runs for supported observables in a case that has been created from a MISP alert if self.webhook.isNewMispCase(): logger.info( 'Case {} has been tagged as MISP and is just created'.format( self.webhook.data['rootId'])) #Retrieve caseid caseid = self.webhook.data['object']['id'] #Add customFields firstSearched and lastSearched #Create a Case object? Or whatever it is case = Case() #Add the case id to the object case.id = caseid #Debug output logger.info('Updating case %s' % case.id) #Define which fields need to get updated fields = ['customFields'] #Retrieve all required attributes from the alert and add them as custom fields to the case current_time = int(round(time.time() * 1000)) customFields = CustomFieldHelper()\ .add_date('firstSearched', current_time)\ .add_date('lastSearched', current_time)\ .build() #Add custom fields to the case object case.customFields = customFields #Update the case self.TheHiveConnector.updateCase(case, fields) self.report_action = 'updateCase' """ Start the analyzers automatically for MISP observables that are supported and update the case with a new timestamp """ #Automatically run Analyzers for newly created MISP cases where supported IOC's are present if self.webhook.isNewMispArtifact(): logger.info( 'Case artifact is tagged with "MISP-extern". Checking if observable is of a supported type' ) #Retrieve caseid caseid = self.webhook.data['rootId'] #Retrieve case data case_data = self.TheHiveConnector.getCase(caseid) #List all supported ioc's for the case observable = self.webhook.data['object'] #When supported, start a cortex analyzer for it if observable['dataType'] in self.qr_config['supported_datatypes']: supported_observable = observable['_id'] #Trigger a search for the supported ioc logger.info('Launching analyzers for observable: {}'.format( observable['_id'])) response = self.CortexConnector.runAnalyzer( "Cortex-intern", supported_observable, "IBMQRadar_Search_Manual_0_1") #Add customFields firstSearched and lastSearched #Create a Case object case = Case() #Add the case id to the object case.id = caseid #Debug output logger.info('Updating case %s' % case.id) #Define which fields need to get updated fields = ['customFields'] #Retrieve all required attributes from the alert and add them as custom fields to the case current_time = int(round(time.time() * 1000)) customFields = CustomFieldHelper()\ .add_date('firstSearched', case_data['customFields']['firstSearched']['date'])\ .add_date('lastSearched', current_time)\ .build() #Add custom fields to the case object case.customFields = customFields #Update the case self.TheHiveConnector.updateCase(case, fields) self.report_action = 'updateCase' """ Automatically create a task for a found IOC """ #If the Job result contains a successful search with minimum of 1 hit, create a task to investigate the results if self.webhook.isCaseArtifactJob() and self.webhook.isSuccess( ) and self.webhook.isMisp(): #Case ID caseid = self.webhook.data['rootId'] #Load Case information case_data = self.TheHiveConnector.getCase(caseid) logger.info( 'Job {} is part of a case that has been tagged as MISP case and has just finished' .format(self.webhook.data['object']['cortexJobId'])) #Check if the result count higher than 0 if int( float(self.webhook.data['object']['report']['summary'] ['taxonomies'][0]['value'])) > 0: logger.info( 'Job {} contains hits, checking if a task is already present for this observable' .format(self.webhook.data['object']['cortexJobId'])) #Retrieve case task information response = self.TheHiveConnector.getCaseTasks(caseid) case_tasks = response.json() #Load CaseTask template casetask = CaseTask() #Observable + Link observable = self.webhook.data['object']['artifactId'] observable_link = TheHive.get( 'url' ) + "/index.html#!/case/" + caseid + "/observables/" + self.webhook.data[ 'object']['artifactId'] #Task name casetask.title = "Investigate found IOC with id: {}".format( observable) #Date date_found = time.strftime("%d-%m-%Y %H:%M") case_task_found = False for case_task in case_tasks: #Check if task is present for investigating the new results if casetask.title == case_task['title']: case_task_found = True if not case_task_found: logger.info( 'No task found, creating task for observable found in job {}' .format(self.webhook.data['object']['cortexJobId'])) #Add description casetask.description = "The following ioc is hit in the environment. Investigate the results and act accordingly:\n\n" casetask.description = casetask.description + "{} is seen on {}\n".format( observable_link, date_found) #Check if case is closed if case_data['status'] == "Resolved": #Create a Case object? Or whatever it is case = Case() #Add the case id to the object case.id = caseid logger.info('Updating case %s' % case.id) #Define which fields need to get updated fields = ['status'] #Reopen the case case.status = "Open" #Update the case self.TheHiveConnector.updateCase(case, fields) #Add the case task self.TheHiveConnector.createTask(caseid, casetask) self.report_action = 'createTask' return self.report_action
class Automators(Main): def __init__(self, cfg, use_case_config): self.logger = logging.getLogger(__name__) self.logger.info('Initiating Splunk Automators') self.cfg = cfg self.use_case_config = use_case_config self.TheHiveConnector = TheHiveConnector(cfg) self.TheHiveAutomators = TheHiveAutomators(cfg, use_case_config) self.SplunkConnector = SplunkConnector(cfg) def checkSiem(self, action_config, webhook): #Only continue if the right webhook is triggered if webhook.isImportedAlert() or webhook.isNewAlert(): pass else: return False #Define variables and actions based on certain webhook types #Alerts if webhook.isNewAlert(): self.alert_id = webhook.data['object']['id'] self.alert_description = webhook.data['object']['description'] self.supported_query_type = 'enrichment_queries' if self.supported_query_type in action_config: self.query_config = action_config[self.supported_query_type] #Cases elif webhook.isImportedAlert(): self.case_id = webhook.data['object']['case'] self.supported_query_type = 'search_queries' if self.supported_query_type in action_config: self.query_config = action_config[self.supported_query_type] else: self.logger.debug("Search query config not found for the Imported Alert") return False self.query_variables = {} self.query_variables['input'] = {} self.enriched = False #Prepare search queries for searches for query_name, query_config in self.query_config.items(): try: self.logger.info('Found the following query: %s' % (query_name)) self.query_variables[query_name] = {} #Render query try: #Prepare the template self.template = Template(query_config['query']) #Find variables in the template self.template_env = Environment() self.template_parsed = self.template_env.parse(query_config['query']) #Grab all the variales from the template and try to find them in the description self.template_vars = meta.find_undeclared_variables(self.template_parsed) self.logger.debug("Found the following variables in query: {}".format(self.template_vars)) for template_var in self.template_vars: #Skip dynamically generated Stop_time variable if template_var == "Stop_Time": continue self.logger.debug("Looking up variable required for template: {}".format(template_var)) #Replace the underscore from the variable name to a white space as this is used in the description table self.template_var_with_ws = template_var.replace("_", " ") try: self.alert_data= self.TheHiveConnector.getAlert(self.alert_id) self.logger.debug('output for get_alert: {}'.format(self.alert_data)) self.query_variables['input'][template_var] = self.TheHiveAutomators.fetchValueFromMDTable(self.alert_data['description'],self.template_var_with_ws) except: self.case_data = self.TheHiveConnector.getCase(self.case_id) self.logger.debug('output for get_case: {}'.format(self.case_data)) self.query_variables['input'][template_var] = self.TheHiveAutomators.fetchValueFromMDTable(self.case_data['description'],self.template_var_with_ws) #Parse times required for the query (with or without offset) if template_var == "Start_Time": self.logger.debug("Found Start Time: %s" % self.query_variables['input']['Start_Time']) if 'start_time_offset' in query_config: self.start_time = self.query_variables['input']['Start_Time'] self.query_variables['input']['Start_Time'] = self.parseTimeOffset(self.start_time, self.cfg.get('Automation', 'event_start_time_format'), query_config['start_time_offset'], self.cfg.get('Splunk', 'time_format')) else: self.query_variables['input']['Start_Time'] = self.query_variables['input']['Start_Time'] if 'stop_time_offset' in query_config: self.query_variables['input']['Stop_Time'] = self.parseTimeOffset(self.start_time, self.cfg.get('Automation', 'event_start_time_format'), query_config['stop_time_offset'], self.cfg.get('Splunk', 'time_format')) else: self.query_variables['input']['Stop_Time'] = datetime.now().strftime(self.cfg.get('Automation', 'event_start_time_format')) self.query_variables[query_name]['query'] = self.template.render(self.query_variables['input']) self.logger.debug("Rendered the following query: %s" % self.query_variables[query_name]['query']) except Exception as e: self.logger.warning("Could not render query due to missing variables", exc_info=True) raise GetOutOfLoop #Perform queries try: self.query_variables[query_name]['result'] = self.SplunkConnector.query(self.query_variables[query_name]['query']) #Check if there are any results self.results = True if len(self.query_variables[query_name]['result']) == 0: self.logger.info("No results found for query") self.results = False except Exception as e: self.logger.warning("Could not perform query", exc_info=True) raise GetOutOfLoop #Check results self.logger.debug('The search result returned the following information: \n %s' % self.query_variables[query_name]['result']) if self.supported_query_type == "search_queries": #Task name self.uc_task_title = query_config['task_title'] self.uc_task_description = "The following information is found. Investigate the results and act accordingly:\n\n\n\n" #create a table header self.table_header = "|" self.rows = "|" if self.results: for key in self.query_variables[query_name]['result'][0].keys(): self.table_header = self.table_header + " %s |" % key self.rows = self.rows + "---|" self.table_header = self.table_header + "\n" + self.rows + "\n" self.uc_task_description = self.uc_task_description + self.table_header #Create the data table for the results for event in self.query_variables[query_name]['result']: self.table_data_row = "|" for field_key, field_value in event.items(): # Escape pipe signs if field_value: field_value = field_value.replace("|", "|") # Use to create some additional spacing self.table_data_row = self.table_data_row + " %s |" % field_value self.table_data_row = self.table_data_row + "\n" self.uc_task_description = self.uc_task_description + self.table_data_row else: self.uc_task_description = self.uc_task_description + "No results \n" #Add the case task self.uc_task = self.TheHiveAutomators.craftUcTask(self.uc_task_title, self.uc_task_description) self.TheHiveConnector.createTask(self.case_id, self.uc_task) if self.supported_query_type == "enrichment_queries": if self.results: # making enrichment results presentable self.enrichment_result= self.TheHiveAutomators.make_it_presentable(self.query_variables[query_name]['result'][0]['enrichment_result']) else: self.enrichment_result = "No Results found in Splunk" #Add results to description try: if self.TheHiveAutomators.fetchValueFromMDTable(self.alert_data['description'],query_name) != self.enrichment_result: self.regex_end_of_table = ' \|\\n\\n\\n' self.end_of_table = ' |\n\n\n' self.replacement_description = '|\n | **%s** | %s %s' % (query_name, self.enrichment_result, self.end_of_table) self.alert_description = self.TheHiveConnector.getAlert(self.alert_id)['description'] self.alert_description=re.sub(self.regex_end_of_table, self.replacement_description, self.alert_description) self.enriched = True except Exception as e: self.logger.warning("Could not add results from the query to the description. Error: {}".format(e)) raise GetOutOfLoop except GetOutOfLoop: pass #Only enrichment queries need to update the alert out of the loop. The search queries will create a task within the loop if self.enriched: #Update Alert with the new description field self.updated_alert = Alert self.updated_alert.description = self.alert_description self.TheHiveConnector.updateAlert(self.alert_id, self.updated_alert, ["description"]) return True
class Integration(Main): def __init__(self): super().__init__() self.RDConnector = RDConnector(self.cfg) self.TheHiveConnector = TheHiveConnector(self.cfg) def validateRequest(self, request): workflowReport = self.connectRD() if workflowReport['success']: return json.dumps(workflowReport), 200 else: return json.dumps(workflowReport), 500 def connectRD(self): self.logger.info('%s.connectResponsibleDisclosure starts', __name__) report = dict() report['success'] = bool() # Setup Tags self.tags = ['Responsible disclosure', 'Synapse'] tracker_file = "./modules/ResponsibleDisclosure/email_tracker" link_to_load = "" if os.path.exists(tracker_file): self.logger.debug("Reading from the tracker file...") with open(tracker_file, "r") as tracker: link_to_load = tracker.read() if not link_to_load: link_to_load = self.cfg.get('ResponsibleDisclosure', 'list_endpoint') emails, new_link = self.RDConnector.scan(link_to_load) try: for email in emails: try: if ('@removed' in email) or [email["from"]["emailAddress"]["address"]] in self.cfg.get('ResponsibleDisclosure', 'excluded_senders'): continue self.logger.debug("Found unread E-mail with id: {}".format(email['id'])) # Get the conversation id from the email CID = email["conversationId"] # Conversation id hash will be used as a unique identifier for the alert CIDHash = hashlib.md5(CID.encode()).hexdigest() email_date = datetime.strptime(email["receivedDateTime"], "%Y-%m-%dT%H:%M:%SZ") epoch_email_date = email_date.timestamp() * 1000 alertTitle = "Responsible Disclosure - {}".format(email["subject"]) alertDescription = self.createDescription(email) # Moving the email from Inbox to the new folder defined by variable to_move_folder in synapse.conf # Disabled temporarily # self.RDConnector.moveToFolder(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id'], self.cfg.get('ResponsibleDisclosure', 'to_move_folder')) # Get all the attachments and upload to the hive observables attachment_data = self.RDConnector.listAttachment(self.cfg.get('ResponsibleDisclosure', 'email_address'), email['id']) all_artifacts = [] all_attachments = [] if attachment_data: for att in attachment_data: file_name = self.RDConnector.downloadAttachments(att['name'], att['attachment_id'], att['isInline'], att['contentType']) all_attachments.append(file_name) self.af = AlertArtifact(dataType='file', data=file_name, tlp=2, tags=['Responsible disclosure', 'Synapse'], ioc=True) all_artifacts.append(self.af) # Create the alert in thehive alert = self.TheHiveConnector.craftAlert( alertTitle, alertDescription, 1, epoch_email_date, self.tags, 2, "New", "internal", "ResponsibleDisclosure", CIDHash, all_artifacts, self.cfg.get('ResponsibleDisclosure', 'case_template')) # Check if the alert was created successfully query = dict() query['sourceRef'] = str(CIDHash) # Look up if any existing alert in theHive alert_results = self.TheHiveConnector.findAlert(query) # If no alerts are found for corresponding CIDHASH, create a new alert if len(alert_results) == 0: createdAlert = self.TheHiveConnector.createAlert(alert) # automatish antwoord to the original email sender from the responsible disclosure emailaddress autoreply_subject_name = "RE: {}".format(email["subject"]) self.RDConnector.sendAutoReply("*****@*****.**", email["from"]["emailAddress"]["address"], self.cfg.get('ResponsibleDisclosure', 'email_body_filepath'), autoreply_subject_name) # If alert is found update the alert or it may have been migrated to case so update the case if len(alert_results) > 0: alert_found = alert_results[0] # Check if alert is promoted to a case if 'case' in alert_found: case_found = self.TheHiveConnector.getCase(alert_found['case']) # Create a case model self.updated_case = Case # Update the case with new description # What if the email body is empty for new email, then use the old description self.updated_case.description = case_found['description'] + "\n\n" + alertDescription self.updated_case.id = alert_found['case'] self.TheHiveConnector.updateCase(self.updated_case, ["description"]) self.logger.info("updated the description of the case with id: {}".format(alert_found['case'])) # Check if there new observables available if all_attachments: for att in all_attachments: try: self.TheHiveConnector.addFileObservable(alert_found['case'], att, "email attachment") except Exception as e: self.logger.error(f"Encountered an error while creating a new file based observable: {e}", exc_info=True) continue # Else it means there is no corresponding case so update the alert else: # create an alert model self.updated_alert = Alert # Update the alert with new description # What if the email body is empty for new email, then use the old description self.updated_alert.description = alert_found['description'] + "\n\n" + alertDescription self.TheHiveConnector.updateAlert(alert_found['id'], self.updated_alert, ["description"]) self.logger.info("updated the description of the alert with id: {}".format(alert_found['id'])) except Exception as e: self.logger.error(e, exc_info=True) continue if all_attachments: for att in all_attachments: os.remove(att) # Write the delta link to the tracker with open(tracker_file, "w+") as tracker: tracker.write(new_link) report['success'] = True return report except Exception as e: self.logger.error(e) self.logger.error('Connection failure', exc_info=True) report['success'] = False return report def createDescription(self, email): email_body = email['body']['content'] subject = email["subject"] # Get the conversation id from the email CID = email["conversationId"] # Conversation id hash will be used as a unique identifier for the alert CIDHash = hashlib.md5(CID.encode()).hexdigest() # Parse all the URLs and add them to a field in the description table urls_list = re.findall(r'\<(https?://[\S]+?)\>', email_body) # " " is ascii for next line urls_str = ' '.join(str(x) for x in urls_list) from_e = email["from"]["emailAddress"]["address"] to_e = "N/A" if email["toRecipients"]: to_e = email["toRecipients"][0]["emailAddress"]["address"] OriginatingIP = "N/A" for header in email['internetMessageHeaders']: if header['name'] == 'X-Originating-IP': # Formatting the ip value, bydefault it comesup like [x.x.x.x] OriginatingIP = (header['value'][1:-1]) # putting together the markdown table temp_fullbody = [] temp_fullbody.append("| | |") temp_fullbody.append("|:-----|:-----|") temp_fullbody.append("| " + "**" + "Subject" + "**" + " | " + subject + " |") temp_fullbody.append("| " + "**" + "Sender" + "**" + " | " + from_e + " |") temp_fullbody.append("| " + "**" + "Recipient" + "**" + " | " + to_e + " |") temp_fullbody.append("| " + "**" + "Originating IP" + "**" + " | " + OriginatingIP + " |") temp_fullbody.append("| " + "**" + "Received at" + "**" + " | " + email["receivedDateTime"] + " |") temp_fullbody.append("| " + "**" + "URL(s) in email" + "**" + " | " + urls_str + " |") temp_fullbody.append("| " + "**" + "Msg ID" + "**" + " | " + email['id'] + " |") temp_fullbody.append("**" + "Email body" + "**") temp_fullbody.append("```") temp_fullbody.append(email_body) temp_fullbody.append("```") alertDescription = '\r\n'.join(str(x) for x in temp_fullbody) return alertDescription
class Automators(Main): def __init__(self, cfg, use_case_config): self.logger = logging.getLogger(__name__) self.logger.info('Initiating The Hive Automator') self.cfg = cfg self.TheHiveConnector = TheHiveConnector(cfg) if self.cfg.getboolean('Cortex', 'enabled'): self.CortexConnector = CortexConnector(cfg) #Read mail config self.mailsettings = self.cfg.get('TheHive', 'mail') ''' Can be used to check if there is a match between tags and the provided list. Useful for checking if there is a customer tag (having a list of customers) present where only one can match. ''' def MatchValueAgainstTags(self, tags, list): for tag in tags: if tag in list: return tag def craftUcTask(self, title, description): self.logger.debug('%s.craftUcTask starts', __name__) self.uc_task = CaseTask(title=title, description=description) return self.uc_task def createBasicTask(self, action_config, webhook): #Only continue if the right webhook is triggered if webhook.isImportedAlert(): pass else: return False #Perform actions for the CreateBasicTask action self.case_id = webhook.data['object']['case'] self.title = action_config['title'] self.description = action_config['description'] self.logger.info('Found basic task to create: %s' % self.title) #Create Task self.uc_task = self.craftUcTask(self.title, self.description) self.uc_task_id = self.TheHiveConnector.createTask( self.case_id, self.uc_task) return True def createMailTask(self, action_config, webhook): #Only continue if the right webhook is triggered if webhook.isImportedAlert(): pass else: return False self.tags = webhook.data['object']['tags'] self.case_id = webhook.data['object']['case'] if self.cfg.getboolean('Automation', 'enable_customer_list', fallback=False): self.customer_id = self.MatchValueAgainstTags( self.tags, self.customers) self.logger.info('Found customer %s, retrieving recipient' % self.customer_id) else: self.customer_id = None self.notification_type = "email" self.title = action_config['title'] self.description = self.renderTemplate(action_config['long_template'], self.tags, webhook, self.notification_type, customer_id=self.customer_id, mail_settings=self.mailsettings) self.logger.info('Found mail task to create: %s' % self.title) #Create Task self.ucTask = self.craftUcTask(self.title, self.description) self.ucTaskId = self.TheHiveConnector.createTask( self.case_id, self.ucTask) if 'auto_send_mail' in action_config and action_config[ 'auto_send_mail'] and not self.stopsend: self.logger.info('Sending mail for task with id: %s' % self.ucTaskId) self.TheHiveConnector.runResponder( 'case_task', self.ucTaskId, self.use_case_config['configuration']['mail']['responder_id']) def runAnalyzer(self, action_config, webhook): #Automatically run Analyzers for newly created cases where supported IOC's are present if webhook.isNewArtifact(): self.logger.debug( 'Case artifact found. Checking if observable is of a supported type to automatically fire the analyzer' ) #Retrieve caseid self.caseid = webhook.data['rootId'] #List all supported ioc's for the case self.observable = webhook.data['object'] #When supported, start a cortex analyzer for it if self.observable['dataType'] in action_config['datatypes']: self.supported_observable = self.observable['_id'] #Blacklist IP addresses, make sure the blacklist is present if self.observable[ 'dataType'] == "ip" and 'blacklist' in action_config and 'ip' in action_config[ 'blacklist']: for entry in action_config['blacklist']['ip']: #Initial values match = False observable_ip = ipaddress.ip_address( self.observable['data']) #Match ip with CIDR syntax if entry[-3:] == "/32": bl_entry = ipaddress.ip_address(entry[:-3]) match = observable_ip == bl_entry #Match ip without CIDR syntax elif "/" not in entry: bl_entry = ipaddress.ip_address(entry) match = observable_ip == bl_entry #Capture actual network entries else: bl_entry = ipaddress.ip_network(entry, strict=False) match = observable_ip in bl_entry #If matched add it to new entries to use outside of the loop if match: self.logger.debug( "Observable {} has matched {} of blacklist. Ignoring..." .format(self.observable['data'], entry)) return #Trigger a search for the supported ioc self.logger.debug( 'Launching analyzers for observable: {}'.format( self.observable['_id'])) self.TheHiveConnector.runAnalyzer( action_config['cortex_instance'], self.supported_observable, action_config['analyzer']) def closeCaseForTaxonomyInAnalyzerResults(self, action_config, webhook): #If the Job result contains a successful search with minimum of 1 hit, create a task to investigate the results if webhook.isCaseArtifactJob() and webhook.isSuccess(): #Case ID self.caseid = webhook.data['rootId'] #Load Case information self.case_data = self.TheHiveConnector.getCase(self.caseid) self.logger.debug('Job {} has just finished'.format( webhook.data['object']['cortexJobId'])) #Check if the result count higher than 0 if webhook.data['object']['report']['summary']['taxonomies'][0][ 'level'] in action_config["taxonomy_level"]: self.logger.info( 'Job {} has configured taxonomy level, checking if a task is already present for this observable' .format(webhook.data['object']['cortexJobId'])) #Check if task is present for investigating the new results if self.case_data['status'] != "Resolved": self.logger.info( 'Case is not yet closed, closing case for {} now...'. format(webhook.data['object']['cortexJobId'])) #Close the case self.TheHiveConnector.closeCase(self.caseid) self.report_action = 'closeCase' return self.report_action def createTaskForTaxonomyinAnalyzerResults(self, action_config, webhook): #If the Job result contains a successful search with minimum of 1 hit, create a task to investigate the results if webhook.isCaseArtifactJob() and webhook.isSuccess(): #Case ID self.caseid = webhook.data['rootId'] #Load Case information self.case_data = self.TheHiveConnector.getCase(self.caseid) self.logger.debug('Job {} has just finished'.format( webhook.data['object']['cortexJobId'])) #Check if the result count higher than 0 if webhook.data['object']['report']['summary']['taxonomies'][0][ 'level'] in action_config["taxonomy_level"]: self.logger.info( 'Job {} has configured taxonomy level, checking if a task is already present for this observable' .format(webhook.data['object']['cortexJobId'])) #Retrieve case task information self.response = self.TheHiveConnector.getCaseTasks(self.caseid) self.case_tasks = self.response.json() #Load CaseTask template self.casetask = CaseTask() #Observable + Link self.observable = webhook.data['object']['artifactId'] self.observable_link = self.cfg.get( 'Automation', 'hive_url', fallback="https://localhost" ) + "/index.html#!/case/" + self.caseid + "/observables/" + webhook.data[ 'object']['artifactId'] #Task name self.casetask.title = "{} {}".format(action_config['title'], self.observable) #Date self.date_found = time.strftime("%d-%m-%Y %H:%M") self.case_task_found = False for case_task in self.case_tasks: #Check if task is present for investigating the new results if self.casetask.title == case_task['title']: self.case_task_found = True if not self.case_task_found: self.logger.info( 'No task found, creating task for observable found in job {}' .format(webhook.data['object']['cortexJobId'])) #Add description self.casetask.description = action_config['description'] self.casetask.description = self.casetask.description + "\n\n {} is seen on {}\n".format( self.observable_link, self.date_found) #Check if case is closed if self.case_data['status'] == "Resolved": #Create a Case object case = Case() #Add the case id to the object case.id = self.caseid self.logger.info('Updating case %s' % case.id) #Define which fields need to get updated fields = ['status'] #Reopen the case case.status = "Open" #Update the case self.TheHiveConnector.updateCase(case, fields) #Add the case task self.TheHiveConnector.createTask(self.caseid, self.casetask) self.report_action = 'createTask' return self.report_action