def CalcPreCompromiseTime(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug('CalcPreCompromiseTime() called') filtered_artifacts_data_0 = phantom.collect2(container=container, datapath=['filtered-data:MultiArtifact_StartTime:condition_1:artifact:*.cef.startTime']) literal_values_0 = [ [ -5, "days", ], ] parameters = [] for item0 in filtered_artifacts_data_0: for item1 in literal_values_0: parameters.append({ 'input_datetime': item0[0], 'amount_to_modify': item1[0], 'modification_unit': item1[1], 'input_format_string': None, 'output_format_string': None, }) ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ################################################################################ ## Custom Code End ################################################################################ # call custom function "community/datetime_modify", returns the custom_function_run_id phantom.custom_function(custom_function='community/datetime_modify', parameters=parameters, name='CalcPreCompromiseTime', callback=ConvertCompromiseTimeFormat) return
def enumerate_files_to_delete(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("enumerate_files_to_delete() called") parameters = [] parameters.append({ "input_1": "log4j_hosts_and_files", "input_2": None, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ # use custom code to read a custom list of potential log4j files to delete # and make a json to create an artifact for each one. # the expected format of the custom list is: # hostname1 | unix | /full/path/to/delete/on/hostname_1 # 1.1.1.1 | windows | C:\\Full\Path\To\Delete\On\1_1_1_1 # # the list can either have all rows with files or no rows with files. some rows with files and some without will not work custom_list_name = parameters[0]['input_1'] success, message, rows = phantom.get_list(list_name=custom_list_name) # return early if the list is not found if not success: phantom.debug( "Failed to find the custom list, so only existing artifacts will be used" ) phantom.custom_function(custom_function="community/passthrough", parameters=[], name="enumerate_files_to_delete", callback=create_file_artifacts) return # loop through the rows and create a list of artifact jsons to add # the three columns are expected to be the ip_or_hostname, the operating system family, and the full path to the file to delete parameters = [] unix_hosts = [] windows_hosts = [] unknown_hosts = [] has_files = False if rows[0][2] and ('/' in rows[0][2] or '\\' in rows[0][2]): has_files = True for row in rows: # hostname and operating system are required, but file path is optional. files will not be deleted if file path is missing if row[0] and row[1]: # only windows and unix are supported, and operating system family is required if row[1] == 'unix' or row[1] == 'windows': artifact_dict = { 'cef_data': { 'deviceHostname': row[0], 'operatingSystemFamily': row[1], 'filePath': row[2] }, 'field_mapping': { 'deviceHostname': ['host name', 'ip'], 'filePath': ['file path'] } } # full paths should have at least one slash somewhere in them if row[2] and ('/' in row[2] or '\\' in row[2]): if has_files: artifact_dict['cef_data']['filePath'] = row[2] artifact_dict['field_mapping']['filePath'] = [ 'file path' ] else: phantom.debug( "skipping host {} with file {} because other rows did not have files" .format(row[0], row[2])) else: if has_files: phantom.error( "host {} is missing a file; playbook will be discontinued" .format(row[0])) phantom.discontinue() parameters.append({'input_1': artifact_dict}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="enumerate_files_to_delete", callback=create_file_artifacts) return
def cf_rba_master_add_artifact_with_tags_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug('cf_rba_master_add_artifact_with_tags_1() called') container_property_0 = [ [ container.get("id"), ], ] formatted_data_0 = [ [ phantom.get_format_data(name="format_4"), ], ] literal_values_0 = [ [ "Extracted IPv4 address", "risk_rule", "low", "{\"threat_object\": [\"ip\"]}", "True", ], ] parameters = [] for item0 in formatted_data_0: for item1 in literal_values_0: for item2 in container_property_0: parameters.append({ 'cef': item0[0], 'name': item1[0], 'tags': None, 'label': item1[1], 'severity': item1[2], 'container_id': item2[0], 'field_mapping': item1[3], 'run_automation': item1[4], }) ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ################################################################################ ## Custom Code End ################################################################################ # call custom function "rba-master/add_artifact_with_tags", returns the custom_function_run_id phantom.custom_function( custom_function='rba-master/add_artifact_with_tags', parameters=parameters, name='cf_rba_master_add_artifact_with_tags_1') return
def parse_risk_results_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("parse_risk_results_1() called") run_risk_rule_query_result_data = phantom.collect2(container=container, datapath=["run_risk_rule_query:action_result.data","run_risk_rule_query:action_result.parameter.context.artifact_id"], action_results=results) run_risk_rule_query_result_item_0 = [item[0] for item in run_risk_rule_query_result_data] parameters = [] parameters.append({ "input_1": run_risk_rule_query_result_item_0, "input_2": None, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ from dateutil.parser import parse from django.utils.dateparse import parse_datetime import re search_json = run_risk_rule_query_result_item_0[0] # overwrite parameters parameters = [] # Helper recursive function to flatten nested lists def flatten(input_list): if not input_list: return input_list if isinstance(input_list[0], list): return flatten(input_list[0]) + flatten(input_list[1:]) return input_list[:1] + flatten(input_list[1:]) # Declare dictionary for cim to cef translation # adjust as needed cim_cef = { "action": "act", "action_name": "act", "app": "app", "bytes_in": "bytesIn", "bytes_out": "bytesOut", "category": "cat", "dest": "destinationAddress", "dest_ip": "destinationAddress", "dest_mac": "destinationMacAddress", "dest_nt_domain": "destinationNtDomain", "dest_port": "destinationPort", "dest_translated_ip": "destinationTranslatedAddress", "dest_translated_port": "destinationTranslatedPort", "direction": "deviceDirection", "dns": "destinationDnsDomain", "dvc": "dvc", "dvc_ip": "deviceAddress", "dvc_mac": "deviceMacAddress", "file_create_time": "fileCreateTime", "file_hash": "fileHash", "file_modify_time": "fileModificationTime", "file_name": "fileName", "file_path": "filePath", "file_size": "fileSize", "message": "message", "protocol": "transportProtocol", "request_payload": "request", "request_payload_type": "requestMethod", "src": "sourceAddress", "src_dns": "sourceDnsDomain", "src_ip": "sourceAddress", "src_mac": "sourceMacAddress", "src_nt_domain": "sourceNtDomain", "src_port": "sourcePort", "src_translated_ip": "sourceTranslatedAddress", "src_translated_port": "sourceTranslatedPort", "src_user": "******", "transport": "transportProtocol", "url": "requestURL", "user": "******", "user_id": "destinationUserId", } # Iterate through Splunk search results for index, artifact_json in enumerate(search_json): field_mapping = {} for k,v in artifact_json.items(): tags = [] # Swap CIM for CEF values if k.lower() in cim_cef.keys(): if k.lower() == 'dest': # if 'dest' matches an IP, use 'dest', otherwise use 'destinationHostName' if re.match('(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)', k): artifact_json[cim_cef[k]] = artifact_json.pop(k) else: artifact_json['destinationHostName'] = artifact_json.pop(k) elif k.lower() == 'src': # if 'src' matches an IP, use 'src', otherwise use 'sourceHostName' if re.match('(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)', k): artifact_json[cim_cef[k]] = artifact_json.pop(k) else: artifact_json['sourceHostName'] = artifact_json.pop(k) else: artifact_json[cim_cef[k.lower()]] = artifact_json.pop(k) for k,v in artifact_json.items(): if type(v) == list: artifact_json[k] = ", ".join(flatten(v)) # Swap risk_message for description if 'risk_message' in artifact_json.keys(): artifact_json['description'] = artifact_json.pop('risk_message') # Make _time easier to read if '_time' in artifact_json.keys(): timestring = parse(artifact_json['_time']) artifact_json['_time'] = "{} {}".format(timestring.date(), timestring.time()) # Add threat_object_type to threat_object field_mapping if 'threat_object' in artifact_json.keys() and 'threat_object_type' in artifact_json.keys(): field_mapping['threat_object'] = [artifact_json['threat_object_type']] # Set the underlying data type in field mapping based on the risk_object_type if 'risk_object' in artifact_json.keys() and 'risk_object_type' in artifact_json.keys(): if 'user' in artifact_json['risk_object_type']: field_mapping['risk_object'] = ["user name"] elif artifact_json['risk_object_type'] == 'system': field_mapping['risk_object'] = ["host name", "hostname"] else: field_mapping['risk_object'] = artifact_json['risk_object_type'] # Extract tags if 'rule_attack_tactic_technique' in artifact_json.keys(): for match in re.findall('(^|\|)(\w+)\s+',artifact_json['rule_attack_tactic_technique']): tags.append(match[1]) tags=list(set(tags)) # Final setp is to build the output. This is reliant on the source field existing which should be present in all Splunk search results if 'source' in artifact_json.keys(): if index < len(search_json[0]) - 1: name = artifact_json.pop('source') parameters.append({'input_1': json.dumps({'cef_data': artifact_json, 'tags': tags, 'name': name, 'field_mapping': field_mapping, 'run_automation': False})}) else: name = artifact_json.pop('source') parameters.append({'input_1': json.dumps({'cef_data': artifact_json, 'tags': tags, 'name': name, 'field_mapping': field_mapping, 'run_automation': True})}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="parse_risk_results_1", callback=create_risk_artifacts) return
def cf_rba_master_add_artifact_with_tags_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug('cf_rba_master_add_artifact_with_tags_1() called') custom_function_result_0 = phantom.collect2( container=container, datapath=[ 'cf_rba_master_parse_risk_results_1:custom_function_result.data.*.artifact.cef', 'cf_rba_master_parse_risk_results_1:custom_function_result.data.*.artifact.name', 'cf_rba_master_parse_risk_results_1:custom_function_result.data.*.artifact.tags' ], action_results=results) container_property_0 = [ [ container.get("id"), ], ] literal_values_0 = [ [ "risk_rule", "informational", "True", ], ] parameters = [] for item0 in custom_function_result_0: for item1 in literal_values_0: for item2 in container_property_0: parameters.append({ 'cef': item0[0], 'name': item0[1], 'tags': item0[2], 'label': item1[0], 'severity': item1[1], 'container_id': item2[0], 'run_automation': item1[2], }) ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ################################################################################ ## Custom Code End ################################################################################ # call custom function "rba-master/add_artifact_with_tags", returns the custom_function_run_id phantom.custom_function( custom_function='rba-master/add_artifact_with_tags', parameters=parameters, name='cf_rba_master_add_artifact_with_tags_1') return
def write_embedded_bash_script_to_vault(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("write_embedded_bash_script_to_vault() called") parameters = [] parameters.append({ "input_1": None, "input_2": None, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ bash_script = r""" #!/bin/bash # This script is part of the Splunk SOAR playbook called internal_host_ssh_log4j_investigate. It gathers # system information as part of a unix endpoint investigation. The output is a human-readable log and a # set of .csv files to be copied back to SOAR echo "##############################################################" echo "splunk_soar_internal_host_ssh_investigate.sh" echo "##############################################################" echo "" echo "[+] Basic system configuration:" echo "key,value" > /tmp/basic_system_configuration.csv echo "hostname: $(uname -n | tr -d "\n")" echo "hostname,$(uname -n | tr -d "\n")" >> /tmp/basic_system_configuration.csv echo "current time: $(date +%F_%T)" echo "current time,$(date +%F_%T)" >> /tmp/basic_system_configuration.csv echo "IP address: $(ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1' | tr '\n' ' ')" echo "IP address,$(ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1' | tr '\n' ' ')" >> /tmp/basic_system_configuration.csv echo "OS release: $(cat /etc/*release | sort -u | tr "\n" ";")" echo "OS release,$(cat /etc/*release | sort -u | tr "\n" ";")" >> /tmp/basic_system_configuration.csv echo "OS issue: $(cat /etc/issue)" echo "OS issue,$(cat /etc/issue)" >> /tmp/basic_system_configuration.csv echo "OS kernel: $(uname -a)" echo "OS kernel,$(uname -a)" >> /tmp/basic_system_configuration.csv echo "" echo "USER,PID,%CPU,%MEM,VSZ,RSS,TTY,STAT,START,TIME,COMMAND" > /tmp/process_list.csv echo "$(ps aux)" >> /tmp/process_list.csv echo "[+] Process list:" echo "$(ps aux)" echo "" echo "UNIT,LOAD,ACTIVE,SUB,DESCRIPTION" > /tmp/service_list.csv echo "$(systemctl)" >> /tmp/service_list.csv echo "[+] Service list:" echo "$(systemctl)" echo "" echo "$(last -a)" > /tmp/login_history.csv echo "[+] login history:" echo "$(last -a)" echo "" echo "$(ss -tunapl)" > /tmp/open_sockets.csv echo "[+] Open sockets:" echo "$(ss -tunapl)" echo "" echo "cron_job" > /tmp/cron_jobs.csv echo "$(for user in $(cut -f1 -d: /etc/passwd); do crontab -u $user -l 2>/dev/null | grep -v '^#'; done)" >> /tmp/cron_jobs.csv echo "[+] Cron jobs:" echo "$(for user in $(cut -f1 -d: /etc/passwd); do crontab -u $user -l 2>/dev/null | grep -v '^#'; done)" echo "" echo "[+] Zip up the outputs ..." zip -j /tmp/$1_ssh_output.zip /tmp/basic_system_configuration.csv /tmp/process_list.csv /tmp/service_list.csv /tmp/login_history.csv /tmp/open_sockets.csv /tmp/cron_jobs.csv echo "wrote zip file to /tmp/$1_ssh_output.zip; next we will copy it back to SOAR" """ file_name = 'splunk_soar_internal_host_ssh_investigate.sh' file_path = '/opt/phantom/vault/tmp/{}'.format(file_name) with open(file_path, 'w') as bash_script_file: bash_script_file.write(bash_script) success, message, vault_id = phantom.vault_add(file_location=file_path, file_name=file_name) parameters = [{'input_1': vault_id}] ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="write_embedded_bash_script_to_vault", callback=upload_bash_script) return
def save_to_artifacts(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("save_to_artifacts() called") id_value = container.get("id", None) get_unused_account_information_result_data = phantom.collect2( container=container, datapath=[ "get_unused_account_information:action_result.parameter.username", "get_unused_account_information:action_result.parameter.context.artifact_id" ], action_results=results) parameters = [] # build parameters list for 'save_to_artifacts' call for get_unused_account_information_result_item in get_unused_account_information_result_data: parameters.append({ "container": id_value, "name": "Unused AWS Account", "label": "user", "severity": None, "cef_field": "awsUserName", "cef_value": get_unused_account_information_result_item[0], "cef_data_type": "aws iam user name", "tags": None, "run_automation": "false", "input_json": None, }) ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/artifact_create", parameters=parameters, name="save_to_artifacts", callback=playbook_aws_disable_user_accounts_1) return
def container_update_info(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("container_update_info() called") id_value = container.get("id", None) filtered_artifact_0_data_event_id_filter = phantom.collect2( container=container, datapath=[ "filtered-data:event_id_filter:condition_1:artifact:*.cef.urgency", "filtered-data:event_id_filter:condition_1:artifact:*.cef.source" ], scope="all") format_event_name = phantom.get_format_data(name="format_event_name") parameters = [] # build parameters list for 'container_update_info' call for filtered_artifact_0_item_event_id_filter in filtered_artifact_0_data_event_id_filter: parameters.append({ "name": format_event_name, "tags": None, "label": None, "owner": None, "status": None, "severity": filtered_artifact_0_item_event_id_filter[0], "input_json": None, "description": filtered_artifact_0_item_event_id_filter[1], "sensitivity": None, "container_input": id_value, }) ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/container_update", parameters=parameters, name="container_update_info", callback=artifact_update_severity) return
def update_investigate_task(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("update_investigate_task() called") id_value = container.get("id", None) risk_notable_enrich_output_note_title = phantom.collect2( container=container, datapath=["risk_notable_enrich:playbook_output:note_title"]) risk_notable_enrich_output_note_content = phantom.collect2( container=container, datapath=["risk_notable_enrich:playbook_output:note_content"]) parameters = [] # build parameters list for 'update_investigate_task' call for risk_notable_enrich_output_note_title_item in risk_notable_enrich_output_note_title: for risk_notable_enrich_output_note_content_item in risk_notable_enrich_output_note_content: parameters.append({ "owner": None, "status": None, "container": id_value, "task_name": "Investigate", "note_title": risk_notable_enrich_output_note_title_item[0], "note_content": risk_notable_enrich_output_note_content_item[0], }) ################################################################################ ## Custom Code Start ################################################################################ parameters = [] for idx, title_item in enumerate(risk_notable_enrich_output_note_title): parameters.append({ "owner": None, "status": None, "container": id_value, "task_name": "Investigate", "note_title": risk_notable_enrich_output_note_title[idx][0], "note_content": risk_notable_enrich_output_note_content[idx][0], }) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/workbook_task_update", parameters=parameters, name="update_investigate_task") return
def process_responses(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("process_responses() called") select_indicators_result_data = phantom.collect2( container=container, datapath=[ "select_indicators:action_result.summary.responses", "select_indicators:action_result.parameter.context.artifact_id" ], action_results=results) get_suspect_indicators_data = phantom.collect2( container=container, datapath=[ "get_suspect_indicators:custom_function_result.data.*.indicator_id" ]) select_indicators_summary_responses = [ item[0] for item in select_indicators_result_data ] get_suspect_indicators_data___indicator_id = [ item[0] for item in get_suspect_indicators_data ] parameters = [] parameters.append({ "input_1": select_indicators_summary_responses, "input_2": get_suspect_indicators_data___indicator_id, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ responses = select_indicators_summary_responses[0] # overwrite parameters parameters = [] # merge responses with data for response, indicator_id in zip( responses, get_suspect_indicators_data___indicator_id): parameters.append( {'input_1': { 'indicator_id': indicator_id, 'response': response }}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="process_responses", callback=response_filter) return
def process_responses(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("process_responses() called") threat_intel_indicator_review_result_data = phantom.collect2(container=container, datapath=["threat_intel_indicator_review:action_result.summary.responses","threat_intel_indicator_review:action_result.parameter.context.artifact_id"], action_results=results) collect_all_indicators_data_all_indicators = phantom.collect2(container=container, datapath=["collect_all_indicators:custom_function_result.data.all_indicators.*.cef_value"]) threat_intel_indicator_review_summary_responses = [item[0] for item in threat_intel_indicator_review_result_data] collect_all_indicators_data_all_indicators___cef_value = [item[0] for item in collect_all_indicators_data_all_indicators] parameters = [] parameters.append({ "input_1": threat_intel_indicator_review_summary_responses, "input_2": collect_all_indicators_data_all_indicators___cef_value, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ responses = threat_intel_indicator_review_summary_responses[0] indicator_values = collect_all_indicators_data_all_indicators___cef_value # lookup table to turn prompt responses into tags to add. "Do nothing" is not included, so no tags will be added response_to_tag_map = { "Tag to block": "marked_for_block", "Tag as safe": "safe" } # overwrite the parameters list with a list of one indicator and one tag per parameter dictionary parameters = [] for indicator_index, indicator_value in enumerate(indicator_values): preconfigured_response = responses[indicator_index * 2] freeform_response = responses[indicator_index * 2 + 1] # handle the preconfigured responses if preconfigured_response in response_to_tag_map: phantom.comment(comment="Tagging the indicator {} with the preconfigured tag {}".format(indicator_value, response_to_tag_map[preconfigured_response])) parameters.append({"input_1": [indicator_value, response_to_tag_map[preconfigured_response]]}) elif preconfigured_response != 'Do nothing': phantom.error('The response {} was chosen for the indicator {}, but that response is not in the set of allowed responses.'.format(preconfigured_response, indicator_value)) # handle the freeform responses if freeform_response.lower() not in ['n', 'none', 'na', 'n/a']: freeform_tags = freeform_response.replace(' ','').split(',') for tag in freeform_tags: phantom.comment(comment="Tagging the indicator {} with the freeform tag {}".format(indicator_value, tag)) parameters.append({"input_1": [indicator_value, tag]}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="process_responses", callback=tag_indicators) return
def enumerate_hosts(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("enumerate_hosts() called") parameters = [] parameters.append({ "input_1": "log4j_hosts", "input_2": None, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ # use custom code to read a custom list of potential log4j hosts and/or ip addresses # and make a json to create an artifact for each one. # the expected format of the custom list is: # hostname1 | unix # 1.1.1.1 | windows custom_list_name = parameters[0]['input_1'] success, message, rows = phantom.get_list(list_name=custom_list_name) # return early if the list is not found if not success: phantom.debug( "Failed to find the custom list, so only existing artifacts will be used" ) phantom.custom_function(custom_function="community/passthrough", parameters=[], name="enumerate_hosts", callback=create_host_artifacts) return # loop through the rows and create a list of artifact jsons to add # the two columns are expected to be the ip_or_hostname and the operating system family parameters = [] unix_hosts = [] windows_hosts = [] unknown_hosts = [] for row in rows: if row[0]: if row[1] != 'unix' and row[1] != 'windows': os_family = 'unknown' else: os_family = row[1] artifact_dict = { 'cef_data': { 'deviceHostname': row[0], 'operatingSystemFamily': os_family }, 'field_mapping': { 'deviceHostname': ['host name', 'ip'] } } parameters.append({'input_1': artifact_dict}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="enumerate_hosts", callback=create_host_artifacts) return
def create_host_artifacts(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("create_host_artifacts() called") id_value = container.get("id", None) enumerate_hosts_data = phantom.collect2( container=container, datapath=["enumerate_hosts:custom_function_result.data.*.item"], scope="all") parameters = [] # build parameters list for 'create_host_artifacts' call for enumerate_hosts_data_item in enumerate_hosts_data: parameters.append({ "name": "potential log4j host", "tags": None, "label": None, "severity": "high", "cef_field": None, "cef_value": None, "container": id_value, "input_json": enumerate_hosts_data_item[0], "cef_data_type": None, "run_automation": None, }) ################################################################################ ## Custom Code Start ################################################################################ # only operate on parameters with input_json, because thats where the data should be parameters = [ parameter for parameter in parameters if parameter['input_json'] ] if parameters == []: # if there are no new artifacts and no existing artifacts with deviceHostname, then stop the playbook relevant_artifacts = phantom.collect2( container=container, datapath=["artifact:*.cef.deviceHostname"]) if relevant_artifacts == []: phantom.comment( comment= "No ip_or_hostname values to operate on; playbook discontinued" ) phantom.error( "No ip_or_hostname values to operate on; playbook discontinued" ) phantom.discontinue() ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/artifact_create", parameters=parameters, name="create_host_artifacts", callback=if_hosts_exist) return
def add_embedded_bash_script_to_vault(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("add_embedded_bash_script_to_vault() called") parameters = [] parameters.append({ "input_1": None, "input_2": None, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ bash_script = r""" #!/bin/bash # This script is part of the Splunk SOAR playbook called internal_host_ssh_log4j_investigate. It shows # the installed java version, lists any running java processes, performs a search for the JndiLookup.class # file in any .jar files found on disk, and searches any .war files for a log4j jar. The output is a human-readable # log and a set of .csv files to be copied back to SOAR echo "##############################################################" echo "splunk_soar_internal_host_ssh_log4j_investigate.sh" echo "##############################################################" echo "" echo "java environment configuration" > /tmp/java_environment.csv echo "[+] Checking Java version:" echo "$(java -version)" echo "java version:" >> /tmp/java_environment.csv java -version 2>> /tmp/java_environment.csv echo "" echo "[+] Checking running Java processes with ps:" echo "$(ps aux | grep java)" echo "ps java processes:" >> /tmp/java_environment.csv echo "$(ps aux | grep java)" >> /tmp/java_environment.csv echo "" echo "[+] Checking running Java processes with jps:" echo "$(jps -v)" echo "jps java processes:" >> /tmp/java_environment.csv echo "$(jps -v)" >> /tmp/java_environment.csv echo "[+] Search .jar files for JndiLookup.class files ..." echo "jar_files" > /tmp/jars_with_jndi.csv find / 2>/dev/null -name '*.jar' -type f -print0 | xargs -0 grep JndiLookup.class | awk '{print $3}' | while read -r file do if [ -f "$file" ]; then echo "JndiLookup.class found in .jar file: $file" echo "$file" >> /tmp/jars_with_jndi.csv fi done echo "" echo "[+] Search .war files for log4j .jar files ..." echo "war_file,jar_size,jar_time_modified,jar_file" > /tmp/wars_with_jars.csv find / 2>/dev/null -name '*.war' -type f -print0 | xargs -0 grep log4j | awk '{print $3}' | while read -r war_file do if [ -f "$war_file" ]; then unzip -l "$war_file" | grep log4j | awk '{print $1"," $2" "$3","$4}' | while read -r jar_file do echo ".war file $war_file was found containing the file $jar_file" echo "$war_file,$jar_file" >> /tmp/wars_with_jars.csv done fi done echo "[+] Zip up the outputs ..." zip -j /tmp/$1_ssh_log4j_output.zip /tmp/java_environment.csv /tmp/jars_with_jndi.csv /tmp/wars_with_jars.csv echo "wrote zip file to /tmp/$1_ssh_log4j_output.zip; next we will copy it back to SOAR" """ file_name = 'splunk_soar_internal_host_ssh_log4j_investigate.sh' file_path = '/opt/phantom/vault/tmp/{}'.format(file_name) with open(file_path, 'w') as bash_script_file: bash_script_file.write(bash_script) success, message, vault_id = phantom.vault_add(file_location=file_path, file_name=file_name) parameters = [{'input_1': vault_id}] ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="add_embedded_bash_script_to_vault", callback=upload_bash_script) return