def on_start(container): import email email_body = phantom.collect2(container=container, datapath=['artifact:*.cef.bodyText']) phantom.error(email_body) phantom.debug('on_start() called') raw_email = json.loads(phantom.get_raw_data(container)).get('raw_email') b = email.message_from_string(raw_email) # parse the email to get the body of the email if b.is_multipart(): email_message = b.get_payload()[0] for payload in b.get_payload(): phantom.debug(payload.get_payload()) for part in email_message.walk(): payload = part.get_payload() #returns a bytes object payload = json.loads(payload, strict=False) phantom.error(payload) phantom.error("=== email payload ===") phantom.debug(payload) parse_json(payload) else: phantom.debug("=== not multipart ===") phantom.error(b.get_payload()) payload = b.get_payload() parse_json(payload) phantom.error("=== results dict ===") phantom.debug(results_dict) add_results_to_container(container) return
def escalate_close_notify(container): #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'escalate_close_notify' call parameters = [] # build parameters list for 'escalate_close_notify' call parameters.append({ 'body': "asdf", 'to': "root@localhost", 'from': "", 'attachments': "", 'subject': "Escalating and closing", }) if parameters: phantom.act("send email", parameters=parameters, assets=['smtp'], name="escalate_close_notify") else: phantom.error( "'escalate_close_notify' will not be executed due to lack of parameters" ) return
def domain_reputation_2(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'domain_reputation_2' call results_data_1 = phantom.collect2(container=container, datapath=['reverse_ip_2:action_result.data.*.ip_addresses.domain_names', 'reverse_ip_2:action_result.parameter.context.artifact_id'], action_results=results) parameters = [] # build parameters list for 'domain_reputation_2' call for results_item_1 in results_data_1: if results_item_1[0]: for domain in results_item_1[0]: if (not domain): continue parameters.append({ 'domain': domain, # context (artifact id) is added to associate results with the artifact 'context': {'artifact_id': results_item_1[1]}, }) if parameters: phantom.act("domain reputation", parameters=parameters, assets=['virustotal'], callback=domain_reputation_2_callback, name="domain_reputation_2", parent_action=action) else: phantom.error("'domain_reputation_2' will not be executed due to lack of parameters") return
def create_ticket_3(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'create_ticket_2' call file_reputation = phantom.collect2(datapath=['file_reputation_1:filtered-action_result.parameter.hash', 'file_reputation_1:filtered-action_result.summary.positives']) blocked_hashes = set(phantom.collect2(datapath='block_hash_2:action_result.parameter.hash')) detected_users = set(phantom.collect2(datapath='hunt_file_1:action_result.data.*.process.results.*.username')) detected_systems = set(phantom.collect2(datapath='hunt_file_1:action_result.data.*.process.results.*.hostname')) title = "Virus Detected on {0} devices".format(len(detected_systems)) description = "Hashes sumbitted with detections:\n{0}\n\n".format(", ".join(["{0} ({1})".format(*fr) for fr in file_reputation])) description += "File was found on {0} devices:\n{1}\n\n".format(len(detected_systems), ', '.join(detected_systems)) description += "This impacts at least {0} users:\n{1}\n\n".format(len(detected_users), ', '.join(detected_users)) if len(blocked_hashes): description += "{0} hashes were submitted for blocking:\n{1}\n\n".format(len(blocked_hashes), ", ".join(blocked_hashes)) parameters = [] # build parameters list for 'create_ticket_2' call parameters.append({ 'short_description': title, 'description': description, 'fields': "", }) if parameters: phantom.act("create ticket", parameters=parameters, assets=['servicenow'], name="create_ticket_3", parent_action=action) else: phantom.error("'create_ticket_3' will not be executed due to lack of parameters") return
def block_ip_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): # collect data for 'block_ip_1' call container_data = phantom.collect2(container=container, datapath=['artifact:*.cef.sourceAddress', 'artifact:*.id']) parameters = [] # build parameters list for 'block_ip_1' call for container_item in container_data: if container_item[0]: parameters.append({ 'smartflow': "default-smartflow", 'service': "default-service", 'application': "WWT-API", 'source': container_item[0], 'host': "default-host", 'action': "deny", # context (artifact id) is added to associate results with the artifact 'context': {'artifact_id': container_item[1]}, }) if parameters: phantom.act("block ip", parameters=parameters, assets=['a10 lightning controller'], name="block_ip_1") else: phantom.error("'block_ip_1' will not be executed due to lack of parameters") return
def playbook_local_soc_fork_customer_request_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('playbook_local_soc_fork_customer_request_1() called') # ----- start of added code ----- import csv # get container id container_id = container.get('id', None) # use the container id to get information about any files in the vault vault_info = phantom.vault_info(container_id=container_id) # filter info returned to find the path where the file is stored in the vault file_path = vault_info[2][0]["path"] phantom.debug('vault file path: {}'.format(file_path)) # read the .csv file, file and add artifacts with the label "customer_request" to container raw_data = {} reader = None try: with open(file_path, 'r') as f: reader = csv.DictReader(f) for cef_data in reader: cef_data_keys = cef_data.keys() if 'action' in cef_data_keys and ( 'sourceAddress' in cef_data_keys or 'destinationAddress' in cef_data_keys): phantom.debug('adding artifact: {}'.format(cef_data)) success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw_data, cef_data=cef_data, label='customer_request', name='Parsed CSV Artifact', severity='high', identifier=None, artifact_type='network') if not success: phantom.error( "Adding Artifact failed: {}".format(message)) except Exception as e: phantom.error("Exception Occurred: {}".format(e.args[1])) return # ----- end of added code ----- # call playbook "local/soc_fork_customer_request", returns the playbook_run_id playbook_run_id = phantom.playbook("local/soc_fork_customer_request", container) return
def file_reputation_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): assets = get_specific_assets("file reputation", ["TitaniumCloud"]) if (not assets): phantom.debug( "ReversingLabs/TitaniumCloud::file reputation not found returning." ) # collect data for 'file_reputation_1' call container_data = phantom.collect2(container=container, datapath=[ 'filtered-artifact:*.cef.fileHash', 'filtered-artifact:*.id' ], filter_artifacts=filtered_artifacts) phantom.debug(container_data) parameters = [] # build parameters list for 'file_reputation_1' call for container_item in container_data: if container_item[0]: parameters.append({ 'hash': container_item[0], # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) if parameters: phantom.act("file reputation", parameters=parameters, assets=assets, name="file_reputation_1", callback=filter_2) else: phantom.error( "'file_reputation_1' will not be executed due to lack of parameters" ) return
def ip_reputation_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('ip_reputation_1() called') #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'ip_reputation_1' call results_data_1 = phantom.collect2( container=container, datapath=[ 'geolocate_ip_1:action_result.parameter.ip', 'geolocate_ip_1:action_result.parameter.context.artifact_id' ], action_results=results) parameters = [] phantom.error("=== HANDLE DATA ===") phantom.debug(handle) phantom.error("=== RESULTS DATA ===") phantom.debug(results) # build parameters list for 'ip_reputation_1' call for results_item_1 in results_data_1: if results_item_1[0]: parameters.append({ 'ip': results_item_1[0], # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': results_item_1[1] }, }) # calculate start time using delay of 60 minutes start_time = datetime.now() + timedelta(minutes=60) phantom.act("ip reputation", parameters=parameters, assets=['virustotal'], start_time=start_time, name="ip_reputation_1", parent_action=action) return
def detonate_file_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): assets = get_specific_assets("detonate file", ["Threat Grid"]) if (not assets): phantom.debug("Threat Grid::detonate file not found returning.") # collect data for 'detonate_file_1' call filtered_results_data_1 = phantom.collect2( container=container, datapath=[ "get_file_2:filtered-action_result.data.*.vault_id", "get_file_2:filtered-action_result.parameter.context.artifact_id" ], action_results=filtered_results) parameters = [] # build parameters list for 'detonate_file_1' call for filtered_results_item_1 in filtered_results_data_1: if filtered_results_item_1[0]: parameters.append({ 'vault_id': filtered_results_item_1[0], 'file_name': "", 'vm': "", 'force_analysis': "", 'private': "", # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': filtered_results_item_1[1] }, }) if parameters: phantom.act("detonate file", parameters=parameters, assets=assets, name="detonate_file_1") else: phantom.error( "'detonate_file_1' will not be executed due to lack of parameters") return
def add_results_to_container(container): cef = results_dict raw = {} success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw, cef_data=cef, label='aws', name='AWS SNS Message', severity='medium', identifier=None, artifact_type='aws') phantom.error("=== success/error status message ===") phantom.debug(success) phantom.error(message) return success
def deduplicate_inputs(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('deduplicate_inputs() called') container_data = phantom.collect2(container=container, datapath=[ 'artifact:*.cef.destinationAddress', 'artifact:*.cef.dest_domain', 'artifact:*.id' ]) container_item_0 = [item[0] for item in container_data] container_item_1 = [item[1] for item in container_data] deduplicate_inputs__ip = None deduplicate_inputs__domain = None ################################################################################ ## Custom Code Start ################################################################################ deduplicate_inputs__ip = container_item_0[0] deduplicate_inputs__domain = container_item_1[0] if not deduplicate_inputs__ip or not deduplicate_inputs__domain: failure_message = "stopping the playbook because either the IP address or domain name was missing from the event" phantom.comment(container=container, comment=failure_message) phantom.error(failure_message) exit(1) ################################################################################ ## Custom Code End ################################################################################ phantom.save_run_data(key='deduplicate_inputs:ip', value=json.dumps(deduplicate_inputs__ip)) phantom.save_run_data(key='deduplicate_inputs:domain', value=json.dumps(deduplicate_inputs__domain)) ip_intelligence_1(container=container) domain_intelligence_1(container=container) return
def hunt_file_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): assets = get_specific_assets("hunt file", ["Carbon Black"]) if (not assets): phantom.debug("Carbon Black::hunt file not found returning.") # collect data for 'hunt_file_1' call container_data = phantom.collect2( container=container, datapath=['artifact:*.cef.fileHash', 'artifact:*.id']) parameters = [] # build parameters list for 'hunt_file_1' call for container_item in container_data: if container_item[0]: parameters.append({ 'hash': container_item[0], 'range': "", 'type': "binary", # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) if parameters: phantom.act("hunt file", parameters=parameters, assets=assets, name="hunt_file_1", callback=filter_4) else: phantom.error( "'hunt_file_1' will not be executed due to lack of parameters") return
def hunt_domain_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): assets = get_specific_assets("hunt domain", ["Falcon Host API"]) if (not assets): phantom.debug("hunt domain/Falcon Host API not found returning.") # collect data for 'hunt_domain_1' call container_data = phantom.collect2( container=container, datapath=['artifact:*.cef.destinationDnsDomain', 'artifact:*.id']) parameters = [] # build parameters list for 'hunt_domain_1' call for container_item in container_data: if container_item[0]: parameters.append({ 'domain': container_item[0], 'count_only': True, # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) if parameters: phantom.act("hunt domain", parameters=parameters, assets=assets, callback=filter_1, name="hunt_domain_1") else: phantom.error( "'hunt_domain_1' will not be executed due to lack of parameters") return
def decode_base64(input_string=None, artifact_id=None, **kwargs): """ Decodes provided base64 string Args: input_string (CEF type: *): Base64 encoded text artifact_id (CEF type: phantom artifact id): Phantom Artifact ID Returns a JSON-serializable object that implements the configured data paths: decoded_string (CEF type: *): Base64 decoded string artifact_id (CEF type: phantom artifact id): Phantom Artifact ID """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom import re from base64 import b64decode outputs = {} if input_string: if input_string.endswith('=='): phantom.debug('padding exists') elif input_string.endswith('='): phantom.debug('padding string with "="') input_string += '=' else: phantom.debug('padding string with "=="') input_string += '==' try: decoded_string = b64decode(input_string).replace('\x00', '') outputs['decoded_string'] = decoded_string outputs['artifact_id'] = artifact_id except Exception as e: phantom.error('Uable to decode b64 string - {}'.format(e)) # Return a JSON-serializable object assert json.dumps( outputs ) # Will raise an exception if the :outputs: object is not JSON-serializable return outputs
def get_file_2(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): assets = get_specific_assets("get file", ["Carbon Black"]) if (not assets): phantom.debug("Carbon Black::get file not found returning.") # collect data for 'get_file_2' call filtered_container_data = phantom.collect2( container=container, datapath=[ 'filtered-artifact:*.cef.fileHash', 'filtered-artifact:*.id' ], filter_artifacts=filtered_artifacts) parameters = [] # build parameters list for 'get_file_2' call for filtered_container_item in filtered_container_data: if filtered_container_item[0]: parameters.append({ 'hash': filtered_container_item[0], }) if parameters: phantom.act("get file", parameters=parameters, assets=assets, callback=filter_3, name="get_file_2") else: phantom.error( "'get_file_2' will not be executed due to lack of parameters") return
def block_ip_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): # collect data for 'block_ip_1' call container_data = phantom.collect2( container=container, datapath=['artifact:*.cef.sourceAddress', 'artifact:*.id']) parameters = [] # build parameters list for 'block_ip_1' call for container_item in container_data: if container_item[0]: parameters.append({ 'smartflow': "default-smartflow", 'service': "default-service", 'application': "WWT-API", 'source': container_item[0], 'host': "default-host", 'action': "deny", # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) if parameters: phantom.act("block ip", parameters=parameters, assets=['a10 lightning controller'], name="block_ip_1") else: phantom.error( "'block_ip_1' will not be executed due to lack of parameters") return
def format_2(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('format_2() called') container_data = phantom.collect2( container=container, datapath=[ 'filtered-data:filter_1:condition_1:artifact:*.cef.emailHeaders.Subject' ]) service = "" for result in container_data: if result[0] and "Service Alert" in result[0]: parts = result[0].split("** PROBLEM Service Alert: ") service = parts[1].split('/')[1].split(" process is CRITICAL")[0] ssh_command = "service {} restart".format(service) success, message, whitelist = phantom.get_list( "nagios_service_monitoring_service_name_whitelist") if [service] in whitelist: phantom.debug("service name whitelist check passed") else: phantom.error("service name whitelist check failed") phantom.comment(container=container, comment="service name whitelist check failed") ssh_command = "service name whitelist check failed" phantom.format(container=container, template=ssh_command, parameters=[""], name="format_2") join_execute_program_1(container=container) return
def custom_format(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("custom_format() called") ################################################################################ # Parse out the JSON returned by the traversal script ################################################################################ run_traversal_script_result_data = phantom.collect2(container=container, datapath=["run_traversal_script:action_result.data.*.std_out"], action_results=results) playbook_input_ip_or_hostname = phantom.collect2(container=container, datapath=["playbook_input:ip_or_hostname"]) run_traversal_script_result_item_0 = [item[0] for item in run_traversal_script_result_data] playbook_input_ip_or_hostname_values = [item[0] for item in playbook_input_ip_or_hostname] custom_format__note_content = None ################################################################################ ## Custom Code Start ################################################################################ custom_format__note_content = "" for script_result_item, ip_hostname in zip(run_traversal_script_result_item_0, playbook_input_ip_or_hostname_values): try: custom_format__note_content += f"### Device - {ip_hostname}\n" custom_format__note_content += " | Path | LineNumber |\n" custom_format__note_content += "| --- | --- |\n" result_to_json = json.loads(script_result_item) for json_result in result_to_json: custom_format__note_content += f"| {json_result['Path']} | {json_result['LineNumber']} |\n" custom_format__note_content += "\n " except: phantom.error("Unable to parse JSON") ################################################################################ ## Custom Code End ################################################################################ phantom.save_run_data(key="custom_format:note_content", value=json.dumps(custom_format__note_content)) add_note_2(container=container) return
def deescalate_close_notify(container): #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'deescalate_close_notify' call container_id = container['id'] phantom.deescalate(container) parameters = [] # build parameters list for 'deescalate_close_notify' call parameters.append({ 'from': "", 'to': "root@localhost", 'subject': "Descalating and Closing Container ID: " + container['id'], 'body': "The c2 investigate and contain playbook on the Phantom platform has completed and will be closing the container." + "Information about the container is as follows: \n Container ID: " + container['id'] + "\nContainer Label: " + container['label'] + "\nContainer Severity: " + container['severity'], 'attachments': "", }) if parameters: phantom.act("send email", parameters=parameters, assets=['smtp'], name="deescalate_close_notify") else: phantom.error( "'deescalate_close_notify' will not be executed due to lack of parameters" ) return
def get_system_info_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'get_system_info_1' call filtered_container_data = phantom.collect2( container=container, datapath=[ 'filtered-artifact:*.cef.sourceAddress', 'filtered-artifact:*.id' ], filter_artifacts=filtered_artifacts) phantom.save_data(filtered_container_data, key="rkitdata") parameters = [] # build parameters list for 'get_system_info_1' call for filtered_container_item in filtered_container_data: if filtered_container_item[0]: parameters.append({ 'ip_hostname': filtered_container_item[0], }) if parameters: phantom.act("get system info", parameters=parameters, assets=['carbonblack'], callback=Send_Email_malicious, name="get_system_info_1") else: phantom.error( "'get_system_info_1' will not be executed due to lack of parameters" ) return
def detonate_file_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): #phantom.debug('Action: {0} {1}'.format(action['name'], ('SUCCEEDED' if success else 'FAILED'))) # collect data for 'detonate_file_1' call container_data = phantom.collect2( container=container, datapath=['artifact:*.cef.deviceCustomString6', 'artifact:*.id']) vault_items = phantom.Vault.get_file_info(container_id=container['id']) parameters = [] # build parameters list for 'detonate_file_1' call for vault_item in vault_items: if vault_item['vault_id']: parameters.append({ 'file_name': "", 'vault_id': vault_item['vault_id'], }) if parameters: phantom.act("detonate file", parameters=parameters, assets=['wildfire'], callback=filter_3, name="detonate_file_1") else: phantom.error( "'detonate_file_1' will not be executed due to lack of parameters") return
def quarantine_device_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): # collect data for 'quarantine_device_1' call container_data = phantom.get_data("rkitdata", clear_data=True) parameters = [] # build parameters list for 'quarantine_device_1' call for container_item in container_data: if container_item[0]: parameters.append({ 'ip_hostname': container_item[0], # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) if parameters: phantom.act("quarantine device", parameters=parameters, assets=['carbonblack'], callback=get_system_info_1, name="quarantine_device_1") else: phantom.error( "'quarantine_device_1' will not be executed due to lack of parameters" ) return
def on_finish(container, summary): phantom.debug('on_finish() called') # This function is called after all actions are completed. # summary of all the action and/or all detals of actions # can be collected here. lat_long = phantom.collect2(container=container, datapath=['geolocate_ip_1:action_result.data.*.latitude', 'geolocate_ip_1:action_result.data.*.longitude']) malicious_urls = phantom.collect2(container=container, datapath=['ip_reputation_1:action_result.data.*.detected_urls.*.url']) phantom.error("=== LAT/LONG ===") phantom.debug(lat_long) phantom.error("=== MALICIOUS URLS ===") for i in malicious_urls: phantom.debug(i) summary_json = phantom.get_summary() if 'result' in summary_json: for action_result in summary_json['result']: if 'action_run_id' in action_result: action_results = phantom.get_action_results(action_run_id=action_result['action_run_id'], result_data=False, flatten=False) phantom.debug(action_results) return
def parse_and_save_iwlist(iwlist): phantom.debug('parsing the iwlist data returned from the wireless scan') iwlist = iwlist[0][0] if not iwlist: phantom.error("no results were received from the wifi sensor") return if 'Scan completed' not in iwlist.split('\n')[0]: phantom.error("failed to parse the results of iwlist") return # disregard the first line iwlist = iwlist.split('\n', 1)[1] # split on the string that starts the results for each access point (iwlist calls them "Cells") access_points = re.split(" Cell ", iwlist) access_points = access_points[1:] # parse out the mac address, ESSID, radio frequency, signal strength, and security protocol for i, access_point in enumerate(access_points): lines = access_point.split('\n') access_points[i] = {} if re.match('\d+ - Address: ', lines[0]): access_points[i]['mac_address'] = lines[0].split(' - Address: ')[-1] else: phantom.error('failed to parse one of the access_points returned from iwlist') return for line in lines[1:]: if re.match(' ESSID:"', line): access_points[i]['ESSID'] = line.split('ESSID:')[-1].strip('"') if re.match(' Frequency:\d', line): access_points[i]['radio_frequency'] = line.split('Frequency:')[-1].strip() if re.match(' Quality=\d', line): access_points[i]['signal_strength'] = line.strip() if re.match(' IE: IEEE 802.11i', line): access_points[i]['security_protocol'] = line.split('IE:')[-1].strip() # no string matching "IEEE 802.11i" means plaintext if not access_points[i].get('security_protocol'): access_points[i]['security_protocol'] = 'plaintext' phantom.debug('parsed out the following wifi access points:') phantom.debug(access_points) phantom.save_run_data(value=json.dumps(access_points), key='parsed_access_points') return
def create_host_artifacts(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("create_host_artifacts() called") id_value = container.get("id", None) enumerate_hosts_data = phantom.collect2( container=container, datapath=["enumerate_hosts:custom_function_result.data.*.item"], scope="all") parameters = [] # build parameters list for 'create_host_artifacts' call for enumerate_hosts_data_item in enumerate_hosts_data: parameters.append({ "name": "potential log4j host", "tags": None, "label": None, "severity": "high", "cef_field": None, "cef_value": None, "container": id_value, "input_json": enumerate_hosts_data_item[0], "cef_data_type": None, "run_automation": None, }) ################################################################################ ## Custom Code Start ################################################################################ # only operate on parameters with input_json, because thats where the data should be parameters = [ parameter for parameter in parameters if parameter['input_json'] ] if parameters == []: # if there are no new artifacts and no existing artifacts with deviceHostname, then stop the playbook relevant_artifacts = phantom.collect2( container=container, datapath=["artifact:*.cef.deviceHostname"]) if relevant_artifacts == []: phantom.comment( comment= "No ip_or_hostname values to operate on; playbook discontinued" ) phantom.error( "No ip_or_hostname values to operate on; playbook discontinued" ) phantom.discontinue() ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/artifact_create", parameters=parameters, name="create_host_artifacts", callback=if_hosts_exist) return
def run_query_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): assets = get_specific_assets("run query", ["Splunk Enterprise", "Carbon Black"]) if (not assets): phantom.debug( "Did not find any asset configured, supporting run query") return container_data_src = phantom.collect2( container=container, datapath=['artifact:*.cef.sourceAddress', 'artifact:*.id']) container_data_dst = phantom.collect2( container=container, datapath=['artifact:*.cef.destinationAddress', 'artifact:*.id']) parameters = [] # build parameters list for 'run_query_1' call phantom.debug("Got the following assets:") phantom.debug(','.join([x for x in assets])) for container_item in container_data_src: if container_item[0]: parameters.append({ 'query': container_item[0], 'display': "", 'type': "process", # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) for container_item in container_data_dst: if container_item[0]: parameters.append({ 'query': container_item[0], 'display': "", 'type': "process", # context (artifact id) is added to associate results with the artifact 'context': { 'artifact_id': container_item[1] }, }) if (parameters): phantom.act("run query", parameters=parameters, assets=assets, name="run_query_1") else: phantom.error( "'run_query_1' will not be executed due to lack of parameters") return
def base64_decode(input_string=None, split_input=None, delimiter=None, **kwargs): """ Decode one or more strings encoded with base64. The input can be a single chunk of base64 or a list of strings separated by a delimiter. Args: input_string (CEF type: *): Y2FsYy5leGU= split_input: Defaults to False. If True, use the delimiter to split the input string and decode each of the components separately if it is base64. delimiter: The character to use as a delimiter if split_input is True. Defaults to a comma. The special option "space" can be used to split on a single space character (" "). Returns a JSON-serializable object that implements the configured data paths: *.input_string (CEF type: *): Base64 string before being decoded *.output_string (CEF type: *): Resulting string after decoding from base64 """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom import base64 if not input_string or not isinstance(input_string, str): raise ValueError('input_string must be a string') def isBase64(sb): try: if isinstance(sb, str): # If there's any unicode here, an exception will be thrown and the function will return false sb_bytes = bytes(sb, 'ascii') elif isinstance(sb, bytes): sb_bytes = sb else: raise ValueError("Argument must be string or bytes") return base64.b64encode(base64.b64decode(sb_bytes)) == sb_bytes except Exception: return False outputs = [] # split_input defaults to false if split_input == True or (isinstance(split_input, str) and split_input.lower() == 'true'): split_input = True else: split_input = False # create the list of inputs, whether it be the single input or a delimiter-separated list if not split_input: input_list = [input_string] else: if not isinstance(delimiter, str): delimiter = ',' if delimiter == 'space': delimiter = ' ' input_list = input_string.split(delimiter) # now that input_list is set up, perform the base64 decode on each item that is valid base64 for index, value in enumerate(input_list): if isBase64(value): try: value_bytes = value.encode('ascii') data = base64.b64decode(value_bytes, validate=True) if data: outputs.append({ 'input_string': value, 'output_string': data.decode('ascii').replace('\x00', '') }) except Exception as e: phantom.error(f'Unable to decode string: {e}') assert json.dumps( outputs ) # Will raise an exception if the :outputs: object is not JSON-serializable return outputs
def process_responses(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("process_responses() called") threat_intel_indicator_review_result_data = phantom.collect2(container=container, datapath=["threat_intel_indicator_review:action_result.summary.responses","threat_intel_indicator_review:action_result.parameter.context.artifact_id"], action_results=results) collect_all_indicators_data_all_indicators = phantom.collect2(container=container, datapath=["collect_all_indicators:custom_function_result.data.all_indicators.*.cef_value"]) threat_intel_indicator_review_summary_responses = [item[0] for item in threat_intel_indicator_review_result_data] collect_all_indicators_data_all_indicators___cef_value = [item[0] for item in collect_all_indicators_data_all_indicators] parameters = [] parameters.append({ "input_1": threat_intel_indicator_review_summary_responses, "input_2": collect_all_indicators_data_all_indicators___cef_value, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ responses = threat_intel_indicator_review_summary_responses[0] indicator_values = collect_all_indicators_data_all_indicators___cef_value # lookup table to turn prompt responses into tags to add. "Do nothing" is not included, so no tags will be added response_to_tag_map = { "Tag to block": "marked_for_block", "Tag as safe": "safe" } # overwrite the parameters list with a list of one indicator and one tag per parameter dictionary parameters = [] for indicator_index, indicator_value in enumerate(indicator_values): preconfigured_response = responses[indicator_index * 2] freeform_response = responses[indicator_index * 2 + 1] # handle the preconfigured responses if preconfigured_response in response_to_tag_map: phantom.comment(comment="Tagging the indicator {} with the preconfigured tag {}".format(indicator_value, response_to_tag_map[preconfigured_response])) parameters.append({"input_1": [indicator_value, response_to_tag_map[preconfigured_response]]}) elif preconfigured_response != 'Do nothing': phantom.error('The response {} was chosen for the indicator {}, but that response is not in the set of allowed responses.'.format(preconfigured_response, indicator_value)) # handle the freeform responses if freeform_response.lower() not in ['n', 'none', 'na', 'n/a']: freeform_tags = freeform_response.replace(' ','').split(',') for tag in freeform_tags: phantom.comment(comment="Tagging the indicator {} with the freeform tag {}".format(indicator_value, tag)) parameters.append({"input_1": [indicator_value, tag]}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="process_responses", callback=tag_indicators) return
def enumerate_files_to_delete(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("enumerate_files_to_delete() called") parameters = [] parameters.append({ "input_1": "log4j_hosts_and_files", "input_2": None, "input_3": None, "input_4": None, "input_5": None, "input_6": None, "input_7": None, "input_8": None, "input_9": None, "input_10": None, }) ################################################################################ ## Custom Code Start ################################################################################ # use custom code to read a custom list of potential log4j files to delete # and make a json to create an artifact for each one. # the expected format of the custom list is: # hostname1 | unix | /full/path/to/delete/on/hostname_1 # 1.1.1.1 | windows | C:\\Full\Path\To\Delete\On\1_1_1_1 # # the list can either have all rows with files or no rows with files. some rows with files and some without will not work custom_list_name = parameters[0]['input_1'] success, message, rows = phantom.get_list(list_name=custom_list_name) # return early if the list is not found if not success: phantom.debug( "Failed to find the custom list, so only existing artifacts will be used" ) phantom.custom_function(custom_function="community/passthrough", parameters=[], name="enumerate_files_to_delete", callback=create_file_artifacts) return # loop through the rows and create a list of artifact jsons to add # the three columns are expected to be the ip_or_hostname, the operating system family, and the full path to the file to delete parameters = [] unix_hosts = [] windows_hosts = [] unknown_hosts = [] has_files = False if rows[0][2] and ('/' in rows[0][2] or '\\' in rows[0][2]): has_files = True for row in rows: # hostname and operating system are required, but file path is optional. files will not be deleted if file path is missing if row[0] and row[1]: # only windows and unix are supported, and operating system family is required if row[1] == 'unix' or row[1] == 'windows': artifact_dict = { 'cef_data': { 'deviceHostname': row[0], 'operatingSystemFamily': row[1], 'filePath': row[2] }, 'field_mapping': { 'deviceHostname': ['host name', 'ip'], 'filePath': ['file path'] } } # full paths should have at least one slash somewhere in them if row[2] and ('/' in row[2] or '\\' in row[2]): if has_files: artifact_dict['cef_data']['filePath'] = row[2] artifact_dict['field_mapping']['filePath'] = [ 'file path' ] else: phantom.debug( "skipping host {} with file {} because other rows did not have files" .format(row[0], row[2])) else: if has_files: phantom.error( "host {} is missing a file; playbook will be discontinued" .format(row[0])) phantom.discontinue() parameters.append({'input_1': artifact_dict}) ################################################################################ ## Custom Code End ################################################################################ phantom.custom_function(custom_function="community/passthrough", parameters=parameters, name="enumerate_files_to_delete", callback=create_file_artifacts) return
def update_artifact(artifact_id=None, data=None, overwrite=None, **kwargs): """ Update artifact with a valid json dictionary. See Phantom Artifact REST API for valid dictionary. Args: artifact_id (CEF type: *): ID of artifact to update data (CEF type: *): JSON formatted data. See artifact REST api overwrite (CEF type: *): Optional: Leave blank for False Returns a JSON-serializable object that implements the configured data paths: """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom def field_updater(data, update_data, overwrite): if type(update_data) == list: if not (overwrite): return (list(set((data or []) + update_data))) else: return (update_data) elif type(update_data) == dict: for keya in update_data.keys(): data[keya] = field_updater(data.get(keya, {}), update_data[keya], overwrite) else: if (overwrite and data) or not (data): return update_data return data outputs = {} try: data = json.loads(data) except Exception as err: return phantom.error('Unable to parse "data" field: {}'.format(err)) if not overwrite: overwrite = False artifact_url = phantom.build_phantom_rest_url( 'artifact/{}'.format(artifact_id)) response = phantom.requests.get(artifact_url, verify=False) if response.status_code != 200: return phantom.error( 'Unable to find artifact id: {}. Response: {}'.format( artifact_id, response.text)) artifact_data = response.json() update_data = {} for key in data.keys(): update_data[key] = field_updater(artifact_data.get(key, {}), data[key], overwrite) post_response = phantom.requests.post(artifact_url, data=json.dumps(update_data), verify=False) if post_response.status_code != 200: return phantom.error('Unable to save artifact data: {}'.format( post_response.text)) phantom.debug('Successfully updated artifact ID: {}'.format(artifact_id)) # Return a JSON-serializable object assert json.dumps( outputs ) # Will raise an exception if the :outputs: object is not JSON-serializable return outputs
def container_update(container_input=None, name=None, description=None, label=None, owner=None, sensitivity=None, severity=None, status=None, tags=None, input_json=None, **kwargs): """ Allows updating various attributes of a container in a single custom function. Any attributes of a container not listed can be updated via the input_json parameter. Args: container_input (CEF type: phantom container id): Supports a container id or container dictionary name: Optional parameter to change container name description: Optional parameter to change the container description label (CEF type: phantom container label): Optional parameter to change the container label owner: Optional parameter to change the container owner. Accepts a username or role name or keyword "current" to set the currently running playbook user as the owner. sensitivity: Optional parameter to change the container sensitivity. severity: Optional parameter to change the container severity. status: Optional parameter to change the container status. tags: Optional parameter to change the container tags. Must be in the format of a comma separated list. input_json: Optional parameter to modify any extra attributes of a container. Input_json will be merged with other inputs. In the event of a conflict, input_json will take precedence. Returns a JSON-serializable object that implements the configured data paths: """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom outputs = {} update_dict = {} if isinstance(container_input, int): container = phantom.get_container(container_input) elif isinstance(container_input, dict): container = container_input else: raise TypeError("container_input is neither a int or a dictionary") if name: update_dict['name'] = name if description: update_dict['description'] = description if label: update_dict['label'] = label if owner: # If keyword 'current' entered then translate effective_user id to a username if owner.lower() == 'current': update_dict['owner_id'] = phantom.get_effective_user() else: # Attempt to translate name to owner_id url = phantom.build_phantom_rest_url( 'ph_user') + f'?_filter_username="******"' data = phantom.requests.get(url, verify=False).json().get('data') if data and len(data) == 1: update_dict['owner_id'] = data[0]['id'] elif data and len(data) > 1: phantom.error(f'Multiple matches for owner "{owner}"') else: # Attempt to translate name to role_id url = phantom.build_phantom_rest_url( 'role') + f'?_filter_name="{owner}"' data = phantom.requests.get(url, verify=False).json().get('data') if data and len(data) == 1: update_dict['role_id'] = data[0]['id'] elif data and len(data) > 1: phantom.error(f'Multiple matches for role "{owner}"') else: phantom.error(f'"{owner}" is not a valid username or role') if sensitivity: update_dict['sensitivity'] = sensitivity if severity: update_dict['severity'] = severity if status: update_dict['status'] = status if tags: tags = tags.replace(" ", "").split(",") update_dict['tags'] = tags if input_json: json_dict = json.loads(input_json) # Merge dictionaries together. The second argument, "**json_dict" will take precedence and overwrite any duplicate parameters. update_dict = {**update_dict, **json_dict} if update_dict: phantom.debug( 'Updating container {0} with the following information: "{1}"'. format(container['id'], update_dict)) phantom.update(container, update_dict) else: phantom.debug( "Valid container entered but no valid container changes provided.") # Return a JSON-serializable object assert json.dumps( outputs ) # Will raise an exception if the :outputs: object is not JSON-serializable return outputs