def update_case(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('update_case() called') get_pins_body = phantom.collect2(container=container, datapath=['get_pins:action_result.data.*.response_body'], action_results=results) # get the necessary run_data fuzzy_matched_access_points = json.loads(phantom.get_run_data(key='fuzzy_matched_access_points')) live_case_id = int(phantom.get_run_data(key='live_case_id')) # delete all the existing pins to prevent duplicates for pin in get_pins_body[0][0]['data']: phantom.delete_pin(int(pin['id'])) # add a pin for each ap that fuzzy-matched the greylist for ap in fuzzy_matched_access_points: if ap['is_escalated']: phantom.pin(container=live_case_id, message=ap['signal_strength'], data=ap['mac_address'], pin_type="card_medium", pin_style="red") phantom.add_artifact( container=live_case_id, raw_data={}, cef_data=ap, label='wireless', name='suspicious access point detection', severity='high', identifier=None, artifact_type='network') decision_1(container=container) return
def create_containers(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('create_containers() called') read_list__peer_list = json.loads( phantom.get_run_data(key='read_list:peer_list')) ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... for server in read_list__peer_list: if server[2] in ["critical"]: phantom.debug("%s is priority %s" % (server[0], server[2])) status, message, cid = phantom.create_container( name="Possible server malware", label="events") #phantom.set_severity(cid, "high") phantom.add_artifact(container=cid, raw_data={}, cef_data={"sourceAddress": server[0]}, label="infection", name="Possibly infected host", severity="high", artifact_type="host") ################################################################################ ## Custom Code End ################################################################################ return
def L5_CF_Create_Containers_From_List_py3(to_be_containerized=None, container_label=None, **kwargs): """ From a list of lists, create a set of Phantom containers and add in the appropriate artifacts Args: to_be_containerized: List of Lists for things to be containerized. container_label: This will be the label applied to the container Returns a JSON-serializable object that implements the configured data paths: new_container_ids: List of container id's that have been created by this Custom Function. """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom import re outputs = {} # Write your custom code here... new_container_ids = [] temp_peer_track = [] for item in to_be_containerized[0]: phantom.debug(item) (peer, priority, count) = item if peer not in temp_peer_track: if 'critical' in priority: priority = "high" temp_peer_track.append(peer) sta, msg, cid = phantom.create_container( name="Malware Peer found: %s" % peer, label=container_label) if re.match("^\d+\.\d+\.\d+\.\d+$", peer) is not None: phantom.add_artifact(container=cid, raw_data={}, cef_data={"destinationAddress": peer}, label=container_label, name="Malware IP Peer: %s" % peer, severity=priority, artifact_type="host") else: phantom.add_artifact(container=cid, raw_data={}, cef_data={"destinationHostName": peer}, label=container_label, name="Malware Hostname Peer: %s" % peer, severity=priority, artifact_type="host") new_container_ids.append(cid) outputs = {"new_container_ids": new_container_ids} # Return a JSON-serializable object assert json.dumps( outputs ) # Will raise an exception if the :outputs: object is not JSON-serializable return outputs
def on_start(container): phantom.debug('on_start() called') # get the _raw data from splunk data = phantom.collect2(container=container, datapath=['artifact:*.cef._raw']) data = data[0][0] phantom.debug(data) split_data = data.split(',') data_dict = {} custom_dict = {} # get a name for the container to update container_name = split_data[0] del (split_data[0]) counter = 1 # breaks out the data into items with k,v or additional items with multiple : for i in split_data: if len(i.split(":")) == 2: data_key = i.split(":")[0] data_value = i.split(":")[1] data_dict[data_key.replace(" ", "")] = data_value else: custom_dict["cs" + str(counter)] = i counter = counter + 1 phantom.debug(container_name) phantom.debug(data_dict) phantom.debug(custom_dict) raw = {} # add our artifacts that were easily parsed phantom.add_artifact(container=container, raw_data=raw, cef_data=data_dict, label='event', name='splunk data', severity='low', identifier=None, artifact_type='network') # everything else phantom.add_artifact(container=container, raw_data=raw, cef_data=custom_dict, label='event', name='splunk data', severity='low', identifier=None, artifact_type='network') # update the name of the container update_data = {"name": container_name} success, message = phantom.update(container, update_data) return
def parse_list(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug("parse_list() called") playbook_input_list_name = phantom.collect2( container=container, datapath=["playbook_input:list_name"]) playbook_input_list_name_values = [ item[0] for item in playbook_input_list_name ] ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... phantom.debug("parse_list input = {}".format(playbook_input_list_name)) sta, msg, read_list__peer_list = phantom.get_list( list_name=playbook_input_list_name_values[0]) for server in read_list__peer_list: if server[2] in ["critical", "high"]: phantom.debug("%s is priority %s" % (server[0], server[2])) status, message, cid = phantom.create_container( name="Possible server malware", label="events") #phantom.set_severity(cid, "high") phantom.add_artifact(container=cid, raw_data={}, cef_data={"sourceAddress": server[0]}, label="infection", name="Possibly infected host", severity="high", artifact_type="host") ################################################################################ ## Custom Code End ################################################################################ return
def dstPrivateArtifacts(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug('dstPrivateArtifacts() called') results_data_1 = phantom.collect2( container=container, datapath=[ 'DST_Private_Reverse_DNS:action_result.parameter.ip', 'DST_Private_Reverse_DNS:action_result.summary.hostname' ], action_results=results) results_item_1_0 = [item[0] for item in results_data_1] results_item_1_1 = [item[1] for item in results_data_1] ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ip_address = results_item_1_0 dns_hostname = results_item_1_1 phantom.debug(ip_address) phantom.debug(dns_hostname) cef = {} cef['destinationAddress'] = ip_address cef['destinationDnsDomain'] = dns_hostname success, message, artifact_id = phantom.add_artifact( container=container, raw_data=None, cef_data=cef, label="comment", name="Destination Private IP", severity="medium", identifier=None, artifact_type=None, field_mapping=None, trace=False, run_automation=False) phantom.debug('artifact added as id: ' + str(artifact_id)) ################################################################################ ## Custom Code End ################################################################################ join_playbook_local_local_Message_Print_1(container=container) return
def launch_reset_playbook(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('launch_reset_playbook() called') filtered_results_data_1 = phantom.collect2( container=container, datapath=[ "filtered-data:get_active_ad_users:condition_1:get_affected_ad_users:action_result.data.*.samaccountname" ]) filtered_results_item_1_0 = [item[0] for item in filtered_results_data_1] ################################################################################ ## Custom Code Start ################################################################################ for r in filtered_results_item_1_0: phantom.debug("[DEBUG]: account = {}".format(r)) phantom.add_artifact(container=container, raw_data={'compromisedUserName': r}, cef_data={'compromisedUserName': r}, label='compromised_account', name='compromised account ' + r, identifier=None, artifact_type='user name', severity='high', run_automation=True) # calling the playbook here is necessary because artifacts are not evaluated while # this code block runs. Consequently, all artifacts are fired as a list instead of # individually without this next call to playbook(). phantom.playbook(playbook='local/activedirectory_reset_password', container=container, show_debug=True) ################################################################################ ## Custom Code End ################################################################################ return
def playbook_local_soc_fork_customer_request_1(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('playbook_local_soc_fork_customer_request_1() called') # ----- start of added code ----- import csv # get container id container_id = container.get('id', None) # use the container id to get information about any files in the vault vault_info = phantom.vault_info(container_id=container_id) # filter info returned to find the path where the file is stored in the vault file_path = vault_info[2][0]["path"] phantom.debug('vault file path: {}'.format(file_path)) # read the .csv file, file and add artifacts with the label "customer_request" to container raw_data = {} reader = None try: with open(file_path, 'r') as f: reader = csv.DictReader(f) for cef_data in reader: cef_data_keys = cef_data.keys() if 'action' in cef_data_keys and ( 'sourceAddress' in cef_data_keys or 'destinationAddress' in cef_data_keys): phantom.debug('adding artifact: {}'.format(cef_data)) success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw_data, cef_data=cef_data, label='customer_request', name='Parsed CSV Artifact', severity='high', identifier=None, artifact_type='network') if not success: phantom.error( "Adding Artifact failed: {}".format(message)) except Exception as e: phantom.error("Exception Occurred: {}".format(e.args[1])) return # ----- end of added code ----- # call playbook "local/soc_fork_customer_request", returns the playbook_run_id playbook_run_id = phantom.playbook("local/soc_fork_customer_request", container) return
def add_artifact_with_tags(cef=None, tags=None, severity=None, container_id=None, label=None, name=None, run_automation=None, field_mapping=None, **kwargs): """ Adds an artifact and updates that artifact with provided tags Args: cef (CEF type: *) tags (CEF type: *) severity (CEF type: *) container_id (CEF type: phantom container id) label (CEF type: *) name (CEF type: *) run_automation (CEF type: *): Defaults to False field_mapping (CEF type: *): valid field_mapping json Returns a JSON-serializable object that implements the configured data paths: id """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom outputs = {} if not run_automation or run_automation.lower() == 'false': run_automation = False elif run_automation.lower() == 'true': run_automation = True success, message, artifact_id = phantom.add_artifact( container=container_id, raw_data={}, cef_data=cef, label=label, field_mapping=field_mapping, name=name, severity=severity, run_automation=run_automation) artifact_url = phantom.build_phantom_rest_url('artifact', artifact_id) data = {'tags': tags} phantom.requests.post(artifact_url, data=json.dumps(data), verify=False) # Return a JSON-serializable object assert json.dumps( outputs ) # Will raise an exception if the :outputs: object is not JSON-serializable return outputs
def add_results_to_container(container): cef = results_dict raw = {} success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw, cef_data=cef, label='aws', name='AWS SNS Message', severity='medium', identifier=None, artifact_type='aws') phantom.error("=== success/error status message ===") phantom.debug(success) phantom.error(message) return success
def get_case_note_count(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('no_op_1() called') # get the container id and phantom url to format for the request container_id = container.get('id', None) phantom_url = phantom.get_base_url() request_url = "{}/rest/container/{}/phases".format(str(phantom_url), str(container_id)) # make the request r = requests.get(request_url, auth=("admin","password"), verify=False).json() # check to see if all notes fields are filled out notes_counter = 0 for i in r["data"][0]["tasks"]: phantom.debug(i) if i["notes"]: notes_counter = notes_counter + 1 # if all the fields are filled out prompt before emailing, if not sleep and check again if notes_counter != 3: no_op_2(container=container, handle=notes_counter) else: for i in r["data"][0]["tasks"]: raw = {} cef = {} cef['container_note'] = i["notes"][0]["content"] success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw, cef_data=cef, label='note', name='container note', severity='low', identifier=None, artifact_type='note') prompt_1(container=container) return
def artifact_create(container=None, name=None, label=None, severity=None, cef_field=None, cef_value=None, cef_data_type=None, tags=None, run_automation=None, input_json=None, **kwargs): """ Create a new artifact with the specified attributes. Args: container (CEF type: phantom container id): Container which the artifact will be added to. name: The name of the new artifact, which is optional and defaults to "artifact". label: The label of the new artifact, which is optional and defaults to "events" severity: The severity of the new artifact, which is optional and defaults to "Medium". Typically this is either "High", "Medium", or "Low". cef_field: The name of the CEF field to populate in the artifact, such as "destinationAddress" or "sourceDnsDomain". Required only if cef_value is provided. cef_value (CEF type: *): The value of the CEF field to populate in the artifact, such as the IP address, domain name, or file hash. Required only if cef_field is provided. cef_data_type: The CEF data type of the data in cef_value. For example, this could be "ip", "hash", or "domain". Optional. tags: A comma-separated list of tags to apply to the created artifact, which is optional. run_automation: Either "true" or "false", depending on whether or not the new artifact should trigger the execution of any playbooks that are set to active on the label of the container the artifact will be added to. Optional and defaults to "false". input_json: Optional parameter to modify any extra attributes of the artifact. Input_json will be merged with other inputs. In the event of a conflict, input_json will take precedence. Returns a JSON-serializable object that implements the configured data paths: artifact_id (CEF type: phantom artifact id): The ID of the created artifact. """ ############################ Custom Code Goes Below This Line ################################# import json import phantom.rules as phantom new_artifact = {} json_dict = None if isinstance(container, int): container_id = container elif isinstance(container, dict): container_id = container['id'] else: raise TypeError("container is neither an int nor a dictionary") if name: new_artifact['name'] = name else: new_artifact['name'] = 'artifact' if label: new_artifact['label'] = label else: new_artifact['label'] = 'events' if severity: new_artifact['severity'] = severity else: new_artifact['severity'] = 'Medium' # validate that if cef_field or cef_value is provided, the other is also provided if (cef_field and not cef_value) or (cef_value and not cef_field): raise ValueError("only one of cef_field and cef_value was provided") # cef_data should be formatted {cef_field: cef_value} if cef_field: new_artifact['cef_data'] = {cef_field: cef_value} if cef_data_type and isinstance(cef_data_type, str): new_artifact['field_mapping'] = {cef_field: [cef_data_type]} # run_automation must be "true" or "false" and defaults to "false" if run_automation: if not isinstance(run_automation, str): raise TypeError("run automation must be a string") if run_automation.lower() == 'true': new_artifact['run_automation'] = True elif run_automation.lower() == 'false': new_artifact['run_automation'] = False else: raise ValueError("run_automation must be either 'true' or 'false'") else: new_artifact['run_automation'] = False if input_json: # ensure valid input_json if isinstance(input_json, dict): json_dict = input_json elif isinstance(input_json, str): json_dict = json.loads(input_json) else: raise ValueError( "input_json must be either 'dict' or valid json 'string'") if json_dict: # Merge dictionaries, using the value from json_dict if there are any conflicting keys for json_key in json_dict: # extract tags from json_dict since it is not a valid parameter for phantom.add_artifact() if json_key == 'tags': tags = json_dict[json_key] else: new_artifact[json_key] = json_dict[json_key] # now actually create the artifact phantom.debug( 'creating a new artifact with the following attributes:\n{}'.format( new_artifact)) success, message, artifact_id = phantom.add_artifact(**new_artifact) phantom.debug( 'add_artifact() returned the following:\nsuccess: {}\nmessage: {}\nartifact_id: {}' .format(success, message, artifact_id)) if not success: raise RuntimeError("add_artifact() failed") # add the tags in a separate REST call because there is no tags parameter in add_artifact() if tags: tags = tags.replace(" ", "").split(",") url = phantom.build_phantom_rest_url('artifact', artifact_id) response = phantom.requests.post(uri=url, json={ 'tags': tags }, verify=False).json() phantom.debug( 'response from POST request to add tags:\n{}'.format(response)) # Return the id of the created artifact return {'artifact_id': artifact_id}
def parse_email_to_artifact(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('parse_email_to_artifact() called') input_parameter_0 = "" ################################################################################ ## Custom Code Start ################################################################################ FIELD_TRANSLATOR = { 'Cloud and Organization': 'environment', 'File MD5 hash': 'fileHashMd5', 'Threat Category': 'threatCategory', 'Threat Name': 'threatName', 'Transactions involving this content': 'eventLink' } # the full content of the email is outside any artifact in the container['data'] field raw_data = phantom.get_raw_data(container) # build regular expressions to parse the Zscaler-specific email format allowed_re = re.compile(r"(?<=allowed )[0-9]+") quarantined_re = re.compile(r"(?<=quarantined )[0-9]+") blocked_re = re.compile(r"(?<=blocked )[0-9]+") cef = {} raw = {} # the content may or may not be base64 encoded so handle both base64index = raw_data.find('base64\\r\\n\\r\\n') phantom.debug(base64index) if base64index != -1: raw_data = raw_data[base64index + 14:] raw_data = raw_data[:raw_data.find('\\r\\n\\r\\n')].replace( '\\r\\n', '\r\n') raw_data = base64.b64decode(raw_data) for line in raw_data.split('\n'): kv_pair = line.split(":") phantom.debug(str(kv_pair)) if len(kv_pair) > 1: kv_pair[1] = ':'.join(kv_pair[1:]) if kv_pair[0].strip() != 'First downloaded': cef[FIELD_TRANSLATOR[ kv_pair[0].strip()]] = kv_pair[1].strip() else: kv_pair[1] = kv_pair[1].strip() cef['startTime'] = kv_pair[1][:kv_pair[1].find('.')] cef['timesAllowed'] = allowed_re.findall(kv_pair[1])[0] cef['timesQuarantined'] = quarantined_re.findall( kv_pair[1])[0] cef['timesBlocked'] = blocked_re.findall(kv_pair[1])[0] # if there is no base64, try to parse as plain text using regexes else: for field_label in FIELD_TRANSLATOR.keys(): field_re = re.compile( r"{}:[ \\r\\n]+(.*?)\\r\\n".format(field_label)) re_result = field_re.findall(raw_data) if re_result: field_value = re_result[0] cef[FIELD_TRANSLATOR[field_label]] = field_value re_result = re.findall(r"First downloaded: (.*?)\.", raw_data) if re_result and len(re_result[0]) < 200: cef['startTime'] = re_result[0] re_result = allowed_re.findall(raw_data) if re_result: cef['timesAllowed'] = re_result[0] re_result = quarantined_re.findall(raw_data) if re_result: cef['timesQuarantined'] = re_result[0] re_result = blocked_re.findall(raw_data) if re_result: cef['timesBlocked'] = re_result[0] success, message, container_id = phantom.create_container( name=container['name'], label='events') phantom.add_artifact(container=container_id, raw_data=raw, cef_data=cef, label='event', name='zScaler Alert Artifact', severity='medium', identifier=str(uuid.uuid4()), artifact_type='network') ################################################################################ ## Custom Code End ################################################################################ return
def add_artifact(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('add_artifact() called') #efrom = generate_random_emails(20, 7) #eTo = generate_random_emails(20, 7) url = ['www.badlink.com' ] raw = {} cef = {} eTo = random.choice(aeTo) efrom = random.choice(aeFrom) emailHeader = {'Delivered-To': '*****@*****.**', 'From': efrom, 'ARC-Seal': '''i=1; a=rsa-sha256; t=1529885666; cv=none; d=google.com; s=arc-20160816; b=CQpzLQywzGbo1pRGG98Ja4x2FTL8ubfppNQg5BFWZLjYkqJOcvJMITXbBt0Jafvb9O i5LC/9Yf5jstXwDKHJUYKkuK40+NiAPFmag/G3+CF01Z7EDHMrnsPQ/6EI29jEES6nxg aWGZhbSiISz6OMVpRfUFztR0SAciSaOWCEiD8lQNhCiq6zWHIEJknYhTegsu/Ud8wTI+ QW91ixIjMJYpOz+SQhMR0QoWn3n7anF1Ny8HEGOKdxirCKE33otb8BGnJif1eH+tzVVQ Lbv4bfyXgJB7QBJgsDCF7YVWicENGuI04KJkPumDZfAFQ7hcBPcV293Bs/ptSI6NyONu +izQ==''', 'Detected URL': 'https://paypal.account.myorder-manage.com/signin/', 'To': eTo, 'X-Mailgun-Sending-Ip': '209.61.151.222', 'X-Mailgun-Tag': 'summary', 'X-Received':'by 2002:aca:a94c:: with SMTP id s73-v6mr5539575oie.178.1529885666213; Sun, 24 Jun 2018 17:14:26 -0700 (PDT)', 'ARC-Authentication-Results': 'i=1; mx.google.com; dkim=pass [email protected] header.s=mailo header.b=LY45x805; dkim=pass [email protected] header.s=mg header.b=CorTHVEY; spf=pass (google.com: domain of [email protected] designates 209.61.151.222 as permitted sender) smtp.mailfrom="[email protected]"; dmarc=pass (p=NONE sp=NONE dis=NONE) header.from=mxtoolbox.com', 'ARC-Message-Signature':'i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20160816; h=mime-version:subject:from:to:message-id:sender:date:dkim-signature :dkim-signature:arc-authentication-results; bh=To1beBo/++WeZttsCE1s+J+qd8OV0VKh759cYATpGwo=; b=ad0TyCFyaVF2pIjMSD8yO6FCH5ZAT+Cxy8NVYshD0wwUCwaXwt7wIjE1IbzhA18Fz1 sgs8fKHQUMyXOmI6CNShzFyhFwzvk/bsetZTtoHxF0W9P72gu5ufSDRmiCovvrGA181N 0csKaQRemuCml+fxIVjtxui/eG0YKycCAr/J937yLZMNuvXyEJqUbhzo1E2jLMEdAIiN jyx3UYjdaO4hRgAn0IUDUMUhNdhVA8MQwi9uTBSHX63Q7m5ke5fPkfakd1sigpI0s63a 3hk1wjVRv84fn9te/Wf7EaFQVrgJ7T7mn4vPGdujLU9iduzklYlzXlPNw3WBSlZGVKJ2 AazA==', 'Date':'', 'Message-Id': '<20180625001424.1.3D7FC1E792BF8A72>', 'Mime-Version':'1.0', 'X-Mailgun-Sid':'WyI3MTUwMyIsICJpY2VidXJnbm90aWZ5QGdtYWlsLmNvbSIsICI5YzQwMyJd', 'Received':'by 2002:ac0:8806:0:0:0:0:0 with SMTP id g6-v6csp3401947img; Sun, 24 Feb 2019, from rs222.mailgun.us (rs222.mailgun.us. [209.61.151.222]) with UTF8SMTPS id d39-v6si4713992otj.152.2018.06.24.17.14.25 (version=TLS1_2 cipher=ECDHE-RSA-AES128-GCM-SHA256 bits=128/128); Sun, 24 Jun 2018 17:14:26 -0700 (PDT), by luna.mailgun.net with HTTP; Mon, 25 Jun 2018 00:14:24 +0000', 'Received-SPF':'pass client-ip=209.61.151.222;', 'Sender':efrom, 'Authentication-Results':'''header.s=mailo header.b=LY45x805; dkim=pass [email protected] header.s=mg header.b=CorTHVEY; spf=pass smtp.mailfrom="[email protected]"; dmarc=pass (p=NONE sp=NONE dis=NONE)', 'DKIM-Signature':'a=rsa-sha256; v=1; c=relaxed/relaxed; d=mailgun.org; q=dns/txt; s=mg; t=1529885665; h=Content-Type: Mime-Version: Subject: From: To: Message-Id: Sender: Date: X-Feedback-Id; bh=To1beBo/++WeZttsCE1s+J+qd8OV0VKh759cYATpGwo=; b=CorTHVEYKdaXjjIua05kaOQ+n90uGMEy+rhlg/5L2W9SmusSRFtLlm4rNdXCub6PQD9PjQhp T3/4NanN9ftDgRlOd1U3l33gN2rJf4x92Ytz/vjLKJOg73JbUJYyRxT9pKP6GBk3XK+MkxPD ZI03CSmfX2Dz3pbNibRcUWvhFH8=''', 'X-Feedback-Id':'5266b068fea3983e6007cc1a:mailgun', 'Content-Type':'multipart/alternative; boundary="917d6539ef9143b1b76d94c66a93a3bf"', 'X-Google-Smtp-Source':'ADUXVKJS4kPPUt/4ky1gQgzF2sD9anlyTDaGRxkf6N9q6KOI3wFA2tzA4GwefhlVxyjO3V/+dPte', 'Subject':'Phishing' } cef['emailHeader'] = emailHeader cef['fromEmail']=efrom cef['toEmail']=eTo cef['bodyText']='''Response required. Dear User, We emailed you a little while ago to ask for your help in resolving an issue with your PayPal business account. Your account is still temporarily limited because we haven’t heard from you. We noticed some unusual activity with your account. Please check that no one has logged in to your account without your permission. To help us with this and to see what you can and can’t do with your account until the issue is restored, please click here to log in to your account and go to Resolution Center. Attached here also is your account’s activity log for reference. As always, if you need help or have any questions, feel free to contact us. We’re always here to help. Sincerely, PayPal''' success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw, cef_data=cef, label='artifact', name= 'Reported by: ' + efrom, severity='high', identifier=None, artifact_type='network') return
def decision_2(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None): phantom.debug('decision_2() called') action = '' pb_info = phantom.get_playbook_info() name_value = container.get('name', None) playbook_name = pb_info[0].get('name', None) container_id = container['id'] if not pb_info: return filtered_artifacts_data_1 = phantom.collect2( container=container, datapath=['filtered-data:filter_2:condition_1:artifact:*.cef']) phantom.debug('TOTAL number of cef.src artifacts is count: {}'.format( len(filtered_artifacts_data_1))) # local_tz = timezone('America/New_York') start = (container['start_time'] )[:-3] # start = (container['start_time']).strip('+00') start_time = datetime.strptime( start, '%Y-%m-%d %H:%M:%S.%f') # format 2017-10-17 11:32:00.839350 # start_time = local_tz.localize(start_time) for filtered_artifacts_item_1 in filtered_artifacts_data_1: item_1 = filtered_artifacts_item_1[0]['src'] phantom.debug('ITEM to be processed: {}'.format(item_1)) if item_1: addr = phantom.get_object(key=str(item_1), playbook_name=playbook_name) if not addr: phantom.debug('SAVE NEW count: {} {} {} '.format( 1, start_time.strftime("%c"), start_time.strftime("%c"))) phantom.save_object(key=str(item_1), value={ 'count': 1, 'start': start_time.strftime("%c"), 'end': start_time.strftime("%c"), 'description': name_value, 'ticket': '', 'ignore': False }, auto_delete=False, playbook_name=playbook_name) else: count = addr[0]['value']['count'] + 1 ignore = addr[0]['value']['ignore'] ticket = addr[0]['value']['ticket'] saved_start = addr[0]['value']['start'] saved_start_time = datetime.strptime( saved_start, '%a %b %d %H:%M:%S %Y' ) # format Mon Oct 16 11:46:30 2017 or '%Y-%m-%d %H:%M:%S.%f' # saved_start_time = local_tz.localize(start_time) delta = abs((start_time - saved_start_time)).total_seconds() # .seconds phantom.debug( 'DECISION start_time {} - saved_start_time {} = {}s '. format(start_time, saved_start_time, delta)) if ignore and (delta > REPEAT): phantom.debug( 'IGNORE {} start_time {} - saved_start_time {} = {}s '. format(ignore, start_time, saved_start_time, delta)) ignore = False saved_start = start_time.strftime("%c") if not ignore: if (ticket == '') and (delta > WINDOW): saved_start = start_time.strftime("%c") count = 0 phantom.debug( 'RESET time/co ticket {} delta {}s {} <- {}'. format(ticket, delta, saved_start, start_time.strftime("%c"))) elif (count > LIMIT) and (delta < WINDOW): count = 0 saved_start = start_time.strftime("%c") raw = {} cef = {} cef['cs3'] = filtered_artifacts_item_1[0]['cs3'] if (ticket == ''): phantom.debug( 'OPENED {} opened {} {}s ago '.format( item_1, saved_start_time, delta)) cef['cn1'] = item_1 success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw, cef_data=cef, label='create', name='ticket', severity='high', identifier=None, artifact_type='host') else: phantom.debug( 'REOPEN {} reopen {} {}s ago '.format( ticket, saved_start_time, delta)) cef['cn2'] = item_1 success, message, artifact_id = phantom.add_artifact( container=container, raw_data=raw, cef_data=cef, label='update', name='ticket', severity='high', identifier=None, artifact_type='host') ignore = True phantom.debug( 'SAVE OLD count: {0} ticket: {1} {2} {3} {4}s'.format( count, ticket, saved_start, start_time.strftime("%c"), delta)) phantom.save_object(key=str(item_1), value={ 'count': count, 'start': saved_start, 'end': start_time.strftime("%c"), 'description': name_value, 'ticket': ticket, 'ignore': ignore }, auto_delete=False, playbook_name=playbook_name) # check for 'if' condition 1 matched_artifacts_1, matched_results_1 = phantom.condition( container=container, scope='all', conditions=[ ["artifact:*.label", "==", "create"], ]) # call connected blocks if condition 1 matched if matched_artifacts_1 or matched_results_1: create_ticket_1(action=action, success=success, container=container, results=results, handle=handle) return # check for 'elif' condition 2 matched_artifacts_2, matched_results_2 = phantom.condition( container=container, scope='all', conditions=[ ["artifact:*.label", "==", "update"], ]) # call connected blocks if condition 2 matched if matched_artifacts_2 or matched_results_2: update_ticket_1(action=action, success=success, container=container, results=results, handle=handle) return return
def adding_comment_artifacts(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug('adding_comment_artifacts() called') results_data_1 = phantom.collect2( container=container, datapath=[ 'SRC_Public_Reverse_DNS:action_result.parameter.ip', 'SRC_Public_Reverse_DNS:action_result.summary.hostname' ], action_results=results) results_data_2 = phantom.collect2( container=container, datapath=['Source_geolocate:action_result.data.*.country_iso_code'], action_results=results) results_item_1_0 = [item[0] for item in results_data_1] results_item_1_1 = [item[1] for item in results_data_1] results_item_2_0 = [item[0] for item in results_data_2] adding_comment_artifacts__comments = None ################################################################################ ## Custom Code Start ################################################################################ # Write your custom code here... ip_address = results_item_1_0 dns_hostname = results_item_1_1 country_code = results_item_2_0 phantom.debug(ip_address) phantom.debug(dns_hostname) phantom.debug(country_code) cef = {} cef['sourceAddress'] = ip_address cef['sourceDnsDomain'] = dns_hostname cef['countryCode'] = country_code success, message, artifact_id = phantom.add_artifact( container=container, raw_data=None, cef_data=cef, label="comment", name="Source Public IP", severity="medium", identifier=None, artifact_type=None, field_mapping=None, trace=False, run_automation=False) phantom.debug('artifact added as id: ' + str(artifact_id)) return ################################################################################ ## Custom Code End ################################################################################ phantom.save_run_data(key='adding_comment_artifacts:comments', value=json.dumps(adding_comment_artifacts__comments)) join_playbook_local_local_Message_Print_1(container=container) return