def _funct_zia_add_url_category_function(self, event, *args, **kwargs): """Function: None""" try: rp = ResultPayload(PACKAGE_NAME, **kwargs) # Get the wf_instance_id of the workflow this Function was called in wf_instance_id = event.message["workflow_instance"][ "workflow_instance_id"] yield StatusMessage( "Starting '{0}' running in workflow '{1}'".format( FN_NAME, wf_instance_id)) # Get and validate required function inputs: fn_inputs = validate_fields([ "zia_configured_name", "zia_super_category", "zia_urls", "zia_custom_category", "zia_activate" ], kwargs) LOG.info("'{0}' inputs: %s", fn_inputs) yield StatusMessage( "Validations complete. Starting business logic") params = { "configured_name": fn_inputs.get("zia_configured_name"), "super_category": fn_inputs.get("zia_super_category"), "urls": fn_inputs.get("zia_urls"), "custom_category": fn_inputs.get("zia_custom_category"), "keywords": fn_inputs.get("zia_keywords"), } activate = fn_inputs.get("zia_activate") ziacli = ZiaClient(self.opts, self.fn_options) result = {"response": ziacli.add_url_category(**params)} result["activation"] = ziacli.activate(activate) yield StatusMessage( "Finished '{0}' that was running in workflow '{1}'".format( FN_NAME, wf_instance_id)) results = rp.done(True, result) LOG.info("'%s' complete", FN_NAME) yield StatusMessage( "Returning results for function '{}' with parameters '{}'.". format( FN_NAME, ", ".join("{!s}={!r}".format(k, v) for (k, v) in fn_inputs.items()))) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as e: yield FunctionError(e)
def _exchange_online_query_emails_function(self, event, *args, **kwargs): """Function: This function will query Exchange Online to find emails matching the specified input parameters.""" try: # Initialize the results payload rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Validate fields validate_fields(['exo_email_address'], kwargs) # Get the function parameters email_address = kwargs.get('exo_email_address') # text mail_folders = kwargs.get('exo_mail_folders') # text sender = kwargs.get('exo_email_address_sender') # text start_date = kwargs.get('exo_start_date') # datetime end_date = kwargs.get('exo_end_date') # datetime has_attachments = kwargs.get('exo_has_attachments') # bool message_subject = kwargs.get('exo_message_subject') # text message_body = kwargs.get('exo_message_body') # text LOG.info(u"exo_email_address: %s", email_address) LOG.info(u"exo_mailfolders: %s", mail_folders) LOG.info(u"exo_email_address_sender: %s", sender) LOG.info(u"exo_start_date: %s", start_date) LOG.info(u"exo_end_date: %s", end_date) LOG.info(u"exo_email_has_attachments: %s", has_attachments) LOG.info(u"exo_message_subject: %s", message_subject) LOG.info(u"exo_message_body: %s", message_body) yield StatusMessage(u"Starting message query.") # Get the MS Graph helper class MS_graph_helper = MSGraphHelper( self.options.get("microsoft_graph_token_url"), self.options.get("microsoft_graph_url"), self.options.get("tenant_id"), self.options.get("client_id"), self.options.get("client_secret"), self.options.get("max_messages"), self.options.get("max_users"), RequestsCommon(self.opts, self.options).get_proxies()) email_results = MS_graph_helper.query_messages( email_address, mail_folders, sender, start_date, end_date, has_attachments, message_subject, message_body) # Put query results in the results payload. results = rp.done(True, email_results) yield StatusMessage(u"Returning results from query.") LOG.debug(json.dumps(results['content'])) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: LOG.error(err) yield FunctionError(err)
def _app_function(self, fn_inputs): """ Function: Get the content of functions, scripts, tasks and sub-workflows used within a workflow Inputs: - fn_inputs.pb_id """ yield self.status_message( "Starting App Function: '{0}'".format(FN_NAME)) playbook_xml = get_playbook(self.rest_client(), fn_inputs.pb_id) if not playbook_xml: msg = "playbook_id not found: {}".format(fn_inputs.pb_id) yield self.status_message(msg) yield FunctionResult({}, success=False, reason=msg) else: results = get_process_elements(playbook_xml) yield FunctionResult(results)
def _exchange_online_delete_email_function(self, event, *args, **kwargs): """Function: Delete a message in the specified user's mailbox.""" try: # Initialize the results payload rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Validate fields validate_fields(['exo_email_address', 'exo_messages_id'], kwargs) # Get the function parameters email_address = kwargs.get('exo_email_address') # text mailfolders_id = kwargs.get('exo_mailfolders_id') # text messages_id = kwargs.get('exo_messages_id') # text LOG.info(u"exo_email_address: %s", email_address) LOG.info(u"exo_mailfolders_id: %s", mailfolders_id) LOG.info(u"exo_messages_id: %s", messages_id) yield StatusMessage( u"Starting delete message for email address: {}".format( email_address)) # Get the MS Graph helper class MS_graph_helper = MSGraphHelper( self.options.get("microsoft_graph_token_url"), self.options.get("microsoft_graph_url"), self.options.get("tenant_id"), self.options.get("client_id"), self.options.get("client_secret"), self.options.get("max_messages"), self.options.get("max_users"), RequestsCommon(self.opts, self.options).get_proxies()) # Call MS Graph API to get the user profile response = MS_graph_helper.delete_message(email_address, mailfolders_id, messages_id) # If message was deleted a 204 code is returned. if response.status_code == 204: success = True response_json = {'value': success} else: success = False response_json = response.json() results = rp.done(success, response_json) yield StatusMessage( u"Returning delete results for email address: {}".format( email_address)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: LOG.error(err) yield FunctionError(err)
def _call_rest_api_function(self, event, *args, **kwargs): """Function: Call a REST web service. The function parameters determine the type of call (GET, POST, etc), the URL, and optionally the headers and body.""" try: # Get the function parameters: rest_method = self.get_select_param(kwargs.get("rest_method")) # select, values: "GET", "HEAD", "POST", "PUT", "DELETE", "OPTIONS" rest_url = kwargs.get("rest_url") # text rest_headers = self.get_textarea_param(kwargs.get("rest_headers")) # textarea rest_body = self.get_textarea_param(kwargs.get("rest_body")) # textarea rest_verify = kwargs.get("rest_verify") # boolean log = logging.getLogger(__name__) log.info("rest_method: %s", rest_method) log.info("rest_url: %s", rest_url) log.info("rest_headers: %s", rest_headers) log.info("rest_body: %s", rest_body) log.info("rest_verify: %s", rest_verify) # Read newline-separated 'rest_headers' into a dictionary headers_dict = {} if rest_headers is not None: lines = rest_headers.split("\n") for line in lines: keyval = line.strip().split(":", 1) if len(keyval) == 2: headers_dict[keyval[0].strip()] = keyval[1].strip() resp = requests.request(rest_method, rest_url, headers=headers_dict, data=rest_body, verify=rest_verify) try: response_json = resp.json() except: response_json = None results = { "ok": resp.ok, "url": resp.url, "status_code": resp.status_code, "reason": resp.reason, "cookies": dict(resp.cookies), "headers": dict(resp.headers), "elapsed": int(resp.elapsed.total_seconds() * 1000.0), "apparent_encoding": resp.apparent_encoding, "text": resp.text, "json": response_json, "links": resp.links, } # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _ansible_tower_list_job_templates_function(self, event, *args, **kwargs): """Function: Run an ansible module outside of the job template""" try: validate_fields(("url"), self.options) # validate key app.config settings # Get the function parameters: tower_hosts = kwargs.get("tower_hosts") # text tower_module = self.get_select_param( kwargs.get("tower_module")) # text tower_arguments = kwargs.get("tower_arguments") # text tower_inventory = kwargs.get("tower_inventory") # number tower_credential = kwargs.get("tower_credential") # number log = logging.getLogger(__name__) log.info("tower_hosts: %s", tower_hosts) log.info("tower_module: %s", tower_module) log.info("tower_arguments: %s", tower_arguments) log.info("tower_inventory: %s", tower_inventory) log.info("tower_credential: %s", tower_credential) result = ResultPayload(SECTION_HDR, **kwargs) rc = RequestsCommon(self.opts, self.options) # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE yield StatusMessage("starting...") url = "/".join( (clean_url(self.options['url']), TOWER_API_BASE, AD_HOC_URL)) # common basic_auth, cafile = get_common_request_items(self.options) arguments = { "module_name": tower_module, "limit": tower_hosts, "module_args": tower_arguments, "inventory": tower_inventory, "credential": tower_credential } rc = RequestsCommon(self.opts, self.options) results = rc.execute_call_v2("post", url, auth=basic_auth, json=arguments, headers=JSON_HEADERS, verify=cafile) result_payload = result.done(True, results.json()) yield StatusMessage("done...") # Produce a FunctionResult with the results yield FunctionResult(result_payload) except Exception: yield FunctionError()
def _funct_zia_remove_from_blocklist_function(self, event, *args, **kwargs): """Function: Remove URLs or IP addresses from the blocklist. See link for URL guidelines: https://help.zscaler.com/zia/url-format-guidelines """ try: rp = ResultPayload(PACKAGE_NAME, **kwargs) # Get the wf_instance_id of the workflow this Function was called in wf_instance_id = event.message["workflow_instance"][ "workflow_instance_id"] yield StatusMessage( "Starting '{0}' running in workflow '{1}'".format( FN_NAME, wf_instance_id)) # Get and validate required function inputs: fn_inputs = validate_fields(["zia_blocklisturls", "zia_activate"], kwargs) LOG.info("'{0}' inputs: %s", fn_inputs) yield StatusMessage( "Validations complete. Starting business logic") blocklisturls = fn_inputs.get("zia_blocklisturls") activate = fn_inputs.get("zia_activate") ziacli = ZiaClient(self.opts, self.fn_options) result = { "response": ziacli.blocklist_action(blocklisturls, "REMOVE_FROM_LIST") } result["activation"] = ziacli.activate(activate) yield StatusMessage( "Finished '{0}' that was running in workflow '{1}'".format( FN_NAME, wf_instance_id)) results = rp.done(True, result) LOG.info("'%s' complete", FN_NAME) yield StatusMessage( "Returning results for function '{}' with parameters '{}'.". format( FN_NAME, ", ".join("{!s}={!r}".format(k, v) for (k, v) in fn_inputs.items()))) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as e: yield FunctionError(e)
def _microsoft_security_graph_update_alert_function( self, event, *args, **kwargs): """Function: Update an alert in the Microsoft Security Graph.""" options = self.options ms_graph_helper = self.Microsoft_security_graph_helper try: start_time = time.time() yield StatusMessage( "Starting Microsoft security graph update alert function...") # Get the function parameters: microsoft_security_graph_alert_id = kwargs.get( "microsoft_security_graph_alert_id") # text microsoft_security_graph_alert_data = self.get_textarea_param( kwargs.get("microsoft_security_graph_alert_data")) # textarea if microsoft_security_graph_alert_id is not None: log.info("microsoft_security_graph_alert_id: %s", microsoft_security_graph_alert_id) else: raise ValueError( "microsoft_security_graph_alert_id is required to run this function." ) if microsoft_security_graph_alert_data is not None: log.info("microsoft_security_graph_alert_data: %s", microsoft_security_graph_alert_data) else: raise ValueError( "microsoft_security_graph_alert_data is required to run this function" ) r = update_alert(options.get("microsoft_graph_url"), ms_graph_helper, microsoft_security_graph_alert_id, microsoft_security_graph_alert_data) if not r: raise FunctionError("Request failed, please check the log.") yield StatusMessage( "Microsoft security graph update alert function complete...") end_time = time.time() results = { "inputs": { "microsoft_security_graph_alert_id": microsoft_security_graph_alert_id, "microsoft_security_graph_alert_data": microsoft_security_graph_alert_data }, "run_time": end_time - start_time, "content": r.json() } # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as e: yield FunctionError(e)
def _task_utils_create_function(self, event, *args, **kwargs): """Function: A function which can be used to create a custom task using the REST API.""" try: payload = ResultPayload("task_utils_create", **kwargs) # Get the function parameters: incident_id = get_function_input(kwargs, "incident_id") # number task_name = get_function_input(kwargs, "task_name", optional=True) # text task_utils_payload = self.get_textarea_param( kwargs.get("task_utils_payload")) # textarea log = logging.getLogger(__name__) log.info("incident_id: %s", incident_id) log.info("task_name: %s", task_name) log.info("task_utils_payload: %s", task_utils_payload) try: task_utils_payload = json.loads(task_utils_payload) except Exception as json_exception: err_msg = "Could not load task_utils_payload as JSON. Error: {}", json_exception log.error(err_msg) raise TypeError(err_msg) else: log.debug( "Successfully parsed task_utils_payload as valid JSON") resilient_client = self.rest_client() # Replace task_json["name"] if task_name is set and task_json["name"] is not set; otherwise use default name # If task_json["name"] is set, do nothing, use that task_utils_payload[ "name"] = task_name if not task_utils_payload.get( "name", False) and task_name else "Default Task Name" log.debug("New task will be saved with name %s", task_utils_payload["name"]) yield StatusMessage("Posting to API") try: task_response = resilient_client.post( '/incidents/{}/tasks'.format(incident_id), task_utils_payload) except Exception as add_note_exception: err_msg = "Encountered exception while trying to create task. Error: {}", add_note_exception raise ValueError(err_msg) log.info("Response from Resilient %s", task_response) yield StatusMessage("Created task with ID: {}".format( task_response.get('id', 'No task ID found'))) results = payload.done(success=True, content={"task": task_response}) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _utilities_xml_transformation_function(self, event, *args, **kwargs): """Function: Perform a transformation of an xml document based on a given stylesheet""" try: # Get the function parameters: xml_source = kwargs.get("xml_source") # text xml_stylesheet = kwargs.get("xml_stylesheet") # text validate_fields(("xml_source", "xml_stylesheet"), kwargs) # confirm that our required parameter exists and is a directory if not (self.options.get(FunctionComponent.XML_DIR) and os.path.isdir( self.options.get(FunctionComponent.XML_DIR))): raise ValueError( "missing or incorrectly specified configuration property: {}" .format(FunctionComponent.XML_DIR)) log = logging.getLogger(__name__) log.info("xml_source: %s", xml_source) log.info("xml_stylesheet: %s", xml_stylesheet) # get the stylesheet stylesheet = os.path.join( self.options.get(FunctionComponent.XML_DIR), xml_stylesheet) if not (os.path.exists(stylesheet) and os.path.isfile(stylesheet)): raise ValueError( "stylesheet file not found: {}".format(stylesheet)) yield StatusMessage("starting...") parser = etree.XMLParser(ns_clean=True, recover=True, encoding="utf-8") # read xsl file xsl = open(stylesheet, mode="rb").read() xsl_root = defused_etree.fromstring(xsl, parser=parser) transform = etree.XSLT(xsl_root) # read xml xml_root = defused_etree.fromstring(xml_source.encode("utf-8"), parser=parser) # transform xml with xslt transformation_doc = transform(xml_root) # return transformation result result = etree.tostring(transformation_doc) results = {"content": result.decode("utf-8")} # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _function_cve_browse_function(self, event, *args, **kwargs): """Function: A Function to Browse Common Vulnerability Exposures Vendors and Product & Database information from https://cve.circl.lu Data Base.""" try: # Get the function parameters: cve_browse_criteria = kwargs.get("cve_browse_criteria") # text if cve_browse_criteria: cve_browse_criteria = cve_browse_criteria.strip() cve_vendor = kwargs.get("cve_vendor") # text if cve_vendor: cve_vendor = cve_vendor.strip() # Variables to Store Parsed CVE API Data result_data_dict = dict() log = logging.getLogger(__name__) log.info("cve_browse_criteria: %s", cve_browse_criteria) log.info("cve_vendor: %s", cve_vendor) yield StatusMessage( "Getting list of Vendors and Products from the CVE Database") if cve_browse_criteria.lower().find('browse') != -1: browse_data = self._browse_cve_api(vendor_name=cve_vendor) elif cve_browse_criteria.lower().find('db') != -1: browse_data = self._cve_db_information() else: raise ValueError("CVE Browse Criteria is not recognized..!") # Defining a key in result dictionary to store parsed data result_data_dict['content'] = [] # Type Of the rest api call like browse,search,specific cve,last, db info api_call_type = browse_data['api_call'] # Rest Api Response Data browse_data_content = browse_data['content'] if api_call_type == 'browse': result_data_dict['api_call'] = 'browse' _browse_dict_list = self._parse_browse_results( api_data=browse_data_content) result_data_dict['content'].extend(_browse_dict_list) elif api_call_type == 'db': result_data_dict['api_call'] = 'db' result_data_dict['content'].append(browse_data_content) log.debug( "Vendor and Products Received from CVE Database: {}".format( result_data_dict)) yield StatusMessage("done...") # Produce a FunctionResult with the results yield FunctionResult(result_data_dict) except Exception as er: yield FunctionError(er)
def _fn_amp_get_file_list_files_function(self, event, *args, **kwargs): """Function: Returns a list of items for a particular file_list. You need to provide file_list_guid to retrieve these items.""" try: # Get the function parameters: amp_file_list_guid = kwargs.get("amp_file_list_guid") # text amp_file_sha256 = kwargs.get("amp_file_sha256") # text amp_limit = kwargs.get("amp_limit") # number amp_offset = kwargs.get("amp_offset") # number log = logging.getLogger(__name__) log.info("amp_file_list_guid: %s", amp_file_list_guid) log.info("amp_file_sha256: %s", amp_file_sha256) log.info("amp_limit: %s", amp_limit) log.info("amp_offset: %s", amp_offset) if is_none(amp_file_list_guid): raise ValueError( "Required parameter 'amp_file_list_guid' not set.") yield StatusMessage( "Running Cisco AMP for endpoints get file lists files by guid..." ) params = { "file_list_guid": amp_file_list_guid, "file_sha256": amp_file_sha256, "limit": amp_limit, "offset": amp_offset } validate_params(params) amp = Ampclient(self.options, RATE_LIMITER) rtn = amp.get_paginated_total(amp.get_file_list_files, **params) query_execution_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') # Add in "query_execution_time" and "ip_address" to result to facilitate post-processing. results = { "response": rtn, "query_execution_time": query_execution_time, "input_params": params } yield StatusMessage( "Returning 'file list files' results for file list guid '{}'.". format(params["file_list_guid"])) log.debug(json.dumps(results)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: log.exception( "Exception in Resilient Function for Cisco AMP for endpoints.") yield FunctionError()
def _cisco_asa_remove_network_object_from_network_object_group_function( self, event, *args, **kwargs): """Function: Remove a network object from a Cisco ASA network object group.""" try: LOG = logging.getLogger(__name__) rc = RequestsCommon(self.opts, self.fn_options) rp = ResultPayload(PACKAGE_NAME, **kwargs) yield StatusMessage("Starting '{0}'".format(FN_NAME)) # Get the function parameters firewall_name = kwargs.get("cisco_asa_firewall") # text network_object_group = kwargs.get( "cisco_asa_network_object_group") # text network_object_value = kwargs.get( "cisco_asa_network_object_value") # text network_object_kind = kwargs.get( "cisco_asa_network_object_kind") # text network_object_id = kwargs.get( "cisco_asa_network_object_id") # text LOG.info(u"cisco_asa_firewall: %s", firewall_name) LOG.info(u"cisco_asa_network_object_group: %s", network_object_group) LOG.info(u"cisco_asa_network_object_value: %s", network_object_value) LOG.info(u"cisco_asa_newtork_object_kind: %s", network_object_kind) LOG.info(u"cisco_asa_newtork_object_id: %s", network_object_id) # Get the the options for this firewall. firewall_options = self.firewalls.get_firewall(firewall_name) # Initialize the Cisco ASA object. asa = CiscoASAClient(firewall_name, self.fn_options, firewall_options, rc) yield StatusMessage( "Validations complete. Remove the network object.") # Call the ASA API to get the network objects in this network object group. success, reason = asa.remove_from_network_object_group( network_object_group, network_object_kind, network_object_value, network_object_id) results = rp.done(success, reason) yield StatusMessage( "Artifact removed network object group: {0}".format(success)) LOG.info("'%s' complete: success = %s", FN_NAME, success) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as e: yield FunctionError(e)
def _exchange_online_send_message_function(self, event, *args, **kwargs): """Function: This function will create a message and send to the specified recipients.""" try: rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Validate fields validate_fields(['exo_email_address', 'exo_recipients'], kwargs) # Get the function parameters: email_address = kwargs.get("exo_email_address") # text recipients = kwargs.get("exo_recipients") # text message_subject = kwargs.get("exo_message_subject") # text message_body = kwargs.get("exo_message_body") # text log = logging.getLogger(__name__) log.info(u"exo_email_address: %s", email_address) log.info(u"exo_recipients: %s", recipients) log.info(u"exo_message_subject: %s", message_subject) log.info(u"exo_message_body: %s", message_body) yield StatusMessage(u"Starting send message from email address: {}".format(email_address)) # Get the MS Graph helper class MS_graph_helper = MSGraphHelper(self.options.get("microsoft_graph_token_url"), self.options.get("microsoft_graph_url"), self.options.get("tenant_id"), self.options.get("client_id"), self.options.get("client_secret"), self.options.get("max_messages"), self.options.get("max_users"), self.options.get("max_retries_total", MAX_RETRIES_TOTAL), self.options.get("max_retries_backoff_factor", MAX_RETRIES_BACKOFF_FACTOR), self.options.get("max_batched_requests", MAX_BATCHED_REQUESTS), RequestsCommon(self.opts, self.options).get_proxies()) # Call MS Graph API to send the message response = MS_graph_helper.send_message(email_address, recipients, message_subject, message_body) # If message was sent a 202 code is returned...nothing is returned in the response. if response.status_code == 202: success = True response_json = {'value': success} else: success = False response_json = response.json() results = rp.done(success, response_json) yield StatusMessage(u"Returning send mail results by email address: {}".format(email_address)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: LOG.error(err) yield FunctionError(err)
def _jira_open_issue_function(self, event, *args, **kwargs): """Function: Create a jira issue.""" try: log = logging.getLogger(__name__) rc = RequestsCommon(self.opts, self.options) rp = ResultPayload(PACKAGE_NAME, **kwargs) # Get + validate the app.config parameters: log.info("Validating app configs") app_configs = validate_app_configs(self.options) # Get + validate the function parameters: log.info("Validating function inputs") fn_inputs = validate_fields(["incident_id", "jira_fields"], kwargs) log.info("Validated function inputs: %s", fn_inputs) # Get JIRA fields from input jira_fields = json.loads(fn_inputs.get("jira_fields")) yield StatusMessage("Connecting to JIRA") jira_client = get_jira_client(app_configs, rc) # Build the URL to Resilient resilient_url = build_url_to_resilient( self.res_params.get("host"), self.res_params.get("port"), fn_inputs.get("incident_id"), fn_inputs.get("task_id")) jira_fields["description"] = prepend_text( "IBM Resilient Link: {0}".format(resilient_url), to_markdown(jira_fields.get("description", ""))) yield StatusMessage("Creating JIRA issue") jira_issue = jira_client.create_issue(fields=jira_fields) results_contents = { "issue_url": jira_issue.permalink(), "issue_url_internal": jira_issue.self, "issue_key": jira_issue.key, "issue": jira_issue.raw } yield StatusMessage(u"JIRA issue {0} created".format( jira_issue.key)) results = rp.done(success=True, content=results_contents) log.info("Complete") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: yield FunctionError(err)
def _pt_integration_a_run_function(self, event, *args, **kwargs): """Function: Function that: - Sleeps for delay - Generates list of num_artifacts - Returns list of Artifacts to add, remaining number of runs and sample data""" try: log = logging.getLogger(__name__) # Instansiate ResultPayload rp = ResultPayload(PACKAGE_NAME, **kwargs) mandatory_fields = [ "pt_int_num_artifacts", "pt_int_num_runs", "pt_int_delay" ] # Get the function inputs: fn_inputs = validate_fields(mandatory_fields, kwargs) num_artifacts = fn_inputs.get("pt_int_num_artifacts") num_runs = fn_inputs.get("pt_int_num_runs") delay = fn_inputs.get("pt_int_delay") sample_data = fn_inputs.get("pt_int_sample_data") log.info("Got fn_inputs: %s", fn_inputs) if delay: log.info("Delay set. Sleeping for %s ms", delay) time.sleep(delay / 1000) log.info("Generating list of %s Artifacts", num_artifacts) artifacts_to_create = [] for num in range(num_artifacts): artifacts_to_create.append({ "value": u"PT Artifact {0}".format(num), "description": u"PT Artifact Description" }) remaining_runs = num_runs - 1 log.info("Remaining runs changed to: %s", remaining_runs) results_content = { "remaining_runs": remaining_runs, "artifacts_to_create": artifacts_to_create, "sample_data": sample_data } results = rp.done(True, results_content) log.info("Returning results to post-process script") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _utility_email_file_parser_function(self, event, *args, **kwargs): """Function: Parses .eml files for email forensics. Useful for reported phishes.""" try: # Get the function parameters: incident_id = kwargs.get("incident_id") # number attachment_id = kwargs.get("attachment_id") # number eml_filename = kwargs.get("attachment_name") # text # Get the eml file attachment by its incident and attachment IDs eml_file = get_file_attachment(self.rest_client(), incident_id, artifact_id=None, task_id=None, attachment_id=attachment_id) yield StatusMessage('Reading and decoding email message (' + eml_filename + ')...') # Parse the email content mail = email.message_from_string(eml_file.decode( "utf-8")) # Get the email object from the raw contents email_body, attachments, urls = get_decoded_email_body( self, incident_id, eml_filename, mail) # Get the UTF-8 encoded body from the raw email string email_header = get_decoded_email_header(mail.items()) results = {} results['body'] = str(email_body) # The full email, HTML formatted results[ 'header'] = email_header # List of 2-tuples containing all the message’s field headers and values # results['mail_items'] = mail.items() # List of 2-tuples containing all the decoded message’s field headers and values (3/12/2019: deprecated, use 'header') results[ 'attachments'] = attachments # List of attachment names from EML file results['urls'] = list( set(urls) ) # URLs from body. Set inside of the list used to ensures no duplicates. url_domains = [] for url in results["urls"]: try: parsed_url = urlparse(url) if parsed_url.netloc: parsed_url = parsed_url.netloc elif parsed_url.path: parsed_url = str(parsed_url.path).split('/')[0] if parsed_url.startswith('www.'): parsed_url = parsed_url.replace('www.', '', 1) url_domains.append(parsed_url) except Exception as err: continue results["url_domains"] = url_domains # URL domains from body URLs. # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _ansible_tower_launch_job_template_function(self, event, *args, **kwargs): """Function: None""" try: validate_fields(("url"), self.options) # validate key app.config settings # Get the function parameters: tower_template_id = kwargs.get('tower_template_id') # number tower_template_name = kwargs.get('tower_template_name') # text tower_template_hosts = kwargs.get('tower_hosts') # text tower_template_run_tags = kwargs.get('tower_run_tags') # text tower_template_skip_tags = kwargs.get('tower_skip_tags') # text tower_template_arguments = kwargs.get('tower_arguments') # text log = logging.getLogger(__name__) log.info("tower_template_id: %s", tower_template_id) log.info("tower_template_name: %s", tower_template_name) log.info("tower_hosts: %s", tower_template_hosts) log.info("tower_arguments: %s", tower_template_arguments) log.info("tower_run_tags: %s", tower_template_run_tags) log.info("tower_skip_tags: %s", tower_template_skip_tags) result = ResultPayload(SECTION_HDR, **kwargs) # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE yield StatusMessage("starting...") if not tower_template_id: if not tower_template_name: raise ValueError( "Specify either tower_template_id or tower_template_name" ) json_template = get_job_template_by_name( self.opts, self.options, tower_template_name) if not json_template: raise KeyError(u"Unable to find job template: %s", tower_template_name) tower_template_id = json_template['id'] tower_result = run_job_template(self.opts, self.options, tower_template_id, tower_template_hosts, tower_template_arguments, tower_template_run_tags, tower_template_skip_tags) result_payload = result.done(True, tower_result) yield StatusMessage("done...") # Produce a FunctionResult with the results yield FunctionResult(result_payload) except Exception: yield FunctionError()
def _tanium_sweep_for_hash_function(self, event, *args, **kwargs): """Function: Checks if a given MD5 hash is among running processes in our environment [Windows OS]. Returns a list of hostnames of False""" now = datetime.datetime.now() try: # Get the function parameters: incident_id = kwargs.get("incident_id") # number file_hash_md5 = kwargs.get("file_hash_md5") # text # Get Tanium config values tanium_user = self.options.get("tanium_user") tanium_password = self.options.get("tanium_password") tanium_server = self.options.get("tanium_server") tanium_port = self.options.get("tanium_port") tanium_pytan_loc = self.options.get("tanium_pytan_loc") log = logging.getLogger(__name__) log.debug("incident_id: %s", incident_id) log.debug("file_hash_md5: %s", file_hash_md5) log.debug("tanium_user: %s", tanium_user) log.debug("tanium_password: %s", tanium_password) log.debug("tanium_server: %s", tanium_server) log.debug("tanium_port: %s", tanium_port) log.debug("tanium_pytan_loc: %s", tanium_pytan_loc) yield StatusMessage("starting...") tanium_object = tanium.TaniumWorker(tanium_user, \ tanium_password, tanium_server, tanium_port, tanium_pytan_loc) machines_where_hash_was_found = [] yield StatusMessage("querying Tanium for data...") machines_where_hash_was_found = tanium_object.sweep_for_hash( file_hash_md5) if machines_where_hash_was_found: yield StatusMessage("Tanium returned data...") # header, data, file_name, incident_id convert.convert_to_csv_and_attach_to_incident([ \ 'Computer Name', 'MD5 Hash', 'Path'], \ machines_where_hash_was_found, \ now.strftime("%Y-%m-%d_%H:%M")+'-sweep_for_hash-'+ \ file_hash_md5+'.csv', incident_id, self) else: yield StatusMessage("hash not found") results = { "machines": machines_where_hash_was_found, "hash": file_hash_md5 } # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _exchange_online_get_message_function(self, event, *args, **kwargs): """Function: This function returns the contents of an Exchange Online message.""" try: # Initialize the results payload rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Validate fields validate_fields(['exo_email_address', 'exo_messages_id'], kwargs) # Get the function parameters: email_address = kwargs.get("exo_email_address") # text message_id = kwargs.get("exo_messages_id") # text LOG.info(u"exo_email_address: %s", email_address) LOG.info(u"exo_messages_id: %s", message_id) yield StatusMessage( u"Starting get message for email address: {}".format( email_address)) # Get the MS Graph helper class MS_graph_helper = MSGraphHelper( self.options.get("microsoft_graph_token_url"), self.options.get("microsoft_graph_url"), self.options.get("tenant_id"), self.options.get("client_id"), self.options.get("client_secret"), self.options.get("max_messages"), self.options.get("max_users"), RequestsCommon(self.opts, self.options).get_proxies()) # Call MS Graph API to get the user profile response = MS_graph_helper.get_message(email_address, message_id) response_json = response.json() results = rp.done(True, response_json) # Add pretty printed string for easier to read output text in note. pretty_string = json.dumps(response_json, ensure_ascii=False, sort_keys=True, indent=4, separators=(',', ': ')) results['pretty_string'] = pretty_string yield StatusMessage( u"Returning results for get message for email address: {}". format(email_address)) LOG.debug(json.dumps(pretty_string)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: LOG.error(err) yield FunctionError(err)
def _fn_google_maps_directions_function(self, event, *args, **kwargs): """Function: A Function that takes an Origin and a Destination and returns a Google Maps Link with Directions""" log = logging.getLogger(__name__) # Base URL to Google Maps GOOGLE_MAPS_URL = "https://www.google.com/maps/dir/?api=1" def get_function_input(inputs, input_name, optional=False): """Given input_name, checks if it defined. Raises ValueError if a mandatory input is None""" input = inputs.get(input_name) if input is None and optional is False: err = "'{0}' is a mandatory function input".format(input_name) raise ValueError(err) else: return input try: # Get the function inputs: inputs = { "google_maps_origin": get_function_input(kwargs, "google_maps_origin"), # text (required) "google_maps_destination": get_function_input( kwargs, "google_maps_destination"), # text (required) } # Create payload dict with inputs payload = FunctionPayload(inputs) yield StatusMessage("Function Inputs OK") # url_encode origin and destination origin = url_encode( payload.inputs["google_maps_origin"].encode('utf8')) destination = url_encode( payload.inputs["google_maps_destination"].encode('utf8')) yield StatusMessage("Generating Link") # Generate Link payload.directions_link = "{0}&origin={1}&destination={2}".format( GOOGLE_MAPS_URL, origin, destination) # Send payload back to Appliance results = payload.as_dict() log.info("Complete") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _mock_function_one_function(self, event, *args, **kwargs): """Function: A mock description of mock_function_one with unicode: ล ฦ ว ศ ษ ส ห ฬ อ""" try: # Get the wf_instance_id of the workflow this Function was called in wf_instance_id = event.message["workflow_instance"][ "workflow_instance_id"] yield StatusMessage( "Starting 'mock_function_one' running in workflow '{0}'". format(wf_instance_id)) # Get the function parameters: mock_input_number = kwargs.get("mock_input_number") # number mock_input_boolean = kwargs.get("mock_input_boolean") # boolean mock_input_select = self.get_select_param( kwargs.get("mock_input_select") ) # select, values: "select one", "select two", "select ล ฦ ว ศ ษ ส ห ฬ อ" mock_input_date_time_picker = kwargs.get( "mock_input_date_time_picker") # datetimepicker mock_input_date_picker = kwargs.get( "mock_input_date_picker") # datepicker mock_input_text_with_value_string = self.get_textarea_param( kwargs.get("mock_input_text_with_value_string")) # textarea mock_input_multiselect = self.get_select_param( kwargs.get("mock_input_multiselect") ) # multiselect, values: "value one", "value two", "value ล ฦ ว ศ ษ ส ห ฬ อ" mock_input_text = kwargs.get("mock_input_text") # text log = logging.getLogger(__name__) log.info("mock_input_number: %s", mock_input_number) log.info("mock_input_boolean: %s", mock_input_boolean) log.info("mock_input_select: %s", mock_input_select) log.info("mock_input_date_time_picker: %s", mock_input_date_time_picker) log.info("mock_input_date_picker: %s", mock_input_date_picker) log.info("mock_input_text_with_value_string: %s", mock_input_text_with_value_string) log.info("mock_input_multiselect: %s", mock_input_multiselect) log.info("mock_input_text: %s", mock_input_text) ############################################## # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE # ############################################## yield StatusMessage( "Finished 'mock_function_one' that was running in workflow '{0}'" .format(wf_instance_id)) results = {"content": "xyz"} # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _fn_sep_scan_endpoints_function(self, event, *args, **kwargs): """Function: Run a Evidence of Compromise (EOC) scan on Symantec Endpoint Protection endpoints.""" try: params = transform_kwargs(kwargs) if kwargs else {} # Instantiate result payload object. rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Get the function parameters: sep_group_ids = kwargs.get("sep_group_ids") # text sep_computer_ids = kwargs.get("sep_computer_ids") # text sep_scan_type = self.get_select_param(kwargs.get( "sep_scan_type")) # select, values: "QUICK_SCAN", "FULL_SCAN" sep_file_name = kwargs.get("sep_file_path") # text sep_sha256 = kwargs.get("sep_sha256") # text sep_sha1 = kwargs.get("sep_sha1") # text sep_md5 = kwargs.get("sep_md5") # text sep_description = kwargs.get("sep_description") # text sep_scan_action = self.get_select_param( kwargs.get( "sep_scan_action")) # select, values: "scan", "remediate" LOG.info("sep_group_ids: %s", sep_group_ids) LOG.info("sep_computer_ids: %s", sep_computer_ids) LOG.info("sep_scan_type: %s", sep_scan_type) LOG.info("sep_file_path: %s", sep_file_name) LOG.info("sep_sha256: %s", sep_sha256) LOG.info("sep_sha1: %s", sep_sha1) LOG.info("sep_md5: %s", sep_md5) LOG.info("sep_description: %s", sep_description) LOG.info("sep_scan_action: %s", sep_scan_action) validate_fields( ["sep_scan_type", "sep_description", "sep_scan_action"], kwargs) yield StatusMessage( "Running Symantec SEP Scan Endpoints command...") sep = Sepclient(self.options, params) rtn = sep.scan_endpoints(**params) results = rp.done(True, rtn) yield StatusMessage( "Returning 'Symantec SEP Scan Endpoints' results") LOG.debug(json.dumps(results["content"])) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: LOG.exception("Exception in Resilient Function for Symantec SEP.") yield FunctionError()
def _tanium_get_ip_conns_function(self, event, *args, **kwargs): """Function: Returns the protocol, local address / port, process name, application name, remote port, and connection state for all active IP connections on an endpoint. Example: tcp|192.168.95.186:51866|explorer.exe| Windows Explorer|165.254.58.66:80|established""" now = datetime.datetime.now() try: # Get the function parameters: tanium_endpoint = kwargs.get("tanium_endpoint") # text incident_id = kwargs.get("incident_id") # number # Get Tanium config values tanium_user = self.options.get("tanium_user") tanium_password = self.options.get("tanium_password") tanium_server = self.options.get("tanium_server") tanium_port = self.options.get("tanium_port") tanium_pytan_loc = self.options.get("tanium_pytan_loc") log = logging.getLogger(__name__) log.debug("tanium_endpoint: %s", tanium_endpoint) log.debug("incident_id: %s", incident_id) log.debug("tanium_user: %s", tanium_user) log.debug("tanium_password: %s", tanium_password) log.debug("tanium_server: %s", tanium_server) log.debug("tanium_port: %s", tanium_port) log.debug("tanium_pytan_loc: %s", tanium_pytan_loc) yield StatusMessage("starting...") tanium_object = tanium.TaniumWorker(tanium_user, \ tanium_password, tanium_server, tanium_port, tanium_pytan_loc) ip_conns = [] yield StatusMessage("querying Tanium for data...") ip_conns = tanium_object.get_ip_connections(tanium_endpoint) if ip_conns: yield StatusMessage("Tanium returned data...") # header, data, file_name, incident_id convert.convert_to_csv_and_attach_to_incident(\ ["Application","Connection State", \ "Local IP and Port", "Process", "Protocol", \ "Remote IP and Port"], ip_conns, \ now.strftime("%Y-%m-%d_%H:%M")+'-IP_Conns-'+\ tanium_endpoint+'.csv', incident_id, self) else: yield StatusMessage("No data returned") results = {"ip_conns": ip_conns} # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _splunk_update_notable_function(self, event, *args, **kwargs): """Function: Update notable events according to the status of the corresponding incident. Inputs: event_id: the notable event id in the splunk_notable_event_id field comment: add a note to the notable event status: Notable event status. Integer: 2=active, 5= closed """ try: # Get the function parameters: event_id = kwargs.get("event_id") # text comment = kwargs.get("comment") # text notable_event_status = kwargs.get("notable_event_status") # number splunk_verify_cert = True if "verify_cert" in self.options and self.options[ "verify_cert"] == "false": splunk_verify_cert = False log = logging.getLogger(__name__) log.info("event_id: %s", event_id) log.info("comment: %s", comment) log.info("notable_event_status: %s", notable_event_status) log.info("splunk_verify_cert: " + str(splunk_verify_cert)) log.info("Splunk host: %s, port: %s, username: %s", self.options["host"], self.options["port"], self.options["username"]) yield StatusMessage("starting...") result_payload = ResultPayload(SECTION_HDR, **kwargs) splnk_utils = splunk_utils.SplunkUtils( host=self.options["host"], port=self.options["port"], username=self.options["username"], password=self.options["splunkpassword"], verify=splunk_verify_cert) splunk_result = splnk_utils.update_notable( event_id=event_id, comment=comment, status=notable_event_status, cafile=splunk_verify_cert) yield StatusMessage("done...") # Produce a FunctionResult with the return value yield FunctionResult( result_payload.done(True, splunk_result.get('content', {}))) except Exception as e: log.error("Function execution throws exception: {}".format(str(e))) yield FunctionError()
def _dt_utils_delete_row_function(self, event, *args, **kwargs): """Function: Function that deletes a row from a Data Table given the row's ID""" log = logging.getLogger(__name__) try: # Instansiate new Resilient API object res_client = self.rest_client() inputs = { "incident_id": get_function_input(kwargs, "incident_id"), # number (required) "dt_utils_datatable_api_name": get_function_input( kwargs, "dt_utils_datatable_api_name"), # text (required) "dt_utils_row_id": get_function_input(kwargs, "dt_utils_row_id", optional=True) # number (optional) } # Create payload dict with inputs payload = FunctionPayload(inputs) yield StatusMessage("Function Inputs OK") # Instantiate a new RESDatatable datatable = RESDatatable( res_client, payload.inputs["incident_id"], payload.inputs["dt_utils_datatable_api_name"]) deleted_row = datatable.delete_row( payload.inputs["dt_utils_row_id"]) if "error" in deleted_row: yield StatusMessage("Row {0} in {1} NOT deleted.".format( payload.inputs["dt_utils_row_id"], datatable.api_name)) payload.success = False raise ValueError(deleted_row["error"]) else: yield StatusMessage("Row {0} in {1} deleted.".format( payload.inputs["dt_utils_row_id"], datatable.api_name)) payload.row = deleted_row payload.success = True results = payload.as_dict() log.info("Complete") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _mitre_groups_using_technique_function(self, event, *args, **kwargs): """Function: Get a list of groups that are using the given technique(s).""" try: mitre_technique_name = kwargs.get("mitre_technique_name") # text mitre_technique_id = kwargs.get("mitre_technique_id") # text log = logging.getLogger(__name__) log.info("mitre_technique_name: %s", mitre_technique_name) log.info("mitre_technique_id: %s", mitre_technique_id) result_payload = ResultPayload( "fn_mitre_integration", mitre_technique_name=mitre_technique_name, mitre_technique_id=mitre_technique_id) if not mitre_technique_id and not mitre_technique_name: raise ValueError( "At least one of the inputs(mitre_technique_name or mitre_technique_id) " "should be provided.") yield StatusMessage("Getting technique information...") mitre_conn = mitre_attack.MitreAttackConnection( self.opts, self.options) techniques = mitre_attack_utils.get_multiple_techniques( mitre_conn, mitre_technique_ids=mitre_technique_id, mitre_technique_names=mitre_technique_name) yield StatusMessage("Getting group information...") groups = [] for technique in techniques: groups.extend( mitre_attack.MitreAttackGroup.get_by_technique( mitre_conn, technique)) if len(groups) == 0: yield StatusMessage( "No groups were found using any of the given techniques. Done." ) else: yield StatusMessage("Done. Returning results.") groups = [x.dict_form() for x in groups] # prepare the data for viewing results = {"mitre_groups": groups} # Produce a FunctionResult with the results yield FunctionResult(result_payload.done(True, results)) except Exception as e: log.exception(str(e)) yield FunctionError()
def _fn_cloud_foundry_create_app_function(self, event, *args, **kwargs): """Function: Creates and deploys a cloud foundry applications from the specified parameters/docker files.""" try: # Get the function parameters: application_name = kwargs.get("fn_cloud_foundry_applications", None) # text space_guid = kwargs.get("fn_cloud_foundry_space_guid", None) # text additional_parameters = kwargs.get( "fn_cloud_foundry_additional_parameters_json", None) # text if space_guid is None or application_name is None: raise ValueError( "Both fn_cloud_foundry_applications and fn_cloud_foundry_space_guid " "have to be defined.") if additional_parameters is None: additional_parameters = {} else: additional_parameters = json.loads(additional_parameters) log = logging.getLogger(__name__) log.info("fn_cloud_foundry_applications: %s", application_name) log.info("fn_cloud_foundry_space_guid: %s", space_guid) log.info("fn_cloud_foundry_additional_parameters_json: %s", additional_parameters) log.info("Params: {}".format(additional_parameters)) authenticator = IBMCloudFoundryAuthenticator( self.opts, self.options, self.base_url) yield StatusMessage("Authenticated into Cloud Foundry") cf_service = IBMCloudFoundryAPI(self.opts, self.options, self.base_url, authenticator) values = { "space_guid": space_guid, "name": application_name, "username": self.cf_api_username, "password": self.cf_api_password } additional_parameters.update( values ) # so values overwrite additional params, not the other way values = additional_parameters results = cf_service.create_app(values) log.info("Result: %s", results) yield StatusMessage("Done.") self._add_keys(results) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as e: yield FunctionError(str(e))
def _tanium_get_open_ports_function(self, event, *args, **kwargs): """Function: Identifies the listening TCP ports, including the process listening to the port, the MD5 hash of the process, the display name of the process (if available), and the listening IP Address and port. The Sensor definition can be modified to exclude process and IP range.""" now = datetime.datetime.now() try: # Get the function parameters: tanium_endpoint = kwargs.get("tanium_endpoint") # text incident_id = kwargs.get("incident_id") # number # Get Tanium config values tanium_user = self.options.get("tanium_user") tanium_password = self.options.get("tanium_password") tanium_server = self.options.get("tanium_server") tanium_port = self.options.get("tanium_port") tanium_pytan_loc = self.options.get("tanium_pytan_loc") log = logging.getLogger(__name__) log.debug("tanium_endpoint: %s", tanium_endpoint) log.debug("incident_id: %s", incident_id) log.debug("tanium_user: %s", tanium_user) log.debug("tanium_password: %s", tanium_password) log.debug("tanium_server: %s", tanium_server) log.debug("tanium_port: %s", tanium_port) log.debug("tanium_pytan_loc: %s", tanium_pytan_loc) yield StatusMessage("starting...") tanium_object = tanium.TaniumWorker(tanium_user, \ tanium_password, tanium_server, tanium_port, tanium_pytan_loc) open_ports = [] yield StatusMessage("querying Tanium for data...") open_ports = tanium_object.get_open_ports(tanium_endpoint) if open_ports: yield StatusMessage("Tanium returned data...") # header, data, file_name, incident_id convert.convert_to_csv_and_attach_to_incident(\ ['Open Port'], open_ports, \ now.strftime("%Y-%m-%d_%H:%M")+'-Open_Ports-' + \ tanium_endpoint+'.csv', incident_id, self) else: yield StatusMessage("No data returned") results = {"open_ports": open_ports} # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _exchange_move_folder_contents_and_delete_folder_function(self, event, *args, **kwargs): """Function: """ try: # Get the function parameters: exchange_email = kwargs.get("exchange_email") # text exchange_delete_if_no_subfolders = kwargs.get("exchange_delete_if_no_subfolders") # boolean exchange_folder_path = kwargs.get("exchange_folder_path") # text exchange_destination_folder_path = kwargs.get("exchange_destination_folder_path") # text log = logging.getLogger(__name__) # Use default connection email if one was not specified if exchange_email is None: exchange_email = self.options.get('email') log.info('No connection email was specified, using value from config file') log.info("exchange_delete_emails_if_no_subfolders: %s" % exchange_delete_if_no_subfolders) log.info("exchange_email: %s" % exchange_email) log.info("exchange_folder_path: %s" % exchange_folder_path) log.info("exchange_destination_folder_path: %s" % exchange_destination_folder_path) # Initialize utils utils = exchange_utils(self.options) # Get folders from_folder = utils.go_to_folder(exchange_email, exchange_folder_path) to_folder = utils.go_to_folder(exchange_email, exchange_destination_folder_path) if exchange_delete_if_no_subfolders: if from_folder.child_folder_count != 0: raise FunctionError('%s has subfolders' % exchange_folder_path) else: queryset = from_folder.all() else: queryset = utils.get_emails(exchange_email, folder_path=exchange_folder_path, search_subfolders=True) # Get items before moving yield StatusMessage("Getting items") results = utils.create_email_function_results(queryset) # Move items yield StatusMessage("Moving items") item_count = queryset.count() for item in queryset: item.move(to_folder) # Delete folder yield StatusMessage("Deleting folder %s" % exchange_folder_path) from_folder.delete() yield StatusMessage("%s deleted, %d items moved" % (exchange_folder_path, item_count)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()