def _base64_to_attachment_function(self, event, *args, **kwargs): """Function: """ try: log = logging.getLogger(__name__) # Get the function parameters: # artifact_file_type: # "Email Attachment", "Malware Sample", "Log File", "X509 Certificate File", "Other File", etc. base64content = kwargs.get("base64content") # text incident_id = kwargs.get("incident_id") # number task_id = kwargs.get("task_id") # number file_name = kwargs.get("file_name") # text content_type = kwargs.get("content_type") # text content_type = content_type \ or mimetypes.guess_type(file_name or "")[0] \ or "application/octet-stream" log.info("incident_id: %s", incident_id) log.info("task_id: %s", task_id) log.info("file_name: %s", file_name) log.info("content_type: %s", content_type) yield StatusMessage("Writing attachment...") datastream = BytesIO(base64.b64decode(base64content)) client = self.rest_client() new_attachment = write_file_attachment(client, file_name, datastream, incident_id, task_id, content_type) log.info(json.dumps(new_attachment)) yield FunctionResult(new_attachment) except Exception: yield FunctionError()
def save_as_attachment(res_client, incident_id, results): """ save an attachment to the incident with the job results :param res_client: :param incident_id: :param results: payload to parse :return: None """ finished = results['summary']['finished'].replace( 'T', ' ') if results['summary']['finished'] else None note = u"Job Id: {}\nStatus: {}\nTemplate Name: {}\nFinished: {}".format( results['summary']['id'], results['summary']['status'], results['summary']['name'], finished) note = u"{0}{1}".format( note, u"\n".join( event.get("stdout") for event in results['events']['results'])) note = re.sub(r'[\x00-\x7f]\[[0-9;]*m', r'', note) # remove color highlighting if sys.version_info.major < 3: file_handle = io.StringIO(note) else: file_handle = io.BytesIO(note.encode('utf-8')) # file names are best without embedded spaces file_name = u"{}_{}.txt".format( results['summary']['name'].replace(" ", "_"), results['summary']['id']) attachment_json = write_file_attachment(res_client, file_name, file_handle, incident_id) return file_name, attachment_json
def _add_ticket_attachments(self, incident, ticket): """ Add the list of Secureworks ticket attachments to the Resilient incident. Only add attachments that are not already in Resilient. To make sure this is the case, add the Secureworks attachmentInfo id to the attachment name in Resilient and then check whether this attachment name is already in Resilient. :param incident: Resilient incident :param ticket: Secureworks ticket :return: """ try: attachment_info_list = ticket.get('attachmentInfo') incident_id = incident.get('id') ticket_id = ticket.get('ticketId') # Get the list of attachments in this incident. uri = u'/incidents/{0}/attachments'.format(incident_id) res_attachments = self.rest_client().get(uri) for attachment in attachment_info_list: attachment_id = attachment.get('id') # Get ticket attachment and name response = self.scwx_client.get_tickets_attachment( ticket_id, attachment_id) attachment_name = attachment.get('name') # Use the Secureworks attachmentInfo id in the name to uniquely identify it. if not attachment_name: res_attachment_name = u"attachmentInfo-id-{0}".format( attachment_id) else: res_attachment_name = u"{0}-attachmentInfo-id-{1}".format( attachment_name, attachment_id) attachment_in_incident = False for r_attachment in res_attachments: if r_attachment.get("name") == res_attachment_name: attachment_in_incident = True if attachment_in_incident: # Don't create attachment as it is already in Resilient continue content = response.get('content') datastream = BytesIO(content) # Write the file as attachement: failures will raise an exception message = u"Writing {0} for Secureworks CTP ticket {1} to Resilient incident {2}".format( attachment_name, ticket_id, incident_id) LOG.info(message) new_attachment = write_file_attachment(self.rest_client(), res_attachment_name, datastream, incident_id, None) LOG.debug(new_attachment) except Exception as err: raise IntegrationError(err)
def create_incident_attachment(rest_client, incident_id, note, name_prefix): """ Add an attachment to the specified Resilient Incident by ID :param incident_id: Resilient Incident ID :param note: Content to be added as attachment :param name_prefix: name prefix of the attachment name :return: Resilient attachment """ try: dt = datetime.now() attachment_name = "{0}-{1}.txt".format(name_prefix, dt.strftime(TIME_FORMAT)) if sys.version_info.major < 3: datastream = StringIO(note) else: datastream = BytesIO(note.encode("utf-8")) attachment = write_file_attachment(rest_client, attachment_name, datastream, incident_id, None) return attachment except Exception as err: raise IntegrationError(err)
def _fn_log_capture_function(self, event, *args, **kwargs): """Function: Get the resilient-circuits log, optionally specifying the last n lines""" try: # Get the function parameters: log_capture_maxlen = kwargs.get("log_capture_maxlen", 0) # number log_capture_date = kwargs.get("log_capture_date") # datetimepicker (epoch) log_capture_date_option = self.get_select_param(kwargs.get("log_capture_date_option")) # select log_min_level = self.get_select_param(kwargs.get("log_min_level")) # select incident_id = kwargs.get("incident_id") # number task_id = kwargs.get("task_id") # number log_attachment_name = kwargs.get("log_attachment_name") validate_fields(['incident_id'], kwargs) if not log_attachment_name: dt = datetime.now() fqdn = platform.node().split('.') log_attachment_name = DEFAULT_ATTACHMENT_NAME.format(fqdn[0], dt.strftime("%Y%m%d_%H%M%S")) log = logging.getLogger(__name__) log.info("log_capture_maxlen: %s", log_capture_maxlen) log.info("log_capture_date: %s", log_capture_date) log.info("log_capture_date_option: %s", log_capture_date_option) log.info("incident_id: %s", incident_id) log.info("task_id: %s", task_id) log.info(u"log_attachment_name: %s", log_attachment_name) log.info("log_min_level: %s", log_min_level) log_capture_maxlen = log_capture_maxlen if log_capture_maxlen else 0 result_payload = ResultPayload(PACKAGE_NAME, **kwargs) yield StatusMessage("starting...") if not log_capture_date and not log_capture_date_option: num_of_lines, captured_lines = get_log_by_filter(self.log_file, log_min_level, log_capture_maxlen) elif log_capture_date and log_capture_date_option: num_of_lines, captured_lines = get_log_by_date(self.log_file, log_capture_date, log_capture_date_option, log_capture_maxlen, log_min_level) else: raise ValueError("Specify date with date option") # add as an attachment rest_client = self.rest_client() if sys.version_info.major < 3: datastream = BytesIO(captured_lines) else: datastream = BytesIO(captured_lines.encode("utf-8")) # failures will raise an exception write_file_attachment(rest_client, log_attachment_name, datastream, incident_id, task_id) # Produce a FunctionResult with the results yield StatusMessage(u"attachment created '{}' with {} lines".format(log_attachment_name, num_of_lines)) yield StatusMessage("done...") result_data = {"attachment_name": log_attachment_name, "num_of_lines": num_of_lines} results = result_payload.done(True, result_data) yield FunctionResult(results) except Exception: yield FunctionError()
def _netwitness_retrieve_log_data(self, event, *args, **kwargs): """Function: Returns back either a log file from Netwitness.""" try: # Get the function parameters: nw_data_format = self.get_select_param( kwargs.get("nw_data_format")) # select yield StatusMessage("Retrieving {} logs...".format(nw_data_format)) nw_start_time = kwargs.get("nw_start_time") # int if nw_start_time is None: raise FunctionError( "nw_start_time must be set in order to run this function.") nw_end_time = kwargs.get("nw_end_time") # int if nw_end_time is None: raise FunctionError( "nw_end_time must be set in order to run this function.") incident_id = kwargs.get("incident_id") # number # Initialize resilient_lib objects (handles the select input) results_payload = ResultPayload("fn_rsa_netwitness", **kwargs) req_common = RequestsCommon(self.opts) log.info("nw_data_format: %s", nw_data_format) log.info("nw_start_time: %s", nw_start_time) log.info("nw_end_time: %s", nw_end_time) data_file = {} start_time = convert_to_nw_time(nw_start_time) end_time = convert_to_nw_time(nw_end_time) # Get all common variables from app.config url = self.options.get("nw_log_server_url") username = self.options.get("nw_log_server_user") password = self.options.get("nw_log_server_password") nw_verify = self.options.get("nw_log_server_verify") # Dict lookup for render format render_format_dict = { "logs_text": "logs", "logs_csv": "text/csv", "logs_xml": "text/xml", "logs_json": "application/json" } # Make sure format is a supported case if nw_data_format not in render_format_dict: raise FunctionError("{} is not a supported format to retrieve logs"\ .format(nw_data_format)) # Return log data in json format if nw_data_format == "logs_json": data_file = get_nw_session_logs_file(url, username, password, nw_verify, \ start_time, end_time, req_common, render_format=render_format_dict[nw_data_format], resp_type="json") # Return log data in text format else: data_file = get_nw_session_logs_file(url, username, password, nw_verify, \ start_time, end_time, req_common, render_format=render_format_dict[nw_data_format]) log.debug("data_file: %s", data_file) results = results_payload.done(True, data_file) log.debug("RESULTS: %s", results) # Check for empty log files # (if empty, no log file will be attached and a note will # be added in the workflow post-process) if results["content"]: yield StatusMessage("Logs found, creating attachment...") # Get client, attachment name, and content of log files from netwitness rest_client = self.rest_client() # Determine the proper extension for the attachment name if nw_data_format == "logs_text": ext = "txt" else: ext = nw_data_format[5:] # for csv, xml, json attachment_name = u"Log file for {} - {}.{}".format( nw_start_time, nw_end_time, ext) if nw_data_format == "logs_json": datastream = BytesIO( json.dumps(results['content'], indent=4).encode('utf-8')) elif sys.version_info.major < 3: datastream = StringIO(results["content"]) else: datastream = BytesIO(results["content"].encode("utf-8")) write_file_attachment(rest_client, attachment_name,\ datastream, incident_id, None) yield StatusMessage("Done...") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as error: yield FunctionError(error)
def _netwitness_retrieve_pcap_data(self, event, *args, **kwargs): """Function: Returns back either a pcap file from Netwitness, and attaches it to an incident.""" try: yield StatusMessage("Starting...") # Get the function parameters: nw_event_session_ids = kwargs.get("nw_event_session_ids") # text nw_start_time = kwargs.get("nw_start_time") # text nw_end_time = kwargs.get("nw_end_time") # text incident_id = str(kwargs.get("incident_id")) # number # Initialize resilient_lib objects (handles the select input) results_payload = ResultPayload("fn_rsa_netwitness", **{"nw_event_session_ids":\ nw_event_session_ids, "incident_id": incident_id}) req_common = RequestsCommon(self.opts) # Verify inputs are set correctly if nw_event_session_ids is None: if nw_start_time is None or nw_end_time is None: raise FunctionError("Either nw_event_session_ids or nw_start_time and "\ "nw_end_time must be set for this function to run correctly.") log.info("nw_event_session_ids: %s", nw_event_session_ids) log.info("nw_start_time: %s", nw_start_time) log.info("nw_end_time: %s", nw_end_time) log.info("incident_id: %s", incident_id) # Get all common variables from app.config url = self.options.get("nw_packet_server_url") username = self.options.get("nw_packet_server_user") password = self.options.get("nw_packet_server_password") nw_verify = self.options.get("nw_packet_server_verify") # User session id if avaiable if nw_event_session_ids: pcap_file = get_nw_session_pcap_file(url, username, password, nw_verify,\ nw_event_session_ids, req_common) file_name = "PCAP file for session IDs: {}.pcap".format( nw_event_session_ids) else: nw_start = convert_to_nw_time(nw_start_time) nw_end = convert_to_nw_time(nw_end_time) pcap_file = get_nw_session_pcap_file_time(url, username, password, nw_verify,\ nw_start, nw_end, req_common) file_name = "PCAP file between {} and {}.pcap".format( nw_start, nw_end) rest_client = self.rest_client() results = results_payload.done(True, {}) if sys.version_info.major < 3: datastream = StringIO(pcap_file) else: datastream = BytesIO(pcap_file) log.debug("pcap_file: %s", pcap_file[1000:]) write_file_attachment(rest_client, file_name, datastream, incident_id, None) yield StatusMessage("PCAP file added as attachment to Incident {}"\ .format(str(incident_id))) yield StatusMessage("Done...") log.debug("RESULTS: %s", results) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as error: yield FunctionError(error)
def _utilities_string_to_attachment_function(self, event, *args, **kwargs): """Function: Create a new attachment from an inputted string""" try: # Check required inputs are defined string_to_convert_to_attachment = kwargs.get( 'string_to_convert_to_attachment') # text (required) if not string_to_convert_to_attachment: raise ValueError('string_to_convert_to_attachment is required') attachment_name = kwargs.get('attachment_name') # text (required) if not attachment_name: raise ValueError('attachment_name is required') ext = os.path.splitext(attachment_name)[1] if not ext or ext == '.': # Attachment has no extension specified or ends with '.'. a_ext = "txt" if attachment_name.endswith('.') else ".txt" attachment_name = '{0}{1}'.format(attachment_name, a_ext) incident_id = kwargs.get('incident_id') # number (required) if not incident_id: raise ValueError('incident_id is required') # Optional Inputs task_id = kwargs.get('task_id') # number (optional) # Define local variables content_type = 'text/plain' new_attachment = None # Initialize logging log = logging.getLogger(__name__) log.info('string_to_convert_to_attachment: %s', string_to_convert_to_attachment) log.info('attachment_name: %s', attachment_name) log.info('incident_id: %s', incident_id) log.info('task_id: %s', task_id) yield StatusMessage('Writing attachment...') if sys.version_info.major < 3: datastream = BytesIO(string_to_convert_to_attachment) else: datastream = BytesIO( string_to_convert_to_attachment.encode("utf-8")) # Access Resilient API client = self.rest_client() # POST the new attachment new_attachment = write_file_attachment(client, attachment_name, datastream, incident_id, task_id=task_id, content_type=content_type) # If the attachment succeeded in POSTing, print message, return result if new_attachment is not None: yield StatusMessage('Attachment {0} was created'.format( new_attachment['id'])) yield FunctionResult({'attachment_id': new_attachment['id']}) # Else, raise an error else: yield StatusMessage('Failed creating attachment') raise FunctionError(u'Failed creating attachment') except Exception: yield FunctionError()
def _urlscanio_function(self, event, *args, **kwargs): """Function: urlscanio""" try: log = logging.getLogger(__name__) # Get the function parameters: urlscanio_url = kwargs.get("urlscanio_url") # text urlscanio_public = kwargs.get("urlscanio_public") # boolean, optional urlscanio_useragent = kwargs.get("urlscanio_useragent") # text, optional urlscanio_referer = kwargs.get("urlscanio_referer") # text, optional incident_id = kwargs.get("incident_id") # number, optional log.info("urlscanio_url: %s", urlscanio_url) # Construct the parameters to send to urlscan.io urlscanio_headers = {'Content-Type': 'application/json', 'API-Key': self.apikey} urlscanio_data = { "url": urlscanio_url } if urlscanio_public: urlscanio_data["public"] = "on" if urlscanio_useragent: urlscanio_data["customagent"] = urlscanio_useragent if urlscanio_referer: urlscanio_data["referer"] = urlscanio_referer req_common = RequestsCommon(self.options, self.opts) urlscanio_scan_url = u"{}/scan/".format(self.urlscanio_report_url) urlscanio_post = req_common.execute_call_v2("POST", urlscanio_scan_url, self.timeout, headers=urlscanio_headers, data=json.dumps(urlscanio_data)) urlscanio_post.raise_for_status() # The post response contains a UUID that we use to check for the report urlscanio_post_json = urlscanio_post.json() log.debug(urlscanio_post_json) # UUID tells me my report ID so I can go grab it after uuid = urlscanio_post_json['uuid'] yield StatusMessage("Submitted URL successfully as %s" % uuid) # Loop until the report is ready start_time = time.time() # epoch seconds while True: time.sleep(10) if time.time() > start_time + self.timeout: yield RuntimeError("Timeout: report was not ready after {} seconds".format(self.timeout)) urlscanio_result_url = u"{}/result/{}".format(self.urlscanio_report_url, uuid) try: urlscanio_get = req_common.execute_call_v2("GET", urlscanio_result_url, self.timeout) if urlscanio_get.status_code == 200: # Report is done break else: # Some other error condition urlscanio_get.raise_for_status() # requests-common will through an IntegrationError if 404 is received except IntegrationError as ie: # 404 means the report is not yet complete if ie.value[:3] == '404': yield StatusMessage("Waiting for report...") else: # Some problem other than 404 raise IntegrationError yield StatusMessage("Report is ready") # get the full report json - usually a big blob urlscanio_report_url = u"{}/result/{}/".format(self.urlscanio_report_url, uuid) urlscanio_report_get = req_common.execute_call_v2("GET", urlscanio_report_url, self.timeout) urlscanio_report_json = urlscanio_report_get.json() yield StatusMessage("Downloaded report from {}".format(urlscanio_report_url)) # Grab the PNG screenshot. Return as a base64 string so it can be passed to another function as needed urlscanio_png_url = u"{}/{}.png".format(self.urlscanio_screenshot_url, uuid) urlscanio_png_get = req_common.execute_call_v2("GET", urlscanio_png_url, self.timeout) urlscanio_png_b64 = base64.b64encode(urlscanio_png_get.content) yield StatusMessage("Downloaded PNG screenshot from {}".format(urlscanio_png_url)) # returns the png file base64 and also the report url results = { "png_base64content": str(urlscanio_png_b64), "png_url": urlscanio_png_url, "report_url": urlscanio_report_url, "report": urlscanio_report_json } # Get rest client, attachment name, and png content so we can write as an attachment rest_client = self.rest_client() attachment_name = u"urlscanio-screenshot-{}.png".format(urlscanio_url) datastream = BytesIO(urlscanio_png_get.content) # Write the file as an attachment write_file_attachment(rest_client, attachment_name, datastream, incident_id, None) yield FunctionResult(results) except Exception as err: yield FunctionError(err)
def _pulsedive_search_function(self, event, **kwargs): """ Function: Search Pulsedive for Indicators, Threats, or Feeds, using type-related filters. This function gets input values from the action rule activity fields and sends them to Pulsedive's Search endpoint Results: a summary will be written to an incident note and full details will be written to an incident attachment. An option to export Indicator (only) search to CSV is provided in a link in Note. """ try: log = logging.getLogger(__name__) log.info("config params: %s", self.options) # Get the function parameters: log.info("function params: %s", kwargs) incident_id = kwargs.get("incident_id") # number # what type of search is requested pulsedive_search_type = self.get_select_param( kwargs.get("pulsedive_search_type") ) # select: "Indicator", "Threat", "Feed" # map action input filters to Pulsedive query filters if pulsedive_search_type == "Indicator": mapping = self._get_indicator_mapping(**kwargs) elif pulsedive_search_type == "Threat": mapping = self._get_threat_mapping(**kwargs) else: mapping = self._get_feed_mapping(**kwargs) # eliminate empty/null vars pulsedive_data = {} for k, v in mapping.items(): if v is not None and v != "": pulsedive_data[k] = v pulsedive_data["key"] = self.options["pulsedive_api_key"] log.info("%s parameters: %s", pulsedive_search_type, pulsedive_data) # set attachment name if user doesn't specify one if kwargs.get("attachment_name") is None: attachment_name = u"pulsedive_search_{}.txt".format( pulsedive_search_type) else: attachment_name = kwargs.get("attachment_name").replace( u" ", u"_") log.info("%s attachment name: %s", attachment_name) # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE yield StatusMessage("starting...") # form the url request api_url = "{}/search.php?".format( self.options["pulsedive_api_url"]) # make the api call rp = ResultPayload(CONFIG_SECTION, **kwargs) rc = RequestsCommon(self.opts, self.options) # initialize resp = rc.execute_call_v2("get", url=api_url, params=pulsedive_data) # Get the rest client so we can add the attachment to the incident client = self.rest_client() # prepare datastream to output to attachment if pulsedive_data["pretty"] == "Yes": # Pulsedive returns pp format if requested. Convert to bytestream for file handling. datastream = BytesIO(resp.content) else: # Convert dict to string first, then convert to bytestream for file handling. ds = resp.text datastream = BytesIO(ds.encode("utf-8")) # Write the file as attachment: failures will raise an exception write_file_attachment(client, attachment_name, datastream=datastream, incident_id=incident_id, task_id=None) # === prepare the results resp_json = rp.done(True, resp.json()) results = {"resp_json": resp_json, "att_name": attachment_name} # Produce a FunctionResult with the results yield FunctionResult(results) yield StatusMessage("done...") except Exception as err: yield FunctionError(err)
def upload_attachment(self, src_rest_client, orig_org_id, orig_inc_id, type_name, payload, orig_type_id): """ attachments may be incident level, associated with tasks, or part of an artifact see write_artifact_attachment for artifact file uploads :param src_rest_client: :param orig_org_id: :param orig_inc_id: :param type_name: :param payload: :param orig_type_id: :return: None """ src_artifact_id = src_attachment_id = src_task_id = dst_task_id = None if payload.get('attachment', {}).get('content_type', None): src_artifact_id = orig_type_id else: src_attachment_id = orig_type_id # find the incident for this attachment sync_inc_id, sync_state = self.dbsync.find_incident( orig_org_id, orig_inc_id) # do nothing if incident already deleted, bypassed or filtered if sync_state in ['deleted', 'bypassed', 'filtered']: LOG.debug("No action on %s: %s:%s->%s", sync_state, orig_inc_id, type_name, orig_type_id) return # is this a task based attachment? if payload.get("task_id", None): src_task_id = payload.get("task_id") _, dst_task_id, sync_state = self.dbsync.find_sync_row( orig_org_id, orig_inc_id, "task", src_task_id) # if the task doesn't exist, the attachment will be requeued if not dst_task_id: LOG.warning( "task:%s->%s for attachment id '%s' does not exist, queuing", orig_inc_id, src_task_id, orig_type_id) self.dbsync.create_retry_row(orig_org_id, orig_inc_id, "task", src_task_id, type_name, orig_type_id, sync_inc_id, payload, 1) return else: # get the target attachment, if it exists _, sync_type_id, _ = self.dbsync.find_sync_row( orig_org_id, orig_inc_id, type_name, orig_type_id) # attachments cannot be updated if sync_type_id: return # incident missing? if not sync_inc_id: self.dbsync.create_retry_row( orig_org_id, orig_inc_id, "task" if src_task_id else "incident", src_task_id if src_task_id else orig_inc_id, type_name, orig_type_id, None, payload, 1) return # read the attachment from the source Resilient attachment_contents = get_file_attachment( src_rest_client, orig_inc_id, attachment_id=src_attachment_id, task_id=src_task_id, artifact_id=src_artifact_id) file_handle = io.BytesIO(attachment_contents) LOG.debug('adding %s:%s->%s to %s:%s', type_name, orig_inc_id, orig_type_id, self.rest_client.org_id, sync_inc_id) # artifact as file attachment? if src_artifact_id: response = self.write_artifact_file(payload['attachment']['name'], file_handle, sync_inc_id, payload) else: try: response = write_file_attachment( self.rest_client, payload['name'], file_handle, sync_inc_id, task_id=dst_task_id, content_type=payload['content_type']) except Exception as err: LOG.error("Unable to create attachment for file: %s", payload['name']) LOG.error(payload) LOG.exception(err) response = None # create sync row if response: new_type_id = response.get('id', None) self.dbsync.create_sync_row(orig_org_id, orig_inc_id, type_name, orig_type_id, sync_inc_id, new_type_id, 'active') LOG.info('added %s:%s->%s to %s:%s->%s', type_name, orig_inc_id, orig_type_id, self.rest_client.org_id, sync_inc_id, new_type_id) else: LOG.error('error adding %s:%s->%s to %s:%s', type_name, orig_inc_id, orig_type_id, self.rest_client.org_id, sync_inc_id)
def _pulsedive_query_id_function(self, event, *args, **kwargs): """Function: Query Pulsedive for information on an indicator ID, threat ID, or feed ID. This function gets input values from the action rule activity fields and sends them to Pulsedive's Query endpoint Results: a summary will be written to an incident note and full details will be written to an incident attachment. """ try: log = logging.getLogger(__name__) log.info("config params: %s", self.options) # get query type: Indicator, Threat, or Feed pulsedive_id = kwargs.get("pulsedive_id") pulsedive_query_type = self.get_select_param(kwargs.get("pulsedive_query_type")) pulsedive_id_report_type = self.get_select_param(kwargs.get( "pulsedive_id_report_type")) # === get url parameters based on report type mapping = self._get_mapping(pulsedive_query_type, **kwargs) # eliminate empty/null vars pulsedive_data = {} for k, v in mapping.items(): if v is not None and v != "": pulsedive_data[k] = v # add key pulsedive_data["key"] = self.options["pulsedive_api_key"] log.info("%s parameters: %s", pulsedive_query_type, pulsedive_data) # === set incident parameters incident_id = kwargs.get("incident_id") # integer if kwargs.get("attachment_name") is None: attachment_name = u"pulsedive_{}_id{}_{}.txt".format( pulsedive_query_type, pulsedive_id, pulsedive_id_report_type) else: attachment_name = kwargs.get("attachment_name").replace(u" ", u"_") log.info("function params: pulsedive id = %s, type = %s, report = %s,\ incident='%s', attachment='%s'", pulsedive_id, pulsedive_query_type, pulsedive_id_report_type, incident_id, attachment_name) yield StatusMessage("starting...") # form the url request api_url = "{}/info.php?".format(self.options["pulsedive_api_url"]) # === make the api call rp = ResultPayload(CONFIG_SECTION, **kwargs) rc = RequestsCommon(self.opts, self.options) # initialize resp = rc.execute_call_v2("get", url=api_url, params=pulsedive_data ) # === Get the rest client so we can add the attachment to the incident client = self.rest_client() # prepare datastream to output to attachment if pulsedive_data["pretty"] == "Yes": # Pulsedive returns pp format if requested. Convert to bytestream for file handling. datastream = BytesIO(resp.content) else: # Convert dict to string first, then convert to bytestream for file handling. ds = resp.text datastream = BytesIO(ds.encode("utf-8")) # Write the file as attachment: failures will raise an exception write_file_attachment(client, attachment_name, datastream=datastream, incident_id=incident_id, task_id=None) # === prepare results resp_json = rp.done(True, resp.json()) results = { "resp_json": resp_json, "att_name": attachment_name } # Produce a FunctionResult with the results yield FunctionResult(results) yield StatusMessage("done...") except Exception as err: yield FunctionError(err)
def _exchange_online_write_message_as_attachment_function( self, event, *args, **kwargs): """Function: This function will get the mime content of an Exchange Online message and write it as an attachment.""" try: # Initialize the results payload rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Validate fields validate_fields( ['incident_id', 'exo_email_address', 'exo_messages_id'], kwargs) # Get the function parameters: incident_id = kwargs.get("incident_id") # number task_id = kwargs.get("task_id") # number email_address = kwargs.get("exo_email_address") # text message_id = kwargs.get("exo_messages_id") # text attachment_name = kwargs.get("exo_attachment_name") # text LOG.info(u"incident_id: %s", incident_id) LOG.info(u"task_id: %s", task_id) LOG.info(u"exo_email_address: %s", email_address) LOG.info(u"exo_messages_id: %s", message_id) LOG.info(u"exo_attachment_name: %s", attachment_name) yield StatusMessage( u"Starting to get message mime for email address: {}".format( email_address)) # Get the MS Graph helper class MS_graph_helper = MSGraphHelper( self.options.get("microsoft_graph_token_url"), self.options.get("microsoft_graph_url"), self.options.get("tenant_id"), self.options.get("client_id"), self.options.get("client_secret"), self.options.get("max_messages"), self.options.get("max_users"), RequestsCommon(self.opts, self.options).get_proxies()) # Call MS Graph API to get the user profile response = MS_graph_helper.get_message_mime( email_address, message_id) datastream = BytesIO(response.content) if attachment_name is None: attachment_name = u"message-{}-{}.eml".format( email_address, message_id) LOG.info(u"attachment_name: %s", attachment_name) # Get the rest client so we can add the attachment to the incident. rest_client = self.rest_client() # Write the file as attachement: failures will raise an exception write_file_attachment(rest_client, attachment_name, datastream, incident_id, task_id) results_data = {"attachment_name": attachment_name} results = rp.done(True, results_data) yield StatusMessage( u"Returning results for get message mime for email address: {0}\n attachment name: {1}" .format(email_address, attachment_name)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: LOG.error(err) yield FunctionError(err)
def _stix_create_bundle_function(self, event, *args, **kwargs): """Function: None""" try: # Get the wf_instance_id of the workflow this Function was called in wf_instance_id = event.message["workflow_instance"][ "workflow_instance_id"] yield StatusMessage( "Starting 'stix_create_bundle' running in workflow '{0}'". format(wf_instance_id)) # Get the function parameters: incident_id = kwargs.get("incident_id") # number log = logging.getLogger(__name__) log.info("incident_id: %s", incident_id) artifacts = self.rest_client().get( "/incidents/{}/artifacts".format(incident_id)) patterns = [] indicators = [] for a in artifacts: log.debug("artifact: %s", a) index = list(np.where(self.mapping['id'] == a['type']))[0][0] log.debug("artifact mapping index: ", index) row = self.mapping.iloc[index] if index and row['object'] is not np.nan and row[ 'property'] is not np.nan: try: obj = ObjectPath(row["object"], [str(row["property"])]) pattern = ObservationExpression( EqualityComparisonExpression(obj, str(a['value']))) log.debug("append pattern %s", pattern) a["description"] = str(pattern) self.rest_client().put( "/incidents/{}/artifacts/{}".format( incident_id, a["id"]), a) patterns.append(str(pattern)) except UnicodeEncodeError: continue for p in patterns: indicators.append(Indicator(pattern_type='stix', pattern=p)) log.debug('indicators: ', indicators) # Create new bundle bundle = Bundle(*indicators) # Upload attachment datastream = BytesIO(str(bundle)) client = self.rest_client() new_attachment = write_file_attachment( client, 'bundle_' + str(incident_id) + '.json', datastream, incident_id) yield StatusMessage( "Finished 'stix_create_bundle' that was running in workflow '{0}'" .format(wf_instance_id)) # Produce a FunctionResult with the results yield FunctionResult({"patterns": patterns}) except Exception: yield FunctionError()
def _urlscanio_function(self, event, *args, **kwargs): """Function: urlscanio""" try: validate_fields([{'name':'urlscanio_api_key', 'placeholder':'xxx'}, 'urlscanio_report_url', 'urlscanio_screenshot_url'], self.options) validate_fields(['urlscanio_url'], kwargs) # Get the function parameters: urlscanio_url = kwargs.get("urlscanio_url") # text urlscanio_public = kwargs.get("urlscanio_public") # boolean, optional urlscanio_useragent = kwargs.get("urlscanio_useragent") # text, optional urlscanio_referer = kwargs.get("urlscanio_referer") # text, optional incident_id = kwargs.get("incident_id") # number, optional LOG.info("urlscanio_url: %s", urlscanio_url) # Construct the parameters to send to urlscan.io urlscanio_headers = {'Content-Type': 'application/json', 'API-Key': self.apikey} urlscanio_data = { "url": urlscanio_url } if urlscanio_public: urlscanio_data["public"] = "on" if urlscanio_useragent: urlscanio_data["customagent"] = urlscanio_useragent if urlscanio_referer: urlscanio_data["referer"] = urlscanio_referer req_common = RequestsCommon(self.opts, self.options) urlscanio_scan_url = u"{}/scan/".format(self.urlscanio_report_url) urlscanio_post = req_common.execute_call_v2("POST", urlscanio_scan_url, self.timeout, headers=urlscanio_headers, data=json.dumps(urlscanio_data), callback=report_callback) # The post response contains a UUID that we use to check for the report urlscanio_post_json = urlscanio_post.json() LOG.debug(urlscanio_post_json) if urlscanio_post.status_code == 400: yield StatusMessage(urlscanio_post_json['message']) results = { "png_base64content": None, "png_url": None, "report_url": urlscanio_scan_url, "report": urlscanio_post_json } else: # UUID tells me my report ID so I can go grab it after uuid = urlscanio_post_json['uuid'] yield StatusMessage("Submitted URL successfully as %s" % uuid) # Loop until the report is ready start_time = time.time() # epoch seconds while True: time.sleep(10) if time.time() > start_time + self.timeout: yield RuntimeError("Timeout: report was not ready after {} seconds".format(self.timeout)) urlscanio_result_url = u"{}/result/{}".format(self.urlscanio_report_url, uuid) urlscanio_get = req_common.execute_call_v2("GET", urlscanio_result_url, self.timeout, callback=report_callback) if urlscanio_get.status_code == 200: # Report is done break # get the full report json - usually a big blob urlscanio_report_url = u"{}/result/{}/".format(self.urlscanio_report_url, uuid) urlscanio_report_get = req_common.execute_call_v2("GET", urlscanio_report_url, self.timeout) urlscanio_report_json = urlscanio_report_get.json() yield StatusMessage("Downloaded report from {}".format(urlscanio_report_url)) # Grab the PNG screenshot. Return as a base64 string so it can be passed to another function as needed urlscanio_png_url = u"{}/{}.png".format(self.urlscanio_screenshot_url, uuid) urlscanio_png_get = req_common.execute_call_v2("GET", urlscanio_png_url, self.timeout, callback=report_callback) if urlscanio_png_get.status_code == 404: urlscanio_png_b64 = None yield StatusMessage("No Screenshot Available") else: urlscanio_png_b64 = base64.b64encode(urlscanio_png_get.content) yield StatusMessage("Downloaded PNG screenshot from {}".format(urlscanio_png_url)) # returns the png file base64 and also the report url results = { "png_base64content": str(urlscanio_png_b64), "png_url": urlscanio_png_url, "report_url": urlscanio_report_url, "report": urlscanio_report_json } # Get rest client, attachment name, and png content so we can write as an attachment rest_client = self.rest_client() attachment_name = u"urlscanio-screenshot-{}.png".format(urlscanio_url) datastream = BytesIO(urlscanio_png_get.content) # Write the file as an attachment write_file_attachment(rest_client, attachment_name, datastream, incident_id, None) yield FunctionResult(results) except Exception as err: yield FunctionError(err)
def _pulsedive_query_value_function(self, event, *args, **kwargs): """Function: Query Pulsedive for information on an indicator/threat/feed value. inputs: incident_id: incident ID for attachment pulsedive_value: artifact type dns, ipadrs, or string pulsedive_query_type: specify "indicator", "threat", or feed pulsedive_feed_org: feed organization (for feed value only) return: json data """ try: log = logging.getLogger(__name__) log.info("config params: %s", self.options) # Get the function parameters: pulsedive_value = kwargs.get("pulsedive_value") # string pulsedive_query_type = self.get_select_param( kwargs.get("pulsedive_query_type") ) # select fld: Indicator, Threat, or Feed pulsedive_feed_org = kwargs.get("pulsedive_feed_org") # string incident_id = kwargs.get("incident_id") # integer if kwargs.get("attachment_name") is None: attachment_name = u"pulsedive_{}_{}.txt".format( pulsedive_query_type, pulsedive_value) else: attachment_name = kwargs.get("attachment_name").replace( u" ", u"_") log.info( "function params: pulsedive value = '%s', type = %s, feed org = '%s', \ incident = '%s', attachment = '%s'", pulsedive_value, pulsedive_query_type, pulsedive_feed_org, incident_id, attachment_name) yield StatusMessage("starting...") # form the url request api_url = "{}/info.php?".format(self.options["pulsedive_api_url"]) pulsedive_data = { "key": self.options["pulsedive_api_key"], "pretty": "Yes" if "pulsedive_pretty" not in kwargs else self.get_select_param(kwargs.get("pulsedive_pretty")) } # add type to url request: if pulsedive_query_type == "Feed": pulsedive_data["feed"] = pulsedive_value pulsedive_data["organization"] = pulsedive_feed_org elif pulsedive_query_type == "Threat": pulsedive_data["threat"] = pulsedive_value else: # Indicator ID (default) pulsedive_data["indicator"] = pulsedive_value # make the api call rp = ResultPayload(CONFIG_SECTION, **kwargs) rc = RequestsCommon(self.opts, self.options) # initialize resp = rc.execute_call_v2("get", url=api_url, params=pulsedive_data) # === Get the rest client so we can add the attachment to the incident client = self.rest_client() # prepare datastream to output to attachment if pulsedive_data["pretty"] == "Yes": # Pulsedive returns pp format if requested. Convert to bytestream for file handling. datastream = BytesIO(resp.content) else: # Convert dict to string first, then convert to bytestream for file handling. ds = resp.text datastream = BytesIO(ds.encode("utf-8")) # Write the file as attachment: failures will raise an exception write_file_attachment(client, attachment_name, datastream=datastream, incident_id=incident_id, task_id=None) # === prepare results resp_json = rp.done(True, resp.json()) results = {"resp_json": resp_json, "att_name": attachment_name} # Produce a FunctionResult with the results yield FunctionResult(results) yield StatusMessage("done...") except Exception as err: yield FunctionError(err)