def download_attachment_if_available(cls, artifact_id, attachment_id, gcp_artifact_input, incident_id, task_id): """ Attempts to download an attachment if an artifact input was not specified. :param artifact_id: :param attachment_id: :param gcp_artifact_input: :param incident_id: :param task_id: :return: """ attachment_input = None attachment_name = None # Check whether we are dealing with an attachment or artifact if (artifact_id or attachment_id or task_id) and gcp_artifact_input is None: LOG.info("Input appears to be an attachment, downloading from REST API") # Get the files data attachment_input = resilient_lib.get_file_attachment( incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id, task_id=task_id, res_client=cls.res_client) # Get the files name attachment_name = resilient_lib.get_file_attachment_name( incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id, task_id=task_id, res_client=cls.res_client) # Perform some special handling to get the text out of a PDF if '.pdf' in attachment_name: LOG.debug("Dealing with a PDF") attachment_input = cls.extract_text_from_pdf(attachment_input) elif '.odt' in attachment_name: LOG.debug("Dealing with a ODT") attachment_input = cls.extract_text_from_odt(attachment_input) elif '.docx' in attachment_name: LOG.debug("Dealing with a docx") attachment_input = cls.extract_text_from_docx(attachment_input) else: # We are not dealing with an attachment LOG.debug("Working with an artifact") return cls.attempt_to_parse_as_utf8(attachment_input), attachment_name
def _isitphishing_html_document_function(self, event, *args, **kwargs): """Function: isitphishing_html_document This function takes an incident id as a required parameter and task_id, attachment_id, and artifact_id as optional input which specifies an HTML document to be base64 encoded and sent to the Vade Secure API endpoint: https://ws.isitphishing.org/api/v2/document for analysis to detemine if the document contains phishing. The "results" dictionary contains the result of the API query in "contents" and the "inputs" parameters to the function. """ try: rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs) # Get the function parameters: incident_id = kwargs.get("incident_id") # number task_id = kwargs.get("task_id") # number attachment_id = kwargs.get("attachment_id") # number artifact_id = kwargs.get("artifact_id") # number log = logging.getLogger(__name__) log.info("incident_id: %s", incident_id) log.info("task_id: %s", task_id) log.info("attachment_id: %s", attachment_id) log.info("artifact_id: %s", artifact_id) # Form the URL for API request. API_URL = u"{0}/document".format(self.options["isitphishing_api_url"]) # Get the license key to access the API endpoint. auth_token = get_license_key(self.options["isitphishing_name"], self.options["isitphishing_license"]) # Build the header and the data payload. headers = { "Authorization": u'Bearer {}'.format(auth_token), "Content-type": "application/json", "Accept": "application/json" } # Build the document payload which is a base64-encoded string. client = self.rest_client() # Get the attachment data data = get_file_attachment(client, incident_id, artifact_id, task_id, attachment_id) filename = get_file_attachment_name(client, incident_id, artifact_id, task_id, attachment_id) # Base64 encode the document string and build payload. base64encoded_doc = base64.b64encode(data).decode("ascii") payload = {"document": base64encoded_doc} yield StatusMessage("Query isitPhishing endpoint for status of document.") # Make API URL request rc = RequestsCommon(self.opts, self.options) results_analysis = rc.execute_call("post", API_URL, payload, log=log, headers=headers) results = rp.done(True, results_analysis) # add back in the filename results["inputs"]["filename"] = filename # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: yield FunctionError()
def _docker_run_docker_container_function(self, event, *args, **kwargs): """Function: A function intended to be used to create a Docker Container from an image, feed an input to the container and then return the results.""" try: # Get the function parameters: artifact_id = kwargs.get("artifact_id") # number attachment_id = kwargs.get("attachment_id") # number incident_id = kwargs.get("incident_id") # number task_id = kwargs.get("task_id") # number docker_image = self.get_select_param( kwargs.get("docker_image") ) # select, values: "volatility", "nsrl", "plaso", "bloodhound" docker_input = kwargs.get("docker_input") # text docker_operation = kwargs.get("docker_operation") # text docker_artifact_type = kwargs.get("docker_artifact_type") attachment_name = None # Initialise attachment name as none payload = ResultPayload("fn_docker", **kwargs) log = logging.getLogger(__name__) log.info("artifact_id: %s", artifact_id) log.info("attachment_id: %s", attachment_id) log.info("incident_id: %s", incident_id) log.info("task_id: %s", task_id) log.info("docker_image: %s", docker_image) log.info("docker_input: %s", docker_input) log.info("docker_artifact_type: %s", docker_artifact_type) log.info("docker_operation: %s", docker_operation) helper = ResDockerHelper(self.options) image_to_use = helper.get_config_option( self.options, "docker_image", True) or docker_image # Prepare the args which will be rendered into the app.config cmd escaped_args = { "docker_input": render(u"{{docker_input|%s}}" % "sh", kwargs), } attachment_file_name = None # Initialise filename as None to avoid reference errors # Check whether we are dealing with an attachment or artifact if (artifact_id or attachment_id or task_id) and docker_input is None: log.debug( "Input appears to be an attachment, attempting to parse.") yield StatusMessage("Downloading Attachment from REST API") # Get the files data attachment_input = resilient_lib.get_file_attachment( incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id, task_id=task_id, res_client=self.rest_client()) # Get the files name attachment_name = resilient_lib.get_file_attachment_name( incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id, task_id=task_id, res_client=self.rest_client()) # Get the external directory in which to save the file output_vol = helper.get_config_option( options=self.all_options.get('{}{}'.format( CONFIGSECTIONPREFIX, image_to_use)), option_name="primary_source_dir", optional=True) log.debug("Writing attachment to bind folder") if os.path.isdir(output_vol): # Convert to named temp file with tempfile.NamedTemporaryFile( delete=False, prefix=DOCKERATTACHMENTPREFIX, dir=output_vol) as temp_file: try: temp_file.write(attachment_input) os.chmod(temp_file.name, 0o666) temp_file.close() finally: attachment_file_name = os.path.split( temp_file.name)[1] log.debug("Saving file to %s", temp_file.name) # Add a attachment_input arg to be rendered into the cmd command escaped_args.update({ "attachment_input": render("{{attachment_input|%s}}" % "sh", { u"attachment_input": attachment_file_name }), }) yield StatusMessage( u"Added this as an Attachment Input: {}". format(attachment_name)) else: errMsg = u"""Could not write file to directory, does the directory {0} exist? If not create it with mkdir {0}""".format( output_vol) raise FunctionError(errMsg) else: # We are not dealing with an attachment log.debug("Working with an artifact") docker_interface = DockerUtils() # Decide whether to use local connection or remote docker_interface.setup_docker_connection(options=self.options) # Ensure the specified image is an approved one if image_to_use not in helper.get_config_option( self.options, "docker_approved_images").split(","): raise ValueError( "Image is not in list of approved images. Review your app.config" ) # Gather the command to send to the image and format docker_extra_kwargs for any image specific volumes command, docker_extra_kwargs, image_fullname, docker_operation = docker_interface.gather_image_args_and_volumes( helper, image_to_use, self.all_options, escaped_args, docker_operation) log.info( "Command: %s \n Volume Bind: %s", (command, docker_extra_kwargs.get('volumes', "No Volumes"))) # Now Get the Image docker_interface.get_image(image_fullname) # Get the Client docker_client = docker_interface.get_client() yield StatusMessage("Now starting container with input") try: # Run container using client container = docker_client.containers.run( image=image_fullname, command=render(command, escaped_args), detach=True, # Detach from container remove= False, # Remove set to false as will be removed manually after gathering info **docker_extra_kwargs) container_stats = docker_interface.gather_container_stats( container_id=container.id) container_id = container.id # Gather the logs as they happen, until the container finishes. container_logs = container.logs(follow=True) yield StatusMessage("Container has finished and logs gathered") """ Attempt to remove the container now we have finished. Will throw an exception if the container has already been removed""" container_status = container.wait() container.remove() except requests.exceptions.HTTPError as request_exception: yield StatusMessage( u"""Encountered issue when trying to remove container: {} \n {}""" .format( request_exception, u"""If you supplied an extra app.config value to remove the container this is expected.""" )) timestamp_epoch = int(time.time() * 1000) # Setup tempfile to write back the attachment with tempfile.NamedTemporaryFile(mode="w+t", delete=False) as temp_upload_file: try: new_attachment_name = helper.format_result_attachment_name( image_to_use, container_id) # Write and close tempfile temp_upload_file.write( helper.format_output_attachment_body( container_id, docker_operation, attachment_file_name, docker_artifact_type, docker_input, container_logs.decode('utf-8'), timestamp_epoch)) temp_upload_file.close() # Access Resilient API client = self.rest_client() # Create POST uri # ..for a task, if task_id is defined if task_id: attachment_uri = '/tasks/{}/attachments'.format( task_id) # ...else for an attachment else: attachment_uri = '/incidents/{}/attachments'.format( incident_id) # POST the new attachment new_attachment = client.post_attachment( attachment_uri, temp_upload_file.name, filename=new_attachment_name, mimetype='text/plain') except Exception as upload_exception: err_msg = u"""Encountered issue when saving results to a file and uploading via REST API. Exception: {0}""".format( upload_exception) raise FunctionError(err_msg) finally: os.unlink(temp_upload_file.name) results = payload.done( # If container had no errors, 0 will be returned. Use a falsey check to ensure we get 0 else False success=True if not container_status.get("StatusCode", 1) else False, content={ "logs": container_logs.decode('utf-8'), "container_exit_status": container_status, "container_stats": container_stats, "container_id": container_id, "res_links": { "res_object": helper.prepare_res_link(host=self.host_config[0], incident_id=incident_id, task_id=task_id) }, "attachment_name": attachment_name or None, }) results["metrics"]["timestamp_epoch"] = timestamp_epoch # Update the docker_operation input to ensure we have captured the operation done, if any results["inputs"]["docker_operation"] = docker_operation # Produce a FunctionResult with the results using the FunctionPayload yield FunctionResult(results) log.debug("RESULTS: %s", results) log.info("Complete") except Exception: yield FunctionError() finally: try: os.unlink(temp_file.name) except NameError: log.debug( "Error when trying to unlink file, appears file does not exist." ) else: log.debug("Successfully cleaned up file")
def _fn_vmray_sandbox_analyzer_function(self, event, *args, **kwargs): """Function: for VMRay Cloud Analyzer integration""" def write_temp_file(data, name=None): if name: path = os.path.join(tempfile.gettempdir(), name) else: tf = tempfile.mkstemp() path = tf[1] fo = open(path, 'wb') fo.write(data) fo.close() return path try: # Get VMRay Sandbox options from app.config file VMRAY_API_KEY = self.options.get("vmray_api_key") VMRAY_ANALYZER_URL = self.options.get("vmray_analyzer_url") VMRAY_ANALYSIS_REPORT_REQUEST_TIMEOUT = float( self.options.get("vmray_analyzer_report_request_timeout")) # Get the function parameters: incident_id = kwargs.get("incident_id") # number artifact_id = kwargs.get("artifact_id") # number attachment_id = kwargs.get("attachment_id") # number analysis_report_status = kwargs.get( "analysis_report_status") # Boolean sample_ids = kwargs.get("sample_ids") or [] # List if not incident_id: raise ValueError("incident_id is required") if (not attachment_id) and (not artifact_id): raise ValueError("attachment_id or artifact_id is required") log = logging.getLogger(__name__) log.info("incident_id: %s", incident_id) log.info("artifact_id: %s", artifact_id) log.info("attachment_id: %s", attachment_id) log.info("analysis_report_status: %s", analysis_report_status) log.info("sample_ids: %s", sample_ids) sample_final_result = [] if not analysis_report_status: # VMRay client and Resilient client vmray = VMRayAPI(VMRAY_API_KEY, url=VMRAY_ANALYZER_URL, proxies=RequestsCommon( self.opts, self.options).get_proxies()) resilient = self.rest_client() # Get attachment entity we are dealing with (either attachment or artifact) # then submit it to VMRay Analyzer sample_file = get_file_attachment(res_client=resilient, incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id) sample_name = get_file_attachment_name( res_client=resilient, incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id) # with tempfile.NamedTemporaryFile('w+b', bufsize=0, delete=True) as temp_file_binary: # temp_file_binary.write(sample_file) # sample_ids = [sample["sample_id"] for sample in vmray.submit_samples(temp_file_binary.name, sample_name)] with open(write_temp_file(sample_file, sample_name), "rb") as handle: sample_ids = [ sample["sample_id"] for sample in vmray.submit_samples( handle, sample_name) ] log.info("sample_ids: " + str(sample_ids)) # New samples submission might need take as long as hours to finished, # need to check the if the analysis have been done. time_of_begin_check_report = time.time() is_samples_analysis_finished = all( vmray.check(sample_id) for sample_id in sample_ids) while not is_samples_analysis_finished: if time.time( ) - time_of_begin_check_report > VMRAY_ANALYSIS_REPORT_REQUEST_TIMEOUT: yield StatusMessage( "Analysis processing still running at Cloud VMRay Analyzer, please check it later. " ) break yield StatusMessage( "Analysis Report not done yet, retrieve every {} seconds" .format(CHECK_REPORTS_SLEEP_TIME)) time.sleep(CHECK_REPORTS_SLEEP_TIME) is_samples_analysis_finished = all( vmray.check(sample_id) for sample_id in sample_ids) if is_samples_analysis_finished: for sample_id in sample_ids: sample_final_result.append({ "sample_id": sample_id, "sample_report": vmray.get_sample_report(sample_id)["data"], "sample_reputation_report": vmray.get_sample_reputation_report(sample_id) ["data"], "sample_analysis_report": vmray.get_sample_anlysis_report(sample_id)["data"] }) analysis_report_status = True results = { "analysis_report_status": analysis_report_status, "incident_id": incident_id, "artifact_id": artifact_id, "attachment_id": attachment_id, "sample_final_result": sample_final_result } log.info("results: " + str(results)) # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: yield FunctionError(err)
def _dt_utils_create_csv_table_function(self, event, *args, **kwargs): """Function: Create a utility function to take csv data and add the results to a named datatable.""" try: # Instantiate new Resilient API object res_client = self.rest_client() inputs = { "incident_id": get_function_input(kwargs, "incident_id", optional=False), # number (required) "attachment_id": get_function_input(kwargs, "attachment_id", optional=True), # number (optional) "has_headers": get_function_input(kwargs, "dt_has_headers", optional=False), # boolean (optional) "csv_data": get_function_input(kwargs, "dt_csv_data", optional=True), # text (optional) "datable_name": get_function_input(kwargs, "dt_datable_name", optional=False), # text (required) "mapping_table": get_function_input(kwargs, "dt_mapping_table", optional=False), # text (optional) "date_time_format": get_function_input(kwargs, "dt_date_time_format", optional=True), # text (optional) "start_row": get_function_input(kwargs, "dt_start_row", optional=True), # number (optional) "max_rows": get_function_input(kwargs, "dt_max_rows", optional=True), # number (optional) } LOG.info(inputs) yield StatusMessage("Starting ...") mapping_table = convert_json(inputs['mapping_table']) if not mapping_table: raise ValueError( u"Unable to convert mapping_table to json: %s", inputs['mapping_table']) # Create payload dict with inputs rp = ResultPayload(PACKAGE_NAME, **kwargs) if (inputs["attachment_id"] and inputs["csv_data"]) or \ not (inputs["attachment_id"] or inputs["csv_data"]): raise ValueError("Specify either attachment_id or csv_data") # Either an attachment ID or CSV Data is needed to be able to add rows if inputs["attachment_id"]: attachment_name = get_file_attachment_name( res_client, inputs['incident_id'], attachment_id=inputs["attachment_id"]) b_csv_data = get_file_attachment( res_client, inputs['incident_id'], attachment_id=inputs["attachment_id"]) csv_data = b_csv_data.decode("utf-8") if sys.version_info.major < 3: inline_data = BytesIO(b_csv_data) else: inline_data = StringIO(csv_data) else: attachment_name = None csv_data = inputs["csv_data"] if sys.version_info.major < 3: inline_data = StringIO(csv_data.encode("utf-8")) else: inline_data = StringIO(csv_data) datatable = RESDatatable(res_client, inputs["incident_id"], inputs["datable_name"]) # Retrieve the column names for the datatable, and their data_types, # to compare against what the user provides, and attempt data conversion, if necessary fields = datatable.get_dt_headers() dt_ordered_columns = { fields[field]['order']: (fields[field]['name'], fields[field]['input_type']) for field in fields } # ordered column names if we need to assign the headers to the columns in column order dt_column_names = OrderedDict([ dt_ordered_columns[field] for field in sorted(dt_ordered_columns.keys()) ]) # different readers if we have headers or not dialect = csv.Sniffer().sniff( csv_data[0:csv_data.find('\n')]) # limit analysis to first row # py2 needs changes to dialect to avoid unicode attributes if sys.version_info.major < 3: for attr in dir(dialect): a = getattr(dialect, attr) if type(a) == unicode: setattr(dialect, attr, bytes(a)) LOG.debug(dialect.__dict__) if inputs["has_headers"]: reader = csv.DictReader( inline_data, dialect=dialect ) # each row is a dictionary keyed by the column name csv_headers = reader.fieldnames # just the headers else: reader = csv.reader( inline_data, dialect=dialect) # each row is a list of values csv_headers = [] mapping_table = build_mapping_table(mapping_table, csv_headers, dt_column_names) LOG.debug("csv headers to datatable columns: %s", mapping_table) # perform the api calls to the datatable number_of_added_rows, number_of_rows_with_errors = self.add_to_datatable( reader, datatable, mapping_table, dt_column_names, inputs['date_time_format'], inputs['start_row'], inputs['max_rows']) LOG.info("Number of rows added: %s ", number_of_added_rows) LOG.info("Number of rows that could not be added: %s", number_of_rows_with_errors) row_data = { "data_source": attachment_name if attachment_name else "CSV data", "rows_added": number_of_added_rows, "rows_with_errors": number_of_rows_with_errors } results = rp.done(True, row_data) yield StatusMessage("Ending ...") # Produce a FunctionResult with the results yield FunctionResult(results) except Exception as err: yield FunctionError(err)