Example #1
0
def send_data(type_info, inc_id, rest_client_helper, payload,\
              feed_outputs, is_deleted, incl_attachment_data):
    """
    perform the sync to the different datastores
    :param type_info:
    :param inc_id:
    :param rest_client:
    :param payload:
    :param feed_outputs:
    :param is_deleted: true/false
    :param incl_attachment_data: true/false
    :return: None
    """
    context = FeedContext(type_info, inc_id,
                          rest_client_helper.inst_rest_client, is_deleted)

    type_name = type_info.get_pretty_type_name()
    # make sure the incident has a org_name
    if type_name == 'incident':
        payload['org_name'] = type_info.get_org_name(payload['org_id'])

    # collect attachment data to pass on
    elif not is_deleted and incl_attachment_data \
            and type_name == 'attachment':
        # this will return a byte string
        payload['content'] = get_file_attachment(
            rest_client_helper.inst_rest_client,
            inc_id,
            task_id=payload.get('task_id'),
            attachment_id=payload['id'])
    elif not is_deleted and incl_attachment_data \
            and type_name == 'artifact' \
            and payload.get('attachment'):
        # this will return a byte string
        payload['content'] = get_file_attachment(
            rest_client_helper.inst_rest_client,
            inc_id,
            artifact_id=payload['id'])

    for feed_output in feed_outputs:
        # don't let a failure in one feed break all the rest
        try:
            LOG.debug("Calling feed %s", feed_output.__class__.__name__)
            feed_output.send_data(context, payload)
        except Exception as err:
            LOG.error("Failure in update to %s %s",
                      feed_output.__class__.__name__, err)
            error_trace = traceback.format_exc()
            LOG.error("Traceback %s", error_trace)
Example #2
0
    def _utility_email_file_parser_function(self, event, *args, **kwargs):
        """Function: Parses .eml files for email forensics. Useful for reported phishes."""
        try:
            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            eml_filename = kwargs.get("attachment_name")  # text

            # Get the eml file attachment by its incident and attachment IDs
            eml_file = get_file_attachment(self.rest_client(), incident_id, artifact_id=None, task_id=None, attachment_id=attachment_id)

            yield StatusMessage('Reading and decoding email message (' + eml_filename + ')...')

            # Parse the email content
            mail = email.message_from_string(eml_file.decode("utf-8"))  # Get the email object from the raw contents
            email_body, attachments, urls = get_decoded_email_body(self, incident_id, eml_filename, mail)  # Get the UTF-8 encoded body from the raw email string
            email_header = get_decoded_email_header(mail.items())

            results = {}
            results['body'] = str(email_body)  # The full email, HTML formatted
            results['header'] = email_header  # List of 2-tuples containing all the message’s field headers and values
            # results['mail_items'] = mail.items()  # List of 2-tuples containing all the decoded message’s field headers and values (3/12/2019: deprecated, use 'header')
            results['attachments'] = attachments  # List of attachment names from EML file
            results['urls'] = list(set(urls))  # URLs from body. Set inside of the list used to ensures no duplicates.

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def _attachment_to_base64_function(self, event, *args, **kwargs):
        """Function: Produce base64 content of a file attachment."""
        try:
            log = logging.getLogger(__name__)

            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            task_id = kwargs.get("task_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            artifact_id = kwargs.get("artifact_id")  # number

            log.info("incident_id: %s", incident_id)
            log.info("task_id: %s", task_id)
            log.info("attachment_id: %s", attachment_id)
            log.info("artifact_id: %s", artifact_id)

            if incident_id is None:
                raise FunctionError("Error: incident_id must be specified.")
            elif attachment_id is None and artifact_id is None:
                raise FunctionError(
                    "Error: attachment_id or artifact_id must be specified.")
            else:
                yield StatusMessage("> Function inputs OK")

            yield StatusMessage("> Reading attachment...")

            client = self.rest_client()
            data = get_file_attachment(client,
                                       incident_id,
                                       artifact_id=artifact_id,
                                       task_id=task_id,
                                       attachment_id=attachment_id)
            metadata = get_file_attachment_metadata(
                client,
                incident_id,
                artifact_id=artifact_id,
                task_id=task_id,
                attachment_id=attachment_id)

            results = {
                "filename": metadata["name"],
                "content_type": metadata["content_type"],
                "size": metadata["size"],
                "created": metadata["created"],
                "content": b_to_s(base64.b64encode(data)),
            }
            yield StatusMessage("> Complete...")
            # Produce a FunctionResult with the return value
            log.debug(json.dumps(results, indent=2))
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def download_attachment_if_available(cls, artifact_id, attachment_id, gcp_artifact_input, incident_id,
                                         task_id):
        """
        Attempts to download an attachment if an artifact input was not specified.
        :param artifact_id:
        :param attachment_id:
        :param gcp_artifact_input:
        :param incident_id:
        :param task_id:
        :return:
        """
        attachment_input = None
        attachment_name = None
        # Check whether we are dealing with an attachment or artifact
        if (artifact_id or attachment_id or task_id) and gcp_artifact_input is None:
            LOG.info("Input appears to be an attachment, downloading from REST API")

            # Get the files data
            attachment_input = resilient_lib.get_file_attachment(
                incident_id=incident_id, artifact_id=artifact_id,
                attachment_id=attachment_id, task_id=task_id, res_client=cls.res_client)

            # Get the files name
            attachment_name = resilient_lib.get_file_attachment_name(
                incident_id=incident_id, artifact_id=artifact_id,
                attachment_id=attachment_id, task_id=task_id, res_client=cls.res_client)

            # Perform some special handling to get the text out of a PDF
            if '.pdf' in attachment_name:
                LOG.debug("Dealing with a PDF")
                attachment_input = cls.extract_text_from_pdf(attachment_input)
            elif '.odt' in attachment_name:
                LOG.debug("Dealing with a ODT")
                attachment_input = cls.extract_text_from_odt(attachment_input)
            elif '.docx' in attachment_name:
                LOG.debug("Dealing with a docx")
                attachment_input = cls.extract_text_from_docx(attachment_input)

        else:
            # We are not dealing with an attachment
            LOG.debug("Working with an artifact")

        return cls.attempt_to_parse_as_utf8(attachment_input), attachment_name
Example #5
0
    def _utility_txt_to_json_structure_function(self, event, *args, **kwargs):

        results = {}
        results["was_successful"] = False

        try:
            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            attachment_name = kwargs.get("attachment_name")  # text (not required at this time)
            split_rows_on_new_lines = kwargs.get("split_rows_on_new_lines")  # bool
            row_limit = kwargs.get("row_limit")  # number (optional)

            log = logging.getLogger(__name__)  # Establish logging

            yield StatusMessage('Converting {} data to JSON...'.format(attachment_name))

            # Get the TXT file attachment by its incident and attachment IDs
            txt_file_data = get_file_attachment(self.rest_client(), incident_id, artifact_id=None, task_id=None, attachment_id=attachment_id)
            txt_file = StringIO(unicodedata.normalize("NFKD", txt_file_data.decode('utf-8', 'ignore')))

            txt_data = []

            if split_rows_on_new_lines is True:  # Each line will be split and added to the txt_data list
                lines = txt_file.readlines()
                row_index = 0
                for line in lines:
                    row_index += 1
                    txt_data.append({'content': line})
                    if row_limit and row_index >= int(row_limit): break

            else:  # All content will be in a single item in the txt_data list
                txt_data = {'content': txt_file.read()}

            results["json_data"] = [txt_data]

            results["was_successful"] = True

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
def get_incident_file_attachment(rest_client, incident_id, attachment_name):
    """
    Get incident file attachment data from Resilient.
    Gracefully fail by returning null if the attachment is not found.
    :param rest_client: Resilient REST API client
    :param incident_id: Resilient incident ID
    :param attachment_name: name of the attachment to get
    :return: base64 encoded attachment data, attachment ID
    """
    # Get the attachment ID
    attachment_id = get_attachment_id(rest_client, incident_id,
                                      attachment_name)
    if not attachment_id:
        return None, None

    # Get the attachment data content, base64 encode it, and decode bytes to string
    content = get_file_attachment(rest_client,
                                  incident_id,
                                  attachment_id=attachment_id)
    encodedContent = base64.b64encode(content).decode("utf-8")
    return encodedContent, attachment_id
Example #7
0
    def _email_parse_function(self, event, *args, **kwargs):
        """Function: Extract message headers and body parts from an email message (.eml or .msg).
        Any attachments found are added to the Incident as Artifacts if 'utilities_parse_email_attachments' is set to True"""

        try:
            log = logging.getLogger(__name__)

            # Set variables
            parsed_email = path_tmp_file = path_tmp_dir = reason = results = None

            # Get the function inputs:
            fn_inputs = validate_fields(["incident_id"], kwargs)

            # Instansiate ResultPayload
            rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs)

            # If its just base64content as input, use parse_from_string
            if fn_inputs.get("base64content"):
                yield StatusMessage("Processing provided base64content")
                parsed_email = mailparser.parse_from_string(
                    b_to_s(base64.b64decode(fn_inputs.get("base64content"))))
                yield StatusMessage("Provided base64content processed")

            else:

                # Validate that either: (incident_id AND attachment_id OR artifact_id) OR (task_id AND attachment_id) is defined
                if not (fn_inputs.get("incident_id") and (fn_inputs.get("attachment_id") or fn_inputs.get("artifact_id"))) and \
                   not (fn_inputs.get("task_id") and fn_inputs.get("attachment_id")):
                    raise FunctionError(
                        "You must define either: (incident_id AND attachment_id OR artifact_id) OR (task_id AND attachment_id)"
                    )

                # Instansiate new Resilient API object
                res_client = self.rest_client()

                # Get attachment metadata
                attachment_metadata = get_file_attachment_metadata(
                    res_client=res_client,
                    incident_id=fn_inputs.get("incident_id"),
                    artifact_id=fn_inputs.get("artifact_id"),
                    task_id=fn_inputs.get("task_id"),
                    attachment_id=fn_inputs.get("attachment_id"))

                # Get attachment content
                attachment_contents = get_file_attachment(
                    res_client=res_client,
                    incident_id=fn_inputs.get("incident_id"),
                    artifact_id=fn_inputs.get("artifact_id"),
                    task_id=fn_inputs.get("task_id"),
                    attachment_id=fn_inputs.get("attachment_id"))

                # Write the attachment_contents to a temp file
                path_tmp_file, path_tmp_dir = write_to_tmp_file(
                    attachment_contents,
                    tmp_file_name=attachment_metadata.get("name"))

                # Get the file_extension
                file_extension = os.path.splitext(path_tmp_file)[1]

                if file_extension == ".msg":
                    yield StatusMessage("Processing MSG File")
                    try:
                        parsed_email = mailparser.parse_from_file_msg(
                            path_tmp_file)
                        yield StatusMessage("MSG File processed")
                    except Exception as err:
                        reason = u"Could not parse {0} MSG File".format(
                            attachment_metadata.get("name"))
                        yield StatusMessage(reason)
                        results = rp.done(success=False,
                                          content=None,
                                          reason=reason)
                        log.error(err)

                else:
                    yield StatusMessage("Processing Raw Email File")
                    try:
                        parsed_email = mailparser.parse_from_file(
                            path_tmp_file)
                        yield StatusMessage("Raw Email File processed")
                    except Exception as err:
                        reason = u"Could not parse {0} Email File".format(
                            attachment_metadata.get("name"))
                        yield StatusMessage(reason)
                        results = rp.done(success=False,
                                          content=None,
                                          reason=reason)
                        log.error(err)

            if parsed_email is not None:
                if not parsed_email.mail:
                    reason = u"Raw email in unsupported format. Failed to parse {0}".format(
                        u"provided base64content" if fn_inputs.
                        get("base64content"
                            ) else attachment_metadata.get("name"))
                    yield StatusMessage(reason)
                    results = rp.done(success=False,
                                      content=None,
                                      reason=reason)

                else:
                    # Load all parsed email attributes into a Python Dict
                    parsed_email_dict = json.loads(parsed_email.mail_json,
                                                   encoding="utf-8")
                    parsed_email_dict[
                        "plain_body"] = parsed_email.text_plain_json
                    parsed_email_dict[
                        "html_body"] = parsed_email.text_html_json
                    yield StatusMessage("Email parsed")

                    # If the input 'utilities_parse_email_attachments' is true and some attachments were found
                    if fn_inputs.get("utilities_parse_email_attachments"
                                     ) and parsed_email_dict.get(
                                         "attachments"):

                        yield StatusMessage(
                            "Attachments found in email message")
                        attachments_found = parsed_email_dict.get(
                            "attachments")

                        # Loop attachments found
                        for attachment in attachments_found:

                            yield StatusMessage(
                                u"Attempting to add {0} to Incident: {1}".
                                format(attachment.get("filename"),
                                       fn_inputs.get("incident_id")))

                            # Write the attachment.payload to a temp file
                            path_tmp_file, path_tmp_dir = write_to_tmp_file(
                                data=s_to_b(attachment.get("payload")),
                                tmp_file_name=attachment.get("filename"),
                                path_tmp_dir=path_tmp_dir)

                            artifact_description = u"This email attachment was found in the parsed email message from: '{0}'".format(
                                u"provided base64content" if fn_inputs.
                                get("base64content"
                                    ) else attachment_metadata.get("name"))

                            # POST the artifact to Resilient as an 'Email Attachment' Artifact
                            res_client.post_artifact_file(
                                uri=ARTIFACT_URI.format(
                                    fn_inputs.get("incident_id")),
                                artifact_type=EMAIL_ATTACHMENT_ARTIFACT_ID,
                                artifact_filepath=path_tmp_file,
                                description=artifact_description,
                                value=attachment.get("filename"),
                                mimetype=attachment.get("mail_content_type"))

                    results = rp.done(True, parsed_email_dict)

            else:
                reason = u"Raw email in unsupported format. Failed to parse {0}".format(
                    u"provided base64content" if fn_inputs.
                    get("base64content") else attachment_metadata.get("name"))
                yield StatusMessage(reason)
                results = rp.done(success=False, content=None, reason=reason)

            log.info("Done")

            yield FunctionResult(results)
        except Exception:
            yield FunctionError()

        finally:
            # Remove the tmp directory
            if path_tmp_dir and os.path.isdir(path_tmp_dir):
                shutil.rmtree(path_tmp_dir)
Example #8
0
    def _attachment_zip_list_function(self, event, *args, **kwargs):
        """Function: For a zipfile attachment, return a list of its contents."""
        try:
            log = logging.getLogger(__name__)

            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            task_id = kwargs.get("task_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number

            log.info("incident_id: %s", incident_id)
            log.info("task_id: %s", task_id)
            log.info("attachment_id: %s", attachment_id)
            if incident_id is None and task_id is None:
                raise FunctionError(
                    "Error: incident_id or task_id must be specified.")
            if attachment_id is None:
                raise FunctionError("Error: attachment_id must be specified.")

            yield StatusMessage("Reading attachment...")

            client = self.rest_client()
            data = get_file_attachment(client,
                                       incident_id,
                                       task_id=task_id,
                                       attachment_id=attachment_id)

            results = {}
            with tempfile.NamedTemporaryFile(delete=False) as temp_file:
                try:
                    temp_file.write(data)
                    temp_file.close()
                    # Examine with zip
                    zfile = zipfile.ZipFile(temp_file.name, "r")
                    results["namelist"] = zfile.namelist()

                    # Don't include zinfo.extra since it's not a string
                    results["infolist"] = [{
                        "filename":
                        zinfo.filename,
                        "date_time":
                        epoch_millis(zinfo.date_time),
                        "compress_type":
                        zinfo.compress_type,
                        "comment":
                        b_to_s(zinfo.comment),
                        "create_system":
                        zinfo.create_system,
                        "create_version":
                        zinfo.create_version,
                        "extract_version":
                        zinfo.extract_version,
                        "flag_bits":
                        zinfo.flag_bits,
                        "volume":
                        zinfo.volume,
                        "internal_attr":
                        zinfo.internal_attr,
                        "external_attr":
                        zinfo.external_attr,
                        "header_offset":
                        zinfo.header_offset,
                        "CRC":
                        zinfo.CRC,
                        "compress_size":
                        zinfo.compress_size,
                        "file_size":
                        zinfo.file_size
                    } for zinfo in zfile.infolist()]
                except (zipfile.LargeZipFile, zipfile.BadZipfile) as exc:
                    # results["error"] = str(exc)
                    raise
                finally:
                    os.unlink(temp_file.name)
            # Produce a FunctionResult with the return value
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
Example #9
0
    def _attachment_zip_extract_function(self, event, *args, **kwargs):
        """Function: Extract a file from a zipfile attachment, producing a base64 string."""
        try:
            log = logging.getLogger(__name__)

            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            task_id = kwargs.get("task_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            file_path = kwargs.get("file_path")  # text
            zipfile_password = kwargs.get("zipfile_password")  # text

            if incident_id is None and task_id is None:
                raise FunctionError(
                    "Error: incident_id or task_id must be specified.")
            if attachment_id is None:
                raise FunctionError("Error: attachment_id must be specified.")
            if file_path is None:
                raise FunctionError("Error: file_path must be specified.")

            log.info("incident_id: %s", incident_id)
            log.info("task_id: %s", task_id)
            log.info("attachment_id: %s", attachment_id)
            log.info("file_path: %s", file_path)

            yield StatusMessage("Reading attachment...")

            client = self.rest_client()
            data = get_file_attachment(client,
                                       incident_id,
                                       task_id=task_id,
                                       attachment_id=attachment_id)

            results = {}
            with tempfile.NamedTemporaryFile(delete=False) as temp_file:
                try:
                    temp_file.write(data)
                    temp_file.close()
                    # Examine with zip
                    zfile = zipfile.ZipFile(temp_file.name, "r")
                    # Read the metadata, since it may be useful
                    zinfo = zfile.getinfo(file_path)
                    # Don't include zinfo.extra since it's not a string
                    results["info"] = {
                        "filename": zinfo.filename,
                        "date_time": epoch_millis(zinfo.date_time),
                        "compress_type": zinfo.compress_type,
                        "comment": b_to_s(zinfo.comment),
                        "create_system": zinfo.create_system,
                        "create_version": zinfo.create_version,
                        "extract_version": zinfo.extract_version,
                        "flag_bits": zinfo.flag_bits,
                        "volume": zinfo.volume,
                        "internal_attr": zinfo.internal_attr,
                        "external_attr": zinfo.external_attr,
                        "header_offset": zinfo.header_offset,
                        "CRC": zinfo.CRC,
                        "compress_size": zinfo.compress_size,
                        "file_size": zinfo.file_size
                    }
                    # Extract the file we want
                    b64data = base64.b64encode(
                        zfile.read(file_path, s_to_b(zipfile_password)))
                    results["content"] = b_to_s(b64data)
                except (KeyError, zipfile.LargeZipFile,
                        zipfile.BadZipfile) as exc:
                    # results["error"] = str(exc)
                    # To help debug, list the contents
                    log.info(zfile.namelist())
                    raise
                finally:
                    os.unlink(temp_file.name)
            # Produce a FunctionResult with the return value
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def upload_attachment(self, src_rest_client, orig_org_id, orig_inc_id,
                          type_name, payload, orig_type_id):
        """
        attachments may be incident level, associated with tasks, or part of an artifact
        see write_artifact_attachment for artifact file uploads
        :param src_rest_client:
        :param orig_org_id:
        :param orig_inc_id:
        :param type_name:
        :param payload:
        :param orig_type_id:
        :return: None
        """
        src_artifact_id = src_attachment_id = src_task_id = dst_task_id = None
        if payload.get('attachment', {}).get('content_type', None):
            src_artifact_id = orig_type_id
        else:
            src_attachment_id = orig_type_id

        # find the incident for this attachment
        sync_inc_id, sync_state = self.dbsync.find_incident(
            orig_org_id, orig_inc_id)

        # do nothing if incident already deleted, bypassed or filtered
        if sync_state in ['deleted', 'bypassed', 'filtered']:
            LOG.debug("No action on %s: %s:%s->%s", sync_state, orig_inc_id,
                      type_name, orig_type_id)
            return

        # is this a task based attachment?
        if payload.get("task_id", None):
            src_task_id = payload.get("task_id")
            _, dst_task_id, sync_state = self.dbsync.find_sync_row(
                orig_org_id, orig_inc_id, "task", src_task_id)
            # if the task doesn't exist, the attachment will be requeued
            if not dst_task_id:
                LOG.warning(
                    "task:%s->%s for attachment id '%s' does not exist, queuing",
                    orig_inc_id, src_task_id, orig_type_id)
                self.dbsync.create_retry_row(orig_org_id, orig_inc_id, "task",
                                             src_task_id, type_name,
                                             orig_type_id, sync_inc_id,
                                             payload, 1)
                return
        else:
            # get the target attachment, if it exists
            _, sync_type_id, _ = self.dbsync.find_sync_row(
                orig_org_id, orig_inc_id, type_name, orig_type_id)
            # attachments cannot be updated
            if sync_type_id:
                return

        # incident missing?
        if not sync_inc_id:
            self.dbsync.create_retry_row(
                orig_org_id, orig_inc_id,
                "task" if src_task_id else "incident",
                src_task_id if src_task_id else orig_inc_id, type_name,
                orig_type_id, None, payload, 1)
            return

        # read the attachment from the source Resilient
        attachment_contents = get_file_attachment(
            src_rest_client,
            orig_inc_id,
            attachment_id=src_attachment_id,
            task_id=src_task_id,
            artifact_id=src_artifact_id)

        file_handle = io.BytesIO(attachment_contents)

        LOG.debug('adding %s:%s->%s to %s:%s', type_name, orig_inc_id,
                  orig_type_id, self.rest_client.org_id, sync_inc_id)

        # artifact as file attachment?
        if src_artifact_id:
            response = self.write_artifact_file(payload['attachment']['name'],
                                                file_handle, sync_inc_id,
                                                payload)
        else:
            try:
                response = write_file_attachment(
                    self.rest_client,
                    payload['name'],
                    file_handle,
                    sync_inc_id,
                    task_id=dst_task_id,
                    content_type=payload['content_type'])
            except Exception as err:
                LOG.error("Unable to create attachment for file: %s",
                          payload['name'])
                LOG.error(payload)
                LOG.exception(err)
                response = None

        # create sync row
        if response:
            new_type_id = response.get('id', None)
            self.dbsync.create_sync_row(orig_org_id, orig_inc_id, type_name,
                                        orig_type_id, sync_inc_id, new_type_id,
                                        'active')

            LOG.info('added %s:%s->%s to %s:%s->%s', type_name, orig_inc_id,
                     orig_type_id, self.rest_client.org_id, sync_inc_id,
                     new_type_id)
        else:
            LOG.error('error adding %s:%s->%s to %s:%s', type_name,
                      orig_inc_id, orig_type_id, self.rest_client.org_id,
                      sync_inc_id)
    def _isitphishing_html_document_function(self, event, *args, **kwargs):
        """Function: isitphishing_html_document
        This function takes an incident id as a required parameter and
        task_id, attachment_id, and artifact_id as optional input which
        specifies an HTML document to be base64 encoded and sent to the
        Vade Secure API endpoint:
        https://ws.isitphishing.org/api/v2/document
        for analysis to detemine if the document contains phishing.
        The "results" dictionary contains the result of the API query in
        "contents" and the "inputs" parameters to the function.
        """
        try:
            rp = ResultPayload(CONFIG_DATA_SECTION, **kwargs)

            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            task_id = kwargs.get("task_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            artifact_id = kwargs.get("artifact_id")  # number

            log = logging.getLogger(__name__)
            log.info("incident_id: %s", incident_id)
            log.info("task_id: %s", task_id)
            log.info("attachment_id: %s", attachment_id)
            log.info("artifact_id: %s", artifact_id)

            # Form the URL for API request.
            API_URL = u"{0}/document".format(self.options["isitphishing_api_url"])

            # Get the license key to access the API endpoint.
            auth_token = get_license_key(self.options["isitphishing_name"], self.options["isitphishing_license"])

            # Build the header and the data payload.
            headers = {
                "Authorization": u'Bearer {}'.format(auth_token),
                "Content-type": "application/json",
                "Accept": "application/json"
            }

            # Build the document payload which is a base64-encoded string.
            client = self.rest_client()

            # Get the attachment data
            data = get_file_attachment(client, incident_id, artifact_id, task_id, attachment_id)
            filename = get_file_attachment_name(client, incident_id, artifact_id, task_id, attachment_id)

            # Base64 encode the document string and build payload.
            base64encoded_doc = base64.b64encode(data).decode("ascii")
            payload = {"document": base64encoded_doc}

            yield StatusMessage("Query isitPhishing endpoint for status of document.")

            # Make API URL request
            rc = RequestsCommon(self.opts, self.options)
            results_analysis = rc.execute_call("post", API_URL, payload, log=log, headers=headers)

            results = rp.done(True, results_analysis)
            # add back in the filename
            results["inputs"]["filename"] = filename

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception as err:
            yield FunctionError()
Example #12
0
    def _fn_vmray_sandbox_analyzer_function(self, event, *args, **kwargs):
        """Function: for VMRay Cloud Analyzer integration"""
        def write_temp_file(data, name=None):
            if name:
                path = os.path.join(tempfile.gettempdir(), name)
            else:
                tf = tempfile.mkstemp()
                path = tf[1]

            fo = open(path, 'wb')
            fo.write(data)
            fo.close()
            return path

        try:
            # Get VMRay Sandbox options from app.config file
            VMRAY_API_KEY = self.options.get("vmray_api_key")
            VMRAY_ANALYZER_URL = self.options.get("vmray_analyzer_url")
            VMRAY_ANALYSIS_REPORT_REQUEST_TIMEOUT = float(
                self.options.get("vmray_analyzer_report_request_timeout"))

            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            artifact_id = kwargs.get("artifact_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            analysis_report_status = kwargs.get(
                "analysis_report_status")  # Boolean
            sample_ids = kwargs.get("sample_ids") or []  # List

            if not incident_id:
                raise ValueError("incident_id is required")
            if (not attachment_id) and (not artifact_id):
                raise ValueError("attachment_id or artifact_id is required")

            log = logging.getLogger(__name__)
            log.info("incident_id: %s", incident_id)
            log.info("artifact_id: %s", artifact_id)
            log.info("attachment_id: %s", attachment_id)
            log.info("analysis_report_status: %s", analysis_report_status)
            log.info("sample_ids: %s", sample_ids)

            sample_final_result = []

            if not analysis_report_status:

                # VMRay client and Resilient client
                vmray = VMRayAPI(VMRAY_API_KEY,
                                 url=VMRAY_ANALYZER_URL,
                                 proxies=RequestsCommon(
                                     self.opts, self.options).get_proxies())
                resilient = self.rest_client()

                # Get attachment entity we are dealing with (either attachment or artifact)
                # then submit it to VMRay Analyzer

                sample_file = get_file_attachment(res_client=resilient,
                                                  incident_id=incident_id,
                                                  artifact_id=artifact_id,
                                                  attachment_id=attachment_id)
                sample_name = get_file_attachment_name(
                    res_client=resilient,
                    incident_id=incident_id,
                    artifact_id=artifact_id,
                    attachment_id=attachment_id)

                # with tempfile.NamedTemporaryFile('w+b', bufsize=0, delete=True) as temp_file_binary:
                #     temp_file_binary.write(sample_file)
                #     sample_ids = [sample["sample_id"] for sample in vmray.submit_samples(temp_file_binary.name, sample_name)]

                with open(write_temp_file(sample_file, sample_name),
                          "rb") as handle:
                    sample_ids = [
                        sample["sample_id"] for sample in vmray.submit_samples(
                            handle, sample_name)
                    ]

                log.info("sample_ids: " + str(sample_ids))

                # New samples submission might need take as long as hours to finished,
                # need to check the if the analysis have been done.
                time_of_begin_check_report = time.time()
                is_samples_analysis_finished = all(
                    vmray.check(sample_id) for sample_id in sample_ids)

                while not is_samples_analysis_finished:
                    if time.time(
                    ) - time_of_begin_check_report > VMRAY_ANALYSIS_REPORT_REQUEST_TIMEOUT:
                        yield StatusMessage(
                            "Analysis processing still running at Cloud VMRay Analyzer, please check it later. "
                        )
                        break
                    yield StatusMessage(
                        "Analysis Report not done yet, retrieve every {} seconds"
                        .format(CHECK_REPORTS_SLEEP_TIME))
                    time.sleep(CHECK_REPORTS_SLEEP_TIME)
                    is_samples_analysis_finished = all(
                        vmray.check(sample_id) for sample_id in sample_ids)

                if is_samples_analysis_finished:
                    for sample_id in sample_ids:
                        sample_final_result.append({
                            "sample_id":
                            sample_id,
                            "sample_report":
                            vmray.get_sample_report(sample_id)["data"],
                            "sample_reputation_report":
                            vmray.get_sample_reputation_report(sample_id)
                            ["data"],
                            "sample_analysis_report":
                            vmray.get_sample_anlysis_report(sample_id)["data"]
                        })

                    analysis_report_status = True

            results = {
                "analysis_report_status": analysis_report_status,
                "incident_id": incident_id,
                "artifact_id": artifact_id,
                "attachment_id": attachment_id,
                "sample_final_result": sample_final_result
            }

            log.info("results: " + str(results))
            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception as err:
            yield FunctionError(err)
Example #13
0
    def _attachment_hash_function(self, event, *args, **kwargs):
        """Function: Calculate hashes for a file attachment."""
        try:
            log = logging.getLogger(__name__)

            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            task_id = kwargs.get("task_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number

            log.info("incident_id: %s", incident_id)
            log.info("task_id: %s", task_id)
            log.info("attachment_id: %s", attachment_id)
            if incident_id is None and task_id is None:
                raise FunctionError(
                    "Error: incident_id or task_id must be specified.")
            if attachment_id is None:
                raise FunctionError("Error: attachment_id must be specified.")

            yield StatusMessage("Reading attachment...")

            client = self.rest_client()
            data = get_file_attachment(client,
                                       incident_id,
                                       task_id=task_id,
                                       attachment_id=attachment_id)
            metadata = get_file_attachment_metadata(
                client,
                incident_id,
                task_id=task_id,
                attachment_id=attachment_id)

            results = {
                "filename": metadata["name"],
                "content_type": metadata["content_type"],
                "size": metadata["size"],
                "created": metadata["created"]
            }

            # Hashlib provides a list of all "algorithms_available", but there's duplication, so
            # use the standard list: ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
            # Hashlib in 3.2 and above does not supports hashlib.algorithms_guaranteed this contains a longer
            # list but will be used instead of hashlib.algorithms
            if sys.version_info.major >= 3:
                algorithms = hashlib.algorithms_guaranteed
            else:
                algorithms = hashlib.algorithms

            for algo in algorithms:
                impl = hashlib.new(algo)
                impl.update(data)
                # shake algorithms require a 'length' parameter
                if algo.startswith("shake_"):
                    length_list = algo.split('_')
                    results[algo] = impl.hexdigest(int(length_list[-1]))
                else:
                    results[algo] = impl.hexdigest()

            log.info(u"{} sha1={}".format(metadata["name"], results["sha1"]))

            # Produce a FunctionResult with the return value
            log.debug(json.dumps(results))
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def _docker_run_docker_container_function(self, event, *args, **kwargs):
        """Function: A function intended to be used to create a Docker Container from an image, feed an input to the container and then return the results."""
        try:

            # Get the function parameters:
            artifact_id = kwargs.get("artifact_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            incident_id = kwargs.get("incident_id")  # number
            task_id = kwargs.get("task_id")  # number
            docker_image = self.get_select_param(
                kwargs.get("docker_image")
            )  # select, values: "volatility", "nsrl", "plaso", "bloodhound"
            docker_input = kwargs.get("docker_input")  # text
            docker_operation = kwargs.get("docker_operation")  # text
            docker_artifact_type = kwargs.get("docker_artifact_type")
            attachment_name = None  # Initialise attachment name as none

            payload = ResultPayload("fn_docker", **kwargs)

            log = logging.getLogger(__name__)
            log.info("artifact_id: %s", artifact_id)
            log.info("attachment_id: %s", attachment_id)
            log.info("incident_id: %s", incident_id)
            log.info("task_id: %s", task_id)
            log.info("docker_image: %s", docker_image)
            log.info("docker_input: %s", docker_input)
            log.info("docker_artifact_type: %s", docker_artifact_type)
            log.info("docker_operation: %s", docker_operation)

            helper = ResDockerHelper(self.options)
            image_to_use = helper.get_config_option(
                self.options, "docker_image", True) or docker_image

            # Prepare the args which will be rendered into the app.config cmd
            escaped_args = {
                "docker_input": render(u"{{docker_input|%s}}" % "sh", kwargs),
            }
            attachment_file_name = None  # Initialise filename as None to avoid reference errors
            # Check whether we are dealing with an attachment or artifact
            if (artifact_id or attachment_id
                    or task_id) and docker_input is None:
                log.debug(
                    "Input appears to be an attachment, attempting to parse.")

                yield StatusMessage("Downloading Attachment from REST API")
                # Get the files data
                attachment_input = resilient_lib.get_file_attachment(
                    incident_id=incident_id,
                    artifact_id=artifact_id,
                    attachment_id=attachment_id,
                    task_id=task_id,
                    res_client=self.rest_client())

                # Get the files name
                attachment_name = resilient_lib.get_file_attachment_name(
                    incident_id=incident_id,
                    artifact_id=artifact_id,
                    attachment_id=attachment_id,
                    task_id=task_id,
                    res_client=self.rest_client())
                # Get the external directory in which to save the file
                output_vol = helper.get_config_option(
                    options=self.all_options.get('{}{}'.format(
                        CONFIGSECTIONPREFIX, image_to_use)),
                    option_name="primary_source_dir",
                    optional=True)

                log.debug("Writing attachment to bind folder")

                if os.path.isdir(output_vol):
                    # Convert to named temp file
                    with tempfile.NamedTemporaryFile(
                            delete=False,
                            prefix=DOCKERATTACHMENTPREFIX,
                            dir=output_vol) as temp_file:
                        try:
                            temp_file.write(attachment_input)
                            os.chmod(temp_file.name, 0o666)
                            temp_file.close()

                        finally:
                            attachment_file_name = os.path.split(
                                temp_file.name)[1]
                            log.debug("Saving file to %s", temp_file.name)

                            # Add a attachment_input arg to be rendered into the cmd command
                            escaped_args.update({
                                "attachment_input":
                                render("{{attachment_input|%s}}" % "sh", {
                                    u"attachment_input":
                                    attachment_file_name
                                }),
                            })
                            yield StatusMessage(
                                u"Added this as an Attachment Input: {}".
                                format(attachment_name))
                else:
                    errMsg = u"""Could not write file to directory, does the directory {0} exist? If not create it with mkdir {0}""".format(
                        output_vol)
                    raise FunctionError(errMsg)
            else:
                # We are not dealing with an attachment
                log.debug("Working with an artifact")

            docker_interface = DockerUtils()

            # Decide whether to use local connection or remote
            docker_interface.setup_docker_connection(options=self.options)

            # Ensure the specified image is an approved one
            if image_to_use not in helper.get_config_option(
                    self.options, "docker_approved_images").split(","):
                raise ValueError(
                    "Image is not in list of approved images. Review your app.config"
                )

            # Gather the command to send to the image and format docker_extra_kwargs for any image specific volumes
            command, docker_extra_kwargs, image_fullname, docker_operation = docker_interface.gather_image_args_and_volumes(
                helper, image_to_use, self.all_options, escaped_args,
                docker_operation)

            log.info(
                "Command: %s \n Volume Bind: %s",
                (command, docker_extra_kwargs.get('volumes', "No Volumes")))
            # Now Get the Image
            docker_interface.get_image(image_fullname)
            # Get the Client
            docker_client = docker_interface.get_client()

            yield StatusMessage("Now starting container with input")
            try:
                # Run container using client
                container = docker_client.containers.run(
                    image=image_fullname,
                    command=render(command, escaped_args),
                    detach=True,  # Detach from container
                    remove=
                    False,  # Remove set to false as will be removed manually after gathering info
                    **docker_extra_kwargs)

                container_stats = docker_interface.gather_container_stats(
                    container_id=container.id)
                container_id = container.id
                # Gather the logs as they happen, until the container finishes.
                container_logs = container.logs(follow=True)

                yield StatusMessage("Container has finished and logs gathered")
                """
                Attempt to remove the container now we have finished.
                Will throw an exception if the container has already been removed"""

                container_status = container.wait()
                container.remove()

            except requests.exceptions.HTTPError as request_exception:
                yield StatusMessage(
                    u"""Encountered issue when trying to remove container: {} \n {}"""
                    .format(
                        request_exception,
                        u"""If you supplied an extra app.config value to remove the container this is expected."""
                    ))

            timestamp_epoch = int(time.time() * 1000)
            # Setup tempfile to write back the attachment
            with tempfile.NamedTemporaryFile(mode="w+t",
                                             delete=False) as temp_upload_file:
                try:
                    new_attachment_name = helper.format_result_attachment_name(
                        image_to_use, container_id)

                    # Write and close tempfile
                    temp_upload_file.write(
                        helper.format_output_attachment_body(
                            container_id, docker_operation,
                            attachment_file_name,
                            docker_artifact_type, docker_input,
                            container_logs.decode('utf-8'), timestamp_epoch))
                    temp_upload_file.close()
                    #  Access Resilient API
                    client = self.rest_client()

                    # Create POST uri
                    # ..for a task, if task_id is defined
                    if task_id:
                        attachment_uri = '/tasks/{}/attachments'.format(
                            task_id)
                    # ...else for an attachment
                    else:
                        attachment_uri = '/incidents/{}/attachments'.format(
                            incident_id)

                    # POST the new attachment
                    new_attachment = client.post_attachment(
                        attachment_uri,
                        temp_upload_file.name,
                        filename=new_attachment_name,
                        mimetype='text/plain')

                except Exception as upload_exception:
                    err_msg = u"""Encountered issue when saving results to a file and uploading via REST API. Exception:  {0}""".format(
                        upload_exception)
                    raise FunctionError(err_msg)
                finally:
                    os.unlink(temp_upload_file.name)

            results = payload.done(
                # If container had no errors, 0 will be returned. Use a falsey check to ensure we get 0 else False
                success=True
                if not container_status.get("StatusCode", 1) else False,
                content={
                    "logs": container_logs.decode('utf-8'),
                    "container_exit_status": container_status,
                    "container_stats": container_stats,
                    "container_id": container_id,
                    "res_links": {
                        "res_object":
                        helper.prepare_res_link(host=self.host_config[0],
                                                incident_id=incident_id,
                                                task_id=task_id)
                    },
                    "attachment_name": attachment_name or None,
                })
            results["metrics"]["timestamp_epoch"] = timestamp_epoch

            # Update the docker_operation input to ensure we have captured the operation done, if any
            results["inputs"]["docker_operation"] = docker_operation
            # Produce a FunctionResult with the results using the FunctionPayload
            yield FunctionResult(results)
            log.debug("RESULTS: %s", results)
            log.info("Complete")
        except Exception:
            yield FunctionError()
        finally:
            try:
                os.unlink(temp_file.name)
            except NameError:
                log.debug(
                    "Error when trying to unlink file, appears file does not exist."
                )
            else:
                log.debug("Successfully cleaned up file")
    def _utility_csv_to_json_structure_function(self, event, *args, **kwargs):

        results = {}
        results["was_successful"] = False

        try:
            # Get the function parameters:
            incident_id = kwargs.get("incident_id")  # number
            attachment_id = kwargs.get("attachment_id")  # number
            csv_filename = kwargs.get(
                "attachment_name")  # text (not required at this time)
            csv_fields = kwargs.get(
                "csv_fields"
            )  # text list (optional) (ie: inputs.csv_fields = "Name", "LastAccessTime", "Field To Skip" "CreationTime")
            table_fields = kwargs.get(
                "table_fields"
            )  # text list (optional) (ie: inputs.table_fields = "Name", "LastAccessTime", "CreationTime")
            row_limit = kwargs.get("row_limit")  # number (optional)
            column_limit = kwargs.get("column_limit")  # number (optional)

            log = logging.getLogger(__name__)  # Establish logging

            # Get the CSV file attachment by its incident and attachment IDs
            csv_file_data = get_file_attachment(self.rest_client(),
                                                incident_id,
                                                artifact_id=None,
                                                task_id=None,
                                                attachment_id=attachment_id)
            csv_file = StringIO(
                unicodedata.normalize("NFKD",
                                      csv_file_data.decode('utf-8', 'ignore')))
            try:
                csv_dialect = csv.Sniffer().sniff(csv_file.readline())
                csv_delimiter = csv_dialect.delimiter
            except:
                csv_dialect = None
                csv_delimiter = ','
            csv_file.seek(0)

            # Clean the pre-processor provided csv_fields to ensure a clean tuple
            if csv_fields:
                if ',' in csv_fields and '[' not in csv_fields:
                    if '(' not in csv_fields:
                        csv_fields = str('(' + csv_fields + ')')
                    csv_fields = tuple(
                        item.strip()
                        for item in csv_fields.replace('(', '').replace(
                            ')', '').replace("'", ',').split(','))
                elif '[' in csv_fields:
                    csv_fields = tuple([
                        row.strip(' ')
                        for row in csv_fields.strip('][').split(',')
                    ])
                else:
                    csv_fields = tuple(csv_fields)

            # Clean the pre-processor provided table_fields to ensure a clean tuple
            if table_fields:
                if ',' in table_fields and '[' not in table_fields:
                    if '(' not in table_fields:
                        table_fields = str('(' + table_fields + ')')
                    table_fields = tuple(
                        item.strip()
                        for item in table_fields.replace('(', '').replace(
                            ')', '').replace("'", ',').split(','))
                elif '[' in table_fields:
                    table_fields = tuple([
                        row.strip(' ')
                        for row in table_fields.strip('][').split(',')
                    ])
                else:
                    table_fields = tuple(table_fields)

            # If csv_fields is not provided
            if not csv_fields:
                csv_fields = None  # Use first row as keys

            # If csv_fields is the same as the first row of the data
            if csv_fields == (csv_file_data.partition('\n')[0]):
                next(csv_file)  # Pass over first row

            yield StatusMessage(
                'Converting {} data to JSON...'.format(csv_filename))

            csv_data = csv.DictReader(
                (line.replace('\0', '') for line in csv_file),
                fieldnames=csv_fields,
                dialect='excel',
                delimiter=csv_delimiter)

            if not table_fields:
                table_fields = csv_data.fieldnames

            # Python 2 returns an unordered dictionary from csv.DictReader(), but using the order of fieldnames, we can reorder using OrderedDict and lambda
            order_maintained_rows = []
            row_index = 0
            for row in csv_data:
                if not any([str(value).strip() for value in row.values()]):
                    continue  # Skip empty rows
                row_index += 1
                row_dict = dict(
                    filter(lambda i: i[0] in table_fields, row.items())
                )  # Remove row items not included in table_fields
                fieldnames_tuple = tuple(
                    filter(lambda i: i in row_dict.keys(), table_fields)
                )  # Remove table_fields not found in row items
                if column_limit and int(column_limit) < row.items():
                    order_maintained_rows.append(
                        OrderedDict(
                            sorted(row_dict.items(),
                                   key=lambda item: fieldnames_tuple.index(
                                       item[0]))[:int(column_limit)]))
                else:
                    order_maintained_rows.append(
                        OrderedDict(
                            sorted(row_dict.items(),
                                   key=lambda item: fieldnames_tuple.index(
                                       item[0]))))
                if row_limit and row_index >= int(row_limit): break

            results["json_data"] = order_maintained_rows
            results["fieldnames"] = table_fields
            results["was_successful"] = True

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
Example #16
0
    def _dt_utils_create_csv_table_function(self, event, *args, **kwargs):
        """Function: Create a utility function to take csv data and add the results to a named datatable."""

        try:
            # Instantiate new Resilient API object
            res_client = self.rest_client()

            inputs = {
                "incident_id":
                get_function_input(kwargs, "incident_id",
                                   optional=False),  # number (required)
                "attachment_id":
                get_function_input(kwargs, "attachment_id",
                                   optional=True),  # number (optional)
                "has_headers":
                get_function_input(kwargs, "dt_has_headers",
                                   optional=False),  # boolean (optional)
                "csv_data":
                get_function_input(kwargs, "dt_csv_data",
                                   optional=True),  # text (optional)
                "datable_name":
                get_function_input(kwargs, "dt_datable_name",
                                   optional=False),  # text (required)
                "mapping_table":
                get_function_input(kwargs, "dt_mapping_table",
                                   optional=False),  # text (optional)
                "date_time_format":
                get_function_input(kwargs,
                                   "dt_date_time_format",
                                   optional=True),  # text (optional)
                "start_row":
                get_function_input(kwargs, "dt_start_row",
                                   optional=True),  # number (optional)
                "max_rows":
                get_function_input(kwargs, "dt_max_rows",
                                   optional=True),  # number (optional)
            }

            LOG.info(inputs)

            yield StatusMessage("Starting ...")
            mapping_table = convert_json(inputs['mapping_table'])
            if not mapping_table:
                raise ValueError(
                    u"Unable to convert mapping_table to json: %s",
                    inputs['mapping_table'])

            # Create payload dict with inputs
            rp = ResultPayload(PACKAGE_NAME, **kwargs)

            if (inputs["attachment_id"] and inputs["csv_data"]) or \
               not (inputs["attachment_id"] or inputs["csv_data"]):
                raise ValueError("Specify either attachment_id or csv_data")

            # Either an attachment ID or CSV Data is needed to be able to add rows
            if inputs["attachment_id"]:
                attachment_name = get_file_attachment_name(
                    res_client,
                    inputs['incident_id'],
                    attachment_id=inputs["attachment_id"])
                b_csv_data = get_file_attachment(
                    res_client,
                    inputs['incident_id'],
                    attachment_id=inputs["attachment_id"])
                csv_data = b_csv_data.decode("utf-8")
                if sys.version_info.major < 3:
                    inline_data = BytesIO(b_csv_data)
                else:
                    inline_data = StringIO(csv_data)
            else:
                attachment_name = None
                csv_data = inputs["csv_data"]
                if sys.version_info.major < 3:
                    inline_data = StringIO(csv_data.encode("utf-8"))
                else:
                    inline_data = StringIO(csv_data)

            datatable = RESDatatable(res_client, inputs["incident_id"],
                                     inputs["datable_name"])

            # Retrieve the column names for the datatable, and their data_types,
            # to compare against what the user provides, and attempt data conversion, if necessary
            fields = datatable.get_dt_headers()
            dt_ordered_columns = {
                fields[field]['order']:
                (fields[field]['name'], fields[field]['input_type'])
                for field in fields
            }
            # ordered column names if we need to assign the headers to the columns in column order
            dt_column_names = OrderedDict([
                dt_ordered_columns[field]
                for field in sorted(dt_ordered_columns.keys())
            ])

            # different readers if we have headers or not
            dialect = csv.Sniffer().sniff(
                csv_data[0:csv_data.find('\n')])  # limit analysis to first row
            # py2 needs changes to dialect to avoid unicode attributes
            if sys.version_info.major < 3:
                for attr in dir(dialect):
                    a = getattr(dialect, attr)
                    if type(a) == unicode:
                        setattr(dialect, attr, bytes(a))
            LOG.debug(dialect.__dict__)

            if inputs["has_headers"]:
                reader = csv.DictReader(
                    inline_data, dialect=dialect
                )  # each row is a dictionary keyed by the column name
                csv_headers = reader.fieldnames  # just the headers
            else:
                reader = csv.reader(
                    inline_data,
                    dialect=dialect)  # each row is a list of values
                csv_headers = []

            mapping_table = build_mapping_table(mapping_table, csv_headers,
                                                dt_column_names)
            LOG.debug("csv headers to datatable columns: %s", mapping_table)

            # perform the api calls to the datatable
            number_of_added_rows, number_of_rows_with_errors = self.add_to_datatable(
                reader, datatable, mapping_table, dt_column_names,
                inputs['date_time_format'], inputs['start_row'],
                inputs['max_rows'])
            LOG.info("Number of rows added: %s ", number_of_added_rows)
            LOG.info("Number of rows that could not be added: %s",
                     number_of_rows_with_errors)

            row_data = {
                "data_source":
                attachment_name if attachment_name else "CSV data",
                "rows_added": number_of_added_rows,
                "rows_with_errors": number_of_rows_with_errors
            }
            results = rp.done(True, row_data)

            yield StatusMessage("Ending ...")

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception as err:
            yield FunctionError(err)