def _dt_utils_delete_row_function(self, event, *args, **kwargs):
        """Function: Function that deletes a row from a Data Table given the row's ID"""

        log = logging.getLogger(__name__)

        try:
            # Instansiate new Resilient API object
            res_client = self.rest_client()

            inputs = {
                "incident_id":
                get_function_input(kwargs, "incident_id"),  # number (required)
                "dt_utils_datatable_api_name":
                get_function_input(
                    kwargs, "dt_utils_datatable_api_name"),  # text (required)
                "dt_utils_row_id":
                get_function_input(kwargs, "dt_utils_row_id",
                                   optional=True)  # number (optional)
            }

            # Create payload dict with inputs
            payload = FunctionPayload(inputs)

            yield StatusMessage("Function Inputs OK")

            # Instantiate a new RESDatatable
            datatable = RESDatatable(
                res_client, payload.inputs["incident_id"],
                payload.inputs["dt_utils_datatable_api_name"])

            deleted_row = datatable.delete_row(
                payload.inputs["dt_utils_row_id"])

            if "error" in deleted_row:
                yield StatusMessage("Row {0} in {1} NOT deleted.".format(
                    payload.inputs["dt_utils_row_id"], datatable.api_name))
                payload.success = False
                raise ValueError(deleted_row["error"])

            else:
                yield StatusMessage("Row {0} in {1} deleted.".format(
                    payload.inputs["dt_utils_row_id"], datatable.api_name))
                payload.row = deleted_row
                payload.success = True

            results = payload.as_dict()

            log.info("Complete")

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def _dt_utils_update_row_function(self, event, *args, **kwargs):
        """Function: Function that takes a JSON String of 'column name/cell value' pairs to update a Data Table row"""

        log = logging.getLogger(__name__)

        try:
            # Instansiate new Resilient API object
            res_client = self.rest_client()
            workflow_instance_id = event.message.get('workflow_instance', {}).get('workflow_instance_id')

            inputs = {
                "incident_id": get_function_input(kwargs, "incident_id"),  # number (required)
                "dt_utils_datatable_api_name": get_function_input(kwargs, "dt_utils_datatable_api_name"),  # text (required)
                "dt_utils_row_id": get_function_input(kwargs, "dt_utils_row_id"),  # number (required)
                "dt_utils_cells_to_update": get_function_input(kwargs, "dt_utils_cells_to_update")  # text (required)
            }
            log.info(inputs)

            try:
                inputs["dt_utils_cells_to_update"] = json.loads(inputs["dt_utils_cells_to_update"])
            except Exception:
                raise ValueError("Failed to parse JSON string: {0}".format(inputs["dt_utils_cells_to_update"]))

            # Create payload dict with inputs
            payload = FunctionPayload(inputs)

            yield StatusMessage("Function Inputs OK")

            # Instantiate a new RESDatatable
            datatable = RESDatatable(res_client, payload.inputs["incident_id"], payload.inputs["dt_utils_datatable_api_name"])

            # Get the data table data
            datatable.get_data()

            # use the current row_id if dt_utils_row_id = 0
            if not inputs['dt_utils_row_id'] or not int(inputs['dt_utils_row_id']):
                row_id = datatable.get_row_id_from_workflow(workflow_instance_id)
                if not row_id:
                    raise ValueError("Run the workflow from a datatable to get the current row_id.")

                log.info("Using current row_id: %s", row_id)
                inputs['dt_utils_row_id'] = row_id

            # Update the row
            updated_row = datatable.update_row(payload.inputs["dt_utils_row_id"], payload.inputs["dt_utils_cells_to_update"])

            if "error" in updated_row:
                yield StatusMessage("Row in {1} NOT updated.".format(datatable.api_name))
                payload.success = False
                raise ValueError(updated_row["error"])

            else:
                yield StatusMessage("Row {0} in {1} updated.".format(updated_row["id"], datatable.api_name))
                payload.row = updated_row
                payload.success = True

            results = payload.as_dict()

            log.info("Complete")

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def _dt_utils_delete_rows_function(self, event, *args, **kwargs):
        """Function: Function that deletes rows from a Data Table"""

        log = logging.getLogger(__name__)

        try:
            # Instansiate new Resilient API object
            res_client = self.rest_client()
            workflow_id = event.message.get('workflow_instance', {}).get('workflow_instance_id')

            inputs = {
                "incident_id": get_function_input(kwargs, "incident_id"),  # number (required)
                "dt_utils_datatable_api_name": get_function_input(kwargs, "dt_utils_datatable_api_name"),  # text (required)
                "dt_utils_rows_ids": get_function_input(kwargs, "dt_utils_rows_ids", optional=True),  # text (optional)
                "dt_utils_search_column": get_function_input(kwargs, "dt_utils_search_column", optional=True),  # text (optional)
                "dt_utils_search_value": get_function_input(kwargs, "dt_utils_search_value", optional=True), # text (optional)
            }

            log.info("incident_id: {0}".format(inputs["incident_id"]))
            log.info("dt_utils_datatable_api_name: {0}".format(inputs["dt_utils_datatable_api_name"]))
            log.info("dt_utils_rows_ids: {0}".format(inputs["dt_utils_rows_ids"]))
            log.info("dt_utils_search_column: {0}".format(inputs["dt_utils_search_column"]))
            log.info(u"dt_utils_search_value: {0}".format(inputs["dt_utils_search_value"]))

            # Ensure correct search inputs are defined correctly
            valid_search_inputs = validate_search_inputs(rows_ids=inputs["dt_utils_rows_ids"],
                                                         search_column=inputs["dt_utils_search_column"],
                                                         search_value=inputs["dt_utils_search_value"])

            if not valid_search_inputs["valid"]:
                raise ValueError(valid_search_inputs["msg"])

            # Create payload dict with inputs
            payload = FunctionPayload(inputs)

            # Instantiate a new RESDatatable
            datatable = RESDatatable(res_client, payload.inputs["incident_id"],
                                     payload.inputs["dt_utils_datatable_api_name"])
            
            # get datatable row_id if function used on a datatable
            row_id = datatable.get_row_id_from_workflow(workflow_id)
            row_id and log.debug("Current row_id: %s", row_id)

            # Get the data table data
            datatable.get_data()

            deleted_rows = datatable.delete_rows(payload.inputs["dt_utils_rows_ids"], 
                                                 payload.inputs["dt_utils_search_column"], 
                                                 payload.inputs["dt_utils_search_value"],
                                                 row_id,
                                                 workflow_id)

            if not deleted_rows:
                yield StatusMessage("No row(s) found.")
                payload.success = False

            elif "error" in deleted_rows:
                yield StatusMessage(u"Row(s) not deleted. Error: {0}".format(deleted_rows["error"]))
                payload.success = False
                raise FunctionError("Failed to delete a row.")

            else:
                yield StatusMessage("Row(s) {0} in {1} deleted.".format(deleted_rows, datatable.api_name))
                payload.rows_ids = deleted_rows
                payload.success = True

            results = payload.as_dict()

            log.info("Complete")

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
    def _dt_utils_delete_row_function(self, event, *args, **kwargs):
        """Function: Function that deletes a row from a Data Table given the row's ID"""

        log = logging.getLogger(__name__)

        try:
            # Instansiate new Resilient API object
            res_client = self.rest_client()
            workflow_instance_id = event.message.get(
                'workflow_instance', {}).get('workflow_instance_id')

            dt_utils_row_id = get_function_input(
                kwargs, "dt_utils_row_id", optional=True)  # number (optional)
            dt_utils_datatable_api_name = get_function_input(
                kwargs, "dt_utils_datatable_api_name")  # text (required)

            inputs = {
                "incident_id":
                get_function_input(kwargs, "incident_id"),  # number (required)
                "dt_utils_datatable_api_name": dt_utils_datatable_api_name,
                "dt_utils_row_id": dt_utils_row_id
            }
            log.debug(inputs)

            # Create payload dict with inputs
            payload = FunctionPayload(inputs)

            yield StatusMessage("Function Inputs OK")

            # Instantiate a new RESDatatable
            datatable = RESDatatable(res_client, payload.inputs["incident_id"],
                                     dt_utils_datatable_api_name)

            # get datatable row_id if function used on a datatable
            row_id = datatable.get_row_id_from_workflow(workflow_instance_id)
            row_id and log.debug("Current row_id: %s", row_id)

            # if dt_utils_row_id == 0, use row_id
            if not dt_utils_row_id or not int(dt_utils_row_id):
                if not row_id:
                    raise ValueError(
                        "Run the workflow from a datatable to get the current row_id."
                    )

                log.info("Using current row_id: %s", row_id)
                dt_utils_row_id = row_id

            if row_id == int(dt_utils_row_id):
                yield StatusMessage(
                    "Queuing row {0} for delete".format(dt_utils_row_id))
                deleted_row = datatable.queue_delete(workflow_instance_id,
                                                     dt_utils_row_id)
            else:
                deleted_row = datatable.delete_row(dt_utils_row_id)

            if "error" in deleted_row:
                yield StatusMessage(u"Row {0} in {1} not deleted.".format(
                    dt_utils_row_id, dt_utils_datatable_api_name))
                payload.success = False
                raise ValueError(deleted_row["error"])

            yield StatusMessage("Row {0} in {1} deleted.".format(
                dt_utils_row_id, dt_utils_datatable_api_name))
            payload.row = deleted_row
            payload.success = True

            results = payload.as_dict()

            log.info("Complete")

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
Example #5
0
    def _dt_utils_create_csv_table_function(self, event, *args, **kwargs):
        """Function: Create a utility function to take csv data and add the results to a named datatable."""

        try:
            # Instantiate new Resilient API object
            res_client = self.rest_client()

            inputs = {
                "incident_id":
                get_function_input(kwargs, "incident_id",
                                   optional=False),  # number (required)
                "attachment_id":
                get_function_input(kwargs, "attachment_id",
                                   optional=True),  # number (optional)
                "has_headers":
                get_function_input(kwargs, "dt_has_headers",
                                   optional=False),  # boolean (optional)
                "csv_data":
                get_function_input(kwargs, "dt_csv_data",
                                   optional=True),  # text (optional)
                "datable_name":
                get_function_input(kwargs, "dt_datable_name",
                                   optional=False),  # text (required)
                "mapping_table":
                get_function_input(kwargs, "dt_mapping_table",
                                   optional=False),  # text (optional)
                "date_time_format":
                get_function_input(kwargs,
                                   "dt_date_time_format",
                                   optional=True),  # text (optional)
                "start_row":
                get_function_input(kwargs, "dt_start_row",
                                   optional=True),  # number (optional)
                "max_rows":
                get_function_input(kwargs, "dt_max_rows",
                                   optional=True),  # number (optional)
            }

            LOG.info(inputs)

            yield StatusMessage("Starting ...")
            mapping_table = convert_json(inputs['mapping_table'])
            if not mapping_table:
                raise ValueError(
                    u"Unable to convert mapping_table to json: %s",
                    inputs['mapping_table'])

            # Create payload dict with inputs
            rp = ResultPayload(PACKAGE_NAME, **kwargs)

            if (inputs["attachment_id"] and inputs["csv_data"]) or \
               not (inputs["attachment_id"] or inputs["csv_data"]):
                raise ValueError("Specify either attachment_id or csv_data")

            # Either an attachment ID or CSV Data is needed to be able to add rows
            if inputs["attachment_id"]:
                attachment_name = get_file_attachment_name(
                    res_client,
                    inputs['incident_id'],
                    attachment_id=inputs["attachment_id"])
                b_csv_data = get_file_attachment(
                    res_client,
                    inputs['incident_id'],
                    attachment_id=inputs["attachment_id"])
                csv_data = b_csv_data.decode("utf-8")
                if sys.version_info.major < 3:
                    inline_data = BytesIO(b_csv_data)
                else:
                    inline_data = StringIO(csv_data)
            else:
                attachment_name = None
                csv_data = inputs["csv_data"]
                if sys.version_info.major < 3:
                    inline_data = StringIO(csv_data.encode("utf-8"))
                else:
                    inline_data = StringIO(csv_data)

            datatable = RESDatatable(res_client, inputs["incident_id"],
                                     inputs["datable_name"])

            # Retrieve the column names for the datatable, and their data_types,
            # to compare against what the user provides, and attempt data conversion, if necessary
            fields = datatable.get_dt_headers()
            dt_ordered_columns = {
                fields[field]['order']:
                (fields[field]['name'], fields[field]['input_type'])
                for field in fields
            }
            # ordered column names if we need to assign the headers to the columns in column order
            dt_column_names = OrderedDict([
                dt_ordered_columns[field]
                for field in sorted(dt_ordered_columns.keys())
            ])

            # different readers if we have headers or not
            dialect = csv.Sniffer().sniff(
                csv_data[0:csv_data.find('\n')])  # limit analysis to first row
            # py2 needs changes to dialect to avoid unicode attributes
            if sys.version_info.major < 3:
                for attr in dir(dialect):
                    a = getattr(dialect, attr)
                    if type(a) == unicode:
                        setattr(dialect, attr, bytes(a))
            LOG.debug(dialect.__dict__)

            if inputs["has_headers"]:
                reader = csv.DictReader(
                    inline_data, dialect=dialect
                )  # each row is a dictionary keyed by the column name
                csv_headers = reader.fieldnames  # just the headers
            else:
                reader = csv.reader(
                    inline_data,
                    dialect=dialect)  # each row is a list of values
                csv_headers = []

            mapping_table = build_mapping_table(mapping_table, csv_headers,
                                                dt_column_names)
            LOG.debug("csv headers to datatable columns: %s", mapping_table)

            # perform the api calls to the datatable
            number_of_added_rows, number_of_rows_with_errors = self.add_to_datatable(
                reader, datatable, mapping_table, dt_column_names,
                inputs['date_time_format'], inputs['start_row'],
                inputs['max_rows'])
            LOG.info("Number of rows added: %s ", number_of_added_rows)
            LOG.info("Number of rows that could not be added: %s",
                     number_of_rows_with_errors)

            row_data = {
                "data_source":
                attachment_name if attachment_name else "CSV data",
                "rows_added": number_of_added_rows,
                "rows_with_errors": number_of_rows_with_errors
            }
            results = rp.done(True, row_data)

            yield StatusMessage("Ending ...")

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception as err:
            yield FunctionError(err)