Ejemplo n.º 1
0
def _do_artifact_mapping(query_definition,
                         event_message,
                         metadata,
                         response,
                         res_client,
                         context_token,
                         additional_map_data=None):
    """ Map query results to new artifact and add to Resilient """
    incident = event_message.get("incident", {})
    incident_id = incident.get("id")

    if not response:
        return
    existing_artifacts = [
        _artifact_key(artifact)
        for artifact in _get_artifacts(incident_id, res_client)
    ]
    LOG.debug(u"Existing Artifacts:\n%s", u"\n".join(existing_artifacts))

    if query_definition.result_container:
        # Create an artifact for each query result row
        for row in response:
            for artifact_template in query_definition.artifact_mapping:
                mapdata = {"result": row}
                # Add in any query result metadata
                mapdata.update(metadata)
                if additional_map_data:
                    mapdata.update(additional_map_data)
                artifact = template_functions.render_json(
                    artifact_template, mapdata)
                if artifact.get("value") and _unique_artifact(
                        artifact, existing_artifacts):
                    _add_artifact(res_client, incident_id, artifact,
                                  context_token)
                    existing_artifacts.append(_artifact_key(artifact))
    else:
        # Create a single artifact from the query result
        for artifact_template in query_definition.artifact_mapping:
            artifact = template_functions.render_json(artifact_template,
                                                      response)
            if artifact.get("value") and _unique_artifact(
                    artifact, existing_artifacts):
                _add_artifact(res_client, incident_id, artifact, context_token)
                existing_artifacts.append(_artifact_key(artifact))
Ejemplo n.º 2
0
def _do_task_mapping(query_definition,
                     event_message,
                     metadata,
                     response,
                     res_client,
                     context_token,
                     additional_map_data=None):
    """ Map query results to new task and add to Resilient """
    incident = event_message.get("incident", {})
    incident_id = incident.get("id")

    if not response:
        return

    def _add_task(client, incident_id, task):
        """ Create resilient task """
        try:
            LOG.info(u"Adding task to incident %s: %s", incident_id, task)
            client.post("/incidents/%s/tasks" % incident_id,
                        task,
                        co3_context_token=context_token)
        except SimpleHTTPException as error:
            LOG.error("Failed to post new task. Status %s",
                      str(error.response.status_code))
            LOG.exception("Failed to post task")

    if query_definition.result_container:
        # Create a task for each query result row
        for row in response:
            for task_template in query_definition.task_mapping:
                mapdata = {"result": row}
                # Add in any query result metadata
                mapdata.update(metadata)
                if additional_map_data:
                    mapdata.update(additional_map_data)

                task = template_functions.render_json(task_template, mapdata)
                _add_task(res_client, incident_id, task)

    else:
        # Create a single task from the query result
        for task_template in query_definition.task_mapping:
            task = template_functions.render_json(task_template, response)
            _add_task(res_client, incident_id, task)
Ejemplo n.º 3
0
def _do_note_mapping(query_definition,
                     event_message,
                     metadata,
                     response,
                     res_client,
                     context_token,
                     additional_map_data=None):
    """ Map query results to new note and add to Resilient """
    incident = event_message.get("incident", {})
    incident_id = incident.get("id")

    if not response:
        return

    def _add_note(client, incident_id, note):
        """ Create resilient note """
        try:
            LOG.info(u"Adding note to incident %s: %s", incident_id, note)
            client.post("/incidents/%s/comments" % incident_id,
                        note,
                        co3_context_token=context_token)
        except SimpleHTTPException as error:
            LOG.error("Failed to post new note. Status %s",
                      str(error.response.status_code))
            LOG.exception("Failed to post note")

    if query_definition.result_container:
        # Create a note for each query result row
        for row in response:
            mapdata = {"result": row}
            # Add in any query result metadata
            mapdata.update(metadata)
            if additional_map_data:
                mapdata.update(additional_map_data)

            for note_template in query_definition.note_mapping:
                note = template_functions.render_json(note_template, mapdata)
                _add_note(res_client, incident_id, note)
    else:
        # Create a single note from the query result
        for note_template in query_definition.note_mapping:
            note = template_functions.render_json(note_template, response)
            _add_note(res_client, incident_id, note)
    def _close_incident(self, incident, ticket):
        """
        Close a Resilient incident by rendering a jinja2 template
        :param ticket: Secureworks CTP ticket (json object)
        :return: Resilient incident
        """

        try:
            # Close a Resilient incident from this ticket
            # using a JSON (JINJA2) template file
            template_file_path = self.options.get('template_file_close')
            if template_file_path and not os.path.exists(template_file_path):
                LOG.warning(u"Template file '%s' not found.",
                            template_file_path)
                template_file_path = None
            if not template_file_path:
                # Use the template file installed by this package
                template_file_path = resource_filename(
                    Requirement("fn-secureworks-ctp"),
                    "fn_secureworks_ctp/data/scwx_ctp_template_close.jinja")
                if not os.path.exists(template_file_path):
                    raise Exception(
                        u"Template file for close'{0}' not found".format(
                            template_file_path))

            LOG.info(
                u"Secureworks CTP jinja template file for closing incident: %s",
                template_file_path)
            with open(template_file_path, "r") as definition:
                close_template = definition.read()

            incident_payload = render_json(close_template, ticket)
            # Set the scwx_ctp_status incident field to the ticket status (Closed or Resolved) so that the
            # automatic rule to close the Securework ticket is not triggered as the ticket is already closed in SCWX.
            incident_payload['properties']['scwx_ctp_status'] = ticket.get(
                "status")

            # Render the template.
            incident_id = incident.get('id')
            result = self._update_incident(incident_id, incident_payload)
            LOG.debug(incident_payload)

            ticket_id = ticket.get('ticketId')
            if result and result.get('success'):
                message = u"Closed incident {0} for Secureworks CTP ticket {1}".format(
                    incident_id, ticket_id)
                LOG.info(message)
            else:
                message = u"Unable to update incident {0} for closing. Secureworks CTP ticket {1}".format(
                    incident_id, ticket_id)
                LOG.error(message)
            return result

        except Exception as err:
            raise IntegrationError(err)
Ejemplo n.º 5
0
    def _process_approval_request(self, event):
        # Process one approval request
        log = self.log
        request = event.request
        request_id = request["id"]

        # special "test the process by escalating a single request" mode
        test_single_request = self.options.get("test_single_request")
        if test_single_request:
            if str(request_id) not in str(test_single_request).split(","):
                log.info(u"Skipping request %s, test", request_id)
                return

        # Find the Resilient incident corresponding to this CbProtect approval request (if available)
        resilient_incident = self._find_resilient_incident_for_req(request_id)
        if resilient_incident:
            log.info(u"Skipping request %s, already escalated", request_id)
            return

        log.info(u"Processing request %s", request_id)
        try:
            # Create a new Resilient incident from this approval request
            # using a JSON (JINJA2) template file
            template_file_path = self.options.get("template_file")
            if template_file_path and not os.path.exists(template_file_path):
                log.warn(u"Template file '%s' not found.", template_file_path)
                template_file_path = None
            if not template_file_path:
                # Use the template file installed by this package
                template_file_path = resource_filename(
                    Requirement("fn-cb-protection"),
                    "fn_cb_protection/data/template.jinja")
                if not os.path.exists(template_file_path):
                    raise Exception(u"Template file '{}' not found".format(
                        template_file_path))

            log.info(u"Template file: %s", template_file_path)
            with open(template_file_path, "r") as definition:
                escalate_template = definition.read()

            # Render the template.  Be sure to set the CbProtect ID in the result!
            new_resilient_inc = render_json(escalate_template, request)
            new_resilient_inc["properties"][REQUEST_ID_FIELDNAME] = request_id

            log.debug(new_resilient_inc)
            inc = self.rest_client().post("/incidents", new_resilient_inc)
            rs_inc_id = inc["id"]
            message = u"Created incident {} for CbProtect {}".format(
                rs_inc_id, request_id)
            log.info(message)

        except Exception as exc:
            log.exception(exc)
            raise
    def _update_custom_fields(self, incident, ticket):
        """
        Update a Resilient incident by rendering a jinja2 template
        :param ticket: Secureworks CTP ticket (json object)
        :return: Resilient incident
        """

        try:
            # Update Resilient custom incident fields from this ticket
            # using a JSON (JINJA2) template file
            template_file_path = self.options.get('template_file_update')
            if template_file_path and not os.path.exists(template_file_path):
                LOG.warning(u"Template file '%s' not found.",
                            template_file_path)
                template_file_path = None
            if not template_file_path:
                # Use the template file installed by this package
                template_file_path = resource_filename(
                    Requirement("fn-secureworks-ctp"),
                    "fn_secureworks_ctp/data/scwx_ctp_template_update.jinja")
                if not os.path.exists(template_file_path):
                    raise Exception(
                        u"Template file for updating incident'{0}' not found".
                        format(template_file_path))

            LOG.info(
                u"Secureworks CTP jinja template file for updating incident: %s",
                template_file_path)
            with open(template_file_path, "r") as definition:
                update_template = definition.read()

            incident_payload = render_json(update_template, ticket)

            # Render the template.
            incident_id = incident.get('id')
            result = self._update_incident(incident_id, incident_payload)
            LOG.debug(incident_payload)

            ticket_id = ticket.get('ticketId')
            if result and result.get('success'):
                message = u"Updated incident {0} for Secureworks CTP ticket {1}".format(
                    incident_id, ticket_id)
                LOG.info(message)
            else:
                message = u"Unable to update incident {0} for Secureworks CTP ticket {1}".format(
                    incident_id, ticket_id)
                LOG.error(message)
            return result

        except Exception as err:
            raise IntegrationError(err)
Ejemplo n.º 7
0
    def make_payload_from_template(self, template_override, default_template,
                                   payload):
        """convert a payload into a newformat based on a specified template

        Args:
            template_override ([str]): [/path/to/template.jinja]
            default_template ([str]): [/path/to/template.jinja]
            payload ([dict]): [data to convert]

        Returns:
            [dict]: [converted payload]
        """
        template_data = self.get_template(template_override, default_template)

        # Render the template.
        rendered_payload = render_json(template_data, payload)
        LOG.debug(rendered_payload)

        return rendered_payload
    def _create_incident(self, ticket):
        """
        Create a new Resilient incident by rendering a jinja2 template
        :param ticket: Secureworks CTP ticket (json object)
        :return: Resilient incident
        """
        ticket_id = ticket.get('ticketId')
        try:
            # Create a new Resilient incident from this ticket
            # using a JSON (JINJA2) template file
            template_file_path = self.options.get('template_file_escalate')
            if template_file_path and not os.path.exists(template_file_path):
                LOG.warning(u"Template file '%s' not found.",
                            template_file_path)
                template_file_path = None
            if not template_file_path:
                # Use the template file installed by this package
                template_file_path = resource_filename(
                    Requirement("fn-secureworks-ctp"),
                    "fn_secureworks_ctp/data/scwx_ctp_template_escalate.jinja")
                if not os.path.exists(template_file_path):
                    raise Exception(u"Template file '{0}' not found".format(
                        template_file_path))

            LOG.info(u"Secureworks CTP Template file: %s", template_file_path)
            with open(template_file_path, "r") as definition:
                escalate_template = definition.read()

            # Render the template.
            new_incident_payload = render_json(escalate_template, ticket)
            LOG.debug(new_incident_payload)

            # Post incident to Resilient
            incident = self.rest_client().post("/incidents",
                                               new_incident_payload)
            incident_id = incident.get('id')
            message = u"Created incident {0} for Secureworks CTP ticket {1}".format(
                incident_id, ticket_id)
            LOG.info(message)
            return incident

        except Exception as err:
            raise IntegrationError(err)
Ejemplo n.º 9
0
def codegen_from_template(client, export_file, template_file_path, package,
                          message_destination_names, function_names,
                          workflow_names, action_names, field_names,
                          datatable_names, task_names, script_names,
                          output_dir, output_file):
    """Based on a template-file, produce the generated file or package.

       To codegen a single file, the template will be a JSON dict with just one entry,
       such as {"file_to_generate.py": "path/to/template.jinja2"}
       To codegen a whole directory, the template dict can have multiple values,
       including nested subdirectories.

       Each source ("path/to/template.jinja2") will be rendered using jinja2,
       then written to the target ("file_to_generate.py").

       :param client: the REST client
       :param export_file: file containing customization exports (default is to use the server's latest)
       :param template_file_path: location of templates
       :param package: name of the package to be generated
       :param message_destination_names: list of message desctinations; generate all the functions that use them
       :param function_names: list of named functions to be generated
       :param workflow_names: list of workflows whose customization def should be included in the package
       :param action_names: list of actions (rules) whose customization def should be included in the package
       :param field_names: list of incident fields whose customization def should be included in the package
       :param datatable_names: list of data tables whose customization def should be included in the package
       :param task_names: list of automatic tasks whose customization def should be included in the package
       :param script_names: list of scripts whose customization def should be included in the package
       :param output_dir: output location
       :param output_file: output file name
    """
    functions = {}
    function_params = {}
    message_destinations = {}
    incident_fields = {}
    action_fields = {}
    datatables = {}
    datatable_fields = {}
    phases = {}
    automatic_tasks = {}
    scripts = {}
    workflows = {}
    actions = {}

    if export_file:
        with io.open(export_file, 'r', encoding="utf-8") as export:
            export_data = json.loads(export.read())
        LOG.info(
            u"Codegen is based on the organization export from '{}'.".format(
                export_file))
    else:
        # Get the most recent org export that includes actions and tasks
        export_uri = "/configurations/exports/history"
        export_list = client.get(export_uri)["histories"]
        last_date = 0
        last_id = 0
        for export in export_list:
            if export["options"]["actions"] and export["options"][
                    "phases_and_tasks"]:
                if export["date"] > last_date:
                    last_date = export["date"]
                    last_id = export["id"]
        if last_date == 0:
            LOG.error(
                u"ERROR: No suitable export is available.  "
                u"Create an export for code generation. (Administrator Settings -> Organization -> Export)."
            )
            return
        dt = datetime.datetime.utcfromtimestamp(last_date / 1000.0)
        LOG.info(
            u"Codegen is based on the organization export from {}.".format(dt))
        export_uri = "/configurations/exports/{}".format(last_id)
        export_data = client.get(export_uri)

    all_destinations = dict(
        (dest["programmatic_name"], dest)
        for dest in export_data.get("message_destinations", []))
    all_destinations_2 = dict(
        (dest["name"], dest)
        for dest in export_data.get("message_destinations", []))

    if function_names or message_destination_names:
        # Check that 'functions' are available (v30 onward)
        function_defs = export_data.get("functions")
        if not function_defs:
            LOG.error(u"ERROR: Functions are not available in this export.")
            return
        function_names = function_names or []
        available_names = [
            function_def["name"] for function_def in function_defs
        ]
        if message_destination_names:
            # Build a list of all the functions that use the specified message destination(s)
            for function_def in function_defs:
                if function_def[
                        "destination_handle"] in message_destination_names:
                    function_names.append(function_def["name"])

        # Check that each named function is available
        for function_name in function_names or []:
            if function_name not in available_names:
                LOG.error(u"ERROR: Function '%s' not found in this export.",
                          function_name)
                list_functions(function_defs)
                return

        # Check that the named message destination is available
        for message_destination_name in message_destination_names or []:
            if message_destination_name not in all_destinations:
                LOG.error(
                    u"ERROR: Message destination '%s' not found in this export.",
                    message_destination_name)
                list_message_destinations(
                    export_data.get("message_destinations"))
                return

    if workflow_names:
        # Check that 'workflows' are available (v28 onward)
        workflow_defs = export_data.get("workflows")
        if not workflow_defs:
            LOG.error(u"ERROR: Workflows are not available in this export.")
            return
    else:
        workflow_names = []

    if action_names:
        # Check that 'actions' are available
        action_defs = export_data.get("actions")
        if not action_defs:
            LOG.error(u"ERROR: Rules are not available in this export.")
            return

        # Check that each named action is available
        actions = {
            action_def["name"]: clean(copy.deepcopy(action_def),
                                      ACTION_ATTRIBUTES)
            for action_def in action_defs if action_def["name"] in action_names
        }
        all_action_fields = dict((field["uuid"], field)
                                 for field in export_data.get("fields")
                                 if field["type_id"] == ACTION_TYPE_ID)

        for action_name in action_names:
            if action_name not in actions:
                LOG.error(u"ERROR: Rule '%s' not found in this export.",
                          action_name)
                list_actions(action_defs)
                return
            action_def = actions[action_name]

            # Get the activity-fields for this action (if any)
            action_field_uuids = [
                item.get("content") for item in action_def["view_items"]
                if "content" in item
            ]
            fields = []
            for field_uuid in action_field_uuids:
                field = copy.deepcopy(all_action_fields.get(field_uuid))
                clean(field, ACTION_FIELD_ATTRIBUTES)
                for template in field.get("templates", []):
                    clean(template, TEMPLATE_ATTRIBUTES)
                for value in field.get("values", []):
                    clean(value, VALUE_ATTRIBUTES)
                fields.append(field)
                action_fields[field["name"]] = field

            # Get the workflow(s) for this rule (if any)
            wf_names = action_def["workflows"]
            for wf_name in wf_names:
                if wf_name not in workflow_names:
                    workflow_names.append(wf_name)

            # Get the message destination(s) for this rule (if any)
            dest_names = action_def["message_destinations"]
            for dest_name in dest_names:
                if dest_name not in message_destinations:
                    dest = copy.deepcopy(all_destinations_2[dest_name])
                    clean(dest, MESSAGE_DESTINATION_ATTRIBUTES)
                    message_destinations[dest_name] = dest

    all_functions = dict((function["name"], function)
                         for function in export_data.get("functions"))
    all_function_fields = dict((field["uuid"], field)
                               for field in export_data.get("fields")
                               if field["type_id"] == FUNCTION_TYPE_ID)

    for function_name in (function_names or []):
        # Get the function definition
        function_def = copy.deepcopy(all_functions.get(function_name))
        # Remove the attributes we don't want to serialize
        clean(function_def, FUNCTION_ATTRIBUTES)
        for view_item in function_def.get("view_items", []):
            clean(view_item, VIEW_ITEM_ATTRIBUTES)
        functions[function_name] = function_def

        # Get the parameters (input fields) for this function
        param_names = [
            item.get("content") for item in function_def["view_items"]
            if "content" in item
        ]
        params = []
        for param_name in param_names:
            param = copy.deepcopy(all_function_fields[param_name])
            clean(param, FUNCTION_FIELD_ATTRIBUTES)
            for template in param.get("templates", []):
                clean(template, TEMPLATE_ATTRIBUTES)
            for value in param.get("values", []):
                clean(value, VALUE_ATTRIBUTES)
            params.append(param)
            function_params[param["uuid"]] = param

        # Get the message destination for this function
        dest_name = function_def["destination_handle"]
        if dest_name not in message_destinations:
            dest = copy.deepcopy(all_destinations[dest_name])
            clean(dest, MESSAGE_DESTINATION_ATTRIBUTES)
            message_destinations[dest_name] = dest

    if workflow_names:
        all_workflows = dict((workflow["programmatic_name"], workflow)
                             for workflow in export_data.get("workflows"))
        for workflow_name in workflow_names:
            # Get the workflow definition
            workflow_def = all_workflows.get(workflow_name)
            if workflow_def:
                # Remove the attributes we don't want to serialize
                workflow = clean(copy.deepcopy(workflow_def),
                                 WORKFLOW_ATTRIBUTES)
                clean(workflow["content"], WORKFLOW_CONTENT_ATTRIBUTES)
                workflows[workflow_name] = workflow
            else:
                LOG.error(u"ERROR: Workflow '%s' not found in this export.",
                          workflow_name)
                list_workflows(export_data.get("workflows"))
                return

    if field_names:
        # Get definitions for custom incident fields
        all_fields = dict((field["name"], field)
                          for field in export_data.get("fields")
                          if field["type_id"] == INCIDENT_TYPE_ID
                          and field.get("prefix") == "properties")
        for field_name in field_names:
            fielddef = all_fields.get(field_name)
            if fielddef:
                field = clean(copy.deepcopy(fielddef),
                              INCIDENT_FIELD_ATTRIBUTES)
                for template in field.get("templates", []):
                    clean(template, TEMPLATE_ATTRIBUTES)
                for value in field.get("values", []):
                    clean(value, VALUE_ATTRIBUTES)
                incident_fields[field["uuid"]] = field
            else:
                LOG.error(
                    u"ERROR: Custom incident field '%s' not found in this export.",
                    field_name)
                list_incident_fields(export_data.get("fields"))
                return

    if datatable_names:
        # Get datatable definitions
        all_datatables = dict((table["type_name"], table)
                              for table in export_data.get("types")
                              if table["type_id"] == DATATABLE_TYPE_ID)
        for datatable_name in datatable_names:
            datatable = all_datatables.get(datatable_name)
            if datatable:
                for (fieldname, fielddef) in datatable["fields"].items():
                    field = clean(copy.deepcopy(fielddef),
                                  DATATABLE_FIELD_ATTRIBUTES)
                    for template in field.get("templates", []):
                        clean(template, TEMPLATE_ATTRIBUTES)
                    for value in field.get("values", []):
                        clean(value, VALUE_ATTRIBUTES)
                    datatable_fields[field["uuid"]] = field
                datatables[datatable_name] = datatable
            else:
                LOG.error(u"ERROR: Datatable '%s' not found in this export.",
                          datatable_name)
                list_datatables(export_data.get("types", []))
                return

    # Automtic tasks determine the list of phases
    phase_names = set()
    if task_names:
        # Get task definitions
        all_tasks = dict((task["programmatic_name"], task)
                         for task in export_data.get("automatic_tasks"))
        for task_name in task_names:
            task = all_tasks.get(task_name)
            if task:
                automatic_tasks[task_name] = clean(copy.deepcopy(task),
                                                   AUTOMATIC_TASK_ATTRIBUTES)
                phase_names.add(task["phase_id"])
            else:
                LOG.error(u"ERROR: Task '%s' not found in this export.",
                          task_name)
                list_automatic_tasks(export_data.get("automatic_tasks", []))
                return

    if phase_names:
        # Get phase definitions
        all_phases = dict(
            (phase["name"], phase) for phase in export_data.get("phases"))
        for phase_name in phase_names:
            # Assume phase-name is found.  It was derived from the automatic task.
            phase = all_phases[phase_name]
            phases[phase_name] = clean(copy.deepcopy(phase), PHASE_ATTRIBUTES)

    if script_names:
        # Get script definitions
        all_scripts = dict(
            (script["name"], script) for script in export_data.get("scripts"))
        for script_name in script_names:
            script = all_scripts.get(script_name)
            if script:
                scripts[script_name] = clean(copy.deepcopy(script),
                                             SCRIPT_ATTRIBUTES)
            else:
                LOG.error(u"ERROR: Script '%s' not found in this export.",
                          script_name)
                list_scripts(export_data.get("scripts", []))
                return

    # Minify the export_data
    fields_list = []
    if len(incident_fields) == 0:
        # import requires at least one, use placeholder
        fields_list.extend(["incident/inc_training"])
    else:
        fields_list.extend([
            "incident/{}".format(fld["name"])
            for fld in incident_fields.values()
        ])
    fields_list.extend([
        "actioninvocation/{}".format(fld["name"])
        for fld in action_fields.values()
    ])
    fields_list.extend([
        "__function/{}".format(fld["name"])
        for fld in function_params.values()
    ])
    keep_keys = [
        "export_date", "export_format_version", "id", "server_version"
    ]
    minify_keys = {
        "actions": {
            "name": actions.keys()
        },
        "automatic_tasks": {
            "programmatic_name": automatic_tasks.keys()
        },
        "fields": {
            "export_key": fields_list
        },
        "functions": {
            "name": functions.keys()
        },
        "message_destinations": {
            "programmatic_name": message_destinations.keys()
        },
        "phases": {
            "name": phases.keys()
        },
        "scripts": {
            "name": scripts.keys()
        },
        "types": {
            "type_name": datatables.keys()
        },
        "workflows": {
            "programmatic_name": workflows.keys()
        },
    }
    for key in export_data.keys():
        if key in keep_keys:
            pass
        elif key in minify_keys.keys():
            name = list(minify_keys[key].keys())[0]  # The property we match on
            values = minify_keys[key][
                name]  # These are the names of the things to keep
            for data in list(export_data[key]):
                if not data.get(name):
                    LOG.warning("No %s in %s", name, key)
                if not data.get(name) in values:
                    export_data[key].remove(data)
        elif isinstance(export_data[key], list):
            export_data[key] = []
        elif isinstance(export_data[key], dict):
            export_data[key] = {}
        else:
            export_data[key] = None
    # Incident types are special, add one for this specific package
    # (because not enabled, this doesn't actually get loaded into the destination)
    t0 = int(time.time() * 1000)
    export_data["incident_types"] = [{
        "update_date": t0,
        "create_date": t0,
        "uuid": str(UUID_CODEGEN),
        "description": "Customization Packages (internal)",
        "export_key": "Customization Packages (internal)",
        "name": "Customization Packages (internal)",
        "enabled": False,
        "system": False,
        "parent_id": None,
        "hidden": False,
        "id": 0
    }]

    # Prepare the dictionary of substitution values for jinja2
    # (includes all the configuration elements related to the functions)
    data = {
        "package": package,
        "function_names": function_names,
        "output_dir": output_dir,
        "output_file": output_file,
        "functions": functions,
        "function_params": function_params,
        "message_destinations": message_destinations,
        "incident_fields": incident_fields,
        "action_fields": action_fields,
        "datatables": datatables,
        "datatable_fields": datatable_fields,
        "phases": phases,
        "automatic_tasks": automatic_tasks,
        "scripts": scripts,
        "workflows": workflows,
        "actions": actions,
        "export_data": export_data
    }
    LOG.debug(u"Configuration data:\n%s", json.dumps(data, indent=2))

    # Read the files/package template and render it
    # to produce the file-mapping dictionary from template-files to generated-files
    with io.open(template_file_path, 'r', encoding="utf-8") as template_file:
        file_mapping_template = template_file.read()
        file_mapping = template_functions.render_json(file_mapping_template,
                                                      data)

    LOG.debug(u"Codegen template:\n%s", json.dumps(file_mapping, indent=2))

    # Write all the files defined in the mapping definition
    src_dir = os.path.dirname(template_file_path)
    render_file_mapping(file_mapping, data, src_dir, output_dir)
Ejemplo n.º 10
0
def rest_call(options, query_definition, event_message):
    """ Make a REST call and return result """

    # options
    timeout = int(options.get("query_timeout", 60))

    verify = options.get("verify", "")
    if verify[:1].lower() in ("0", "f", "n"):
        verify = False
    else:
        verify = True

    # The REST URL to call is the rendered query expression
    rest_url = query_definition.query

    # The REST method is can be set in 'vars'
    http_method = query_definition.vars.get("http-method", "GET")
    LOG.debug("HTTP method: %s", http_method)

    # HTTP headers can be set in 'vars'
    http_headers = query_definition.vars.get("http-headers", {})
    LOG.debug("HTTP headers: %s", http_headers)

    # HTTP post body can be set in 'vars'
    http_body = query_definition.vars.get("http-body")
    if isinstance(http_body, string_types):
        http_body = json.loads(http_body)
    LOG.debug("HTTP body: %s", http_body)

    session = requests.Session()
    error = None
    response = None
    try:
        response = session.request(http_method, rest_url,
                                   headers=http_headers,
                                   json=http_body,
                                   verify=verify,
                                   timeout=timeout)
        if response.status_code not in [200, 201]:
            raise SimpleHTTPException(response)
        response = response.json()
    except Exception as exc:
        if not query_definition.onerror:
            raise
        LOG.error(exc)
        error = u"{}".format(exc)

    if error:
        mapdata = copy.deepcopy(event_message)
        mapdata.update(query_definition.vars)
        mapdata.update({"query": query_definition.query})
        mapdata.update({"error": error})
        error_template = json.dumps({"events": [query_definition.onerror]}, indent=2)
        error_rendered = template_functions.render_json(error_template, mapdata)
        response = error_rendered

    if not response:
        LOG.warn("No data returned from query")
        if query_definition.default:
            mapdata = copy.deepcopy(event_message)
            mapdata.update(query_definition.vars)
            mapdata.update({"query": query_definition.query})
            default_template = json.dumps({"events": [query_definition.default]}, indent=2)
            default_rendered = template_functions.render_json(default_template, mapdata)
            response = default_rendered

    LOG.debug("Response: %s", json.dumps(response))
    return {"result": response}
Ejemplo n.º 11
0
def _do_datatable_mapping(query_definition,
                          dtinfo,
                          event_message,
                          metadata,
                          response,
                          datatable_locks,
                          res_client,
                          context_token,
                          additional_map_data=None):
    """ Map query results to Resilient data table rows """
    incident = event_message.get("incident", {})
    incident_id = incident.get("id")
    if query_definition.result_container:
        rows = response
    else:
        rows = [
            response,
        ]

    # Contains: "name", "key" (optional), "cells" (optional)
    dtname = dtinfo.get("name")
    dtkey = dtinfo.get("keys", [])
    dtrow_id = dtinfo.get("row_id", None)
    dtcells = dtinfo.get("cells", None)
    limit = dtinfo.get("limit", 0)

    # Get access to the data table
    if not datatable_locks[dtname].acquire(timeout=600):
        LOG.error("Couldn't acquire lock on table %s. No update done.", dtname)
        return
    try:
        datatable = DataTable(res_client, table_name=dtname)
        dtrows = []
        row_to_update = None

        if dtrow_id:
            # We are updating a single existing row
            row_to_update = datatable.find_row(incident['id'], dtrow_id)
            if not row_to_update:
                LOG.error("Row [%s] not found. No update done.", dtrow_id)
                return
        elif dtkey:
            # Read all the rows
            dtrows = datatable.rows(incident_id)

        # Map for rendering starts with the event (incident, etc)
        mapdata = copy.deepcopy(event_message)
        if additional_map_data:
            mapdata.update(additional_map_data)
        # Add in any rendered vars
        mapdata.update(query_definition.vars)
        # Add in any query result metadata
        mapdata.update(metadata)

        LOG.debug("Key columns: %s", dtkey)

        cells_template = json.dumps({"cells": dtcells}, indent=2)
        LOG.debug("Cells template: %s", cells_template)

        num_created = 0
        for result_row in rows:
            # If a key is specified, it's for upsert:
            # - Each row in the result should correspond to one row in the data table
            # - Render key with **the event_message and the result row**
            #   (because the key could be e.g. artifact.value, or task.id, or row.somevalue)
            # - It looks like {"cell":"value"} when rendered
            # - We expect a single row matching the key, or none
            #   (If multiple rows match the key, just pick the randomly-first one and carry on)
            # - Update it based on the response row, or insert
            mapdata["result"] = result_row

            # Render the result row to cells using the template provided in the query definition.
            cells_rendered = template_functions.render_json(
                cells_template, mapdata)
            datatable.update_cell_value_types(cells_rendered)

            dtrow = None
            if dtkey:
                LOG.debug("Find matching row to update!")
                key_dict = {
                    key: cells_rendered["cells"].get(key,
                                                     {}).get("value", None)
                    for key in dtkey
                }
                matching_rows = datatable.match(dtrows, key_dict, limit=1)
                if matching_rows:
                    dtrow = matching_rows[0]
            elif row_to_update:
                dtrow = row_to_update
            if dtrow is None:
                # Insert a new row in the data table
                LOG.debug("Adding Row: %s", json.dumps(cells_rendered,
                                                       indent=2))
                new_row = datatable.add_row(incident_id, cells_rendered)
                if new_row:
                    dtrows.append(new_row)
            else:
                # Update the row in the data table
                LOG.debug("Updating Row: %s",
                          json.dumps(cells_rendered, indent=2))
                datatable.update(incident_id,
                                 dtrow,
                                 cells_rendered,
                                 co3_context_token=context_token)
            num_created = num_created + 1
            if num_created == limit:
                LOG.info("Limiting Datatable row creation to first %d results",
                         limit)
                break
    finally:
        datatable_locks[dtname].release()
Ejemplo n.º 12
0
def run_search(options, query_definition, event_message):
    """ Run Ariel search and return result """

    # Read the options and construct a QRadar client
    qradar_url = options.get("qradar_url", "")
    qradar_token = options.get("qradar_service_token", "")
    timeout = int(options.get("query_timeout", 600))
    polling_interval = int(options.get("polling_interval", 5))
    if not all((qradar_url, qradar_token, timeout, polling_interval)):
        LOG.error("Configuration file missing required values!")
        raise Exception("Missing Configuration Values")

    verify = options.get("qradar_verify", "")
    if verify[:1].lower() in ("0", "f", "n"):
        verify = False
    else:
        verify = True

    qradar_client = QRadarClient(qradar_url, qradar_token, verify=verify)

    error = None
    response = None
    try:
        params = {'query_expression': query_definition.query}
        url = "ariel/searches"
        response = qradar_client.post(url, params=params)
        LOG.debug(response)
        search_id = response.get('search_id', '')
        if not search_id:
            error = "Query Failed: " + response.get("message",
                                                    "No Error Message Found")
        else:
            LOG.info("Queued Search %s", search_id)
            _wait_for_query_to_complete(search_id, qradar_client, timeout,
                                        polling_interval)
            # Query Execution Finished, Get Results
            response = _get_query_results(search_id, qradar_client,
                                          query_definition.range)
    except Exception as exc:
        if not query_definition.onerror:
            raise
        LOG.error(exc)
        error = u"{}".format(exc)

    if error:
        mapdata = copy.deepcopy(event_message)
        mapdata.update(query_definition.vars)
        mapdata.update({"query": query_definition.query})
        mapdata.update({"error": error})
        error_template = json.dumps({"events": [query_definition.onerror]},
                                    indent=2)
        error_rendered = template_functions.render_json(
            error_template, mapdata)
        response = error_rendered

    if not response or len(response["events"]) == 0:
        LOG.warn("No data returned from query")
        if query_definition.default:
            mapdata = copy.deepcopy(event_message)
            mapdata.update(query_definition.vars)
            mapdata.update({"query": query_definition.query})
            default_template = json.dumps(
                {"events": [query_definition.default]}, indent=2)
            default_rendered = template_functions.render_json(
                default_template, mapdata)
            response = default_rendered

    return response
def run_search(options, query_definition, event_message):
    """ Run LDAP search and return result """
    # Read the LDAP configuration options
    ldap_server = options["server"]
    ldap_port = int(options["port"] or LDAP_DEFAULT_PORT)
    ldap_user = options["user"]
    ldap_password = options["password"]
    ldap_ssl = options["ssl"] == "True"  # anything else is false
    ldap_auth = LDAP_AUTH_TYPES[options["auth"] or "ANONYMOUS"]
    results = None
    # CLIENT Active Directory
    client_ad_server = Server(ldap_server,
                              ldap_port,
                              get_info=ldap3.ALL,
                              use_ssl=options["ssl"] == "True",
                              connect_timeout=3)

    client_ad_creds = (ldap_user, ldap_password, ldap_auth)

    if query_definition.params is None:
        raise Exception("LDAP query requires 'search_base' parameter")
    search_base = query_definition.params.get("search_base")
    if search_base is None:
        raise Exception("LDAP query requires 'search_base' parameter")

    # Connect to the LDAP server
    LOG.debug("LDAP connect")
    with Connection(client_ad_server,
                    user=client_ad_creds[0],
                    password=client_ad_creds[1],
                    authentication=client_ad_creds[2],
                    auto_bind=True) as conn:

        LOG.debug("LDAP search {0} / {1}".format(search_base,
                                                 query_definition.query))
        conn.search(search_base,
                    query_definition.query,
                    attributes=ldap3.ALL_ATTRIBUTES)

        entries = conn.entries
        if entries is None:
            LOG.info("LDAP query returned None")
            results = {"entries": None}
            if query_definition.default:
                mapdata = copy.deepcopy(event_message)
                mapdata.update(query_definition.vars)
                mapdata.update({"query": query_definition.query})
                default_template = json.dumps(
                    {"entries": [query_definition.default]}, indent=2)
                default_rendered = template_functions.render_json(
                    default_template, mapdata)
                results = default_rendered
        else:
            # List of entries.
            entries = json.loads(conn.response_to_json())["entries"]
            LOG.info("Result contains %s entries", len(entries))
            # Each entry has 'dn' and dict of 'attributes'.  Move attributes to the top level for easier processing.
            for entry in entries:
                entry.update(entry.pop("attributes", None))
            results = {"entries": entries}

    LOG.debug(json.dumps(results, indent=2))
    return results
Ejemplo n.º 14
0
 def render_json(self, template, mapdata):
     # Render a JINJA template, using our filters etc
     return template_functions.render_json(template, mapdata)