예제 #1
0
def selftest_function(opts):
    """
    Placeholder for selftest function. An example use would be to test package api connectivity.
    Suggested return values are be unimplemented, success, or failure.
    """
    options = opts.get("fn_rsa_netwitness", {})
    nw_packet_server_url = options.get("nw_packet_server_url")
    nw_packet_server_user = options.get("nw_packet_server_user")
    nw_packet_server_password = options.get("nw_packet_server_password")
    nw_packet_server_verify = str_to_bool(options.get("nw_packet_server_verify"))

    nw_log_server_url = options.get("nw_log_server_url")
    nw_log_server_user = options.get("nw_log_server_user")
    nw_log_server_password = options.get("nw_log_server_password")
    nw_log_server_verify = str_to_bool(options.get("nw_log_server_verify"))

    try:
        request_common = RequestsCommon(options, opts)

        # Test PCAP server connection
        headers = get_headers(nw_packet_server_user, nw_packet_server_password)
        request_url = "{}/sdk/packets?sessions={}&render=pcap".format(nw_packet_server_url, "100")
        request_common.execute_call_v2("GET", request_url, verify=nw_packet_server_verify,\
             headers=headers).content

        # Test Logger server connection
        time1 = int(time.time()) * 1000
        time2 = int(time.time()) * 1000
        headers = get_headers(nw_log_server_user, nw_log_server_password)
        request_url = "{}/sdk/packets?time1={}&time2={}&render={}"\
            .format(nw_log_server_url, time1, time2, "logs")
        request_common.execute_call_v2("GET", request_url,\
            verify=nw_log_server_verify, headers=headers).text

        return {"state": "success"}
    except Exception as err:
        err_reason_msg = """Could not connect to NetWitness.
                    error: {0}
                    ---------
                    Current Configs in app.config file::
                    ---------
                    nw_packet_server_url: {1}
                    nw_packet_server_user: {2}
                    nw_packet_server_verify: {3}
                    nw_log_server_url: {4}
                    nw_log_server_user: {5}
                    nw_log_server_verify: {6}\n""".format(
            err,
            nw_packet_server_url,
            nw_packet_server_user,
            nw_packet_server_verify,
            nw_log_server_url,
            nw_log_server_user,
            nw_log_server_verify)

        log.error(err_reason_msg)

        return {"state": "failed"}
예제 #2
0
    def _build_transitionIssue_appDict(self, kwargs):
        '''
        build the dictionary used for the transition api request
        :param kwargs:
        :return: dictionary of values to use
        '''

        # test for required fields
        validate_fields(['jira_url', 'jira_transition_id'], kwargs)

        appDict = {
            'user': self.options['user'],
            'password': self.options['password'],
            'url': kwargs['jira_url'],
            'verifyFlag': str_to_bool(self.options.get('verify_cert', 'True')),
            'transitionId': kwargs['jira_transition_id'],
        }

        if kwargs.get('jira_resolution'):
            appDict['resolution'] = kwargs['jira_resolution']

        # optional
        if kwargs.get('jira_comment', None):
            html2markdwn = MarkdownParser(strikeout=constants.STRIKEOUT_CHAR,
                                          bold=constants.BOLD_CHAR,
                                          underline=constants.UNDERLINE_CHAR,
                                          italic=constants.ITALIC_CHAR)
            appDict['comment'] = html2markdwn.convert(kwargs['jira_comment'])

        return appDict
    def _run_netdevice(self, netdevice_ids, netdevice_cmd, netdevice_config,
                       use_textfsm):
        """
        Perform the netdevice execution
        :param netdevice_ids:
        :param netdevice_cmd:
        :param net_device_config:
        :param use_textfsm:
        :param kwargs:
        :return: json of status object
        """

        log = logging.getLogger(__name__)

        result = {}
        rc = True
        for device_id in netdevice_ids.split(','):

            # find the access information
            device_info = self.opts.get(device_id.strip(), None)
            if not device_info:
                msg = u"Unable to find section for '{}'".format(device_id)
                result[device_id] = {"status": 'failure', "reason": msg}
                rc = False

                log.warning(msg)
                continue

            device_commit = str_to_bool(device_info.pop('use_commit',
                                                        'False'))  # pop
            result[device_id] = execute(device_info, netdevice_cmd,
                                        netdevice_config, device_commit,
                                        use_textfsm)

        return rc, result
    def __init__(self, opts):
        super(FunctionComponent, self).__init__(opts)
        self.options = opts.get("fn_symantec_dlp", {})
        self.dlp_listener = DLPListener(opts)

        if "pytest" in sys.modules:
            # Reaching here indicates that the component is invoked from within a testing session.
            # In this case, don't start the Poller

            LOG.info("Running within a test environment. Not starting listener")
        else:
            # The dlp_listener_toggle will determine whether the Poller will run
            if str_to_bool(self.options.get("sdlp_should_poller_run", None)):
                if self.dlp_listener.soap_client.is_connected:
                    self.setup_listener()

                    # Use a circuits timer to fire off an event every N seconds.
                    #     When the event is fired, a function with the decorator @handler(name_of_event)
                    #     will be used to handle the event and perform some task
                    polling_interval = int(self.options.get("sdlp_listener_timer", DEFAULT_POLLING_INTERVAL))
                    LOG.debug(u"DLP Polling interval will be %s seconds", polling_interval)
                    
                    Timer(interval=polling_interval,
                          event=Event.create("DLPListenerPollingEvent"),
                          persist=True).register(self)
예제 #5
0
def validate_app_configs(app_configs):
    """
    Validates the app configs for fn_jira. Raises an error
    if the required configs are not set

    :param app_configs: The app_configs for fn_jira
    :return: All the app configs
    :rtype: dict
    """
    valid_app_configs = validate_fields([{
        "name": "url",
        "placeholder": "https://<jira url>"
    }, {
        "name": "user",
        "placeholder": "<jira user>"
    }, {
        "name": "password",
        "placeholder": "<jira user password>"
    }, {
        "name": "verify_cert"
    }], app_configs)

    valid_app_configs["verify_cert"] = str_to_bool(
        valid_app_configs.get("verify_cert"))

    return valid_app_configs
    def __init__(self, opts):
        super(FeedComponent, self).__init__(opts)

        try:
            self.options = opts.get("feeds", {})
            LOG.debug(self.options)

            self.channel = "actions." + self.options.get("queue", "feed_data")

            if self.options.get("feed_names") is None:
                LOG.error("No feed_names are specified")
            else:
                rest_client_helper = RestClientHelper(self.rest_client)

                self.feed_outputs = build_feed_outputs(
                    rest_client_helper, opts,
                    self.options.get("feed_names", None))

                # determine the reload options to follow
                if self.options.get('reload', 'false').lower() == 'true':
                    query_api_method = str_to_bool(
                        self.options.get("reload_query_api_method", 'false'))

                    reload = Reload(rest_client_helper,
                                    self.feed_outputs,
                                    query_api_method=query_api_method)
                    reload.reload_all()

        except Exception as err:
            LOG.error("exception: %s", err)
            error_trace = traceback.format_exc()
            LOG.error("Traceback %s", error_trace)
    def _build_comment_appDict(self, kwargs):
        """
        build the dictionary used to create a comment
        :param kwargs:
        :return: dictionary of values to use
        """

        # test for required fields
        validate_fields(['jira_url', 'jira_comment'], kwargs)

        html2markdwn = MarkdownParser(strikeout=constants.STRIKEOUT_CHAR,
                                      bold=constants.BOLD_CHAR,
                                      underline=constants.UNDERLINE_CHAR,
                                      italic=constants.ITALIC_CHAR)
        jira_comment = html2markdwn.convert(
            self.get_textarea_param(kwargs['jira_comment']))
        if jira_comment is None or not jira_comment.strip():
            raise FunctionError("comment is empty after rich text is removed")

        appDict = {
            'user': self.options['user'],
            'password': self.options['password'],
            'url': kwargs['jira_url'],
            'verifyFlag': str_to_bool(self.options.get('verify_cert', 'True')),
            'comment': jira_comment
        }

        return appDict
예제 #8
0
def selftest_function(opts):
    """
    Placeholder for selftest function. An example use would be to test package api connectivity.
    Suggested return values are be unimplemented, success, or failure.
    """
    app_configs = opts.get("fn_joe_sandbox_analysis", {})
    API_KEY = get_config_option("jsb_api_key", app_configs)
    ACCEPT_TAC = str_to_bool(get_config_option("jsb_accept_tac", app_configs))
    HTTP_PROXY = get_config_option("jsb_http_proxy", app_configs, True)
    HTTPS_PROXY = get_config_option("jsb_https_proxy", app_configs, True)
    log.info(API_KEY)
    proxies = {}
    test = False
    try:
        proxies = get_proxies(opts, app_configs)
        if (HTTP_PROXY) and (len(proxies) == 0):
            proxies["http"] = HTTP_PROXY

        if (HTTPS_PROXY) and (len(proxies) == 0):
            proxies["https"] = HTTPS_PROXY

        if (len(proxies) == 0):
            proxies = None
    except Exception as proxy_error:
        proxies = None
    joesandbox = jbxapi.JoeSandbox(apikey=API_KEY,
                                   accept_tac=ACCEPT_TAC,
                                   proxies=proxies)
    test = joesandbox.server_online()
    if test:
        return {"state": "success", "reason": "Server Online"}
    else:
        return {"state": "failure", "reason": "Server Offline"}
    def setup_docker_connection(self, options):
        """
        A function used to decide whether to connect to docker using a local or remote connection

        Depends on 2 app.config values
        A boolean determining whether to use a remote connection

        A string comprising of a remote_server_url to be connected to.

        :return: void
        """

        if resilient_lib.str_to_bool(
                options.get("docker_use_remote_conn", "False")):
            LOG.debug("Use Remote Connection config set to true.")
            LOG.debug("Docker Remote URL provided %s",
                      options.get("docker_remote_url", None))
            if 'ssh://' not in options.get("docker_remote_url", "No URL Provided") \
                    and 'tcp://' not in options.get("docker_remote_url", "No URL Provided"):
                raise ValueError(
                    "docker_remote_url does not appear to be configured correctly. Current value {}"
                    .format(options.get("docker_remote_url", None)))
            self.initiate_remote_docker_connection(
                options.get("docker_remote_url"))
        else:
            self.initiate_local_docker_connection()
예제 #10
0
def selftest_function(opts):
    """
    A self test function which attempts to set up a connection with your ElasticSearch instance and then ping it.
    The self test attempts to use as much code from the function as possible to mimic the functionality.

    Success state is achieved if both a connection is made and a succesful ping is done after this.
    """
    helper = ElasticSearchHelper(opts.get("fn_elasticsearch", {}))
    # Get Elasticsearch params
    ELASTICSEARCH_BOOL_HTTP_AUTH = str_to_bool(
        value=helper.get_config_option("es_use_http", True))
    ELASTICSEARCH_URL = helper.get_config_option("es_datastore_url")
    ELASTICSEARCH_CERT = helper.get_config_option("es_cafile", True)
    ELASTICSEARCH_SCHEME = helper.get_config_option(
        "es_datastore_scheme", True)
    ELASTICSEARCH_USERNAME = helper.get_config_option("es_auth_username", True)
    ELASTICSEARCH_PASSWORD = helper.get_config_option("es_auth_password", True)

    log = logging.getLogger(__name__)

    log.info("Connecting to ElasticSearch...")

    if not ELASTICSEARCH_CERT:
        log.info("No Cafile found in app.config. Attempting connection without")
    try:
        if ELASTICSEARCH_SCHEME.lower() == 'https':
            # Attempt to create an SSL context, should work fine if no CERT is provided
            if ELASTICSEARCH_CERT is None:
                context = create_default_context()
            else:
                context = create_default_context(cafile=ELASTICSEARCH_CERT)
            # Connect to the ElasticSearch instance
            es = Elasticsearch(ELASTICSEARCH_SCHEME.lower() + "://" + ELASTICSEARCH_URL, ssl_context=context,
                               http_auth=(ELASTICSEARCH_USERNAME, ELASTICSEARCH_PASSWORD))
        else:
            # Connect without to Elastic without HTTPS
            if ELASTICSEARCH_BOOL_HTTP_AUTH:
                es = Elasticsearch([ELASTICSEARCH_URL], verify_certs=False, cafile=ELASTICSEARCH_CERT, http_auth=(
                    ELASTICSEARCH_USERNAME, ELASTICSEARCH_PASSWORD))
            else:
                es = Elasticsearch(
                    [ELASTICSEARCH_URL], verify_certs=False, cafile=ELASTICSEARCH_CERT)
        try:
            # If we cant ping the ES instance we can't query it
            if not es.ping():
                return {"state": "failed",
                        "reason": "Could not Ping the ElasticSearch instance. Your host may be down or your config is needs changing"}
            else:
                # We can ping the ES instance so selftest passes
                return {"state": "success"}

        # Catch exceptions for when the es service is down, sometimes an exception is returned rather than False
        except Exception as e:
            return {"state": "failed",
                    "reason": "Could not Ping the ElasticSearch instance, there may be an issue with your config. Reason : {0}".format(e)}

    except Exception as e:
        return {"state": "failed",
                "reason": "Encountered error while connecting to ElasticSearch. Please check your config. Reason {0}".format(e)}
    def _data_feeder_sync_incidents_function(self, event, *args, **kwargs):
        """Function: Synchronize Incident(s) and their associated tasks, notes, attachments, artifacts, milestones and associated datatables"""
        try:
            # Get the wf_instance_id of the workflow this Function was called in
            wf_instance_id = event.message["workflow_instance"][
                "workflow_instance_id"]

            result = ResultPayload("data_feeder", **kwargs)

            # Get the function parameters:
            df_min_incident_id = kwargs.get("df_min_incident_id")  # number
            df_max_incident_id = kwargs.get("df_max_incident_id",
                                            df_min_incident_id)  # number
            df_query_api_method = kwargs.get("df_query_api_method",
                                             False)  # boolean

            log = logging.getLogger(__name__)
            log.info("df_min_incident_id: %s", df_min_incident_id)
            log.info("df_max_incident_id: %s", df_max_incident_id)

            if not df_max_incident_id:
                df_max_incident_id = df_min_incident_id

            if df_min_incident_id > df_max_incident_id:
                raise ValueError(
                    "Min value {} greater than max value {}".format(
                        df_min_incident_id, df_max_incident_id))

            # select all incidents as max
            if df_max_incident_id == 0:
                df_max_incident_id = sys.maxsize

            yield StatusMessage("starting...")
            rest_client_helper = RestClientHelper(self.rest_client)
            feed_outputs = build_feed_outputs(
                rest_client_helper, self.opts,
                self.options.get("feed_names", None))

            # expose attachment content setting
            self.incl_attachment_data = str_to_bool(
                self.options.get("include_attachment_data", 'false'))

            df = Reload(rest_client_helper,
                        feed_outputs,
                        query_api_method=df_query_api_method,
                        incl_attachment_data=self.incl_attachment_data)
            reloaded_incidents = df.reload_all(min_inc_id=df_min_incident_id,
                                               max_inc_id=df_max_incident_id)

            result_payload = result.done(
                True, {"num_of_sync_incidents": reloaded_incidents})

            yield StatusMessage("done...")

            # Produce a FunctionResult with the results
            yield FunctionResult(result_payload)
        except Exception:
            yield FunctionError()
예제 #12
0
    def __init__(self, opts):
        """constructor provides access to the configuration options"""
        super(FunctionComponent, self).__init__(opts)
        self.options = opts.get("fn_rsa_netwitness", {})

        # Validate app.config fields
        validate_fields(["nw_log_server_url", "nw_log_server_user", "nw_log_server_password"], self.options)

        self.options["nw_log_server_verify"] = str_to_bool(self.options.get("nw_log_server_verify"))
    def __init__(self, rest_client_helper, options):  # pylint: disable=unused-argument
        super(ResilientFeedDestination, self).__init__()
        self.options = options

        self.resilient_source = Resilient(options, rest_client_helper)
        self.resilient_target = Resilient(options, None)

        # incident fields to exclude
        self.exclude_fields = options.get("exclude_incident_fields",
                                          "").replace(" ", "").split(";")
        self.sync_references = str_to_bool(
            options.get("sync_reference_fields", "false"))
        self.delete_incidents = str_to_bool(
            options.get("delete_incidents", "false"))

        self.match_list, self.match_operator_and = parse_matching_criteria(
            options.get("matching_incident_fields", None),
            options.get("matching_operator", None))
def get_common_request_items(options):
    """
    return basic auth and cafile information
    :return: basic_auth and cafile
    """
    validate_fields(("username", "password"), options)
    basic_auth = (options['username'], options['password'])
    cafile = str_to_bool(options.get("cafile")) if options.get("cafile", "False").lower() in ("true", "false") else True

    return basic_auth, cafile
예제 #15
0
 def __init__(self, opts):
     super(DLPListener, self).__init__(opts)
     # A SOAP Client to interface with DLP Incident and Reporting API
     self.dlp_opts = opts.get("fn_symantec_dlp", {})
     self.soap_client = DLPSoapClient(app_configs=self.dlp_opts)
     self.should_search_res = str_to_bool(
         self.dlp_opts.get("sdlp_should_search_res"))
     # A REST Client to interface with Resilient
     self.res_rest_client = ResilientComponent.rest_client(self)
     self.default_artifact_type_id = 16  # When uploading DLP Binaries as attachments, they will be uploaded at 'Other File'
     self.add_filters_to_jinja()
예제 #16
0
def parse_matching_criteria(filters, filter_operator):
    """
    build the filter criteria, if present
    :param filters:field opr value[;]...
    :param filter_operator: any|all
    :return dictionary of parsed filter settings, True/False for "all"/"any" setting
    """
    LOG.debug("%s %s", filters, filter_operator)

    if filter_operator and filter_operator.strip().lower() not in ('all', 'any'):
        raise ValueError("operator must be 'all' or 'any': {}".format(filter_operator))

    match_operator_and = (filter_operator.strip().lower() == 'all') if filter_operator else True

    # parse the filters and produce a tuple of (field, operator, value)
    match_list = {}
    if filters:
        for filter_str in filters.split(';'):
            m = REGEX_OPERATORS.match(filter_str.strip())
            if not m or len(m.groups()) != 3:
                raise ValueError("Unable to parse filter '{}'".format(filter_str))

            match_field = m.group(1)
            match_opr = m.group(2)
            # correct mistyped comparison
            if match_opr.strip() == '=':
                match_opr = '=='

            match_value = m.group(3)

            # restore lists to actual lists
            if match_value.startswith("["):
                try:
                    match_value = json.loads(match_value.replace("'", '"'))  # make sure correct json format
                except Exception as err:
                    LOG.error(str(err))
                    pass
            # determine if working with a string, boolean, or int
            elif match_value in ["true", "True", "false", "False"]:
                match_value = str_to_bool(match_value)
            elif match_value == 'None':
                match_value = None
            else:
                try:
                    match_value = int(match_value) # this will fail for numbers, which will be trapped
                except:
                    pass

            compare_tuple = (match_field, match_opr, match_value)
            LOG.debug(compare_tuple)
            match_list[match_field] = compare_tuple

    return match_list, match_operator_and
    def _fn_get_wiki_contents_function(self, event, *args, **kwargs):
        """Function: None"""
        try:
            validate_fields(["wiki_path"], kwargs)
            # Get the wf_instance_id of the workflow this Function was called in
            #wf_instance_id = event.message["workflow_instance"]["workflow_instance_id"]
            #yield StatusMessage("Starting 'fn_get_wiki_contents' running in workflow '{0}'".format(wf_instance_id))

            # Get the function parameters:
            wiki_contents_as_json = str_to_bool(
                kwargs.get("wiki_contents_as_json", "False"))  # boolean
            wiki_path = kwargs.get("wiki_path")  # text

            log = logging.getLogger(__name__)
            log.info("wiki_contents_as_json: %s", wiki_contents_as_json)
            log.info(u"wiki_path: %s", wiki_path)

            ##############################################
            # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE #
            ##############################################
            rp = ResultPayload(PACKAGE_NAME, **kwargs)
            helper = WikiHelper(self.rest_client())

            # separate the target wiki from it's parent path
            wiki_list = wiki_path.strip().split("/")
            wiki_title = wiki_list.pop()

            # find the wiki page
            content = helper.get_wiki_contents(wiki_title, wiki_list)
            log.debug(content)

            content_text = reason = None
            if content:
                content_text = content['text']
                if wiki_contents_as_json:
                    content['json'] = json.loads(content_text.replace(
                        '\n', ''))
            else:
                reason = u"Unable to find wiki by path: {}".format(wiki_path)
                yield StatusMessage(reason)

            results = rp.done(not bool(reason), content, reason=reason)
            # add the title of the wiki page
            results['title'] = content.get('title') if content else None

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
예제 #18
0
    def _build_createIssue_appDict(self, kwargs):
        '''
        build the dictionary used for the create api request
        :param kwargs:
        :return: dictionary of values to use
        '''

        validate_fields(
            ['incident_id', 'jira_project', 'jira_issuetype', 'jira_summary'],
            kwargs)

        # build the URL back to Resilient
        url = build_incident_url(
            build_resilient_url(self.res_params.get('host'),
                                self.res_params.get('port')),
            kwargs['incident_id'])
        if kwargs.get("task_id"):
            url = "{}?task_id={}".format(url, kwargs.get("task_id"))

        html2markdwn = MarkdownParser(strikeout=constants.STRIKEOUT_CHAR,
                                      bold=constants.BOLD_CHAR,
                                      underline=constants.UNDERLINE_CHAR,
                                      italic=constants.ITALIC_CHAR)

        data = {
            'url': self.options['url'],
            'user': self.options['user'],
            'password': self.options['password'],
            'verifyFlag': str_to_bool(self.options.get('verify_cert', 'True')),
            'project': self.get_textarea_param(kwargs['jira_project']),
            'issuetype': self.get_textarea_param(kwargs['jira_issuetype']),
            'fields': {
                'summary':
                self.get_textarea_param(kwargs['jira_summary']),
                'description':
                u"{}\n{}".format(
                    url,
                    html2markdwn.convert(kwargs.get('jira_description', '')))
            }
        }

        if kwargs.get('jira_priority'):
            data['fields']['priority'] = {"id": kwargs['jira_priority']}

        if kwargs.get('jira_assignee'):
            data['fields']['assignee'] = {"name": kwargs['jira_assignee']}

        return data
예제 #19
0
def validate(options):
    """
    validate API_KEY, URL, and VERIFY_CERT
    :param options: function options from app.config
    :return
    """
    validate_fields([{
        "name": "misp_key",
        "placeholder": "http://localhost"
    }, {
        "name": "misp_url",
        "placeholder": "<your key>"
    }, {
        "name": "verify_cert"
    }], options)
    key = options.get('misp_key')
    url = options.get('misp_url')
    verify = str_to_bool(options.get("verify_cert"))
    return key, url, verify
    def _network_device_query_function(self, event, *args, **kwargs):
        """Function: function to connect with firewalls via ssh to retrieve stats
         This integration uses the netMiko library to access the hosts.
        """
        try:
            # Get the function parameters:
            netdevice_ids = kwargs.get("netdevice_ids")  # text
            netdevice_cmd = kwargs.get("netdevice_send_cmd")  # text
            use_textfsm = str_to_bool(
                kwargs.get("netdevice_use_textfsm", 'False'))  # bool

            log = logging.getLogger(__name__)
            log.info("netdevice_ids: %s", netdevice_ids)
            log.info("netdevice_cmd: %s", netdevice_cmd)
            log.info("netdevice_use_textfsm: %s", use_textfsm)

            if not netdevice_cmd:
                raise ValueError("Specify netdevice_send_cmd")

            if use_textfsm and not self.template_dir:
                raise ValueError(
                    "'netdevice_use_textfsm' set but no template directory is specified in app.config"
                )

            yield StatusMessage("starting...")

            result_payload = ResultPayload(FunctionComponent.SECTION_HDR,
                                           **kwargs)

            rc, result = self._run_netdevice(netdevice_ids, netdevice_cmd,
                                             None, use_textfsm)
            status = result_payload.done(rc, result)

            yield StatusMessage("done")

            # Produce a FunctionResult with the results
            yield FunctionResult(status)
        except Exception:
            yield FunctionError()
예제 #21
0
    def _fn_elasticsearch_query_function(self, event, *args, **kwargs):
        """Function: Allows a user to query a specified ElasticSearch datastore for data."""
        try:

            yield StatusMessage("Starting")
            helper = ElasticSearchHelper(self.options)

            ELASTICSEARCH_BOOL_HTTP_AUTH = str_to_bool(
                value=helper.get_config_option("es_use_http", True))
            ELASTICSEARCH_URL = helper.get_config_option("es_datastore_url")
            ELASTICSEARCH_CERT = helper.get_config_option("es_cafile", True)
            ELASTICSEARCH_SCHEME = helper.get_config_option(
                "es_datastore_scheme", True)
            ELASTICSEARCH_USERNAME = helper.get_config_option(
                "es_auth_username", True)
            ELASTICSEARCH_PASSWORD = helper.get_config_option(
                "es_auth_password", True)
            # Get the function parameters:
            es_index = kwargs.get("es_index")  # text
            es_doc_type = kwargs.get("es_doc_type")  # text
            es_query = self.get_textarea_param(
                kwargs.get("es_query"))  # textarea

            log = logging.getLogger(__name__)
            log.info("index: %s", es_index)
            log.info("doc_type: %s", es_doc_type)
            log.info("es_query: %s", es_query)

            if not es_query:
                raise ValueError("An elasticsearch query is required")

            yield StatusMessage("Connecting to ElasticSearch...")
            '''

            verify_certs is set to false so self-signed certs won't throw an error
            cafile is the path to the cert; optional
            '''

            if not ELASTICSEARCH_CERT:
                yield StatusMessage(
                    "No Cafile found in app.config. Attempting connection without"
                )
            try:
                if ELASTICSEARCH_SCHEME.lower() == 'https':
                    # Attempt to create an SSL context, should work fine if no CERT is provided
                    if ELASTICSEARCH_CERT is None:
                        context = create_default_context()
                    else:
                        context = create_default_context(
                            cafile=ELASTICSEARCH_CERT)
                    # Connect to the ElasticSearch instance
                    es = Elasticsearch(ELASTICSEARCH_SCHEME.lower() + "://" +
                                       ELASTICSEARCH_URL,
                                       ssl_context=context,
                                       http_auth=(ELASTICSEARCH_USERNAME,
                                                  ELASTICSEARCH_PASSWORD))
                else:
                    #This handles for HTTP statements
                    if ELASTICSEARCH_BOOL_HTTP_AUTH:
                        es = Elasticsearch([ELASTICSEARCH_URL],
                                           verify_certs=False,
                                           cafile=ELASTICSEARCH_CERT,
                                           http_auth=(ELASTICSEARCH_USERNAME,
                                                      ELASTICSEARCH_PASSWORD))
                    else:
                        es = Elasticsearch([ELASTICSEARCH_URL],
                                           verify_certs=False,
                                           cafile=ELASTICSEARCH_CERT)
            except Exception as e:
                raise FunctionError(
                    "Encountered error while connecting to ElasticSearch {0}".
                    format(e))
            # Start query results as None
            query_results = None
            matched_records = 0
            es_instance_info = es.info()

            es_results = self.perform_search(es_instance_info, es, es_query,
                                             es_index, es_doc_type)

            # If our results has a 'hits' attribute; inform the user
            if 'hits' in es_results:
                yield StatusMessage(
                    "Call to elasticsearch was successful. Returning results")
                # Prepare the results object
                query_results = es_results["hits"]["hits"]
                matched_records = es_results["hits"]["total"]

            # Check if we have a status attribute indicating an error we could raise
            elif 'status' in es_results:
                # If we encounter either a 404 (Not found) or 400 error return the reason

                if es_results['status'] in ERROR_TUPLE:
                    # Can raise the root_cause of the failure
                    log.error(es_results["error"]["root_cause"])
                    log.error(es_results)
                    log.error(es_results['status'])
                    if es_results['status'] == BADLY_FORMED_QUERY:
                        # es_results["error"]["root_cause"][1]["reason"] is only available on exceptions of type 400
                        yield StatusMessage(
                            "Exception with code 400 encountered. Error: " +
                            str(es_results["error"]["root_cause"]))

                    elif es_results['status'] == NOT_FOUND:
                        # Give reason that 404 happened; index not found?
                        yield StatusMessage(
                            "Exception encounted during query : " +
                            str(es_results["error"]["reason"]))
                    elif es_results['status'] == ES_ERROR:
                        yield StatusMessage(
                            "Unexpected 500 error encountered. Error: " +
                            str(es_results["error"]["reason"]))

            # Prepare the results object
            results = {
                "inputs": {
                    "es_query": es_query,
                    "es_doc_type": es_doc_type,
                    "es_index": es_index
                },
                "query_results": query_results,
                "success": (True if query_results is not None else False),
                "matched_records": matched_records,
                "returned_records": len(query_results)
            }
            yield StatusMessage("Successful: " + str(results["success"]))
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()
def triggered_job(incident_id, object_id, row_id, scheduler_label, rule_name,
                  rule_id, rule_object_type_id, rule_params, opts, options,
                  **kwargs):
    """
    This function is called when a scheduled rule is triggered. It is run asynchronous of the create_scheduled_rule process.
    It's role is to build the api call-back to Resilient to run the rule.
    In addition to invoking a rule, a note is added to the incideent to indicate that a scheduled rule was run.
    :param incident_id:
    :param object_id: task_id, note_id, artifact_id, etc.
    :param row_id: used when object_id is a datatable_id
    :param scheduler_label:
    :param rule_name:
    :param rule_id:
    :param rule_object_type_id: internal id referring to incident, task, artifact, etc.
    :param rule_params:
    :param opts: **DEPRECATED** contains [resilient] parameters needed to connect back to Resilient for API calls
    :param options: contains [fn_scheduler] parameters
    :param kwargs: catch all for additional arguments as necessary
    :return: None
    """
    log.debug(incident_id)
    log.debug(rule_id)
    log.debug(rule_object_type_id)
    log.debug(rule_params)
    log.debug(kwargs)

    disable_notes = str_to_bool(options.get("disable_notes", False))
    log.debug(disable_notes)

    # get the rest client
    rest_client = get_resilient_client(RESILIENT_CONNECTION)
    scheduler = ResilientScheduler.get_scheduler()

    # make sure the incident is still open and not deleted
    try:
        resp = get_incident(rest_client, incident_id)
    except SimpleHTTPException:
        resp = None

    if not resp or resp['end_date'] is not None:
        log.warning(
            u"Incident %s is not found or closed. Removing scheduled rule: %s",
            incident_id, rule_name)
        scheduler.remove_job(scheduler_label)
        return

    # make sure the rule is still enabled
    try:
        get_rule_by_id(rest_client, rule_id)
    except KeyError as err:
        # remove rules which no longer exist
        log.error(u"Rule '%s' not found and schedule will be removed.",
                  rule_name)
        (not disable_notes) and add_comment(
            rest_client, incident_id, u"Error running rule '{}': {}".format(
                scheduler_label, str(err)))
        scheduler.remove_job(scheduler_label)
        return

    # build url for invoking a rule
    rule_type = lookup_object_type(rest_client, rule_object_type_id)
    if rule_type == "tasks":
        url = "/{}/{}".format(rule_type, object_id)
    else:
        url = "/incidents/{}".format(incident_id)

        if rule_type != '':
            url = url + "/{}/{}".format(rule_type, object_id)

    if row_id:
        url = url + "/row_data/{}".format(row_id)

    url = url + "/action_invocations"

    # build the JSON for rule
    payload = {"action_id": rule_id, "properties": rule_params}

    log.info("Executing Rule '{}:{}' for Incident {}".format(
        scheduler_label, rule_name, incident_id))

    # run the rule
    try:
        resp = rest_client.post(url, payload)
        log.debug(resp)
    except SimpleHTTPException as err:
        # is the object removed?
        if "Not Found" in str(err):
            log.error(
                "Object not found and schedule will be removed for rule '%s'",
                rule_id)
            scheduler.remove_job(scheduler_label)
        else:
            log.error("An error occurred for rule '%s'", rule_id)
        (not disable_notes) and add_comment(
            rest_client, incident_id, u"Error running rule '{}': {}".format(
                scheduler_label, str(err)))
        return

    if rule_type:
        (not disable_notes) and add_comment(
            rest_client,
            incident_id, u"Scheduled job '{}' run on {}: {}".format(
                rule_name, rule_type, object_id))
    else:
        (not disable_notes) and add_comment(
            rest_client, incident_id,
            u"Scheduled job '{}' run on incident".format(rule_name))
예제 #23
0
    def _fn_wiki_create_update_function(self, event, *args, **kwargs):
        """Function: Create or Update a wiki page in Resilient"""
        try:
            validate_fields(["wiki_path"], kwargs)
            # Get the wf_instance_id of the workflow this Function was called in
            #wf_instance_id = event.message["workflow_instance"]["workflow_instance_id"]
            #yield StatusMessage("Starting 'fn_wiki_create_update' running in workflow '{0}'".format(wf_instance_id))

            # Get the function parameters:
            wiki_create_if_missing = str_to_bool(
                kwargs.get("wiki_create_if_missing", "False"))  # boolean
            wiki_path = kwargs.get("wiki_path")  # text
            wiki_body = kwargs.get("wiki_body")  # text

            log = logging.getLogger(__name__)
            log.info("wiki_create_if_missing: %s", wiki_create_if_missing)
            log.info(u"wiki_path: %s", wiki_path)
            log.info("wiki_body: %s", wiki_body)

            ##############################################
            # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE #
            ##############################################
            helper = WikiHelper(self.rest_client())
            rp = ResultPayload(PACKAGE_NAME, **kwargs)

            # separate the target wiki from it's parent path
            wiki_list = wiki_path.strip().split("/")
            wiki_title = wiki_list.pop()

            content = helper.get_wiki_contents(wiki_title, wiki_list)
            reason = None
            result_content = None

            # update if content found
            if content:
                result_content = helper.update_wiki(content['id'],
                                                    content['title'],
                                                    content['parent'],
                                                    wiki_body)
            elif wiki_create_if_missing:
                parent_title = wiki_list.pop() if wiki_list else None
                # determine if the parent exists
                parent_id = None
                if parent_title:
                    parent_content = helper.get_wiki_contents(
                        parent_title, wiki_list)
                    if not parent_content:
                        reason = u"Unable to find parent page: '{}'".format(
                            parent_title)
                        yield StatusMessage(reason)
                    else:
                        parent_id = parent_content['id']

                if not reason:
                    result_content = helper.create_wiki(
                        wiki_title, parent_id, wiki_body)
            else:
                reason = u"Unable to find page with title: {}".format(
                    wiki_title)
                result_content = None

            results = rp.done(not bool(reason), result_content, reason=reason)

            # Produce a FunctionResult with the results
            yield FunctionResult(results)
        except Exception:
            yield FunctionError()