Exemplo n.º 1
0
    def do_run(self, inputs):
        """
        The entry point for the modular input.

        :param inputs: The command line arguments used when running this modular input. See the parent method definition
                       for more details.
        """
        # noinspection PyBroadException
        try:
            if not modular_input_should_run(self.session_key, LOGGER):
                LOGGER.debug('The AR modular input will not run on this host.')
                return

            uri = rest.makeSplunkdUri()
            _wait_for_kvstore_to_start(uri=uri,
                                       session_key=self.session_key,
                                       timeout_seconds=30)

            task.react(self._run_initialization, [AsyncClientFactory(uri)])
        except SystemExit as e:
            if e.code != 0:
                LOGGER.exception(
                    'Exited AR modular input with non-zero exit_code=%d message=%s',
                    e.code, e.message)
            else:
                LOGGER.debug(
                    'Successfully ran the AR initialization modular input.')
        except Exception:
            LOGGER.exception(
                'Unhandled exception while running AR modular input.')
Exemplo n.º 2
0
async def do_trigger(alert_payload):
    auth_header = SplunkAuthHeader(alert_payload[SESSION_KEY])

    # Use default URI for alerts
    try:
        uri = rest.makeSplunkdUri()
    except Exception as e:
        LOGGER.exception("Failed to generate default URI")

    if not uri:
        return

    mtls_spacebridge_client = None
    mtls_enabled = config.get_mtls_enabled()
    if mtls_enabled:
        mtls_spacebridge_client = build_mtls_spacebridge_client(alert_payload[SESSION_KEY])

    async_client_factory = AsyncClientFactory(uri, spacebridge_client=mtls_spacebridge_client)
    async_kvstore_client = async_client_factory.kvstore_client()
    async_splunk_client = async_client_factory.splunk_client()
    async_spacebridge_client = async_client_factory.spacebridge_client()

    alert_sid = alert_payload[SEARCH_ID]
    preprocess_payload(alert_payload)

    # Default to empty string so urllib.quote doesn't fail if user doesn't exist
    user = alert_payload[RESULT].get(USER, '')
    request_context = RequestContext(auth_header=auth_header, is_alert=True, current_user=user)

    LOGGER.info("get_registered_devices alert_sid=%s", alert_sid)
    registered_devices = await get_registered_devices(request_context, async_kvstore_client, alert_payload)
    LOGGER.info("get_registered_devices ok alert_sid=%s", alert_sid)

    alert = await build_alert(request_context, alert_payload, async_splunk_client, async_kvstore_client)
    LOGGER.info("persist_alert alert_id=%s", alert_sid)
    response = persist_alert(alert, auth_header.session_token)
    LOGGER.info("persist_alert ok succeeded alert_id=%s", alert_sid)

    # If we get a proper response from KV Store, then we get the key of the stored alert
    # and create a (device_id, alert_id, timestamp) triplet for each device that should
    # receive the alert

    if response is not None and "_key" in response.keys():
        alert_id = response["_key"]
        alert.notification.alert_id = alert_id

        # Persisting (recipient device, alert id) pairs and sending push notifications happens simultaneously via async
        LOGGER.info("persist_recipient_devices alert_id=%s", alert_id)
        await persist_recipient_devices(request_context, alert_id, registered_devices, alert_payload,
                                        async_kvstore_client)
        LOGGER.info("persist_recipient_devices ok alert_id=%s", alert_id)
        LOGGER.info("send_push_notifications starting registered_devices=%s", len(registered_devices))
        await send_push_notifications(
            request_context, alert.notification, registered_devices, async_kvstore_client, async_spacebridge_client,
            async_splunk_client)
Exemplo n.º 3
0
def do_trigger(reactor, alert_payload):
    auth_header = SplunkAuthHeader(alert_payload[SESSION_KEY])

    # Use default URI for alerts
    try:
        uri = rest.makeSplunkdUri()
    except Exception as e:
        LOGGER.exception("Failed to generate default URI. {}".format(e))

    if not uri:
        return

    async_client_factory = AsyncClientFactory(uri)
    async_kvstore_client = async_client_factory.kvstore_client()
    async_splunk_client = async_client_factory.splunk_client()
    async_spacebridge_client = async_client_factory.spacebridge_client()

    alert_sid = alert_payload[SEARCH_ID]
    preprocess_payload(alert_payload)

    request_context = RequestContext(auth_header=auth_header, is_alert=True)

    LOGGER.info("get_registered_devices alert_sid=%s" % alert_sid)
    registered_devices = yield get_registered_devices(request_context,
                                                      async_kvstore_client,
                                                      alert_payload)
    LOGGER.info("get_registered_devices ok alert_sid=%s" % alert_sid)

    alert = yield build_alert(request_context, alert_payload,
                              async_splunk_client, async_kvstore_client)
    LOGGER.info("persist_alert alert_id=%s" % alert_sid)
    response = persist_alert(alert, auth_header.session_token)
    LOGGER.info("persist_alert ok succeeded alert_id=%s" % alert_sid)

    # If we get a proper response from KV Store, then we get the key of the stored alert
    # and create a (device_id, alert_id, timestamp) triplet for each device that should
    # receive the alert

    if response is not None and "_key" in response.keys():
        alert_id = response["_key"]
        alert.notification.alert_id = alert_id

        # Persisting (recipient device, alert id) pairs and sending push notifications happens simultaneously via async
        LOGGER.info("persist_recipient_devices alert_id=%s" % alert_id)
        persist_recipient_devices(request_context, alert_id,
                                  registered_devices, alert_payload,
                                  async_kvstore_client)
        LOGGER.info("persist_recipient_devices ok alert_id=%s" % alert_id)
        LOGGER.info("send_push_notifications starting registered_devices=%s" %
                    len(registered_devices))
        yield send_push_notifications(request_context, alert.notification,
                                      registered_devices, async_kvstore_client,
                                      async_spacebridge_client,
                                      async_splunk_client)
Exemplo n.º 4
0
    def executor(job_contexts):
        deferreds = []
        for job in job_contexts:
            LOGGER.debug("Processing search job. search_key=%s", job.search_context.search.key())
            async_client_factory = AsyncClientFactory(job.splunk_uri)
            d = process_pubsub_subscription(job.auth_header, encryption_context,
                                            async_client_factory.spacebridge_client(),
                                            async_client_factory.kvstore_client(),
                                            async_client_factory.splunk_client(), job.search_context)
            deferreds.append(d)

        return FakeProcess(defer.DeferredList(deferreds, consumeErrors=True))
Exemplo n.º 5
0
    def do_run(self, input_config):
        """ Spins up a websocket connection Spacebridge and begins
        the reactor loops
        """
        shard_id = default_shard_id()

        self.logger.info("Starting libsodium child process")
        sodium_logger = self.logger.getChild('sodium_client')
        sodium_logger.setLevel(logging.WARN)

        sodium_client = SodiumClient(sodium_logger)
        encryption_context = SplunkEncryptionContext(
            self.session_key, constants.SPACEBRIDGE_APP_NAME, sodium_client)

        self.logger.info(
            "Running Splunk Cloud Gateway modular input on search head, shard_id=%s",
            shard_id)

        # Fetch load balancer address if configured, otherwise use default URI
        try:
            uri = get_uri(self.session_key)
            self.logger.debug(
                "Successfully verified load_balancer_address={}".format(uri))
        except Exception as e:
            self.logger.exception(
                "Failed to verify load_balancer_address. {}".format(e))

        if not uri:
            return

        try:
            ensure_deployment_friendly_name(self.session_key)
            async_client_factory = AsyncClientFactory(uri)
            cloudgateway_message_handler = CloudgatewayMessageHandler(
                SplunkAuthHeader(self.session_key),
                logger=self.logger,
                encryption_context=encryption_context,
                async_client_factory=async_client_factory,
                shard_id=shard_id)

            client = CloudGatewayWsClient(
                encryption_context,
                message_handler=cloudgateway_message_handler,
                mode=WebsocketMode.ASYNC,
                logger=self.logger,
                config=config,
                shard_id=shard_id)

            client.connect()
        except Exception as e:
            self.logger.exception(
                "Exception connecting to cloud gateway={0}".format(e))
Exemplo n.º 6
0
async def subprocess_subscription(job_context, mtls_enabled):
    mtls_spacebridge_client = None
    if mtls_enabled:
        mtls_spacebridge_client = build_mtls_spacebridge_client(
            job_context.auth_header.session_token)

    encryption_context = job_context.encryption_context
    async_client_factory = AsyncClientFactory(
        job_context.splunk_uri, spacebridge_client=mtls_spacebridge_client)
    result = JobResult(False)
    try:
        result = await process_pubsub_subscription(
            job_context.auth_header, encryption_context,
            async_client_factory.spacebridge_client(),
            async_client_factory.kvstore_client(),
            async_client_factory.splunk_client(), job_context.search_context,
            job_context.subscription_update_ids)
    except Exception:
        LOGGER.exception("Failed to process subscription")

    return result
Exemplo n.º 7
0
    def do_run(self, input_config):
        """
        This will update the Device Role Mapping table in KV Store with the new mapping of a device to role
        :param input_config:
        :return:
        """
        if not modular_input_should_run(self.session_key, logger=self.logger):
            self.logger.debug("Modular input will not run on this node.")
            return

        # Use default URI for Device Role Mapping
        try:
            uri = rest.makeSplunkdUri()
        except Exception as e:
            self.logger.exception(
                "Failed to generate default URI. {}".format(e))

        if not uri:
            return

        try:
            async_client_factory = AsyncClientFactory(uri)
            kvstore_client = async_client_factory.kvstore_client()
            splunk_client = async_client_factory.splunk_client()
            asyncio.run(update(self.session_key, kvstore_client,
                               splunk_client))
        except SystemExit as e:
            if e.code == 0:
                self.logger.debug(
                    "device to role mapping updated successfully with code={}".
                    format(str(e.code)))
            else:
                self.logger.error(
                    "device to role mapping update failed with error={}".
                    format(str(e)))
        except:
            self.logger.exception(
                "Unexpected exception in device to role mapping")
Exemplo n.º 8
0
 def __init__(self,
              input_config,
              encryption_context,
              session_key,
              async_splunk_client,
              parent_process_monitor=None,
              cluster_monitor=None,
              async_client_factory=None,
              async_kvstore_client=None,
              async_spacebridge_client=AsyncSpacebridgeClient()):
     """
     Subscription Manager constructor
     :param input_config:
     :param encryption_context:
     :param session_key:
     :param async_kvstore_client:
     :param async_splunk_client:
     :param async_spacebridge_client:
     """
     self.input_config = input_config
     self.encryption_context = encryption_context
     self.session_key = session_key
     self.parent_process_monitor = parent_process_monitor
     self.cluster_monitor = cluster_monitor
     self.async_splunk_client = async_splunk_client
     self.async_spacebridge_client = async_spacebridge_client
     self.system_auth_header = SplunkAuthHeader(self.session_key)
     if not async_client_factory:
         uri = get_uri(self.session_key)
         async_client_factory = AsyncClientFactory(uri)
     self.async_client_factory = async_client_factory
     if not async_kvstore_client:
         async_kvstore_client = self.async_client_factory.kvstore_client()
     self.async_kvstore_client = async_kvstore_client
     self.request_context = RequestContext(
         auth_header=self.system_auth_header,
         current_user=constants.ADMIN,
         system_auth_header=self.system_auth_header)
def _run(job_contexts, sodium_client):
    errors = []

    LOGGER.debug("Running search process, searches=%s", len(job_contexts))

    for job in job_contexts:
        LOGGER.debug("Processing search job.  search_key=%s",
                     job.search_context.search.key())
        encryption_keys = EncryptionKeys.from_json(job.encryption_keys)
        encryption_context = EncryptionContext(encryption_keys, sodium_client)
        async_client_factory = AsyncClientFactory(job.splunk_uri)
        try:
            yield process_pubsub_subscription(
                job.auth_header, encryption_context,
                async_client_factory.spacebridge_client(),
                async_client_factory.kvstore_client(),
                async_client_factory.splunk_client(), job.search_context)
        except Exception as e:
            LOGGER.exception("Failed to process search, search_key=%s",
                             job.search_context.search.key())
            errors.append(e)

    if len(errors) > 0:
        raise errors[0]
Exemplo n.º 10
0
 def __init__(self, command_line, command_arg):
     BaseRestHandler.__init__(self)
     Thread(target=reactor.run, args=(False, )).start()
     self.uri = rest.makeSplunkdUri()
     self.async_client_factory = AsyncClientFactory(self.uri)
     self.async_kvstore_client = self.async_client_factory.kvstore_client()
 def __init__(self, command_line, command_arg, async_client_factory=None):
     # command_line and command_arg are passed in (but for some reason unused??) by the Splunk REST framework.
     # Accepting them at this level saves us from making all subclasses accept them.
     super(AsyncBaseRestHandler, self).__init__()
     self.async_client_factory = async_client_factory or AsyncClientFactory(
         rest.makeSplunkdUri())
 def __init__(self, command_line, command_arg):
     BaseRestHandler.__init__(self)
     self.base_uri = rest.makeSplunkdUri()
     self.async_client_factory = AsyncClientFactory(self.base_uri)