async def do_trigger(alert_payload): auth_header = SplunkAuthHeader(alert_payload[SESSION_KEY]) # Use default URI for alerts try: uri = rest.makeSplunkdUri() except Exception as e: LOGGER.exception("Failed to generate default URI") if not uri: return mtls_spacebridge_client = None mtls_enabled = config.get_mtls_enabled() if mtls_enabled: mtls_spacebridge_client = build_mtls_spacebridge_client(alert_payload[SESSION_KEY]) async_client_factory = AsyncClientFactory(uri, spacebridge_client=mtls_spacebridge_client) async_kvstore_client = async_client_factory.kvstore_client() async_splunk_client = async_client_factory.splunk_client() async_spacebridge_client = async_client_factory.spacebridge_client() alert_sid = alert_payload[SEARCH_ID] preprocess_payload(alert_payload) # Default to empty string so urllib.quote doesn't fail if user doesn't exist user = alert_payload[RESULT].get(USER, '') request_context = RequestContext(auth_header=auth_header, is_alert=True, current_user=user) LOGGER.info("get_registered_devices alert_sid=%s", alert_sid) registered_devices = await get_registered_devices(request_context, async_kvstore_client, alert_payload) LOGGER.info("get_registered_devices ok alert_sid=%s", alert_sid) alert = await build_alert(request_context, alert_payload, async_splunk_client, async_kvstore_client) LOGGER.info("persist_alert alert_id=%s", alert_sid) response = persist_alert(alert, auth_header.session_token) LOGGER.info("persist_alert ok succeeded alert_id=%s", alert_sid) # If we get a proper response from KV Store, then we get the key of the stored alert # and create a (device_id, alert_id, timestamp) triplet for each device that should # receive the alert if response is not None and "_key" in response.keys(): alert_id = response["_key"] alert.notification.alert_id = alert_id # Persisting (recipient device, alert id) pairs and sending push notifications happens simultaneously via async LOGGER.info("persist_recipient_devices alert_id=%s", alert_id) await persist_recipient_devices(request_context, alert_id, registered_devices, alert_payload, async_kvstore_client) LOGGER.info("persist_recipient_devices ok alert_id=%s", alert_id) LOGGER.info("send_push_notifications starting registered_devices=%s", len(registered_devices)) await send_push_notifications( request_context, alert.notification, registered_devices, async_kvstore_client, async_spacebridge_client, async_splunk_client)
def do_trigger(reactor, alert_payload): auth_header = SplunkAuthHeader(alert_payload[SESSION_KEY]) # Use default URI for alerts try: uri = rest.makeSplunkdUri() except Exception as e: LOGGER.exception("Failed to generate default URI. {}".format(e)) if not uri: return async_client_factory = AsyncClientFactory(uri) async_kvstore_client = async_client_factory.kvstore_client() async_splunk_client = async_client_factory.splunk_client() async_spacebridge_client = async_client_factory.spacebridge_client() alert_sid = alert_payload[SEARCH_ID] preprocess_payload(alert_payload) request_context = RequestContext(auth_header=auth_header, is_alert=True) LOGGER.info("get_registered_devices alert_sid=%s" % alert_sid) registered_devices = yield get_registered_devices(request_context, async_kvstore_client, alert_payload) LOGGER.info("get_registered_devices ok alert_sid=%s" % alert_sid) alert = yield build_alert(request_context, alert_payload, async_splunk_client, async_kvstore_client) LOGGER.info("persist_alert alert_id=%s" % alert_sid) response = persist_alert(alert, auth_header.session_token) LOGGER.info("persist_alert ok succeeded alert_id=%s" % alert_sid) # If we get a proper response from KV Store, then we get the key of the stored alert # and create a (device_id, alert_id, timestamp) triplet for each device that should # receive the alert if response is not None and "_key" in response.keys(): alert_id = response["_key"] alert.notification.alert_id = alert_id # Persisting (recipient device, alert id) pairs and sending push notifications happens simultaneously via async LOGGER.info("persist_recipient_devices alert_id=%s" % alert_id) persist_recipient_devices(request_context, alert_id, registered_devices, alert_payload, async_kvstore_client) LOGGER.info("persist_recipient_devices ok alert_id=%s" % alert_id) LOGGER.info("send_push_notifications starting registered_devices=%s" % len(registered_devices)) yield send_push_notifications(request_context, alert.notification, registered_devices, async_kvstore_client, async_spacebridge_client, async_splunk_client)
def executor(job_contexts): deferreds = [] for job in job_contexts: LOGGER.debug("Processing search job. search_key=%s", job.search_context.search.key()) async_client_factory = AsyncClientFactory(job.splunk_uri) d = process_pubsub_subscription(job.auth_header, encryption_context, async_client_factory.spacebridge_client(), async_client_factory.kvstore_client(), async_client_factory.splunk_client(), job.search_context) deferreds.append(d) return FakeProcess(defer.DeferredList(deferreds, consumeErrors=True))
def __init__(self, system_auth_header, async_client_factory: AsyncClientFactory): """ :param async_splunk_client (AsyncSplunkClient) :param async_kvstore_client:(AsyncKvStoreClient) :param system_auth_header: (AuthHeader) """ self.async_splunk_client = async_client_factory.splunk_client() self.async_kvstore_client = async_client_factory.kvstore_client() self.async_spacebridge_client = async_client_factory.spacebridge_client( ) self.async_telemetry_client = async_client_factory.telemetry_client() self.system_auth_header = system_auth_header
async def subprocess_subscription(job_context, mtls_enabled): mtls_spacebridge_client = None if mtls_enabled: mtls_spacebridge_client = build_mtls_spacebridge_client( job_context.auth_header.session_token) encryption_context = job_context.encryption_context async_client_factory = AsyncClientFactory( job_context.splunk_uri, spacebridge_client=mtls_spacebridge_client) result = JobResult(False) try: result = await process_pubsub_subscription( job_context.auth_header, encryption_context, async_client_factory.spacebridge_client(), async_client_factory.kvstore_client(), async_client_factory.splunk_client(), job_context.search_context, job_context.subscription_update_ids) except Exception: LOGGER.exception("Failed to process subscription") return result
def do_run(self, input_config): """ This will update the Device Role Mapping table in KV Store with the new mapping of a device to role :param input_config: :return: """ if not modular_input_should_run(self.session_key, logger=self.logger): self.logger.debug("Modular input will not run on this node.") return # Use default URI for Device Role Mapping try: uri = rest.makeSplunkdUri() except Exception as e: self.logger.exception( "Failed to generate default URI. {}".format(e)) if not uri: return try: async_client_factory = AsyncClientFactory(uri) kvstore_client = async_client_factory.kvstore_client() splunk_client = async_client_factory.splunk_client() asyncio.run(update(self.session_key, kvstore_client, splunk_client)) except SystemExit as e: if e.code == 0: self.logger.debug( "device to role mapping updated successfully with code={}". format(str(e.code))) else: self.logger.error( "device to role mapping update failed with error={}". format(str(e))) except: self.logger.exception( "Unexpected exception in device to role mapping")
def _run(job_contexts, sodium_client): errors = [] LOGGER.debug("Running search process, searches=%s", len(job_contexts)) for job in job_contexts: LOGGER.debug("Processing search job. search_key=%s", job.search_context.search.key()) encryption_keys = EncryptionKeys.from_json(job.encryption_keys) encryption_context = EncryptionContext(encryption_keys, sodium_client) async_client_factory = AsyncClientFactory(job.splunk_uri) try: yield process_pubsub_subscription( job.auth_header, encryption_context, async_client_factory.spacebridge_client(), async_client_factory.kvstore_client(), async_client_factory.splunk_client(), job.search_context) except Exception as e: LOGGER.exception("Failed to process search, search_key=%s", job.search_context.search.key()) errors.append(e) if len(errors) > 0: raise errors[0]