Example #1
0
async def get_sid_from_query(auth_header,
                             search=None,
                             input_tokens=None,
                             owner=None,
                             app_name=None,
                             async_splunk_client=None,
                             sid=None):
    """
    This will create a search job based of a regular search query

    :param auth_header:
    :param search:
    :param input_tokens:
    :param owner:
    :param app_name:
    :param async_splunk_client:
    :param sid: Optional override for a search job id
    :return:
    """

    # inject methods can handle the empty input_tokens
    search.query = inject_tokens_into_string(input_tokens, search.query)
    search.earliest_time, search.latest_time = inject_time_tokens(
        input_tokens, search.earliest_time, search.latest_time)

    params = get_search_job_request_params(
        query=search.query,
        earliest_time=search.earliest_time,
        latest_time=search.latest_time,
        sample_ratio=search.sample_ratio,
        exec_mode=EXEC_MODE_NORMAL,
        max_time='0',  # max_time=0 will allow search to run until complete
        sid=sid)

    if not params:
        LOGGER.error("Failed to get search job params %s", search)
        raise SpacebridgeApiRequestError("Missing search query.",
                                         status_code=HTTPStatus.NOT_FOUND)

    response = await async_splunk_client.async_get_search_data_request(
        auth_header=auth_header,
        owner=owner,
        app_name=app_name,
        data=urllib.urlencode(params))
    if response.code not in {HTTPStatus.OK, HTTPStatus.CREATED}:
        error = await response.text()
        LOGGER.error(
            "Failed to create search job status_code=%s, error=%s, %s",
            response.code, error, search)
        raise SpacebridgeApiRequestError(format_splunk_error(
            response.code, error),
                                         status_code=response.code)

    response_json = await response.json()
    sid = response_json.get("sid")
    LOGGER.info("Created search job with sid=%s", response_json)
    return sid
Example #2
0
async def fetch_visualization_data(auth_header,
                                   owner,
                                   app_name,
                                   subscription_search,
                                   input_tokens,
                                   async_splunk_client,
                                   map_post_search=None):
    """
    This method will loop until job is complete.  After job is complete it will return visualization data
    :param auth_header:
    :param owner:
    :param app_name:
    :param subscription_search:
    :param input_tokens:
    :param async_splunk_client:
    :param map_post_search:
    :return:
    """

    post_search = None
    # Add post_search if search is dependent (i.e. defines a base)
    sid = subscription_search.sid
    if subscription_search.base:
        post_search = inject_tokens_into_string(input_tokens,
                                                subscription_search.query)

    # If we have a base search and a post_search_map update then we must append the two in this order
    if map_post_search and post_search:
        post_search += " " + map_post_search
    elif map_post_search:
        post_search = map_post_search

    # when done get data results from search
    visualization_data = await fetch_search_job_results_visualization_data(
        auth_header=auth_header,
        owner=owner,
        app_name=app_name,
        search_id=sid,
        post_search=post_search,
        async_splunk_client=async_splunk_client)

    if not visualization_data:
        raise SpacebridgeApiRequestError(
            "Unable to get visualization data for search. {}".format(
                subscription_search),
            status_code=HTTPStatus.NOT_FOUND)

    return visualization_data
def fetch_dashboard_data(request_context,
                         dashboard_visualization_id=None,
                         visualization=None,
                         input_tokens=None,
                         async_splunk_client=None):
    """
    Fetch DashboardData given visualization_id
    :param request_context:
    :param dashboard_visualization_id:
    :param visualization:
    :param input_tokens:
    :param async_splunk_client:
    :return:
    """
    dashboard_id = dashboard_visualization_id.dashboard_id
    visualization_id = dashboard_visualization_id.visualization_id
    owner, app_name, _ = dashboard_helpers.parse_dashboard_id(dashboard_id)

    LOGGER.info(
        "Start fetch_dashboard_data dashboard_id={}, visualization_id={}".
        format(dashboard_id, visualization_id))

    # inject methods can handle the empty input_tokens
    visualization.search.query = inject_tokens_into_string(
        input_tokens, visualization.search.query)
    visualization.search.earliest, visualization.search.latest = inject_time_tokens(
        input_tokens, visualization.search.earliest,
        visualization.search.latest)

    # perform export call to retrieve visualization data without search_id
    visualization_data = yield fetch_visualization_data(
        request_context=request_context,
        owner=owner,
        app_name=app_name,
        visualization=visualization,
        async_splunk_client=async_splunk_client)

    LOGGER.info(
        "Finished fetch_dashboard_data dashboard_id={}, visualization_id={}, visualization.search.query={}"
        .format(dashboard_id, visualization_id, visualization.search.query))

    defer.returnValue(
        DashboardData(dashboard_visualization_id=dashboard_visualization_id,
                      visualization_data=visualization_data))
async def process_single_subscription_update(request_context,
                                             search,
                                             visualization_id,
                                             server_subscription_update,
                                             async_splunk_client,
                                             map_post_search=None):
    """
    An async processor which will create a subscription data event
    :param request_context:
    :param search:
    :param visualization_id:
    :param server_subscription_update:
    :param async_splunk_client:
    :param map_post_search:
    :return:
    """
    user, app_name, dashboard_name = parse_dashboard_id(search.dashboard_id)

    # Add post_search if search is dependent (i.e. defines a base)

    post_search = None
    sid = search.sid
    if search.base:
        input_tokens = load_input_tokens(search.input_tokens)
        post_search = inject_tokens_into_string(input_tokens, search.query)
        LOGGER.debug("Search has base, using parent sid, search_key=%s, sid=%s, post_search=%s",
                     search.key(), sid, post_search)

    if not post_search:
        post_search = map_post_search
    elif map_post_search:
        post_search += " " + map_post_search

    # Query the job status
    job_status = await get_search_job_content(auth_header=request_context.system_auth_header,
                                              owner=user,
                                              app_name=app_name,
                                              search_id=sid,
                                              async_splunk_client=async_splunk_client)

    # If no job_status we don't try to send this update
    if job_status is not None:
        # call api with sid
        visualization_data = await fetch_search_job_results_visualization_data(
            owner=user,
            app_name=app_name,
            search_id=sid,
            post_search=post_search,
            auth_header=request_context.system_auth_header,
            async_splunk_client=async_splunk_client)

        # populate update if data available, if no data is available it means job is still processing or error occurred
        # its okay if we miss this update as it should get processing in looping update
        if visualization_data:

            subscription_update = build_subscription_update(search=search,
                                                            visualization_data=visualization_data,
                                                            job_status=job_status)

            build_server_subscription_update(subscription_update, server_subscription_update)
        else:
            LOGGER.debug("No visualization data found, sid=%s, visualization_id=%s", sid, visualization_id)
    else:
        LOGGER.debug("No search job status found, sid=%s, visualization_id=%s", sid, visualization_id)
Example #5
0
def process_single_subscription_update(request_context,
                                       search,
                                       visualization_id,
                                       server_subscription_update,
                                       async_splunk_client,
                                       guid_generator,
                                       map_post_search=None):
    """
    An async processor which will create a subscription data event
    :param request_context:
    :param search:
    :param visualization_id:
    :param server_subscription_update:
    :param async_splunk_client:
    :param guid_generator:
    :return:
    """
    user, app_name, dashboard_name = parse_dashboard_id(search.dashboard_id)

    # Add post_search if search is dependent (i.e. defines a base)

    post_search = None
    sid = search.sid
    if search.base:
        input_tokens = load_input_tokens(search.input_tokens)
        post_search = inject_tokens_into_string(input_tokens, search.query)
        LOGGER.debug(
            "Search has base, using parent sid, search_key=%s, sid=%s, post_search=%s",
            search.key(), sid, post_search)

    if not post_search:
        post_search = map_post_search
    elif map_post_search:
        post_search += " " + map_post_search

    # Query the job status
    job_status = yield get_search_job_content(
        auth_header=request_context.system_auth_header,
        owner=user,
        app_name=app_name,
        search_id=sid,
        async_splunk_client=async_splunk_client)

    # If no job_status we don't try to send this update
    if job_status:
        # call api with sid
        visualization_data = yield fetch_search_job_results_visualization_data(
            owner=user,
            app_name=app_name,
            search_id=sid,
            post_search=post_search,
            auth_header=request_context.system_auth_header,
            async_splunk_client=async_splunk_client)

        # populate update if data available, if no data is available it means job is still processing or error occurred
        # its okay if we miss this update as it should get processing in looping update
        if visualization_data:
            # Populate the server_subscription_update
            LOGGER.debug("Post processing sid=%s, visualization_id=%s", sid,
                         visualization_id)
            dashboard_visualization_id = DashboardVisualizationId(
                dashboard_id=search.dashboard_id,
                visualization_id=visualization_id)

            # Set params
            server_subscription_update.updateId = guid_generator()
            visualization_data.set_protobuf(
                server_subscription_update.dashboardVisualizationEvent.
                visualizationData)

            # Update update with search job state,
            if job_status:
                server_subscription_update.dashboardVisualizationEvent.dispatchState = int(
                    job_status.dispatch_state)
                server_subscription_update.dashboardVisualizationEvent.doneProgress = float(
                    job_status.done_progress)

            # Update with dashboard_visualization_id
            dashboard_visualization_id.set_protobuf(
                server_subscription_update.dashboardVisualizationEvent.
                dashboardVisualizationId)
        else:
            LOGGER.debug(
                "No visualization data found, sid=%s, visualization_id=%s",
                sid, visualization_id)
    else:
        LOGGER.debug("No search job status found, sid=%s, visualization_id=%s",
                     sid, visualization_id)