Ejemplo n.º 1
0
async def start_job_and_update_search(auth_header, subscription_search,
                                      input_tokens, async_splunk_client,
                                      async_kvstore_client):
    if subscription_search.parent_search_key:
        parent_search = await fetch_search(
            auth_header, subscription_search.parent_search_key,
            async_kvstore_client)
        sid = parent_search.sid
        LOGGER.debug("Updating sid from parent search_key=%s, sid=%s",
                     subscription_search.key(), sid)
    else:
        owner, app_name, dashboard_name = parse_dashboard_id(
            subscription_search.dashboard_id)
        sid = await spawn_search_job(auth_header, app_name,
                                     subscription_search, input_tokens,
                                     async_splunk_client)
        LOGGER.debug("Updating sid from job search_key=%s, sid=%s",
                     subscription_search.key(), sid)

    subscription_search.sid = sid
    subscription_search.dispatch_state = DispatchState.NONE.value
    subscription_search.done_progress = 0

    if subscription_search.refresh_interval_seconds:
        subscription_search.next_update_time = \
            get_expiration_timestamp_str(ttl_seconds=subscription_search.refresh_interval_seconds)

    LOGGER.debug("Updated subscription_search search_key=%s, sid=%s, data=%s",
                 subscription_search.key(), sid, subscription_search)
    return True
Ejemplo n.º 2
0
def parse_dashboard_link(url=None):
    """
    Helper to parse dashboard_id and input_map from drilldown link
    :param url:
    :return:
    """
    dashboard_id = ''
    input_map = {}

    if url and url.startswith(SPLUNK_URL_PREFIX):
        parsed_url = urlparse(url)
        if parsed_url.path:
            # We can just use the url path here because the fetch_dashboard_list_json ignores user
            owner, app_name, dashboard_name = parse_dashboard_id(
                parsed_url.path[1:])  # removes the '/' from /app url
            try:
                dashboard_id = generate_dashboard_id("-", app_name,
                                                     dashboard_name)
            except SpacebridgeApiRequestError as e:
                LOGGER.error(
                    "Failed to parse dashboard link url={}, {}".format(
                        url, e.message))
        if parsed_url.query:
            input_map = query_params_to_input_map(parsed_url.query)

        if not dashboard_id or not input_map:
            LOGGER.error("Failed to get dashboard_id=%s or input_map=%s",
                         dashboard_id, input_map)

    LOGGER.debug(
        "Parse dashboard_link url={}, dashboard_id={}, input_map={}".format(
            url, dashboard_id, input_map))
    return_tuple = (dashboard_id, input_map)
    return return_tuple
async def _spawn_search_job(request_context, async_splunk_client,
                            subscription_search, input_tokens, sid):
    owner, app_name, dashboard_name = parse_dashboard_id(
        subscription_search.dashboard_id)
    sid = await create_job_from_search(request_context.auth_header,
                                       subscription_search, app_name,
                                       request_context.current_user,
                                       input_tokens, async_splunk_client, sid)

    LOGGER.info("Created search job sid={}".format(sid))
    return sid
def fetch_dashboard_data(request_context,
                         dashboard_visualization_id=None,
                         visualization=None,
                         input_tokens=None,
                         async_splunk_client=None):
    """
    Fetch DashboardData given visualization_id
    :param request_context:
    :param dashboard_visualization_id:
    :param visualization:
    :param input_tokens:
    :param async_splunk_client:
    :return:
    """
    dashboard_id = dashboard_visualization_id.dashboard_id
    visualization_id = dashboard_visualization_id.visualization_id
    owner, app_name, _ = dashboard_helpers.parse_dashboard_id(dashboard_id)

    LOGGER.info(
        "Start fetch_dashboard_data dashboard_id={}, visualization_id={}".
        format(dashboard_id, visualization_id))

    # inject methods can handle the empty input_tokens
    visualization.search.query = inject_tokens_into_string(
        input_tokens, visualization.search.query)
    visualization.search.earliest, visualization.search.latest = inject_time_tokens(
        input_tokens, visualization.search.earliest,
        visualization.search.latest)

    # perform export call to retrieve visualization data without search_id
    visualization_data = yield fetch_visualization_data(
        request_context=request_context,
        owner=owner,
        app_name=app_name,
        visualization=visualization,
        async_splunk_client=async_splunk_client)

    LOGGER.info(
        "Finished fetch_dashboard_data dashboard_id={}, visualization_id={}, visualization.search.query={}"
        .format(dashboard_id, visualization_id, visualization.search.query))

    defer.returnValue(
        DashboardData(dashboard_visualization_id=dashboard_visualization_id,
                      visualization_data=visualization_data))
async def process_single_subscription_update(request_context,
                                             search,
                                             visualization_id,
                                             server_subscription_update,
                                             async_splunk_client,
                                             map_post_search=None):
    """
    An async processor which will create a subscription data event
    :param request_context:
    :param search:
    :param visualization_id:
    :param server_subscription_update:
    :param async_splunk_client:
    :param map_post_search:
    :return:
    """
    user, app_name, dashboard_name = parse_dashboard_id(search.dashboard_id)

    # Add post_search if search is dependent (i.e. defines a base)

    post_search = None
    sid = search.sid
    if search.base:
        input_tokens = load_input_tokens(search.input_tokens)
        post_search = inject_tokens_into_string(input_tokens, search.query)
        LOGGER.debug("Search has base, using parent sid, search_key=%s, sid=%s, post_search=%s",
                     search.key(), sid, post_search)

    if not post_search:
        post_search = map_post_search
    elif map_post_search:
        post_search += " " + map_post_search

    # Query the job status
    job_status = await get_search_job_content(auth_header=request_context.system_auth_header,
                                              owner=user,
                                              app_name=app_name,
                                              search_id=sid,
                                              async_splunk_client=async_splunk_client)

    # If no job_status we don't try to send this update
    if job_status is not None:
        # call api with sid
        visualization_data = await fetch_search_job_results_visualization_data(
            owner=user,
            app_name=app_name,
            search_id=sid,
            post_search=post_search,
            auth_header=request_context.system_auth_header,
            async_splunk_client=async_splunk_client)

        # populate update if data available, if no data is available it means job is still processing or error occurred
        # its okay if we miss this update as it should get processing in looping update
        if visualization_data:

            subscription_update = build_subscription_update(search=search,
                                                            visualization_data=visualization_data,
                                                            job_status=job_status)

            build_server_subscription_update(subscription_update, server_subscription_update)
        else:
            LOGGER.debug("No visualization data found, sid=%s, visualization_id=%s", sid, visualization_id)
    else:
        LOGGER.debug("No search job status found, sid=%s, visualization_id=%s", sid, visualization_id)
Ejemplo n.º 6
0
async def fetch_dashboard_description(request_context,
                                      dashboard_id=None,
                                      show_refresh=True,
                                      async_splunk_client=None,
                                      async_kvstore_client=None):
    """
    Method will make async http call to get single dashboard and return a DashboardDescription object

    :param request_context:
    :param dashboard_id:
    :param show_refresh: show refresh params, default True
    :param async_splunk_client:
    :param async_kvstore_client:
    :return:
    """
    params = {'output_mode': 'json'}
    owner, app_name, dashboard_name = dashboard_helpers.parse_dashboard_id(
        dashboard_id)
    if not owner or owner == '-':
        owner = request_context.current_user
        LOGGER.info("No owner given in dashboard_id=%s taking owner=%s",
                    dashboard_id, owner)
    response = await async_splunk_client.async_get_dashboard_request(
        owner=owner,
        app_name=app_name,
        auth_header=request_context.auth_header,
        params=params,
        dashboard_name=dashboard_name)

    LOGGER.info('fetch_dashboard_description response={}'.format(
        response.code))

    if response.code != HTTPStatus.OK:
        response_text = await response.text()
        raise SpacebridgeApiRequestError(
            "Failed fetch_dashboard_description request dashboard_id={}, response.code={}, response.text={}"
            .format(dashboard_id, response.code, response_text),
            status_code=response.code)

    response_json = await response.json()
    entry_json_list = response_json.get('entry')

    if not entry_json_list:
        # log result in the event the dashboardId is not valid
        raise SpacebridgeApiRequestError(
            "No Entries found for dashboard_id={}".format(dashboard_id),
            status_code=HTTPStatus.NOT_FOUND)

    dashboard = await parse.to_dashboard_description(
        entry_json_list[0],
        request_context=request_context,
        async_splunk_client=async_splunk_client,
        show_refresh=show_refresh)

    if async_kvstore_client is not None:
        dashboard_meta = await fetch_dashboard_meta(
            request_context=request_context,
            dashboard_id=dashboard_id,
            async_kvstore_client=async_kvstore_client)
        if dashboard_meta:
            dashboard.is_favorite = dashboard_meta.is_favorite

    return dashboard
Ejemplo n.º 7
0
def process_single_subscription_update(request_context,
                                       search,
                                       visualization_id,
                                       server_subscription_update,
                                       async_splunk_client,
                                       guid_generator,
                                       map_post_search=None):
    """
    An async processor which will create a subscription data event
    :param request_context:
    :param search:
    :param visualization_id:
    :param server_subscription_update:
    :param async_splunk_client:
    :param guid_generator:
    :return:
    """
    user, app_name, dashboard_name = parse_dashboard_id(search.dashboard_id)

    # Add post_search if search is dependent (i.e. defines a base)

    post_search = None
    sid = search.sid
    if search.base:
        input_tokens = load_input_tokens(search.input_tokens)
        post_search = inject_tokens_into_string(input_tokens, search.query)
        LOGGER.debug(
            "Search has base, using parent sid, search_key=%s, sid=%s, post_search=%s",
            search.key(), sid, post_search)

    if not post_search:
        post_search = map_post_search
    elif map_post_search:
        post_search += " " + map_post_search

    # Query the job status
    job_status = yield get_search_job_content(
        auth_header=request_context.system_auth_header,
        owner=user,
        app_name=app_name,
        search_id=sid,
        async_splunk_client=async_splunk_client)

    # If no job_status we don't try to send this update
    if job_status:
        # call api with sid
        visualization_data = yield fetch_search_job_results_visualization_data(
            owner=user,
            app_name=app_name,
            search_id=sid,
            post_search=post_search,
            auth_header=request_context.system_auth_header,
            async_splunk_client=async_splunk_client)

        # populate update if data available, if no data is available it means job is still processing or error occurred
        # its okay if we miss this update as it should get processing in looping update
        if visualization_data:
            # Populate the server_subscription_update
            LOGGER.debug("Post processing sid=%s, visualization_id=%s", sid,
                         visualization_id)
            dashboard_visualization_id = DashboardVisualizationId(
                dashboard_id=search.dashboard_id,
                visualization_id=visualization_id)

            # Set params
            server_subscription_update.updateId = guid_generator()
            visualization_data.set_protobuf(
                server_subscription_update.dashboardVisualizationEvent.
                visualizationData)

            # Update update with search job state,
            if job_status:
                server_subscription_update.dashboardVisualizationEvent.dispatchState = int(
                    job_status.dispatch_state)
                server_subscription_update.dashboardVisualizationEvent.doneProgress = float(
                    job_status.done_progress)

            # Update with dashboard_visualization_id
            dashboard_visualization_id.set_protobuf(
                server_subscription_update.dashboardVisualizationEvent.
                dashboardVisualizationId)
        else:
            LOGGER.debug(
                "No visualization data found, sid=%s, visualization_id=%s",
                sid, visualization_id)
    else:
        LOGGER.debug("No search job status found, sid=%s, visualization_id=%s",
                     sid, visualization_id)