Esempio n. 1
0
def format_response(request):
    """
        Populates data for response to metrics requests.

        Parameters
        ~~~~~~~~~~

            request : RequestMeta
                RequestMeta object that stores request data.
    """

    args = ParameterMapping.map(request)

    metric_class = get_metric_type(request.metric)
    metric_obj = metric_class(**args)

    # Prepare metrics output for json response
    response = OrderedDict()

    response['type'] = get_request_type(request)
    response['header'] = metric_obj.header()

    # Get metric object params
    for key in metric_obj.__dict__:
        if not search(r'^_.*', key) and str(key) not in response:
            response[str(key)] = metric_obj.__dict__[key]

    response['cohort'] = str(request.cohort_expr)
    response['cohort_last_generated'] = str(request.cohort_gen_timestamp)
    response['time_of_response'] = datetime.now().strftime(DATETIME_STR_FORMAT)
    response['aggregator'] = str(request.aggregator)
    response['metric'] = str(request.metric)
    response['interval_hours'] = request.interval

    if request.group:
        response['group'] = REVERSE_GROUP_MAP[int(request.group)]
    else:
        # @TODO get access to the metric default for this attribute
        response['group'] = 'default'

    response['datetime_start'] = date_parse(metric_obj.datetime_start).\
        strftime(DATETIME_STR_FORMAT)
    response['datetime_end'] = date_parse(metric_obj.datetime_end).\
        strftime(DATETIME_STR_FORMAT)

    response['data'] = OrderedDict()

    return response, metric_class, metric_obj
def format_response(request):
    """
        Populates data for response to metrics requests.

        Parameters
        ~~~~~~~~~~

            request : RequestMeta
                RequestMeta object that stores request data.
    """

    args = ParameterMapping.map(request)

    metric_class = get_metric_type(request.metric)
    metric_obj = metric_class(**args)

    # Prepare metrics output for json response
    response = OrderedDict()

    response["type"] = get_request_type(request)
    response["header"] = metric_obj.header()

    # Get metric object params
    for key in metric_obj.__dict__:
        if not search(r"^_.*", key) and str(key) not in response:
            response[str(key)] = metric_obj.__dict__[key]

    response["cohort"] = str(request.cohort_expr)
    response["cohort_last_generated"] = str(request.cohort_gen_timestamp)
    response["time_of_response"] = datetime.now().strftime(DATETIME_STR_FORMAT)
    response["aggregator"] = str(request.aggregator)
    response["metric"] = str(request.metric)
    response["slice_size"] = request.slice
    response["group"] = request.group
    response["datetime_start"] = date_parse(metric_obj.datetime_start).strftime(DATETIME_STR_FORMAT)
    response["datetime_end"] = date_parse(metric_obj.datetime_end).strftime(DATETIME_STR_FORMAT)

    response["data"] = OrderedDict()

    return response, metric_class, metric_obj
Esempio n. 3
0
def process_data_request(request_meta, users):
    """
        Main entry point of the module, prepares results for a given request.
        Coordinates a request based on the following parameters::

            metric_handle (string) - determines the type of metric object to
            build.  Keys metric_dict.

            users (list) - list of user IDs.

            **kwargs - Keyword arguments may contain a variety of variables.
            Most notably, "aggregator" if the request requires aggregation,
            "time_series" flag indicating a time series request.  The
            remaining kwargs specify metric object parameters.
    """

    # Set interval length in hours if not present
    if not request_meta.slice:
        request_meta.slice = DEFAULT_INERVAL_LENGTH
    else:
        request_meta.slice = float(request_meta.slice)

    # Get the aggregator key
    agg_key = get_agg_key(request_meta.aggregator, request_meta.metric) if \
        request_meta.aggregator else None

    args = ParameterMapping.map(request_meta)

    # Initialize the results
    results, metric_class, metric_obj = format_response(request_meta)

    start = metric_obj.datetime_start
    end = metric_obj.datetime_end

    if results['type'] == request_types.time_series:

        # Get aggregator
        try:
            aggregator_func = get_aggregator_type(agg_key)
        except MetricsAPIError as e:
            results['data'] = 'Request failed. ' + e.message
            return results

        # Determine intervals and thread allocation
        total_intervals = (date_parse(end) - date_parse(start)).\
                          total_seconds() / (3600 * request_meta.slice)
        time_threads = max(1, int(total_intervals / INTERVALS_PER_THREAD))
        time_threads = min(MAX_THREADS, time_threads)

        logging.info(__name__ + ' :: Initiating time series for %(metric)s\n'
                                '\tAGGREGATOR = %(agg)s\n'
                                '\tFROM: %(start)s,\tTO: %(end)s.' %
                                {
                                    'metric': metric_class.__name__,
                                    'agg': request_meta.aggregator,
                                    'start': str(start),
                                    'end': str(end),
                                    })
        metric_threads = '"k_" : {0}, "kr_" : {1}'.format(USER_THREADS,
            REVISION_THREADS)
        metric_threads = '{' + metric_threads + '}'

        new_kwargs = deepcopy(args)

        del new_kwargs['slice']
        del new_kwargs['aggregator']
        del new_kwargs['datetime_start']
        del new_kwargs['datetime_end']

        out = tspm.build_time_series(start,
            end,
            request_meta.slice,
            metric_class,
            aggregator_func,
            users,
            kt_=time_threads,
            metric_threads=metric_threads,
            log=True,
            **new_kwargs)

        results['header'] = ['timestamp'] + \
                            getattr(aggregator_func,
                                    um.METRIC_AGG_METHOD_HEAD)
        for row in out:
            timestamp = date_parse(row[0][:19]).strftime(
                DATETIME_STR_FORMAT)
            results['data'][timestamp] = row[3:]

    elif results['type'] == request_types.aggregator:

        # Get aggregator
        try:
            aggregator_func = get_aggregator_type(agg_key)
        except MetricsAPIError as e:
            results['data'] = 'Request failed. ' + e.message
            return results

        logging.info(__name__ + ' :: Initiating aggregator for %(metric)s\n'
                                '\AGGREGATOR = %(agg)s\n'
                                '\tFROM: %(start)s,\tTO: %(end)s.' %
                                {
                                    'metric': metric_class.__name__,
                                    'agg': request_meta.aggregator,
                                    'start': str(start),
                                    'end': str(end),
                                    })

        try:
            metric_obj.process(users,
                               k_=USER_THREADS,
                               kr_=REVISION_THREADS,
                               log_=True,
                               **args)
        except UserMetricError as e:
            logging.error(__name__ + ' :: Metrics call failed: ' + str(e))
            results['data'] = str(e)
            return results

        r = um.aggregator(aggregator_func, metric_obj, metric_obj.header())
        results['header'] = to_string(r.header)
        results['data'] = r.data[1:]

    elif results['type'] == request_types.raw:

        logging.info(__name__ + ':: Initiating raw request for %(metric)s\n'
                                '\tFROM: %(start)s,\tTO: %(end)s.' %
                                {
                                    'metric': metric_class.__name__,
                                    'start': str(start),
                                    'end': str(end),
                                    })
        try:
            metric_obj.process(users,
                               k_=USER_THREADS,
                               kr_=REVISION_THREADS,
                               log_=True,
                               **args)
        except UserMetricError as e:
            logging.error(__name__ + ' :: Metrics call failed: ' + str(e))
            results['data'] = str(e)
            return results

        for m in metric_obj.__iter__():
            results['data'][m[0]] = m[1:]

    return results