Ejemplo n.º 1
0
def generator(params, timezone_offset):

    startTime,endTime = reportHelpers.calc_timeframe(params['timeframe'])

    # Get the "QUERY TIMED OUT" event type.  We'll need this for our various
    # database queries.

    try:
        query_timed_out_event = EventType.objects.get(type="QUERY_TIMED_OUT")
    except EventType.DoesNotExist:
        query_timed_out_event = None

    # Now scan through the desired time period, grabbing all the
    # "QUERY TIMED OUT" events.

    results = [] # List of (timestamp, sql) tuples.

    if query_timed_out_event != None:
        for event in Event.objects.filter(timestamp__gte=startTime,
                                          timestamp__lte=endTime,
                                          type=query_timed_out_event):
            timestamp = reportHelpers.datetime_to_seconds(event.timestamp)
#                                                          timezone_offset)
            results.append((timestamp, event.text))

    # Finally, return the list of timed-out queries back to the caller.

    return (True, results)
Ejemplo n.º 2
0
def generator(params, timezone_offset):

    # Process our parameters.

    startTime,endTime = reportHelpers.calc_timeframe(params['timeframe'])

    if params['chunk_size'] == "m":
        chunk_size = datetime.timedelta(minutes=1)
    elif params['chunk_size'] == "h":
        chunk_size = datetime.timedelta(hours=1)
    elif params['chunk_size'] == "d":
        chunk_size = datetime.timedelta(days=1)
    else:
        return (False,
                "Unknown chunk_size value: " + repr(params['chunk_size']))

    if params['date_choice'] == "source":
        date_field = "timestamp"
    elif params['date_choice'] == "3taps":
        date_field = "inserted"
    else:
        return (False,
                "Unknown date_choice value: " + repr(params['date_choice']))

    # Build a list of the time periods within the desired timeframe.

    periods = []
    period_start = startTime
    while period_start <= endTime:
        periods.append({'start' : period_start,
                        'end'   : period_start + chunk_size -
                                  datetime.timedelta(microseconds=1)})
        period_start = period_start + chunk_size

    # Calculate the total for each time period in turn.

    for period in periods:
        query = {date_field + "__gte" : period['start'],
                 date_field + "__lte" : period['end']}
        period['num_postings'] = Posting.objects.filter(**query).count()

    # Assemble the results to display.

    results = [] # List of (period_start, num_postings) tuples.

    for period in periods:
        timestamp = reportHelpers.datetime_to_seconds(period['start'],
                                                      timezone_offset)
        results.append((timestamp, period['num_postings']))

    # Finally, return the results back to the caller.

    return (True, results)
def generator(params, timezone_offset):

    startTime,endTime = reportHelpers.calc_timeframe(params['timeframe'])

    # Get the "SUMMARY_REQUESTS" event type.  We'll need this for our various
    # database queries.

    try:
        summary_requests_event = EventType.objects.get(type="SUMMARY_REQUESTS")
    except EventType.DoesNotExist:
        summary_requests_event = None

    # Now scan through the desired time period, grabbing all the
    # "SUMMARY_REQUESTS" events and passing them through a data reducer to keep
    # the number of data points reasonable.

    reducer = reportHelpers.DataReducer()
    reducer.set_max_num_data_points(1000)
    reducer.set_period(startTime, endTime)
    reducer.set_value_combiner(max)

    if summary_requests_event != None:
        for event in Event.objects.filter(timestamp__gte=startTime,
                                          timestamp__lte=endTime,
                                          type=summary_requests_event):
            num_requests     = event.primary_value
            tot_time         = event.secondary_value # milliseconds.
            time_per_request = tot_time / num_requests

            reducer.add(event.timestamp, time_per_request)

    reduced_data = reducer.get_reduced_data()

    # Finally, collate the results and return them back to the caller.

    results = {'startTime' : reportHelpers.datetime_to_seconds(startTime,
                                                               timezone_offset),
               'endTime'   : reportHelpers.datetime_to_seconds(endTime,
                                                               timezone_offset),
               'periods'   : [] # List of (timestamp, time_per_request) values.
              }

    if summary_requests_event != None:
        for period_start,period_end,max_time_per_request in reduced_data:
            timestamp = reportHelpers.datetime_to_seconds(period_start,
                                                          timezone_offset)
            results['periods'].append((timestamp, max_time_per_request))

    return (True, results)
Ejemplo n.º 4
0
def generator(params, timezone_offset):

    startTime,endTime = reportHelpers.calc_timeframe(params['timeframe'])

    # Get the "POSTINGS_QUEUED" and "POSTINGS_DEQUEUED" event types.  We'll
    # need these for our various database queries.

    try:
        postings_queued_event = EventType.objects.get(type="POSTINGS_QUEUED")
    except EventType.DoesNotExist:
        postings_queued_event = None

    try:
        postings_dequeued_event = EventType.objects.get(
                                                    type="POSTINGS_DEQUEUED")
    except EventType.DoesNotExist:
        postings_dequeued_event = None

    # Now calculate the queue size at the start of the time period.  We get
    # this by summing up the total value of the POSTINGS_QUEUED events, and
    # then subtract the total value of the POSTINGS_DEQUEUED events, prior to
    # the starting time period.

    if postings_queued_event != None:
        query = Event.objects.filter(timestamp__lt=startTime,
                                     type=postings_queued_event)
        num_postings_added = \
            query.aggregate(Sum("primary_value"))['primary_value__sum']
        if num_postings_added == None: num_postings_added = 0
    else:
        num_postings_added = 0

    if postings_dequeued_event != None:
        query = Event.objects.filter(timestamp__lt=startTime,
                                     type=postings_dequeued_event)
        num_postings_removed = \
            query.aggregate(Sum("primary_value"))['primary_value__sum']
        if num_postings_removed == None: num_postings_removed = 0
    else:
        num_postings_removed = 0

    starting_queue_size = num_postings_added - num_postings_removed

    # Calculate the data to return to the caller.  Note that we use a data
    # reducer to simplify the data as necessary.

    reducer = reportHelpers.DataReducer()
    reducer.set_max_num_data_points(1000)
    reducer.set_period(startTime, endTime)
    reducer.set_value_combiner(sum)

    if postings_queued_event != None:
        for event in Event.objects.filter(timestamp__gte=startTime,
                                          timestamp__lte=endTime,
                                          type=postings_queued_event):
            reducer.add(event.timestamp, event.primary_value)

    if postings_dequeued_event != None:
        for event in Event.objects.filter(timestamp__gte=startTime,
                                          timestamp__lte=endTime,
                                          type=postings_dequeued_event):
            reducer.add(event.timestamp, -event.primary_value)

    reduced_data = reducer.get_reduced_data()

    # We now have a (possibly reduced) list of the changes to the queue size
    # for the desired time period.  Use these calculated values to build a
    # running total of the queue size over the time period.

    results = {'startTime' : reportHelpers.datetime_to_seconds(startTime,
                                                               timezone_offset),
               'endTime'   : reportHelpers.datetime_to_seconds(endTime,
                                                               timezone_offset),
               'periods'   : []}

    running_total = starting_queue_size

    for period_start,period_end,period_total in reduced_data:
        running_total = running_total + period_total
        timestamp = reportHelpers.datetime_to_seconds(period_start,
                                                      timezone_offset)
        results['periods'].append((timestamp, running_total))

    # Finally, return the calculated data back to the caller.

    return (True, results)