def test_hash_request(self): # Requests with the same parameters should hash to the same values, # regardless of HTTP method. target_qd = QueryDict('&target=randomWalk(%27random%20walk%27)' '&target=randomWalk(%27random%20walk2%27)' '&target=randomWalk(%27random%20walk3%27)') empty_qd = QueryDict('') post_request = HttpRequest() post_request.POST = target_qd.copy() post_request.GET = empty_qd.copy() get_request = HttpRequest() get_request.GET = target_qd.copy() get_request.POST = empty_qd.copy() self.assertEqual(hashRequest(get_request), hashRequest(post_request)) # Check that POST parameters are included in cache key calculations post_request_with_params = HttpRequest() post_request_with_params.GET = empty_qd.copy() post_request_with_params.POST = target_qd.copy() empty_post_request = HttpRequest() empty_post_request.GET = empty_qd.copy() empty_post_request.POST = empty_qd.copy() self.assertNotEqual(hashRequest(post_request_with_params), hashRequest(empty_post_request)) # Check that changing the order of the parameters has no impact on the # cache key request_params = HttpRequest() request_qd = QueryDict('&foo=1&bar=2') request_params.GET = request_qd.copy() request_params.POST = empty_qd.copy() reverse_request_params = HttpRequest() reverse_request_qd = QueryDict('&bar=2&foo=1') reverse_request_params.GET = reverse_request_qd.copy() reverse_request_params.POST = empty_qd.copy() self.assertEqual(hashRequest(request_params), hashRequest(reverse_request_params))
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = "noCache" not in requestOptions cacheTimeout = requestOptions["cacheTimeout"] requestContext = { "startTime": requestOptions["startTime"], "endTime": requestOptions["endTime"], "localOnly": requestOptions["localOnly"], "template": requestOptions["template"], "data": [], } data = requestContext["data"] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache("Request-Cache hit [%s]" % requestKey) log.rendering("Returned cached response in %.6f" % (time() - start)) return cachedResponse else: log.cache("Request-Cache miss [%s]" % requestKey) # Now we prepare the requested data if requestOptions["graphType"] == "pie": for target in requestOptions["targets"]: if target.find(":") >= 0: try: name, value = target.split(":", 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions["pieMode"]] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions["graphType"] == "line": # Let's see if at least our data is cached if useCache: targets = requestOptions["targets"] startTime = requestOptions["startTime"] endTime = requestOptions["endTime"] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext["data"] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions["targets"]: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get("format") if format == "csv": response = HttpResponse(content_type="text/csv") writer = csv.writer(response, dialect="excel") for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions["tzinfo"]) writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == "json": series_data = [] if "maxDataPoints" in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions["maxDataPoints"] for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, datapoints=datapoints)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, datapoints=datapoints)) if "jsonp" in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions["jsonp"], json.dumps(series_data)), content_type="text/javascript", ) else: response = HttpResponse(content=json.dumps(series_data), content_type="application/json") if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) return response if format == "raw": response = HttpResponse(content_type="text/plain") for series in data: response.write("%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(",".join(map(str, series))) response.write("\n") log.rendering("Total rawData rendering time %.6f" % (time() - start)) return response if format == "svg": graphOptions["outputFormat"] = "svg" elif format == "pdf": graphOptions["outputFormat"] = "pdf" if format == "pickle": response = HttpResponse(content_type="application/pickle") seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering("Total pickle rendering time %.6f" % (time() - start)) return response # We've got the data, now to render it graphOptions["data"] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions["graphType"], graphOptions) else: image = doImageRender(requestOptions["graphClass"], graphOptions) useSVG = graphOptions.get("outputFormat") == "svg" if useSVG and "jsonp" in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions["jsonp"], json.dumps(image)), content_type="text/javascript" ) elif graphOptions.get("outputFormat") == "pdf": response = buildResponse(image, "application/x-pdf") else: response = buildResponse(image, "image/svg+xml" if useSVG else "image/png") if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering("Total rendering time %.6f seconds" % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] # TODO: Make that a namedtuple or a class. requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly': requestOptions['localOnly'], 'template': requestOptions['template'], 'tzinfo': requestOptions['tzinfo'], 'forwardHeaders': requestOptions['forwardHeaders'], 'data': [], 'prefetched': {}, 'xFilesFactor': requestOptions['xFilesFactor'], } data = requestContext['data'] response = None # First we check the request cache if useCache: requestKey = hashRequest(request) response = cache.get(requestKey) if response: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return response log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached cachedData = None if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime, requestOptions['xFilesFactor']) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] data.extend(evaluateTarget(requestContext, targets)) if useCache: cache.add(dataKey, data, cacheTimeout) renderStart = time() format = requestOptions.get('format') if format == 'csv': response = renderViewCsv(requestOptions, data) elif format == 'json': response = renderViewJson(requestOptions, data) elif format == 'dygraph': response = renderViewDygraph(requestOptions, data) elif format == 'rickshaw': response = renderViewRickshaw(requestOptions, data) elif format == 'raw': response = renderViewRaw(requestOptions, data) elif format == 'pickle': response = renderViewPickle(requestOptions, data) # if response wasn't generated above, render a graph image if not response: format = 'image' renderStart = time() response = renderViewGraph(graphOptions, requestOptions, data) if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('%s rendering time %6f' % (format, time() - renderStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'localOnly': requestOptions['localOnly'], 'data': [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.set(dataKey, data, cacheTimeout) # If data is all we needed, we're done if 'pickle' in requestOptions: response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response format = requestOptions.get('format') if format == 'csv': response = HttpResponse(mimetype='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = localtime(series.start + (i * series.step)) writer.writerow( (series.name, strftime("%Y-%m-%d %H:%M:%S", timestamp), value)) return response if format == 'json': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), mimetype='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(str, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), mimetype='text/javascript') else: response = buildResponse(image, useSVG and 'image/svg+xml' or 'image/png') if useCache: cache.set(requestKey, response, cacheTimeout) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() try: global_timeout_duration = getattr(settings, 'RENDER_DURATION_TIMEOUT') except: global_timeout_duration = 60 if request.REQUEST.has_key('json_request'): (graphOptions, requestOptions) = parseDataOptions(request.REQUEST['json_request']) elif request.is_ajax() and request.method == 'POST': (graphOptions, requestOptions) = parseDataOptions(request.raw_post_data) else: (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'localOnly': requestOptions['localOnly'], 'data': [] } data = requestContext['data'] # add template to graphOptions try: user_profile = getProfile(request, allowDefault=False) graphOptions['defaultTemplate'] = user_profile.defaultTemplate except: graphOptions['defaultTemplate'] = "default" if request.method == 'GET': cache_request_obj = request.GET.copy() else: cache_request_obj = request.POST.copy() # hack request object to add defaultTemplate param cache_request_obj.appendlist("template", graphOptions['defaultTemplate']) # First we check the request cache requestKey = hashRequest(cache_request_obj) requestHash = hashRequestWTime(cache_request_obj) requestContext['request_key'] = requestHash request_data = "" if request.method == "POST": for k, v in request.POST.items(): request_data += "%s=%s&" % (k.replace("\t", ""), v.replace( "\t", "")) else: request_data = request.META['QUERY_STRING'] log.info("DEBUG:Request_meta:[%s]\t%s\t%s\t%s\t\"%s\"" %\ (requestHash,\ request.META['REMOTE_ADDR'],\ request.META['REQUEST_METHOD'],\ request_data,\ request.META['HTTP_USER_AGENT'])) if useCache: cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestHash) log.rendering('[%s] Returned cached response in %.6f' % (requestHash, (time() - start))) log.info("RENDER:[%s]:Timings:Cached %.5f" % (requestHash, time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestHash) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: q = Queue(maxsize=1) p = Process(target=evaluateWithQueue, args=(q, requestContext, target)) p.start() seriesList = None try: seriesList = q.get(True, global_timeout_duration) p.join() except Exception, e: log.info( "DEBUG:[%s] got an exception on trying to get seriesList from queue, error: %s" % (requestHash, e)) p.terminate() return errorPage("Failed to fetch data") if seriesList == None: log.info("DEBUG:[%s] request timed out" % requestHash) p.terminate() return errorPage("Request timed out") for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0))
def renderView(request): start = time() if request.REQUEST.has_key('json_request'): (graphOptions, requestOptions) = parseDataOptions(request.REQUEST['json_request']) elif request.is_ajax() and request.method == 'POST': (graphOptions, requestOptions) = parseDataOptions(request.raw_post_data) else: (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'localOnly': requestOptions['localOnly'], 'data': [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp( series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow( (series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil( float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + ( series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(series.start, series.end, secondsPerPoint) else: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) else: for series in data: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), content_type='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(str, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # add template to graphOptions try: user_profile = getProfile(request, allowDefault=False) graphOptions['defaultTemplate'] = user_profile.defaultTemplate except: graphOptions['defaultTemplate'] = "default" # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.set(requestKey, response, cacheTimeout) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'localOnly' : requestOptions['localOnly'], 'data' : [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append( (name,value) ) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(requestContext, series) or 0 )) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.set(dataKey, data, cacheTimeout) # If data is all we needed, we're done if 'pickle' in requestOptions: response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response format = requestOptions.get('format') if format == 'csv': response = HttpResponse(mimetype='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = localtime( series.start + (i * series.step) ) writer.writerow( (series.name, strftime("%Y-%m-%d %H:%M:%S", timestamp), value) ) return response if format == 'json': # Render graph to obtain yStep, yUnitSystem info graphOptions['data'] = data graph = requestOptions['graphClass'](**graphOptions) # Prepare series data for JSON series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) cleaned = filter(lambda x: x is not None, series) info = { 'target': series.name, 'datapoints': datapoints, } raw_stats = { 'min': min(cleaned) if cleaned else None, 'max': max(cleaned) if cleaned else None, 'mean': (sum(cleaned) / len(cleaned)) if cleaned else None } formatted_stats = {} for key, value in raw_stats.items(): label = graph.makeLabel(value, ignore_step=True) if value else None formatted_stats[key] = label info['stats'] = {'raw': raw_stats, 'formatted': formatted_stats} series_data.append(info) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), mimetype='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(str,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) response = buildResponse(image) if useCache: cache.set(requestKey, response, cacheTimeout) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() try: global_timeout_duration = getattr(settings, 'RENDER_DURATION_TIMEOUT') except: global_timeout_duration = 60 if request.REQUEST.has_key('json_request'): (graphOptions, requestOptions) = parseDataOptions(request.REQUEST['json_request']) elif request.is_ajax() and request.method == 'POST': (graphOptions, requestOptions) = parseDataOptions(request.raw_post_data) else: (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'localOnly' : requestOptions['localOnly'], 'data' : [] } data = requestContext['data'] # add template to graphOptions try: user_profile = getProfile(request, allowDefault=False) graphOptions['defaultTemplate'] = user_profile.defaultTemplate except: graphOptions['defaultTemplate'] = "default" if request.method == 'GET': cache_request_obj = request.GET.copy() else: cache_request_obj = request.POST.copy() # hack request object to add defaultTemplate param cache_request_obj.appendlist("template", graphOptions['defaultTemplate']) # First we check the request cache requestKey = hashRequest(cache_request_obj) requestHash = hashRequestWTime(cache_request_obj) requestContext['request_key'] = requestHash request_data = "" if request.method == "POST": for k,v in request.POST.items(): request_data += "%s=%s&" % (k.replace("\t",""),v.replace("\t","")) else: request_data = request.META['QUERY_STRING'] log.info("DEBUG:Request_meta:[%s]\t%s\t%s\t%s\t\"%s\"" %\ (requestHash,\ request.META['REMOTE_ADDR'],\ request.META['REQUEST_METHOD'],\ request_data,\ request.META['HTTP_USER_AGENT'])) if useCache: cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestHash) log.rendering('[%s] Returned cached response in %.6f' % (requestHash, (time() - start))) log.info("RENDER:[%s]:Timings:Cached %.5f" % (requestHash, time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestHash) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append( (name,value) ) else: q = Queue(maxsize=1) p = Process(target = evaluateWithQueue, args = (q, requestContext, target)) p.start() seriesList = None try: seriesList = q.get(True, global_timeout_duration) p.join() except Exception, e: log.info("DEBUG:[%s] got an exception on trying to get seriesList from queue, error: %s" % (requestHash,e)) p.terminate() return errorPage("Failed to fetch data") if seriesList == None: log.info("DEBUG:[%s] request timed out" % requestHash) p.terminate() return errorPage("Request timed out") for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(requestContext, series) or 0 ))
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': data = [] for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append( (name,value) ) else: timeInterval = (requestOptions['startTime'], requestOptions['endTime']) seriesList = evaluateTarget(target, timeInterval) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(series) or 0 )) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: data = cachedData else: # Have to actually retrieve the data now data = [] timeInterval = (requestOptions['startTime'], requestOptions['endTime']) for timer in requestOptions['timers']: colors = ['#FFDDDD', '#FF9999', '#99FF99', '#DDFFDD'] if timer.startswith('^'): timer = timer[1:] colors.reverse() series_90 = evaluateTarget("averageSeries("+timer+".percent_90)", timeInterval)[0] series_50 = evaluateTarget("averageSeries("+timer+".percent_50)", timeInterval)[0] series_10 = evaluateTarget("averageSeries("+timer+".percent_10)", timeInterval)[0] series_lo = evaluateTarget("averageSeries("+timer+".min)", timeInterval)[0] series_90.options['lowBound'] = series_50 series_90.options['areaFill'] = True series_90.options['fixedColor'] = colors[1] series_90.options['noLegend'] = True data.append(series_90) series_50.options['lowBound'] = series_10 series_50.options['areaFill'] = True series_50.options['fixedColor'] = colors[2] series_50.options['noLegend'] = True data.append(series_50) series_10.options['lowBound'] = series_lo series_10.options['areaFill'] = True series_10.options['fixedColor'] = colors[3] series_10.options['noLegend'] = True data.append(series_10) series_lo.options['noDraw'] = True series_lo.options['noLegend'] = True data.append(series_lo) series_mid = evaluateTarget("averageSeries("+timer+".percent_50)", timeInterval)[0] series_mid.options['fixedColor'] = '#474747' series_mid.name = 'timer: '+timer data.append(series_mid) for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(target, timeInterval) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.set(dataKey, data) # If data is all we needed, we're done if 'pickle' in requestOptions: response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response if 'rawData' in requestOptions: response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(str,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) response = buildResponse(image) if useCache: cache.set(requestKey, response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': data = [] for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append((name, value)) else: timeInterval = (requestOptions['startTime'], requestOptions['endTime']) seriesList = evaluateTarget(target, timeInterval) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: data = cachedData else: # Have to actually retrieve the data now data = [] timeInterval = (requestOptions['startTime'], requestOptions['endTime']) for timer in requestOptions['timers']: colors = ['#FFDDDD', '#FF9999', '#99FF99', '#DDFFDD'] if timer.startswith('^'): timer = timer[1:] colors.reverse() series_90 = evaluateTarget( "averageSeries(" + timer + ".percent_90)", timeInterval)[0] series_50 = evaluateTarget( "averageSeries(" + timer + ".percent_50)", timeInterval)[0] series_10 = evaluateTarget( "averageSeries(" + timer + ".percent_10)", timeInterval)[0] series_lo = evaluateTarget("averageSeries(" + timer + ".min)", timeInterval)[0] series_90.options['lowBound'] = series_50 series_90.options['areaFill'] = True series_90.options['fixedColor'] = colors[1] series_90.options['noLegend'] = True data.append(series_90) series_50.options['lowBound'] = series_10 series_50.options['areaFill'] = True series_50.options['fixedColor'] = colors[2] series_50.options['noLegend'] = True data.append(series_50) series_10.options['lowBound'] = series_lo series_10.options['areaFill'] = True series_10.options['fixedColor'] = colors[3] series_10.options['noLegend'] = True data.append(series_10) series_lo.options['noDraw'] = True series_lo.options['noLegend'] = True data.append(series_lo) series_mid = evaluateTarget( "averageSeries(" + timer + ".percent_50)", timeInterval)[0] series_mid.options['fixedColor'] = '#474747' series_mid.name = 'timer: ' + timer data.append(series_mid) for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(target, timeInterval) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.set(dataKey, data) # If data is all we needed, we're done if 'pickle' in requestOptions: response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response if 'rawData' in requestOptions: response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(str, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) response = buildResponse(image) if useCache: cache.set(requestKey, response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'localOnly' : requestOptions['localOnly'], 'data' : [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append( (name,value) ) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(requestContext, series) or 0 )) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.set(dataKey, data, cacheTimeout) format = requestOptions.get('format') if format == 'csv': response = HttpResponse(mimetype='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = localtime( series.start + (i * series.step) ) writer.writerow( (series.name, strftime("%Y-%m-%d %H:%M:%S", timestamp), value) ) return response if format == 'json': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints) ) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), mimetype='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(str,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' if format == 'pickle': response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), mimetype='text/javascript') else: response = buildResponse(image, useSVG and 'image/svg+xml' or 'image/png') if useCache: cache.set(requestKey, response, cacheTimeout) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': data = [] for target in requestOptions['targets']: try: name,value = target.split(':',1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append( (name,value) ) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: data = cachedData else: # Have to actually retrieve the data now data = [] timeInterval = (requestOptions['startTime'], requestOptions['endTime']) for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(target, timeInterval) data.extend(seriesList) log.rendering('Retrieval of %s took %.6f' % (target,time() - t)) if useCache: cache.set(dataKey, data) # If data is all we needed, we're done if 'pickle' in requestOptions: response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response if 'rawData' in requestOptions: response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(str,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) response = buildResponse(image) if useCache: cache.set(requestKey, response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] # TODO: Make that a namedtuple or a class. requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly': requestOptions['localOnly'], 'template': requestOptions['template'], 'tzinfo': requestOptions['tzinfo'], 'forwardHeaders': extractForwardHeaders(request), 'data': [], 'prefetched': {}, } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] if settings.REMOTE_PREFETCH_DATA and not requestOptions.get( 'localOnly'): prefetchRemoteData(requestContext, targets) for target in targets: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp( series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow( (series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': jsonStart = time() series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil( float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + ( series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index, v) in enumerate(series): if v is not None: timestamp = series.start + (index * series.step) values.append((v, timestamp)) if len(values) > 0: series_data.append( dict(target=series.name, tags=series.tags, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) output = json.dumps( series_data, indent=(2 if requestOptions['pretty'] else None)).replace( 'None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse(content=output, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('JSON rendering time %6f' % (time() - jsonStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response if format == 'dygraph': labels = ['Time'] result = '{}' if data: datapoints = [[ ts ] for ts in range(data[0].start, data[0].end, data[0].step)] for series in data: labels.append(series.name) for i, point in enumerate(series): if point is None: point = 'null' elif point == float('inf'): point = 'Infinity' elif point == float('-inf'): point = '-Infinity' elif math.isnan(point): point = 'null' datapoints[i].append(point) line_template = '[%%s000%s]' % ''.join([', %s'] * len(data)) lines = [ line_template % tuple(points) for points in datapoints ] result = '{"labels" : %s, "data" : [%s]}' % ( json.dumps(labels), ', '.join(lines)) response = HttpResponse(content=result, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total dygraph rendering time %.6f' % (time() - start)) return response if format == 'rickshaw': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = [{ 'x': x, 'y': y } for x, y in zip(timestamps, series)] series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rickshaw rendering time %.6f' % (time() - start)) return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(repr, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' elif format == 'pdf': graphOptions['outputFormat'] = 'pdf' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders']) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') elif graphOptions.get('outputFormat') == 'pdf': response = buildResponse(image, 'application/x-pdf') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] # TODO: Make that a namedtuple or a class. requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly' : requestOptions['localOnly'], 'template' : requestOptions['template'], 'tzinfo' : requestOptions['tzinfo'], 'forwardHeaders': extractForwardHeaders(request), 'data' : [], 'prefetched' : {}, } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append( (name,value) ) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(requestContext, series) or 0 )) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] if settings.REMOTE_PREFETCH_DATA and not requestOptions.get('localOnly'): prefetchRemoteData(requestContext, targets) for target in targets: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': jsonStart = time() series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange/series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge/series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index,v) in enumerate(series): if v is not None: timestamp = series.start + (index * series.step) values.append((v,timestamp)) if len(values) > 0: series_data.append(dict(target=series.name, tags=series.tags, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints)) output = json.dumps(series_data, indent=(2 if requestOptions['pretty'] else None)).replace('None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse( content=output, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('JSON rendering time %6f' % (time() - jsonStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response if format == 'dygraph': labels = ['Time'] result = '{}' if data: datapoints = [[ts] for ts in range(data[0].start, data[0].end, data[0].step)] for series in data: labels.append(series.name) for i, point in enumerate(series): if point is None: point = 'null' elif point == float('inf'): point = 'Infinity' elif point == float('-inf'): point = '-Infinity' elif math.isnan(point): point = 'null' datapoints[i].append(point) line_template = '[%%s000%s]' % ''.join([', %s'] * len(data)) lines = [line_template % tuple(points) for points in datapoints] result = '{"labels" : %s, "data" : [%s]}' % (json.dumps(labels), ', '.join(lines)) response = HttpResponse(content=result, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total dygraph rendering time %.6f' % (time() - start)) return response if format == 'rickshaw': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = [{'x' : x, 'y' : y} for x, y in zip(timestamps, series)] series_data.append( dict(target=series.name, datapoints=datapoints) ) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rickshaw rendering time %.6f' % (time() - start)) return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(repr,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' elif format == 'pdf': graphOptions['outputFormat'] = 'pdf' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders']) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') elif graphOptions.get('outputFormat') == 'pdf': response = buildResponse(image, 'application/x-pdf') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'localOnly' : requestOptions['localOnly'], 'template' : requestOptions['template'], 'targets': [], 'data' : [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) requestContext['cachedResponse'] = True requestContext['targets'].append((requestOptions['targets'], time() - start)) log_query(request, requestOptions, requestContext, time() - start) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append( (name,value) ) else: t = time() seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(requestContext, series) or 0 )) requestContext['targets'].append((target, time() - t)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: t = time() targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) requestContext['cachedData'] = True requestContext['targets'].append((targets, time() - t)) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) requestContext['targets'].append((target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) log_query(request, requestOptions, requestContext, time() - start) return response if format == 'json': series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange/series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge/series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, datapoints=datapoints)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), content_type='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log_query(request, requestOptions, requestContext, time() - start) return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(str,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) log_query(request, requestOptions, requestContext, time() - start) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) log_query(request, requestOptions, requestContext, time() - start) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) log_query(request, requestOptions, requestContext, time() - start) return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] # TODO: Make that a namedtuple or a class. requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly' : requestOptions['localOnly'], 'template' : requestOptions['template'], 'tzinfo' : requestOptions['tzinfo'], 'forwardHeaders': requestOptions['forwardHeaders'], 'data' : [], 'prefetched' : {}, 'xFilesFactor' : requestOptions['xFilesFactor'], } data = requestContext['data'] response = None # First we check the request cache if useCache: requestKey = hashRequest(request) response = cache.get(requestKey) if response: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return response log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append( (name,value) ) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunction(requestOptions['pieMode']) data.append( (series.name, func(requestContext, series) or 0 )) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached cachedData = None if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime, requestOptions['xFilesFactor']) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] data.extend(evaluateTarget(requestContext, targets)) if useCache: cache.add(dataKey, data, cacheTimeout) renderStart = time() format = requestOptions.get('format') if format == 'csv': response = renderViewCsv(requestOptions, data) elif format == 'json': response = renderViewJson(requestOptions, data) elif format == 'dygraph': response = renderViewDygraph(requestOptions, data) elif format == 'rickshaw': response = renderViewRickshaw(requestOptions, data) elif format == 'raw': response = renderViewRaw(requestOptions, data) elif format == 'pickle': response = renderViewPickle(requestOptions, data) elif format == 'msgpack': response = renderViewMsgPack(requestOptions, data) # if response wasn't generated above, render a graph image if not response: format = 'image' renderStart = time() response = renderViewGraph(graphOptions, requestOptions, data) if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('%s rendering time %6f' % (format, time() - renderStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response