def expand_view(request): "View for expanding a pattern into matching metric paths" queryParams = request.GET.copy() queryParams.update(request.POST) local_only = int(queryParams.get('local', 0)) group_by_expr = int(queryParams.get('groupByExpr', 0)) leaves_only = int(queryParams.get('leavesOnly', 0)) jsonp = queryParams.get('jsonp', False) forward_headers = extractForwardHeaders(request) results = {} for query in queryParams.getlist('query'): results[query] = set() for node in STORE.find(query, local=local_only, headers=forward_headers): if node.is_leaf or not leaves_only: results[query].add(node.path) # Convert our results to sorted lists because sets aren't json-friendly if group_by_expr: for query, matches in results.items(): results[query] = sorted(matches) else: results = sorted(reduce(set.union, results.values(), set())) result = {'results': results} response = json_response_for(request, result, jsonp=jsonp) response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly': requestOptions['localOnly'], 'template': requestOptions['template'], 'tzinfo': requestOptions['tzinfo'], 'forwardHeaders': extractForwardHeaders(request), 'data': [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] if settings.REMOTE_PREFETCH_DATA and not requestOptions.get( 'localOnly'): log.rendering("Prefetching remote data") pathExpressions = extractPathExpressions(targets) prefetchRemoteData(STORE.remote_stores, requestContext, pathExpressions) for target in targets: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp( series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow( (series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': jsonStart = time() series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil( float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + ( series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index, v) in enumerate(series): if v is not None: timestamp = series.start + (index * series.step) values.append((v, timestamp)) if len(values) > 0: series_data.append( dict(target=series.name, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) output = json.dumps(series_data).replace('None,', 'null,').replace( 'NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse(content=output, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('JSON rendering time %6f' % (time() - jsonStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response if format == 'dygraph': labels = ['Time'] result = '{}' if data: datapoints = [[ ts ] for ts in range(data[0].start, data[0].end, data[0].step)] for series in data: labels.append(series.name) for i, point in enumerate(series): if point is None: point = 'null' elif point == float('inf'): point = 'Infinity' elif point == float('-inf'): point = '-Infinity' elif math.isnan(point): point = 'null' datapoints[i].append(point) line_template = '[%%s000%s]' % ''.join([', %s'] * len(data)) lines = [ line_template % tuple(points) for points in datapoints ] result = '{"labels" : %s, "data" : [%s]}' % ( json.dumps(labels), ', '.join(lines)) response = HttpResponse(content=result, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total dygraph rendering time %.6f' % (time() - start)) return response if format == 'rickshaw': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = [{ 'x': x, 'y': y } for x, y in zip(timestamps, series)] series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rickshaw rendering time %.6f' % (time() - start)) return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(repr, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' elif format == 'pdf': graphOptions['outputFormat'] = 'pdf' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders']) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') elif graphOptions.get('outputFormat') == 'pdf': response = buildResponse(image, 'application/x-pdf') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def find_view(request): "View for finding metrics matching a given pattern" queryParams = request.GET.copy() queryParams.update(request.POST) format = queryParams.get('format', 'treejson') local_only = int(queryParams.get('local', 0)) wildcards = int(queryParams.get('wildcards', 0)) fromTime = int(queryParams.get('from', -1)) untilTime = int(queryParams.get('until', -1)) nodePosition = int(queryParams.get('position', -1)) jsonp = queryParams.get('jsonp', False) forward_headers = extractForwardHeaders(request) if fromTime == -1: fromTime = None if untilTime == -1: untilTime = None automatic_variants = int(queryParams.get('automatic_variants', 0)) try: query = str(queryParams['query']) except: return HttpResponseBadRequest( content="Missing required parameter 'query'", content_type='text/plain') if '.' in query: base_path = query.rsplit('.', 1)[0] + '.' else: base_path = '' if format == 'completer': query = query.replace('..', '*.') if not query.endswith('*'): query += '*' if automatic_variants: query_parts = query.split('.') for i, part in enumerate(query_parts): if ',' in part and '{' not in part: query_parts[i] = '{%s}' % part query = '.'.join(query_parts) try: matches = list( STORE.find(query, fromTime, untilTime, local=local_only, headers=forward_headers)) except: log.exception() raise log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches))) matches.sort(key=lambda node: node.name) log.info( "received remote find request: pattern=%s from=%s until=%s local_only=%s format=%s matches=%d" % (query, fromTime, untilTime, local_only, format, len(matches))) if format == 'treejson': profile = getProfile(request) content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards) response = json_response_for(request, content, jsonp=jsonp) elif format == 'nodelist': content = nodes_by_position(matches, nodePosition) response = json_response_for(request, content, jsonp=jsonp) elif format == 'pickle': content = pickle_nodes(matches) response = HttpResponse(content, content_type='application/pickle') elif format == 'json': content = json_nodes(matches) response = json_response_for(request, content, jsonp=jsonp) elif format == 'completer': results = [] for node in matches: node_info = dict(path=node.path, name=node.name, is_leaf=str(int(node.is_leaf))) if not node.is_leaf: node_info['path'] += '.' results.append(node_info) if len(results) > 1 and wildcards: wildcardNode = {'name': '*'} results.append(wildcardNode) response = json_response_for(request, {'metrics': results}, jsonp=jsonp) else: return HttpResponseBadRequest( content="Invalid value for 'format' parameter", content_type='text/plain') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly' : requestOptions['localOnly'], 'template' : requestOptions['template'], 'tzinfo' : requestOptions['tzinfo'], 'forwardHeaders': extractForwardHeaders(request), 'data' : [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': if settings.REMOTE_PREFETCH_DATA and not requestOptions.get('localOnly'): targets = [target for target in requestOptions['targets'] if target.find(':') < 0] log.rendering("Prefetching remote data") pathExpressions = extractPathExpressions(targets) prefetchRemoteData(STORE.remote_stores, requestContext, pathExpressions) for target in requestOptions['targets']: if target.find(':') >= 0: try: name,value = target.split(':',1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append( (name,value) ) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append( (series.name, func(requestContext, series) or 0 )) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] if settings.REMOTE_PREFETCH_DATA and not requestOptions.get('localOnly'): log.rendering("Prefetching remote data") pathExpressions = extractPathExpressions(targets) prefetchRemoteData(STORE.remote_stores, requestContext, pathExpressions) for target in targets: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': jsonStart = time() series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange/series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge/series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index,v) in enumerate(series): if v is not None: timestamp = series.start + (index * series.step) values.append((v,timestamp)) if len(values) > 0: series_data.append(dict(target=series.name, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append(dict(target=series.name, datapoints=datapoints)) output = json.dumps(series_data).replace('None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse( content=output, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('JSON rendering time %6f' % (time() - jsonStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response if format == 'dygraph': labels = ['Time'] result = '{}' if data: datapoints = [[ts] for ts in range(data[0].start, data[0].end, data[0].step)] for series in data: labels.append(series.name) for i, point in enumerate(series): if point is None: point = 'null' elif point == float('inf'): point = 'Infinity' elif point == float('-inf'): point = '-Infinity' elif math.isnan(point): point = 'null' datapoints[i].append(point) line_template = '[%%s000%s]' % ''.join([', %s'] * len(data)) lines = [line_template % tuple(points) for points in datapoints] result = '{"labels" : %s, "data" : [%s]}' % (json.dumps(labels), ', '.join(lines)) response = HttpResponse(content=result, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total dygraph rendering time %.6f' % (time() - start)) return response if format == 'rickshaw': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = [{'x' : x, 'y' : y} for x, y in zip(timestamps, series)] series_data.append( dict(target=series.name, datapoints=datapoints) ) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rickshaw rendering time %.6f' % (time() - start)) return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) ) response.write( ','.join(map(repr,series)) ) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' elif format == 'pdf': graphOptions['outputFormat'] = 'pdf' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders']) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') elif graphOptions.get('outputFormat') == 'pdf': response = buildResponse(image, 'application/x-pdf') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response