def index_json(request): jsonp = request.REQUEST.get('jsonp', False) matches = [] for root, dirs, files in os.walk(settings.WHISPER_DIR): root = root.replace(settings.WHISPER_DIR, '') for basename in files: if fnmatch.fnmatch(basename, '*.wsp'): matches.append(os.path.join(root, basename)) for root, dirs, files in os.walk(settings.CERES_DIR): root = root.replace(settings.CERES_DIR, '') for filename in files: if filename == '.ceres-node': matches.append(root) matches = [ m .replace('.wsp', '') .replace('.rrd', '') .replace('/', '.') .lstrip('.') for m in sorted(matches) ] if jsonp: return HttpResponse("%s(%s)" % (jsonp, json.dumps(matches)), mimetype='text/javascript') else: return HttpResponse(json.dumps(matches), mimetype='application/json')
def findSeries(request): if request.method not in ['GET', 'POST']: return HttpResponse(status=405) queryParams = request.GET.copy() queryParams.update(request.POST) exprs = [] # Normal format: ?expr=tag1=value1&expr=tag2=value2 if len(queryParams.getlist('expr')) > 0: exprs = queryParams.getlist('expr') # Rails/PHP/jQuery common practice format: ?expr[]=tag1=value1&expr[]=tag2=value2 elif len(queryParams.getlist('expr[]')) > 0: exprs = queryParams.getlist('expr[]') if not exprs: return HttpResponse( json.dumps({'error': 'no tag expressions specified'}), content_type='application/json', status=400 ) return HttpResponse( json.dumps(STORE.tagdb.find_series(exprs) if STORE.tagdb else [], indent=(2 if queryParams.get('pretty') else None), sort_keys=bool(request.GET.get('pretty'))), content_type='application/json' )
def test_tag_views(self): url = reverse('tagList') expected = 'test.a;blah=blah;hello=tiger' response = self.client.post(url + '/tagSeries', {'path': 'test.a;hello=tiger;blah=blah'}) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = [{"tag": "hello"}] response = self.client.get(url, {'filter': 'hello$'}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(len(result), len(expected)) self.assertEqual(result[0]['tag'], expected[0]['tag']) response = self.client.get(url, {'filter': 'hello$', 'pretty': 1}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(len(result), len(expected)) self.assertEqual(result[0]['tag'], expected[0]['tag']) expected = {"tag": "hello", "values": [{"count": 1, "value": "tiger"}]} response = self.client.get(url + '/hello', {'filter': 'tiger$'}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(result['tag'], expected['tag']) self.assertEqual(len(result['values']), len(expected['values'])) self.assertEqual(result['values'][0]['count'], expected['values'][0]['count']) self.assertEqual(result['values'][0]['value'], expected['values'][0]['value']) response = self.client.get(url + '/hello', {'filter': 'tiger$', 'pretty': 1}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(result['tag'], expected['tag']) self.assertEqual(len(result['values']), len(expected['values'])) self.assertEqual(result['values'][0]['count'], expected['values'][0]['count']) self.assertEqual(result['values'][0]['value'], expected['values'][0]['value']) expected = ['test.a;blah=blah;hello=tiger'] response = self.client.get(url + '/findSeries?expr[]=name=test.a&expr[]=hello=tiger&expr[]=blah=blah&pretty=1') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = True response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=tiger'}) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected)) expected = [] response = self.client.get(url + '/findSeries?expr=name=test.a&expr=hello=tiger&expr=blah=blah') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True))
def get_data(request): if 'jsonp' in request.REQUEST: response = HttpResponse( "%s(%s)" % (request.REQUEST.get('jsonp'), json.dumps(fetch(request), cls=EventEncoder)), content_type='text/javascript') else: response = HttpResponse( json.dumps(fetch(request), cls=EventEncoder), content_type="application/json") return response
def get_data(request): query_params = request.GET.copy() query_params.update(request.POST) if 'jsonp' in query_params: response = HttpResponse( "%s(%s)" % (query_params.get('jsonp'), json.dumps(fetch(request), cls=EventEncoder)), content_type='text/javascript') else: response = HttpResponse( json.dumps(fetch(request), cls=EventEncoder), content_type='application/json') return response
def index_json(request): jsonp = request.REQUEST.get("jsonp", False) matches = [] for root, dirs, files in os.walk(settings.WHISPER_DIR): root = root.replace(settings.WHISPER_DIR, "") for basename in files: if fnmatch.fnmatch(basename, "*.wsp"): matches.append(os.path.join(root, basename)) matches = [m.replace(".wsp", "").replace("/", ".") for m in sorted(matches)] if jsonp: return HttpResponse("%s(%s)" % (jsonp, json.dumps(matches)), mimetype="text/javascript") else: return HttpResponse(json.dumps(matches), mimetype="application/json")
def index_json(request): jsonp = request.REQUEST.get('jsonp', False) matches = [] for filepath in settings.KENSHIN_INDEX_FILES: with open(filepath) as f: for line in f: if line.endswith('\n'): metric = line.split(' ', 1)[0] matches.append(metric) matches.sort() if jsonp: return HttpResponse("%s(%s)" % (jsonp, json.dumps(matches)), mimetype='text/javascript') else: return HttpResponse(json.dumps(matches), mimetype='application/json')
def json_response(nodes): #json = str(nodes) #poor man's json encoder for simple types json_data = json.dumps(nodes) response = HttpResponse(json_data,mimetype="application/json") response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def tagSeries(request): if request.method != 'POST': return HttpResponse(status=405) path = request.POST.get('path') if not path: return HttpResponse( json.dumps({'error': 'no path specified'}), content_type='application/json', status=400 ) return HttpResponse( json.dumps(STORE.tagdb.tag_series(path)) if STORE.tagdb else 'null', content_type='application/json' )
def post_event(request): if request.method == 'POST': event = json.loads(request.body) assert isinstance(event, dict) tags = event.get('tags') if tags is not None: if isinstance(tags, list): tags = ' '.join(tags) elif not isinstance(tags, six.string_types): return HttpResponse( json.dumps({'error': '"tags" must be an array or space-separated string'}), status=400) else: tags = None if 'when' in event: when = epoch_to_dt(event['when']) else: when = now() Event.objects.create( what=event.get('what'), tags=tags, when=when, data=event.get('data', ''), ) return HttpResponse(status=200) else: return HttpResponse(status=405)
def index_all_json(request): if len(settings.CLUSTER_SERVERS) <= 1: return index_json(request) matches = [] for cluster_server in settings.CLUSTER_SERVERS: matches = list(set(matches + json.loads(urlopen("http://" + cluster_server + "/metrics/index.json").read()))) return HttpResponse(json.dumps(matches), mimetype='application/json')
def context_view(request): if request.method == 'GET': contexts = [] if not 'metric' not in request.GET: return HttpResponse('{ "error" : "missing required parameter \"metric\"" }', mimetype='text/json') for metric in request.GET.getlist('metric'): try: context = STORE.get(metric).context except: contexts.append({ 'metric' : metric, 'error' : 'failed to retrieve context', 'traceback' : traceback.format_exc() }) else: contexts.append({ 'metric' : metric, 'context' : context }) content = json.dumps( { 'contexts' : contexts } ) return HttpResponse(content, mimetype='text/json') elif request.method == 'POST': if 'metric' not in request.POST: return HttpResponse('{ "error" : "missing required parameter \"metric\"" }', mimetype='text/json') newContext = dict( item for item in request.POST.items() if item[0] != 'metric' ) for metric in request.POST.getlist('metric'): STORE.get(metric).updateContext(newContext) return HttpResponse('{ "success" : true }', mimetype='text/json') else: return HttpResponseBadRequest("invalid method, must be GET or POST")
def list_view(request): hosts = [] store = STORE try: query = str( request.REQUEST['query'] ) except: return HttpResponseBadRequest(content="Missing required parameter 'query'", mimetype="text/plain") query_items = query.split('.') for t in list( store.find('*') ): for env in list( store.find('%s.*' % t.path) ): for app in list( store.find('%s.*' % env.path) ): for dc in list( store.find('%s.*' % app.path) ): for id in list( store.find('%s.*' % dc.path) ): is_in = True for q in query_items: if q in id.path: is_in = True else: is_in = False break if is_in: hosts.append({'path': id.path}) hosts.sort(key=lambda node: node['path']) content = json.dumps({ 'metrics' : hosts }) response = HttpResponse(content, mimetype='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def post_event(request): if request.method == 'POST': event = json.loads(request.body) assert isinstance(event, dict) tags = event.get('tags') if tags: if not isinstance(tags, list): return HttpResponse( json.dumps({'error': '"tags" must be an array'}), status=400) tags = ' '.join(tags) if 'when' in event: when = make_aware( datetime.datetime.utcfromtimestamp( event.get('when')), pytz.utc) else: when = now() Event.objects.create( what=event.get('what'), tags=tags, when=when, data=event.get('data', ''), ) return HttpResponse(status=200) else: return HttpResponse(status=405)
def renderViewDygraph(requestOptions, data): labels = ['Time'] output = '{}' if data: datapoints = [[ts] for ts in range(data[0].start, data[0].end, data[0].step)] for series in data: labels.append(series.name) for i, point in enumerate(series): if point is None: point = 'null' elif point == float('inf'): point = 'Infinity' elif point == float('-inf'): point = '-Infinity' elif math.isnan(point): point = 'null' datapoints[i].append(point) line_template = '[%%s000%s]' % ''.join([', %s'] * len(data)) lines = [line_template % tuple(points) for points in datapoints] output = '{"labels" : %s, "data" : [%s]}' % (json.dumps(labels), ', '.join(lines)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse( content=output, content_type='application/json') return response
def expand_view(request): "View for expanding a pattern into matching metric paths" local_only = int( request.REQUEST.get('local', 0) ) group_by_expr = int( request.REQUEST.get('groupByExpr', 0) ) leaves_only = int( request.REQUEST.get('leavesOnly', 0) ) results = {} for query in request.REQUEST.getlist('query'): results[query] = set() for node in STORE.find(query, local=local_only): if node.is_leaf or not leaves_only: results[query].add( node.metric_path ) # Convert our results to sorted lists because sets aren't json-friendly if group_by_expr: for query, matches in results.items(): results[query] = sorted(matches) else: results = sorted( reduce(set.union, results.values(), set()) ) result = { 'results' : results } response = HttpResponse(json.dumps(result), mimetype='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def get_data(request): """Get the data for one series.""" (graphOptions, requestOptions) = parseOptions(request) xrange = request.GET.get('xrange', None) requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'localOnly' : False, 'data' : [] } target = requestOptions['targets'][0] seriesList = evaluateTarget(requestContext, target) result = [] for timeseries in seriesList: if xrange is not None: numberOfDataPoints = len(timeseries) pointsPerPixel = math.ceil( float(numberOfDataPoints) / float(xrange) ) if pointsPerPixel: timeseries.consolidate(pointsPerPixel) result.append(dict( name=timeseries.name, data=[ x for x in timeseries ], start=timeseries.start, end=timeseries.end, step=(timeseries.step * pointsPerPixel), stack=getattr(timeseries, 'stacked', None), )) if not result: raise Exception, "No data for %s" % target return HttpResponse(json.dumps(result), mimetype="application/json")
def get_data(request): """Get the data for one series.""" (graphOptions, requestOptions) = parseOptions(request) requestContext = { 'startTime' : requestOptions['startTime'], 'endTime' : requestOptions['endTime'], 'localOnly' : False, 'data' : [] } if request.method == 'GET': cache_request_obj = request.GET.copy() else: cache_request_obj = request.POST.copy() # First we check the request cache requestHash = hashRequestWTime(cache_request_obj) requestContext['request_key'] = requestHash target = requestOptions['targets'][0] seriesList = evaluateTarget(requestContext, target) result = [ dict( name=timeseries.name, data=[ x for x in timeseries ], start=timeseries.start, end=timeseries.end, step=timeseries.step, ) for timeseries in seriesList ] if not result: raise Http404 return HttpResponse(json.dumps(result), content_type="application/json")
def set_metadata_view(request): results = {} if request.method == 'GET': metric = request.GET['metric'] key = request.GET['key'] value = request.GET['value'] try: results[metric] = CarbonLink.set_metadata(metric, key, value) except: log.exception() results[metric] = dict(error="Unexpected error occurred in CarbonLink.set_metadata(%s, %s)" % (metric, key)) elif request.method == 'POST': if request.META.get('CONTENT_TYPE') == 'application/json': operations = json.loads( request.raw_post_data ) else: operations = json.loads( request.POST['operations'] ) for op in operations: metric = None try: metric, key, value = op['metric'], op['key'], op['value'] results[metric] = CarbonLink.set_metadata(metric, key, value) except: log.exception() if metric: results[metric] = dict(error="Unexpected error occurred in bulk CarbonLink.set_metadata(%s)" % metric) else: results = dict(error="Invalid request method") return HttpResponse(json.dumps(results), mimetype='application/json')
def set_metadata_view(request): results = {} if request.method == "GET": metric = request.GET["metric"] key = request.GET["key"] value = request.GET["value"] try: results[metric] = CarbonLink.set_metadata(metric, key, value) except: log.exception() results[metric] = dict(error="Unexpected error occurred in CarbonLink.set_metadata(%s, %s)" % (metric, key)) elif request.method == "POST": if request.META.get("CONTENT_TYPE") == "application/json": operations = json.loads(request.raw_post_data) else: operations = json.loads(request.POST["operations"]) for op in operations: metric = None try: metric, key, value = op["metric"], op["key"], op["value"] results[metric] = CarbonLink.set_metadata(metric, key, value) except: log.exception() if metric: results[metric] = dict( error="Unexpected error occurred in bulk CarbonLink.set_metadata(%s)" % metric ) else: results = dict(error="Invalid request method") return HttpResponse(json.dumps(results), mimetype="application/json")
def dashboard(request, name=None): dashboard_conf_missing = False try: config.check() except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: dashboard_conf_missing = True else: raise initialError = None debug = request.GET.get('debug', False) theme = request.GET.get('theme', config.ui_config['theme']) css_file = join(settings.CSS_DIR, 'dashboard-%s.css' % theme) if not exists(css_file): initialError = "Invalid theme '%s'" % theme theme = config.ui_config['theme'] context = { 'schemes_json' : json.dumps(config.schemes), 'ui_config_json' : json.dumps(config.ui_config), 'jsdebug' : debug or settings.JAVASCRIPT_DEBUG, 'debug' : debug, 'theme' : theme, 'initialError' : initialError, 'querystring' : json.dumps( dict( request.GET.items() ) ), 'dashboard_conf_missing' : dashboard_conf_missing, 'userName': '', 'permissions': json.dumps(getPermissions(request.user)), 'permissionsUnauthenticated': json.dumps(getPermissions(None)), 'slash' : get_script_prefix(), } user = request.user if user: context['userName'] = user.username if name is not None: try: dashboard = Dashboard.objects.get(name=name) except Dashboard.DoesNotExist: context['initialError'] = "Dashboard '%s' does not exist." % name else: context['initialState'] = dashboard.state return render_to_response("dashboard.html", context)
def dashboard(request, name=None): config.check() context = { 'schemes_json' : json.dumps(config.schemes), 'ui_config_json' : json.dumps(config.ui_config), 'jsdebug' : settings.JAVASCRIPT_DEBUG, } if name is not None: try: dashboard = Dashboard.objects.get(name=name) except Dashboard.DoesNotExist: context['initialError'] = "Dashboard '%s' does not exist." % name else: context['initialState'] = dashboard.state return render_to_response("dashboard.html", context)
def dashboard(request, name=None): dashboard_conf_missing = False try: config.check() except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: dashboard_conf_missing = True else: raise initialError = None debug = request.GET.get("debug", False) theme = request.GET.get("theme", config.ui_config["theme"]) css_file = join(settings.CSS_DIR, "dashboard-%s.css" % theme) if not exists(css_file): initialError = "Invalid theme '%s'" % theme theme = config.ui_config["theme"] context = { "schemes_json": json.dumps(config.schemes), "ui_config_json": json.dumps(config.ui_config), "jsdebug": debug or settings.JAVASCRIPT_DEBUG, "debug": debug, "theme": theme, "initialError": initialError, "querystring": json.dumps(dict(request.GET.items())), "dashboard_conf_missing": dashboard_conf_missing, "userName": "", "permissions": json.dumps(getPermissions(request.user)), "permissionsUnauthenticated": json.dumps(getPermissions(None)), } user = request.user if user: context["userName"] = user.username if name is not None: try: dashboard = Dashboard.objects.get(name=name) except Dashboard.DoesNotExist: context["initialError"] = "Dashboard '%s' does not exist." % name else: context["initialState"] = dashboard.state return render_to_response("dashboard.html", context)
def renderViewJson(requestOptions, data): series_data = [] if 'maxDataPoints' in requestOptions and any(data): maxDataPoints = requestOptions['maxDataPoints'] if maxDataPoints == 1: for series in data: series.consolidate(len(series)) datapoints = list(zip(series, [int(series.start)])) series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints)) else: startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime for series in data: numberOfDataPoints = timeRange/series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge/series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = list(zip(series, timestamps)) series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index,v) in enumerate(series): if v is not None and not math.isnan(v): timestamp = series.start + (index * series.step) values.append((v,timestamp)) if len(values) > 0: series_data.append(dict(target=series.name, tags=series.tags, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = list(zip(series, timestamps)) series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints)) output = json.dumps(series_data, indent=(2 if requestOptions.get('pretty') else None)).replace('None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse( content=output, content_type='application/json') return response
def tagList(request): if request.method != 'GET': return HttpResponse(status=405) return HttpResponse( json.dumps(STORE.tagdb.list_tags(tagFilter=request.GET.get('filter')) if STORE.tagdb else [], indent=(2 if request.GET.get('pretty') else None), sort_keys=bool(request.GET.get('pretty'))), content_type='application/json' )
def tagDetails(request, tag): if request.method != 'GET': return HttpResponse(status=405) return HttpResponse( json.dumps(STORE.tagdb.get_tag(tag, valueFilter=request.GET.get('filter')) if STORE.tagdb else None, indent=(2 if request.GET.get('pretty') else None), sort_keys=bool(request.GET.get('pretty'))), content_type='application/json' )
def tree_json(nodes, base_path, wildcards=False, contexts=False): results = [] branchNode = { 'allowChildren': 1, 'expandable': 1, 'leaf': 0, } leafNode = { 'allowChildren': 0, 'expandable': 0, 'leaf': 1, } #Add a wildcard node if appropriate if len(nodes) > 1 and wildcards: wildcardNode = {'text' : '*', 'id' : base_path + '*'} if any(not n.isLeaf() for n in nodes): wildcardNode.update(branchNode) else: wildcardNode.update(leafNode) results.append(wildcardNode) found = set() results_leaf = [] results_branch = [] for node in nodes: #Now let's add the matching children if node.name in found: continue found.add(node.name) resultNode = { 'text' : str(node.name), 'id' : base_path + str(node.name), } if contexts: resultNode['context'] = node.context else: resultNode['context'] = {} if node.isLeaf(): resultNode.update(leafNode) results_leaf.append(resultNode) else: resultNode.update(branchNode) results_branch.append(resultNode) results.extend(results_branch) results.extend(results_leaf) return json.dumps(results)
def find_view(request): "View for finding metrics matching a given pattern" profile = getProfile(request) format = request.REQUEST.get('format', 'treejson') local_only = int( request.REQUEST.get('local', 0) ) contexts = int( request.REQUEST.get('contexts', 0) ) wildcards = int( request.REQUEST.get('wildcards', 0) ) try: query = str( request.REQUEST['query'] ) except: return HttpResponseBadRequest(content="Missing required parameter 'query'", mimetype="text/plain") if '.' in query: base_path = query.rsplit('.', 1)[0] + '.' else: base_path = '' if local_only: store = LOCAL_STORE else: store = STORE matches = list( store.find(query) ) log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches))) matches.sort(key=lambda node: node.name) if format == 'treejson': content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards, contexts=contexts) response = HttpResponse(content, mimetype='text/json') elif format == 'pickle': content = pickle_nodes(matches, contexts=contexts) response = HttpResponse(content, mimetype='application/pickle') elif format == 'completer': #if len(matches) == 1 and (not matches[0].isLeaf()) and query == matches[0].metric_path + '*': # auto-complete children # matches = list( store.find(query + '.*') ) results = [ dict(path=node.metric_path, name=node.name) for node in matches ] if len(results) > 1 and wildcards: wildcardNode = {'name' : '*'} results.append(wildcardNode) content = json.dumps({ 'metrics' : results }) response = HttpResponse(content, mimetype='text/json') else: return HttpResponseBadRequest(content="Invalid value for 'format' parameter", mimetype="text/plain") response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def get_metadata_view(request): key = request.REQUEST['key'] metrics = request.REQUEST.getlist('metric') results = {} for metric in metrics: try: results[metric] = CarbonLink.get_metadata(metric, key) except: log.exception() results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key)) return HttpResponse(json.dumps(results), mimetype='application/json')
def json_response_for(request, data, mimetype='application/json', jsonp=False, **kwargs): accept = request.META.get('HTTP_ACCEPT', 'application/json') ensure_ascii = accept == 'application/json' content = json.dumps(data, ensure_ascii=ensure_ascii) if jsonp: content = "%s(%)" % (jsonp, content) mimetype = 'text/javascript' if not ensure_ascii: mimetype += ';charset=utf-8' return HttpResponse(content, mimetype=mimetype, **kwargs)
def template(request, name, val): template_conf_missing = False try: config.check() except OSError as e: if e.errno == errno.ENOENT: template_conf_missing = True else: raise initialError = None debug = request.GET.get('debug', False) theme = request.GET.get('theme', config.ui_config['theme']) css_file = finders.find('css/dashboard-%s.css' % theme) if css_file is None: initialError = "Invalid theme '%s'" % theme theme = config.ui_config['theme'] context = { 'schemes_json': json.dumps(config.schemes), 'ui_config_json': json.dumps(config.ui_config), 'jsdebug': debug or settings.JAVASCRIPT_DEBUG, 'debug': debug, 'theme': theme, 'initialError': initialError, 'querystring': json.dumps(dict(request.GET.items())), 'template_conf_missing': template_conf_missing, 'userName': '', 'permissions': json.dumps(getPermissions(request.user)), 'permissionsUnauthenticated': json.dumps(getPermissions(None)) } user = request.user if user: context['userName'] = user.username try: template = Template.objects.get(name=name) except Template.DoesNotExist: context['initialError'] = "Template '%s' does not exist." % name else: state = json.loads(template.loadState(val)) state['name'] = '%s/%s' % (name, val) context['initialState'] = json.dumps(state) return render_to_response("dashboard.html", context)
def test_get_index(self, http_request): finder = RemoteFinder('127.0.0.1') data = [ 'a.b.c', 'a.b.c.d', ] responseObject = HTTPResponse(body=BytesIO(json.dumps(data).encode('utf-8')), status=200, preload_content=False) http_request.return_value = responseObject result = finder.get_index({}) self.assertIsInstance(result, list) self.assertEqual(http_request.call_args[0], ( 'POST', 'http://127.0.0.1/metrics/index.json', )) self.assertEqual(http_request.call_args[1], { 'fields': [ ('local', '1'), ], 'headers': None, 'preload_content': False, 'timeout': 10, }) self.assertEqual(len(result), 2) self.assertEqual(result[0], 'a.b.c') self.assertEqual(result[1], 'a.b.c.d') # non-json response responseObject = HTTPResponse(body=BytesIO(b'error'), status=200, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp(Exception, 'Error decoding index response from http://[^ ]+: .+'): result = finder.get_index({})
def get_data(request): """Get the data for one series.""" (graphOptions, requestOptions) = parseOptions(request) requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'localOnly': False, 'data': [] } target = requestOptions['targets'][0] seriesList = evaluateTarget(requestContext, target) result = [ dict( name=timeseries.name, data=[x for x in timeseries], start=timeseries.start, end=timeseries.end, step=timeseries.step, ) for timeseries in seriesList ] if not result: raise Http404 return HttpResponse(json.dumps(result), mimetype="application/json")
except OSError, e: if e.errno == errno.ENOENT: template_conf_missing = True else: raise initialError = None debug = request.GET.get('debug', False) theme = request.GET.get('theme', config.ui_config['theme']) css_file = join(settings.CSS_DIR, 'dashboard-%s.css' % theme) if not exists(css_file): initialError = "Invalid theme '%s'" % theme theme = config.ui_config['theme'] context = { 'schemes_json': json.dumps(config.schemes), 'ui_config_json': json.dumps(config.ui_config), 'jsdebug': debug or settings.JAVASCRIPT_DEBUG, 'debug': debug, 'theme': theme, 'initialError': initialError, 'querystring': json.dumps(dict(request.GET.items())), 'template_conf_missing': template_conf_missing, 'userName': '', 'permissions': json.dumps(getPermissions(request.user)), 'permissionsUnauthenticated': json.dumps(getPermissions(None)) } user = request.user if user: context['userName'] = user.username
def json_bytes(obj, *args, **kwargs): s = json.dumps(obj, *args, **kwargs) if sys.version_info[0] >= 3: return s.encode('utf-8') return s
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] # TODO: Make that a namedtuple or a class. requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'now': requestOptions['now'], 'localOnly': requestOptions['localOnly'], 'template': requestOptions['template'], 'tzinfo': requestOptions['tzinfo'], 'forwardHeaders': extractForwardHeaders(request), 'data': [], 'prefetched': {}, } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now targets = requestOptions['targets'] if settings.REMOTE_PREFETCH_DATA and not requestOptions.get( 'localOnly'): prefetchRemoteData(requestContext, targets) for target in targets: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp( series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow( (series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': jsonStart = time() series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil( float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + ( series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index, v) in enumerate(series): if v is not None: timestamp = series.start + (index * series.step) values.append((v, timestamp)) if len(values) > 0: series_data.append( dict(target=series.name, tags=series.tags, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) output = json.dumps( series_data, indent=(2 if requestOptions['pretty'] else None)).replace( 'None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse(content=output, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('JSON rendering time %6f' % (time() - jsonStart)) log.rendering('Total request processing time %6f' % (time() - start)) return response if format == 'dygraph': labels = ['Time'] result = '{}' if data: datapoints = [[ ts ] for ts in range(data[0].start, data[0].end, data[0].step)] for series in data: labels.append(series.name) for i, point in enumerate(series): if point is None: point = 'null' elif point == float('inf'): point = 'Infinity' elif point == float('-inf'): point = '-Infinity' elif math.isnan(point): point = 'null' datapoints[i].append(point) line_template = '[%%s000%s]' % ''.join([', %s'] * len(data)) lines = [ line_template % tuple(points) for points in datapoints ] result = '{"labels" : %s, "data" : [%s]}' % ( json.dumps(labels), ', '.join(lines)) response = HttpResponse(content=result, content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total dygraph rendering time %.6f' % (time() - start)) return response if format == 'rickshaw': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = [{ 'x': x, 'y': y } for x, y in zip(timestamps, series)] series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rickshaw rendering time %.6f' % (time() - start)) return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(repr, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' elif format == 'pdf': graphOptions['outputFormat'] = 'pdf' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders']) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') elif graphOptions.get('outputFormat') == 'pdf': response = buildResponse(image, 'application/x-pdf') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.add(requestKey, response, cacheTimeout) patch_response_headers(response, cache_timeout=cacheTimeout) else: add_never_cache_headers(response) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def test_auto_complete_values(self, http_request): finder = RemoteFinder('127.0.0.1') data = [ 'value1', 'value2', ] responseObject = HTTPResponse(body=BytesIO( json.dumps(data).encode('utf-8')), status=200, preload_content=False) http_request.return_value = responseObject result = finder.auto_complete_values(['name=test'], 'tag1', 'value') self.assertIsInstance(result, list) self.assertEqual(http_request.call_args[0], ( 'POST', 'http://127.0.0.1/tags/autoComplete/values', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('tag', 'tag1'), ('valuePrefix', 'value'), ('limit', '100'), ('expr', 'name=test'), ], 'headers': None, 'preload_content': False, 'timeout': 10, }) self.assertEqual(len(result), 2) self.assertEqual(result[0], 'value1') self.assertEqual(result[1], 'value2') # explicit limit & forward headers responseObject = HTTPResponse(body=BytesIO( json.dumps(data).encode('utf-8')), status=200, preload_content=False) http_request.return_value = responseObject result = finder.auto_complete_values( ['name=test', 'tag3=value3'], 'tag1', 'value', limit=5, requestContext={'forwardHeaders': { 'X-Test': 'test' }}) self.assertIsInstance(result, list) self.assertEqual(http_request.call_args[0], ( 'POST', 'http://127.0.0.1/tags/autoComplete/values', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('tag', 'tag1'), ('valuePrefix', 'value'), ('limit', '5'), ('expr', 'name=test'), ('expr', 'tag3=value3'), ], 'headers': { 'X-Test': 'test' }, 'preload_content': False, 'timeout': 10, }) self.assertEqual(len(result), 2) self.assertEqual(result[0], 'value1') self.assertEqual(result[1], 'value2') # non-json response responseObject = HTTPResponse(body=BytesIO(b'error'), status=200, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp( Exception, 'Error decoding autocomplete values response from http://[^ ]+: .+' ): result = finder.auto_complete_values(['name=test'], 'tag1', 'value')
def json_response(obj): return HttpResponse(mimetype='text/json', content=json.dumps(obj))
def renderViewJson(requestOptions, data): series_data = [] if 'maxDataPoints' in requestOptions and any(data): maxDataPoints = requestOptions['maxDataPoints'] if maxDataPoints == 1: for series in data: series.consolidate(len(series)) datapoints = zip(series, [int(series.start)]) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) else: startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil( float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + (series.start % series.step) - ( series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint)) else: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) elif 'noNullPoints' in requestOptions and any(data): for series in data: values = [] for (index, v) in enumerate(series): if v is not None and not math.isnan(v): timestamp = series.start + (index * series.step) values.append((v, timestamp)) if len(values) > 0: series_data.append( dict(target=series.name, tags=series.tags, datapoints=values)) else: for series in data: timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, tags=series.tags, datapoints=datapoints)) output = json.dumps( series_data, indent=(2 if requestOptions.get('pretty') else None)).replace( 'None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,') if 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], output), content_type='text/javascript') else: response = HttpResponse(content=output, content_type='application/json') return response
def get_data(request): return HttpResponse(json.dumps(fetch(request), cls=EventEncoder), mimetype="application/json")
except OSError, e: if e.errno == errno.ENOENT: dashboard_conf_missing = True else: raise initialError = None debug = request.GET.get('debug', False) theme = request.GET.get('theme', config.ui_config['theme']) css_file = join(settings.CSS_DIR, 'dashboard-%s.css' % theme) if not exists(css_file): initialError = "Invalid theme '%s'" % theme theme = config.ui_config['theme'] context = { 'schemes_json' : json.dumps(config.schemes), 'ui_config_json' : json.dumps(config.ui_config), 'jsdebug' : debug or settings.JAVASCRIPT_DEBUG, 'debug' : debug, 'theme' : theme, 'initialError' : initialError, 'querystring' : json.dumps( dict( request.GET.items() ) ), 'dashboard_conf_missing' : dashboard_conf_missing, } if name is not None: try: dashboard = Dashboard.objects.get(name=name) except Dashboard.DoesNotExist: context['initialError'] = "Dashboard '%s' does not exist." % name else:
def find_view(request): "View for finding metrics matching a given pattern" profile = getProfile(request) format = request.REQUEST.get('format', 'treejson') local_only = int(request.REQUEST.get('local', 0)) wildcards = int(request.REQUEST.get('wildcards', 0)) fromTime = int(request.REQUEST.get('from', -1)) untilTime = int(request.REQUEST.get('until', -1)) jsonp = request.REQUEST.get('jsonp') if fromTime == -1: fromTime = None if untilTime == -1: untilTime = None automatic_variants = int(request.REQUEST.get('automatic_variants', 0)) try: query = str(request.REQUEST['query']) except: return HttpResponseBadRequest( content="Missing required parameter 'query'", mimetype="text/plain") if '.' in query: base_path = query.rsplit('.', 1)[0] + '.' else: base_path = '' if format == 'completer': query = query.replace('..', '*.') if not query.endswith('*'): query += '*' if automatic_variants: query_parts = query.split('.') for i, part in enumerate(query_parts): if ',' in part and '{' not in part: query_parts[i] = '{%s}' % part query = '.'.join(query_parts) try: matches = list(STORE.find(query, fromTime, untilTime, local=local_only)) except: log.exception() raise log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches))) matches.sort(key=lambda node: node.name) log.info( "received remote find request: pattern=%s from=%s until=%s local_only=%s format=%s matches=%d" % (query, fromTime, untilTime, local_only, format, len(matches))) if format == 'treejson': content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards) response = HttpResponse(content, mimetype='application/json') elif format == 'pickle': content = pickle_nodes(matches) response = HttpResponse(content, mimetype='application/pickle') elif format == 'completer': results = [] for node in matches: node_info = dict(path=node.path, name=node.name, is_leaf=str(int(node.is_leaf))) if not node.is_leaf: node_info['path'] += '.' results.append(node_info) if len(results) > 1 and wildcards: wildcardNode = {'name': '*'} results.append(wildcardNode) content = json.dumps({'metrics': results}) response = HttpResponse(content, mimetype='application/json') else: return HttpResponseBadRequest( content="Invalid value for 'format' parameter", mimetype="text/plain") response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' if jsonp: response.content = "%s(%s)" % (jsonp, response.content) return response
datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) else: for series in data: if len(set(series)) == 1 and series[0] is None: continue timestamps = range(int(series.start), int(series.end) + 1, int(series.step)) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), content_type='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step))
def test_tag_views(self): url = reverse('tagList') expected = 'test.a;blah=blah;hello=tiger' response = self.client.post(url + '/tagSeries', {'path': 'test.a;hello=tiger;blah=blah'}) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = [{"tag": "hello"}] response = self.client.get(url, {'filter': 'hello$'}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(len(result), len(expected)) self.assertEqual(result[0]['tag'], expected[0]['tag']) response = self.client.get(url, {'filter': 'hello$', 'pretty': 1}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(len(result), len(expected)) self.assertEqual(result[0]['tag'], expected[0]['tag']) expected = {"tag": "hello", "values": [{"count": 1, "value": "tiger"}]} response = self.client.get(url + '/hello', {'filter': 'tiger$'}) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(result['tag'], expected['tag']) self.assertEqual(len(result['values']), len(expected['values'])) self.assertEqual(result['values'][0]['count'], expected['values'][0]['count']) self.assertEqual(result['values'][0]['value'], expected['values'][0]['value']) response = self.client.get(url + '/hello', { 'filter': 'tiger$', 'pretty': 1 }) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(result['tag'], expected['tag']) self.assertEqual(len(result['values']), len(expected['values'])) self.assertEqual(result['values'][0]['count'], expected['values'][0]['count']) self.assertEqual(result['values'][0]['value'], expected['values'][0]['value']) expected = ['test.a;blah=blah;hello=tiger'] response = self.client.get( url + '/findSeries?expr[]=name=test.a&expr[]=hello=tiger&expr[]=blah=blah&pretty=1' ) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = True response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=tiger'}) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected)) expected = [] response = self.client.get( url + '/findSeries?expr=name=test.a&expr=hello=tiger&expr=blah=blah') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True))
def find_view(request): "View for finding metrics matching a given pattern" profile = getProfile(request) format = request.REQUEST.get('format', 'treejson') local_only = int( request.REQUEST.get('local', 0) ) contexts = int( request.REQUEST.get('contexts', 0) ) wildcards = int( request.REQUEST.get('wildcards', 0) ) automatic_variants = int( request.REQUEST.get('automatic_variants', 0) ) try: query = str( request.REQUEST['query'] ) except: return HttpResponseBadRequest(content="Missing required parameter 'query'", mimetype="text/plain") if '.' in query: base_path = query.rsplit('.', 1)[0] + '.' else: base_path = '' if local_only: store = LOCAL_STORE else: store = STORE if format == 'completer': query = query.replace('..', '*.') if not query.endswith('*'): query += '*' if automatic_variants: query_parts = query.split('.') for i,part in enumerate(query_parts): if ',' in part and '{' not in part: query_parts[i] = '{%s}' % part query = '.'.join(query_parts) try: matches = list( store.find(query) ) except: log.exception() raise log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches))) matches.sort(key=lambda node: node.name) if format == 'treejson': content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards, contexts=contexts) response = HttpResponse(content, mimetype='application/json') elif format == 'pickle': content = pickle_nodes(matches, contexts=contexts) response = HttpResponse(content, mimetype='application/pickle') elif format == 'completer': #if len(matches) == 1 and (not matches[0].isLeaf()) and query == matches[0].metric_path + '*': # auto-complete children # matches = list( store.find(query + '.*') ) results = [] for node in matches: node_info = dict(path=node.metric_path, name=node.name, is_leaf=str(int(node.isLeaf()))) if not node.isLeaf(): node_info['path'] += '.' results.append(node_info) if len(results) > 1 and wildcards: wildcardNode = {'name' : '*'} results.append(wildcardNode) content = json.dumps({ 'metrics' : results }) response = HttpResponse(content, mimetype='application/json') else: return HttpResponseBadRequest(content="Invalid value for 'format' parameter", mimetype="text/plain") response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response
def json_response(obj): return HttpResponse(content_type='application/json', content=json.dumps(obj))
def renderView(request): start = time() (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'localOnly': requestOptions['localOnly'], 'data': [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError, "Invalid target '%s'" % target data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.set(dataKey, data, cacheTimeout) # If data is all we needed, we're done if 'pickle' in requestOptions: response = HttpResponse(mimetype='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response format = requestOptions.get('format') if format == 'csv': response = HttpResponse(mimetype='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = localtime(series.start + (i * series.step)) writer.writerow( (series.name, strftime("%Y-%m-%d %H:%M:%S", timestamp), value)) return response if format == 'json': series_data = [] for series in data: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), mimetype='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), mimetype='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(mimetype='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(str, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), mimetype='text/javascript') else: response = buildResponse(image, useSVG and 'image/svg+xml' or 'image/png') if useCache: cache.set(requestKey, response, cacheTimeout) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def renderView(request): start = time() if request.REQUEST.has_key('json_request'): (graphOptions, requestOptions) = parseDataOptions(request.REQUEST['json_request']) elif request.is_ajax() and request.method == 'POST': (graphOptions, requestOptions) = parseDataOptions(request.raw_post_data) else: (graphOptions, requestOptions) = parseOptions(request) useCache = 'noCache' not in requestOptions cacheTimeout = requestOptions['cacheTimeout'] requestContext = { 'startTime': requestOptions['startTime'], 'endTime': requestOptions['endTime'], 'localOnly': requestOptions['localOnly'], 'data': [] } data = requestContext['data'] # First we check the request cache if useCache: requestKey = hashRequest(request) cachedResponse = cache.get(requestKey) if cachedResponse: log.cache('Request-Cache hit [%s]' % requestKey) log.rendering('Returned cached response in %.6f' % (time() - start)) return cachedResponse else: log.cache('Request-Cache miss [%s]' % requestKey) # Now we prepare the requested data if requestOptions['graphType'] == 'pie': for target in requestOptions['targets']: if target.find(':') >= 0: try: name, value = target.split(':', 1) value = float(value) except: raise ValueError("Invalid target '%s'" % target) data.append((name, value)) else: seriesList = evaluateTarget(requestContext, target) for series in seriesList: func = PieFunctions[requestOptions['pieMode']] data.append((series.name, func(requestContext, series) or 0)) elif requestOptions['graphType'] == 'line': # Let's see if at least our data is cached if useCache: targets = requestOptions['targets'] startTime = requestOptions['startTime'] endTime = requestOptions['endTime'] dataKey = hashData(targets, startTime, endTime) cachedData = cache.get(dataKey) if cachedData: log.cache("Data-Cache hit [%s]" % dataKey) else: log.cache("Data-Cache miss [%s]" % dataKey) else: cachedData = None if cachedData is not None: requestContext['data'] = data = cachedData else: # Have to actually retrieve the data now for target in requestOptions['targets']: if not target.strip(): continue t = time() seriesList = evaluateTarget(requestContext, target) log.rendering("Retrieval of %s took %.6f" % (target, time() - t)) data.extend(seriesList) if useCache: cache.add(dataKey, data, cacheTimeout) # If data is all we needed, we're done format = requestOptions.get('format') if format == 'csv': response = HttpResponse(content_type='text/csv') writer = csv.writer(response, dialect='excel') for series in data: for i, value in enumerate(series): timestamp = datetime.fromtimestamp( series.start + (i * series.step), requestOptions['tzinfo']) writer.writerow( (series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value)) return response if format == 'json': series_data = [] if 'maxDataPoints' in requestOptions and any(data): startTime = min([series.start for series in data]) endTime = max([series.end for series in data]) timeRange = endTime - startTime maxDataPoints = requestOptions['maxDataPoints'] for series in data: numberOfDataPoints = timeRange / series.step if maxDataPoints < numberOfDataPoints: valuesPerPoint = math.ceil( float(numberOfDataPoints) / float(maxDataPoints)) secondsPerPoint = int(valuesPerPoint * series.step) # Nudge start over a little bit so that the consolidation bands align with each call # removing 'jitter' seen when refreshing. nudge = secondsPerPoint + ( series.start % series.step) - (series.start % secondsPerPoint) series.start = series.start + nudge valuesToLose = int(nudge / series.step) for r in range(1, valuesToLose): del series[0] series.consolidate(valuesPerPoint) timestamps = range(series.start, series.end, secondsPerPoint) else: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) else: for series in data: timestamps = range(series.start, series.end, series.step) datapoints = zip(series, timestamps) series_data.append( dict(target=series.name, datapoints=datapoints)) if 'jsonp' in requestOptions: response = HttpResponse( content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)), content_type='text/javascript') else: response = HttpResponse(content=json.dumps(series_data), content_type='application/json') response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response if format == 'raw': response = HttpResponse(content_type='text/plain') for series in data: response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step)) response.write(','.join(map(str, series))) response.write('\n') log.rendering('Total rawData rendering time %.6f' % (time() - start)) return response if format == 'svg': graphOptions['outputFormat'] = 'svg' if format == 'pickle': response = HttpResponse(content_type='application/pickle') seriesInfo = [series.getInfo() for series in data] pickle.dump(seriesInfo, response, protocol=-1) log.rendering('Total pickle rendering time %.6f' % (time() - start)) return response # add template to graphOptions try: user_profile = getProfile(request, allowDefault=False) graphOptions['defaultTemplate'] = user_profile.defaultTemplate except: graphOptions['defaultTemplate'] = "default" # We've got the data, now to render it graphOptions['data'] = data if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations image = delegateRendering(requestOptions['graphType'], graphOptions) else: image = doImageRender(requestOptions['graphClass'], graphOptions) useSVG = graphOptions.get('outputFormat') == 'svg' if useSVG and 'jsonp' in requestOptions: response = HttpResponse(content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)), content_type='text/javascript') else: response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png') if useCache: cache.set(requestKey, response, cacheTimeout) log.rendering('Total rendering time %.6f seconds' % (time() - start)) return response
def test_tag_views(self): url = reverse('tagList') ## tagSeries # get should fail response = self.client.get(url + '/tagSeries', {'path': 'test.a;hello=tiger;blah=blah'}) self.assertEqual(response.status_code, 405) # post without path should fail response = self.client.post(url + '/tagSeries', {}) self.assertEqual(response.status_code, 400) self.assertEqual(response['Content-Type'], 'application/json') # tagging a series should succeed expected = 'test.a;blah=blah;hello=tiger' response = self.client.post(url + '/tagSeries', {'path': 'test.a;hello=tiger;blah=blah'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) ## list tags # put should fail response = self.client.put(url, {}) self.assertEqual(response.status_code, 405) # filtered list expected = [{"tag": "hello"}] response = self.client.get(url, {'filter': 'hello$'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(len(result), len(expected)) self.assertEqual(result[0]['tag'], expected[0]['tag']) # pretty output response = self.client.get(url, {'filter': 'hello$', 'pretty': 1}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(len(result), len(expected)) self.assertEqual(result[0]['tag'], expected[0]['tag']) ## tag details # put should fail response = self.client.put(url + '/hello', {}) self.assertEqual(response.status_code, 405) expected = {"tag": "hello", "values": [{"count": 1, "value": "tiger"}]} response = self.client.get(url + '/hello', {'filter': 'tiger$'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(result['tag'], expected['tag']) self.assertEqual(len(result['values']), len(expected['values'])) self.assertEqual(result['values'][0]['count'], expected['values'][0]['count']) self.assertEqual(result['values'][0]['value'], expected['values'][0]['value']) # pretty output response = self.client.get(url + '/hello', { 'filter': 'tiger$', 'pretty': 1 }) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') result = json.loads(response.content) self.assertEqual(result['tag'], expected['tag']) self.assertEqual(len(result['values']), len(expected['values'])) self.assertEqual(result['values'][0]['count'], expected['values'][0]['count']) self.assertEqual(result['values'][0]['value'], expected['values'][0]['value']) ## findSeries # put should fail response = self.client.put(url + '/findSeries', {}) self.assertEqual(response.status_code, 405) # expr is required response = self.client.post(url + '/findSeries', {}) self.assertEqual(response.status_code, 400) self.assertEqual(response['Content-Type'], 'application/json') # basic find expected = ['test.a;blah=blah;hello=tiger'] response = self.client.get( url + '/findSeries?expr[]=name=test.a&expr[]=hello=tiger&expr[]=blah=blah&pretty=1' ) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) # tag another series expected = 'test.a;blah=blah;hello=lion' response = self.client.post(url + '/tagSeries', {'path': 'test.a;hello=lion;blah=blah'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) ## autocomplete tags response = self.client.put(url + '/autoComplete/tags', {}) self.assertEqual(response.status_code, 405) expected = [ 'hello', ] response = self.client.get( url + '/autoComplete/tags?tagPrefix=hello&pretty=1') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = [ 'blah', 'hello', ] response = self.client.get( url + '/autoComplete/tags?expr[]=name=test.a&pretty=1') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = [ 'hello', ] response = self.client.get( url + '/autoComplete/tags?expr=name=test.a&tagPrefix=hell&pretty=1') self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) ## autocomplete values response = self.client.put(url + '/autoComplete/values', {}) self.assertEqual(response.status_code, 405) response = self.client.get(url + '/autoComplete/values', {}) self.assertEqual(response.status_code, 400) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps({'error': 'no tag specified'})) expected = ['lion', 'tiger'] response = self.client.get(url + '/autoComplete/values?tag=hello&pretty=1') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = ['lion', 'tiger'] response = self.client.get( url + '/autoComplete/values?expr[]=name=test.a&tag=hello&pretty=1') self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) expected = ['lion'] response = self.client.get( url + '/autoComplete/values?expr=name=test.a&tag=hello&valuePrefix=li&pretty=1' ) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True)) ## delSeries # PUT should fail response = self.client.put(url + '/delSeries', {}) self.assertEqual(response.status_code, 405) # path is required response = self.client.post(url + '/delSeries', {}) self.assertEqual(response.status_code, 400) self.assertEqual(response['Content-Type'], 'application/json') # delete first series we added expected = True response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=tiger'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected)) # delete second series expected = True response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=lion'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected)) # delete nonexistent series expected = True response = self.client.post(url + '/delSeries', {'path': 'test.a;blah=blah;hello=lion'}) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected)) # find nonexistent series expected = [] response = self.client.get( url + '/findSeries?expr=name=test.a&expr=hello=tiger&expr=blah=blah') self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json') self.assertEqual(response.content, json.dumps(expected, indent=2, sort_keys=True))