def _dosave(request,viewName): profile = getProfile(request) #First find our View log.info("Saving view '%s' under profile '%s'" % (viewName,profile.user.username)) try: view = profile.view_set.get(name=viewName) except ObjectDoesNotExist: view = View(profile=profile,name=viewName) view.save() #Now re-associate the view with the correct Windows view.window_set.all().delete() for windowName,encodedString in request.GET.items(): try: if windowName in ('_','commandInput'): continue paramString = urllib.unquote_plus(encodedString) queryParams = cgi.parse_qs(paramString) modelParams = {} for key,value in queryParams.items(): #Clean up the window params key = str(key) value = str(value[0]) if key in ('top','left'): value = int(float( value.replace('px','') )) if key in ('width','height','interval'): value = int(float(value)) modelParams[key] = value if 'interval' not in modelParams: modelParams['interval'] = None win = Window(view=view,name=windowName,**modelParams) win.save() except: log.exception("Failed to process parameters for window '%s'" % windowName) return stdout('Saved view %s' % viewName)
def set_metadata_view(request): results = {} if request.method == 'GET': metric = request.GET['metric'] key = request.GET['key'] value = request.GET['value'] try: results[metric] = CarbonLink.set_metadata(metric, key, value) except: log.exception() results[metric] = dict(error="Unexpected error occurred in CarbonLink.set_metadata(%s, %s)" % (metric, key)) elif request.method == 'POST': if request.META.get('CONTENT_TYPE') == 'application/json': operations = json.loads( request.raw_post_data ) else: operations = json.loads( request.POST['operations'] ) for op in operations: metric = None try: metric, key, value = op['metric'], op['key'], op['value'] results[metric] = CarbonLink.set_metadata(metric, key, value) except: log.exception() if metric: results[metric] = dict(error="Unexpected error occurred in bulk CarbonLink.set_metadata(%s)" % metric) else: results = dict(error="Invalid request method") return HttpResponse(json.dumps(results), mimetype='application/json')
def fetchData(requestContext, pathExpr): seriesList = [] startTime = requestContext['startTime'] endTime = requestContext['endTime'] if requestContext['localOnly']: store = LOCAL_STORE else: store = STORE for dbFile in store.find(pathExpr): log.metric_access(dbFile.metric_path) dbResults = dbFile.fetch( timestamp(startTime), timestamp(endTime) ) try: cachedResults = CarbonLink.query(dbFile.real_metric) results = mergeResults(dbResults, cachedResults) except: log.exception() results = dbResults if not results: continue (timeInfo,values) = results (start,end,step) = timeInfo series = TimeSeries(dbFile.metric_path, start, end, step, values) series.pathExpression = pathExpr #hack to pass expressions through to render functions seriesList.append(series) return seriesList
def get_metadata_view(request): key = request.REQUEST['key'] metrics = request.REQUEST.getlist('metric') results = {} for metric in metrics: try: results[metric] = CarbonLink.get_metadata(metric, key) except: log.exception() results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key)) return HttpResponse(json.dumps(results), mimetype='application/json')
def renderLocalView(request): try: start = time() reqParams = StringIO(request.raw_post_data) graphType = reqParams.readline().strip() optionsPickle = reqParams.read() reqParams.close() graphClass = GraphTypes[graphType] options = unpickle.loads(optionsPickle) image = doImageRender(graphClass, options) log.rendering("Delegated rendering request took %.6f seconds" % (time() - start)) return buildResponse(image) except: log.exception("Exception in graphite.render.views.rawrender") return HttpResponseServerError()
def __init__(self, index_path): self.index_path = index_path if not os.path.exists(index_path): open(index_path, 'w').close() # touch the file to prevent re-entry down this code path #XXX This is garbagy, should been handled natively, forking a shell to #build the index is less than ideal build_index_path = os.path.join(settings.GRAPHITE_ROOT, "bin/build-index.sh") retcode = subprocess.call(build_index_path) if retcode != 0: log.exception("Couldn't build index file %s" % index_path) raise RuntimeError("Couldn't build index file %s" % index_path) self.last_mtime = 0 self._tree = (None, {}) # (data, children) log.info("[IndexSearcher] performing initial index load") self.reload()
def _dogsave(request,graphName): profile = getProfile(request,allowDefault=False) if not profile: return stderr("You must be logged in to save graphs") url = request.GET.get('url') if not url: return stderr("No url specified!") try: existingGraph = profile.mygraph_set.get(name=graphName) existingGraph.url = url existingGraph.save() except ObjectDoesNotExist: try: newGraph = MyGraph(profile=profile,name=graphName,url=url) newGraph.save() except: log.exception("Failed to create new MyGraph in _dogsave(), graphName=%s" % graphName) return stderr("Failed to save graph %s" % graphName) return stdout("Saved graph %s" % graphName)
def mygraph(request): profile = getProfile(request, allowDefault=False) if not profile: return HttpResponse( "You are not logged in!" ) action = request.GET['action'] graphName = request.GET['graphName'] if not graphName: return HttpResponse("You must type in a graph name.") if action == 'save': url = request.GET['url'] try: existingGraph = profile.mygraph_set.get(name=graphName) existingGraph.url = url existingGraph.save() except ObjectDoesNotExist: try: newGraph = MyGraph(profile=profile,name=graphName,url=url) newGraph.save() except: log.exception("Failed to create new MyGraph in /graphite/composer/mygraph/, graphName=%s" % graphName) return HttpResponse("Failed to save graph %s" % graphName) return HttpResponse("SAVED") elif action == 'delete': try: existingGraph = profile.mygraph_set.get(name=graphName) existingGraph.delete() except ObjectDoesNotExist: return HttpResponse("No such graph '%s'" % graphName) return HttpResponse("DELETED") else: return HttpResponse("Invalid operation '%s'" % action)
def delegateRendering(graphType, graphOptions): start = time() postData = graphType + '\n' + pickle.dumps(graphOptions) servers = settings.RENDERING_HOSTS[:] #make a copy so we can shuffle it safely shuffle(servers) for server in servers: start2 = time() try: # Get a connection try: pool = connectionPools[server] except KeyError: #happens the first time pool = connectionPools[server] = set() try: connection = pool.pop() except KeyError: #No available connections, have to make a new one connection = HTTPConnectionWithTimeout(server) connection.timeout = settings.REMOTE_RENDER_CONNECT_TIMEOUT # Send the request try: connection.request('POST','/graphite/render/local/', postData) except CannotSendRequest: connection = HTTPConnectionWithTimeout(server) #retry once connection.timeout = settings.REMOTE_RENDER_CONNECT_TIMEOUT connection.request('POST', '/graphite/render/local/', postData) # Read the response response = connection.getresponse() assert response.status == 200, "Bad response code %d from %s" % (response.status,server) contentType = response.getheader('Content-Type') imageData = response.read() assert contentType == 'image/png', "Bad content type: \"%s\" from %s" % (contentType,server) assert imageData, "Received empty response from %s" % server # Wrap things up log.rendering('Remotely rendered image on %s in %.6f seconds' % (server,time() - start2)) log.rendering('Spent a total of %.6f seconds doing remote rendering work' % (time() - start)) pool.add(connection) return imageData except: log.exception("Exception while attempting remote rendering request on %s" % server) log.rendering('Exception while remotely rendering on %s wasted %.6f' % (server,time() - start2)) continue
def _find(current_dir, patterns): """Recursively generates absolute paths whose components underneath current_dir match the corresponding pattern in patterns""" pattern = patterns[0] patterns = patterns[1:] try: entries = os.listdir(current_dir) except OSError as e: log.exception(e) entries = [] subdirs = [e for e in entries if isdir( join(current_dir,e) )] matching_subdirs = match_entries(subdirs, pattern) if len(patterns) == 1 and rrdtool: #the last pattern may apply to RRD data sources files = [e for e in entries if isfile( join(current_dir,e) )] rrd_files = match_entries(files, pattern + ".rrd") if rrd_files: #let's assume it does datasource_pattern = patterns[0] for rrd_file in rrd_files: absolute_path = join(current_dir, rrd_file) yield absolute_path + DATASOURCE_DELIMETER + datasource_pattern if patterns: #we've still got more directories to traverse for subdir in matching_subdirs: absolute_path = join(current_dir, subdir) for match in _find(absolute_path, patterns): yield match else: #we've got the last pattern files = [e for e in entries if isfile( join(current_dir,e) )] matching_files = match_entries(files, pattern + '.*') for basename in matching_files + matching_subdirs: yield join(current_dir, basename)
def myGraphLookup(request): "View for My Graphs navigation" profile = getProfile(request,allowDefault=False) assert profile nodes = [] leafNode = { 'allowChildren' : 0, 'expandable' : 0, 'leaf' : 1, } branchNode = { 'allowChildren' : 1, 'expandable' : 1, 'leaf' : 0, } try: path = str( request.GET['path'] ) if path: if path.endswith('.'): userpath_prefix = path else: userpath_prefix = path + '.' else: userpath_prefix = "" matches = [ graph for graph in profile.mygraph_set.all().order_by('name') if graph.name.startswith(userpath_prefix) ] log.info( "myGraphLookup: username=%s, path=%s, userpath_prefix=%s, %ld graph to process" % (profile.user.username, path, userpath_prefix, len(matches)) ) branch_inserted = set() leaf_inserted = set() for graph in matches: #Now let's add the matching graph isBranch = False dotPos = graph.name.find( '.', len(userpath_prefix) ) if dotPos >= 0: isBranch = True name = graph.name[ len(userpath_prefix) : dotPos ] if name in branch_inserted: continue branch_inserted.add(name) else: name = graph.name[ len(userpath_prefix): ] if name in leaf_inserted: continue leaf_inserted.add(name) node = {'text' : str(name) } if isBranch: node.update( { 'id' : str(userpath_prefix + name + '.') } ) node.update(branchNode) else: m = md5() m.update(name) node.update( { 'id' : str(userpath_prefix + m.hexdigest()), 'graphUrl' : str(graph.url) } ) node.update(leafNode) nodes.append(node) except: log.exception("browser.views.myGraphLookup(): could not complete request.") if not nodes: no_graphs = { 'text' : "No saved graphs", 'id' : 'no-click' } no_graphs.update(leafNode) nodes.append(no_graphs) return json_response(nodes, request)
def userGraphLookup(request): "View for User Graphs navigation" user = request.GET.get('user') path = request.GET['path'] if user: username = user graphPath = path[len(username)+1:] elif '.' in path: username, graphPath = path.split('.', 1) else: username, graphPath = path, None nodes = [] branchNode = { 'allowChildren' : 1, 'expandable' : 1, 'leaf' : 0, } leafNode = { 'allowChildren' : 0, 'expandable' : 0, 'leaf' : 1, } try: if not username: profiles = Profile.objects.exclude(user=defaultUser) for profile in profiles: if profile.mygraph_set.count(): node = { 'text' : str(profile.user.username), 'id' : str(profile.user.username) } node.update(branchNode) nodes.append(node) else: profile = getProfileByUsername(username) assert profile, "No profile for username '%s'" % username if graphPath: prefix = graphPath.rstrip('.') + '.' else: prefix = '' matches = [ graph for graph in profile.mygraph_set.all().order_by('name') if graph.name.startswith(prefix) ] inserted = set() for graph in matches: relativePath = graph.name[ len(prefix): ] nodeName = relativePath.split('.')[0] if nodeName in inserted: continue inserted.add(nodeName) if '.' in relativePath: # branch node = { 'text' : str(nodeName), 'id' : str(username + '.' + prefix + nodeName + '.'), } node.update(branchNode) else: # leaf m = md5() m.update(nodeName) node = { 'text' : str(nodeName ), 'id' : str(username + '.' + prefix + m.hexdigest()), 'graphUrl' : str(graph.url), } node.update(leafNode) nodes.append(node) except: log.exception("browser.views.userLookup(): could not complete request for %s" % username) if not nodes: no_graphs = { 'text' : "No saved graphs", 'id' : 'no-click' } no_graphs.update(leafNode) nodes.append(no_graphs) return json_response(nodes, request)
def find_view(request): "View for finding metrics matching a given pattern" profile = getProfile(request) format = request.REQUEST.get('format', 'treejson') local_only = int( request.REQUEST.get('local', 0) ) contexts = int( request.REQUEST.get('contexts', 0) ) wildcards = int( request.REQUEST.get('wildcards', 0) ) automatic_variants = int( request.REQUEST.get('automatic_variants', 0) ) try: query = str( request.REQUEST['query'] ) except: return HttpResponseBadRequest(content="Missing required parameter 'query'", mimetype="text/plain") if '.' in query: base_path = query.rsplit('.', 1)[0] + '.' else: base_path = '' if local_only: store = LOCAL_STORE else: store = STORE if format == 'completer': query = query.replace('..', '*.') if not query.endswith('*'): query += '*' if automatic_variants: query_parts = query.split('.') for i,part in enumerate(query_parts): if ',' in part and '{' not in part: query_parts[i] = '{%s}' % part query = '.'.join(query_parts) try: matches = list( store.find(query) ) except: log.exception() raise log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches))) matches.sort(key=lambda node: node.name) if format == 'treejson': content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards, contexts=contexts) response = HttpResponse(content, mimetype='application/json') elif format == 'pickle': content = pickle_nodes(matches, contexts=contexts) response = HttpResponse(content, mimetype='application/pickle') elif format == 'completer': #if len(matches) == 1 and (not matches[0].isLeaf()) and query == matches[0].metric_path + '*': # auto-complete children # matches = list( store.find(query + '.*') ) results = [] for node in matches: node_info = dict(path=node.metric_path, name=node.name, is_leaf=str(int(node.isLeaf()))) if not node.isLeaf(): node_info['path'] += '.' results.append(node_info) if len(results) > 1 and wildcards: wildcardNode = {'name' : '*'} results.append(wildcardNode) content = json.dumps({ 'metrics' : results }) response = HttpResponse(content, mimetype='application/json') else: return HttpResponseBadRequest(content="Invalid value for 'format' parameter", mimetype="text/plain") response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response