def reload(self): log.info("[IndexSearcher] reading index data from %s" % self.index_path) t = time.time() total_entries = 0 tree = (None, {}) # (data, children) for line in open(self.index_path): line = line.strip() if not line: continue branches = line.split('.') leaf = branches.pop() parent = None cursor = tree for branch in branches: if branch not in cursor[1]: cursor[1][branch] = (None, {}) # (data, children) parent = cursor cursor = cursor[1][branch] cursor[1][leaf] = (line, {}) total_entries += 1 self._tree = tree self.last_mtime = os.path.getmtime(self.index_path) log.info("[IndexSearcher] index reload took %.6f seconds (%d entries)" % (time.time() - t, total_entries))
def _dosave(request,viewName): profile = getProfile(request) #First find our View log.info("Saving view '%s' under profile '%s'" % (viewName,profile.user.username)) try: view = profile.view_set.get(name=viewName) except ObjectDoesNotExist: view = View(profile=profile,name=viewName) view.save() #Now re-associate the view with the correct Windows view.window_set.all().delete() for windowName,encodedString in request.GET.items(): try: if windowName in ('_','commandInput'): continue paramString = urllib.unquote_plus(encodedString) queryParams = cgi.parse_qs(paramString) modelParams = {} for key,value in queryParams.items(): #Clean up the window params key = str(key) value = str(value[0]) if key in ('top','left'): value = int(float( value.replace('px','') )) if key in ('width','height','interval'): value = int(float(value)) modelParams[key] = value if 'interval' not in modelParams: modelParams['interval'] = None win = Window(view=view,name=windowName,**modelParams) win.save() except: log.exception("Failed to process parameters for window '%s'" % windowName) return stdout('Saved view %s' % viewName)
def tree(self): current_mtime = os.path.getmtime(self.index_path) if current_mtime > self.last_mtime: log.info("[IndexSearcher] reloading stale index, current_mtime=%s last_mtime=%s" % (current_mtime, self.last_mtime)) self.reload() return self._tree
def __init__(self, index_path): self.index_path = index_path if not os.path.exists(index_path): open(index_path, 'w').close() # touch the file to prevent re-entry down this code path #XXX This is garbagy, should been handled natively, forking a shell to #build the index is less than ideal build_index_path = os.path.join(settings.GRAPHITE_ROOT, "bin/build-index.sh") retcode = subprocess.call(build_index_path) if retcode != 0: log.exception("Couldn't build index file %s" % index_path) raise RuntimeError("Couldn't build index file %s" % index_path) self.last_mtime = 0 self._tree = (None, {}) # (data, children) log.info("[IndexSearcher] performing initial index load") self.reload()
def myGraphLookup(request): "View for My Graphs navigation" profile = getProfile(request,allowDefault=False) assert profile nodes = [] leafNode = { 'allowChildren' : 0, 'expandable' : 0, 'leaf' : 1, } branchNode = { 'allowChildren' : 1, 'expandable' : 1, 'leaf' : 0, } try: path = str( request.GET['path'] ) if path: if path.endswith('.'): userpath_prefix = path else: userpath_prefix = path + '.' else: userpath_prefix = "" matches = [ graph for graph in profile.mygraph_set.all().order_by('name') if graph.name.startswith(userpath_prefix) ] log.info( "myGraphLookup: username=%s, path=%s, userpath_prefix=%s, %ld graph to process" % (profile.user.username, path, userpath_prefix, len(matches)) ) branch_inserted = set() leaf_inserted = set() for graph in matches: #Now let's add the matching graph isBranch = False dotPos = graph.name.find( '.', len(userpath_prefix) ) if dotPos >= 0: isBranch = True name = graph.name[ len(userpath_prefix) : dotPos ] if name in branch_inserted: continue branch_inserted.add(name) else: name = graph.name[ len(userpath_prefix): ] if name in leaf_inserted: continue leaf_inserted.add(name) node = {'text' : str(name) } if isBranch: node.update( { 'id' : str(userpath_prefix + name + '.') } ) node.update(branchNode) else: m = md5() m.update(name) node.update( { 'id' : str(userpath_prefix + m.hexdigest()), 'graphUrl' : str(graph.url) } ) node.update(leafNode) nodes.append(node) except: log.exception("browser.views.myGraphLookup(): could not complete request.") if not nodes: no_graphs = { 'text' : "No saved graphs", 'id' : 'no-click' } no_graphs.update(leafNode) nodes.append(no_graphs) return json_response(nodes, request)
elif allowDefault: return defaultProfile def getProfileByUsername(username): try: user = User.objects.get(username=username) return Profile.objects.get(user=user) except ObjectDoesNotExist: return None if not environ.get('READTHEDOCS'): try: defaultUser = User.objects.get(username='******') except User.DoesNotExist: log.info("Default user does not exist, creating it...") randomPassword = User.objects.make_random_password(length=16) defaultUser = User.objects.create_user('default','*****@*****.**',randomPassword) defaultUser.save() try: defaultProfile = Profile.objects.get(user=defaultUser) except Profile.DoesNotExist: log.info("Default profile does not exist, creating it...") defaultProfile = Profile(user=defaultUser) defaultProfile.save() # This whole song & dance is due to pickle being insecure # The SafeUnpickler classes were largely derived from # http://nadiana.com/python-pickle-insecure # This code also lives in carbon.util
def find_view(request): "View for finding metrics matching a given pattern" profile = getProfile(request) format = request.REQUEST.get('format', 'treejson') local_only = int( request.REQUEST.get('local', 0) ) contexts = int( request.REQUEST.get('contexts', 0) ) wildcards = int( request.REQUEST.get('wildcards', 0) ) automatic_variants = int( request.REQUEST.get('automatic_variants', 0) ) try: query = str( request.REQUEST['query'] ) except: return HttpResponseBadRequest(content="Missing required parameter 'query'", mimetype="text/plain") if '.' in query: base_path = query.rsplit('.', 1)[0] + '.' else: base_path = '' if local_only: store = LOCAL_STORE else: store = STORE if format == 'completer': query = query.replace('..', '*.') if not query.endswith('*'): query += '*' if automatic_variants: query_parts = query.split('.') for i,part in enumerate(query_parts): if ',' in part and '{' not in part: query_parts[i] = '{%s}' % part query = '.'.join(query_parts) try: matches = list( store.find(query) ) except: log.exception() raise log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches))) matches.sort(key=lambda node: node.name) if format == 'treejson': content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards, contexts=contexts) response = HttpResponse(content, mimetype='application/json') elif format == 'pickle': content = pickle_nodes(matches, contexts=contexts) response = HttpResponse(content, mimetype='application/pickle') elif format == 'completer': #if len(matches) == 1 and (not matches[0].isLeaf()) and query == matches[0].metric_path + '*': # auto-complete children # matches = list( store.find(query + '.*') ) results = [] for node in matches: node_info = dict(path=node.metric_path, name=node.name, is_leaf=str(int(node.isLeaf()))) if not node.isLeaf(): node_info['path'] += '.' results.append(node_info) if len(results) > 1 and wildcards: wildcardNode = {'name' : '*'} results.append(wildcardNode) content = json.dumps({ 'metrics' : results }) response = HttpResponse(content, mimetype='application/json') else: return HttpResponseBadRequest(content="Invalid value for 'format' parameter", mimetype="text/plain") response['Pragma'] = 'no-cache' response['Cache-Control'] = 'no-cache' return response