def get_view_all_nodes_inter_trace(): log.debug("Get most recent view of inter-trace for all nodes") db = store.get_bucket() all_trace_values = {} view_by_node_trace_most_recent = db.view('_design/node-trace-mostrecent/_view/get_node-trace-mostrecent', include_docs= True) ping_status = None port_status = None count = 0 for node in view_by_node_trace_most_recent: count += 1 json_value = node['doc'] document = json_value['json'] inter_trace = documentparser.get_value(document, "inter-trace") name = documentparser.get_value(document, "nodeid") all_trace_values.update({name: {'inter-trace':inter_trace, 'serial':count}}) return all_trace_values
def get_view_all_nodes_synthesized_most_recent(): log.debug("Get most recent synthesized view for all nodes") db = store.get_bucket() all_synthesized_values = {} view_by_node_synthesized_most_recent = db.view('_design/node-synthesized-mostrecent/_view/get_node-synthesized-mostrecent', include_docs= True) ping_status = None port_status = None count = 0 for node in view_by_node_synthesized_most_recent: count += 1 json_value = node['doc'] document = json_value['json'] ping_status = documentparser.get_value(document, "ping_status") port_status = documentparser.get_value(document, "port_status") nodeid = documentparser.get_value(document, "nodeid") all_synthesized_values.update({nodeid: {'ping_status':ping_status, 'port_status':port_status, 'serial':count}}) return all_synthesized_values
def get_view_all_nodes_average_attribute_treemap_bkup(limit =10, start_time="", end_time ="{}"): log.debug("Get view for most recent attribute for all nodes") #for given node_id get value_type ordered by time (most recent first) db = store.get_bucket() count = 0 #Treemap--CPU Usage and Free memory values_treemap_cpu = [['Id', 'parent', 'metricvalue'], ['Average CPU Usage', '', 0]] values_treemap_mem_used = [['Id', 'parent', 'metricvalue'], ['Average Memory Usage', '', 0]] values_treemap_data_sent = [['Id', 'parent', 'metricvalue'], ['Average Data Sent', '', 0]] values_treemap_data_received = [['Id', 'parent', 'metricvalue'], ['Average Data Received', '', 0]] for node_id in constants.nodes: count+=1 name = node_id load_avg_1min = [] mem_usage = [] cpu_usage = [] data_sent= [] data_received = [] if(start_time==""): str_startkey = "[\"" + node_id + "\"]" else: str_startkey = "[\"" + node_id + "\"," + start_time+"]" str_endkey = "[\"" + node_id + "\"," + end_time+"]" view_by_node_id = db.view('_design/node-timestamp/_view/get_node-timestamp', startkey=str_endkey, endkey = str_startkey,limit=limit, descending = True, include_docs= True) count = 0 for node in view_by_node_id: json_value = node['doc'] document = json_value['json'] if document: load_avg_1min.append(documentparser.get_value(document, "load_avg_1min")) mem_usage.append(documentparser.get_value(document, "memory_percent_used")) cpu_usage.append(documentparser.get_value(document, "total_cpu_usage")) data_sent.append(documentparser.get_value(document, "network_total_bytes_sent_last_sec")) data_received.append(documentparser.get_value(document, "network_total_bytes_received_last_sec")) #Update Treemap--CPU Usage and Free memory values_treemap_cpu.append([name, 'Average CPU Usage', numpy.average(cpu_usage)]) values_treemap_mem_used.append([name, 'Average Memory Usage', numpy.average(mem_usage)]) values_treemap_data_sent.append([name, 'Average Data Sent', numpy.average(data_sent)]) values_treemap_data_received.append([name, 'Average Data Received', numpy.average(data_received)]) return node_treemap(cpu_usage= values_treemap_cpu, memory_usage= values_treemap_mem_used, data_sent= values_treemap_data_sent, data_received= values_treemap_data_received)
def rename_sliver(seq_value, name): slivers = documentparser.get_value(seq_value, 'slivers') for container in slivers: sliver= slivers[container] sliver_name = documentparser.get_value(sliver, 'sliver_name') sliver_slice_name = documentparser.get_value(sliver, 'sliver_slice_name') ##Update Sliver name## sliver_name = str(sliver_slice_name) + name seq_value['slivers'][container]['sliver_name'] = sliver_name
def get_view_node_id_attribute_timeline( node_id, value_type, limit=1000, start_time="", end_time ="{}"): log.debug("Get view by node ID for node: %s" %node_id) #for given node_id get value_type ordered by time (most recent first) db = store.get_bucket() if(start_time==""): str_startkey = "[\"" + node_id + "\"]" else: str_startkey = "[\"" + node_id + "\"," + start_time+"]" str_endkey = "[\"" + node_id + "\"," + end_time+"]" view_by_node_id = db.view('_design/node-timestamp/_view/get_node-timestamp', startkey=str_endkey, endkey = str_startkey,limit=limit, descending = True, include_docs= True) all_values = [] for node in view_by_node_id: json = node['doc'] document = json['json'] value = documentparser.get_value(document, value_type) server_timestamp = documentparser.return_server_timestamp(document) date_time_value= util.convert_epoch_to_date_time_javascript(server_timestamp) date_time_value.update({'value': value}) all_values.insert(1, date_time_value) # Keep most recent value at the end of the list to show graph as ascending time line return all_values
def node_info_set_timeline(request, parameter): ''' Parameter received as (metric/node_name.network.interface_name.attribute/start_time0=...) ''' server_ip = util.SERVER_IP server_port = util.SERVER_PORT (metric_nodeid, resource, resource_spec1, attribute_arguments) = string.split(parameter, '.') metric, node_id = metric_nodeid.split("/") attribute, arguments = attribute_arguments.split("/") parameter = metric_nodeid+'.'+resource+'.'+resource_spec1+'.'+attribute value_type= resource+"."+resource_spec1+"."+attribute document = fetchdocument.fetch_most_recent_document(node_id) name = documentparser.get_value(document, "name") arg_dict = util.split_arguments_return_dict(arguments) values_graph = getview.get_view_node_id_attribute_timeline(node_id, value_type, start_time =arg_dict['start_time_epoch'], end_time=arg_dict['end_time_epoch'], limit=arg_dict['limit']) #values_graph = json.dumps(values) return render_to_response('node_info_set_timeline.html',{ 'name':name,'nodeid':node_id, 'value': resource_spec1+" "+attribute, 'server_ip': server_ip, 'server_port': server_port, 'parameter':parameter, 'arguments':arg_dict, 'values_graph':values_graph},context_instance=RequestContext(request))
def get_view_sliver_id_attribute_timeline( sliver_id, value_type): ''' Document returned from view sliver-timestamp/get_sliver-timestamp key = [sliver_id, server_timestamp], value= {'sliver': sliverinfo,'nodeid':nodeid,'server_timestamp': server_timestamp} returns ''' log.debug("Get view by sliver ID for sliver: %s" %sliver_id) db = store.get_bucket() str_startkey = "[\"" + sliver_id + "\",{}]" str_endkey = "[\"" + sliver_id + "\"]" view_by_sliver_id = db.view('_design/sliver-timestamp/_view/get_sliver-timestamp', startkey=str_startkey, endkey = str_endkey, descending = True, limit=1000) all_values = [] for row in view_by_sliver_id: document = row['value'] sliver = document['sliver'] value = documentparser.get_value(sliver, value_type) server_timestamp = documentparser.return_server_timestamp(document) date_time_value= util.convert_epoch_to_date_time_javascript(server_timestamp) date_time_value.update({'value': value}) all_values.insert(1, date_time_value) # Keep most recent value at the end of the list to show graph as ascending time line return all_values
def slice_info(request, parameter): all_values = [] server_ip = util.SERVER_IP server_port = util.SERVER_PORT slice_id = parameter slivers = getview.get_view_slice_id_all_slivers_most_recent(slice_id) count = 0 for container in slivers: sliver= container count +=1 sliver_name = documentparser.get_value(sliver, 'sliver_name') sliver_cpu_usage = documentparser.get_value(sliver,'sliver_cpu_usage') sliver_slice_name = documentparser.get_value(sliver, 'sliver_slice_name') sliver_total_memory = documentparser.get_value(sliver, 'sliver_total_memory') sliver_total_memory_free = documentparser.get_value(sliver, 'sliver_total_memory_free') sliver_total_memory_percent_used = documentparser.get_value(sliver, 'sliver_total_memory_percent_used') nodeid = documentparser.get_value(sliver, "nodeid") sliver_total_memory, sliver_total_memory_free = util.convert_bytes_to_human_readable([sliver_total_memory, sliver_total_memory_free]) all_values.append({'sliver_name': sliver_name, 'sliver_cpu_usage':sliver_cpu_usage, 'sliver_slice_name':sliver_slice_name, 'sliver_total_memory':sliver_total_memory, 'sliver_total_memory_free': sliver_total_memory_free, 'sliver_total_memory_percent_used':sliver_total_memory_percent_used, 'serial':count, 'nodeid': nodeid}) # Populate Treemap graph values = getview.get_view_slice_most_recent_attribute_treemap( slice_id, 'sliver_cpu_usage') values_graph = json.dumps(values) return render_to_response('sliceinfo.html',{'server_ip': server_ip, 'server_port': server_port,'all_values':all_values, 'values_graph': values_graph},context_instance=RequestContext(request))
def async_node_attribute(request, parameter): ''' Parameter of form metric/node/ ''' (metric,node_id,time) =parameter.split('/') document = fetchdocument.fetch_most_recent_document(node_id) name = documentparser.get_value(document, "name") server_ip = util.SERVER_IP server_port = util.SERVER_PORT return render_to_response('async_node_info_timeline.html',{'server_ip': server_ip, 'server_port': server_port,'name':name, 'nodeid':node_id, 'metric':metric})
def get_view_node_id_synthesized_attribute_timeline( node_id, value_type): log.debug("Get view by node ID for node: %s" %node_id) #for given node_id get value_type ordered by time (most recent first) db = store.get_bucket() str_startkey = "[\"" + node_id + "\",{}]" str_endkey = "[\"" + node_id + "\"]" view_by_node_id = db.view('_design/synthesized-timestamp/_view/get_synthesized-timestamp', startkey=str_startkey, endkey = str_endkey, descending = True, limit=1000, include_docs= True) all_values = [] for node in view_by_node_id: json = node['doc'] document = json['json'] value = documentparser.get_value(document, value_type) server_timestamp = documentparser.get_value(document, "server_timestamp") date_time_value= util.convert_epoch_to_date_time_dict(server_timestamp) date_time_value.update({'value': value}) all_values.append(date_time_value) return all_values
def async_aggr_set_node_attribute(request, parameter): ''' Parameter of form set//node//interface//metric// ''' (set,node_id,interface,metric,time) =parameter.split('//') document = fetchdocument.fetch_most_recent_document(node_id) name = documentparser.get_value(document, "name") server_ip = util.SERVER_IP server_port = util.SERVER_PORT return render_to_response('async_aggregate_set.html',{'server_ip': server_ip, 'server_port': server_port,'name':name, 'nodeid':node_id, 'metric':metric, 'interface': interface, 'set':set})
def get_view_slice_id_attribute_timeline( slice_id, value_type): log.debug("Get view by slice ID for slice: %s" %slice_id) #for given slice_id get value_type ordered by time (most recent first) db = store.get_bucket() str_startkey = "[\"" + slice_id + "\",{}]" str_endkey = "[\"" + slice_id + "\"]" view_by_slice_id = db.view('_design/slice-timestamp/_view/get_slice-timestamp', startkey=str_startkey, endkey = str_endkey, descending = True, limit=1000) all_values = [] for slice in view_by_slice_id: document = slice['value'] value = documentparser.get_value(document, value_type) server_timestamp = documentparser.return_server_timestamp(document) date_time_value= util.convert_epoch_to_date_time_javascript(server_timestamp) sliver_id = documentparser.get_value(document, 'sliver_name') date_time_value.update({'value': value, 'sliver_id': sliver_id }) all_values.insert(1, date_time_value) # Keep most recent value at the end of the list to show graph as ascending time line return all_values
def get_view_slice_most_recent_attribute_treemap( slice_id, value_type): ''' Document returned from view sliver-timestamp/get_slice-timestamp key = [sliver_id, server_timestamp], value= {'sliver': sliverinfo,'nodeid':nodeid,'server_timestamp': server_timestamp} returns ''' log.debug("Get most recent sliver attributes for slice: %s" %slice_id) all_values = [['Id', 'parent', 'metricvalue'], [value_type, '', 0]] db = store.get_bucket() view_by_slice_id =[] #nodes = nodelist.get_node_list() nodes = constants.nodes for node in nodes: str_startkey = "[\"" + slice_id +"\",\""+ node+ "\",{}]" str_endkey = "[\"" + slice_id + "\",\""+ node+ "\"]" most_recent_slice = db.view('_design/slice-timestamp/_view/get_slice-timestamp', startkey=str_startkey, endkey = str_endkey, descending = True, limit=1) if(len(most_recent_slice)>0): view_by_slice_id.append(most_recent_slice[0]) for slice in view_by_slice_id: sliver = slice['value'] sliver_id = documentparser.get_value(sliver, 'sliver_name') value = documentparser.get_value(sliver, value_type) all_values.append([sliver_id, value_type, value]) return all_values
def get_view_sliver_most_recent_attribute_treemap( node_id, value_type): ''' Document returned from view sliver-timestamp/get_sliver-timestamp key = [sliver_id, server_timestamp], value= {'sliver': sliverinfo,'nodeid':nodeid,'server_timestamp': server_timestamp} returns ''' log.debug("Get most recent sliver attributes for Node: %s" %node_id) all_values = [['Id', 'parent', 'metricvalue'], [value_type, '', 0]] document = fetchdocument.fetch_most_recent_document(node_id) if(document): slivers = document['slivers'] for container in slivers: sliver = slivers[container] sliver_id = documentparser.get_value(sliver, 'sliver_name') value = documentparser.get_value(sliver, value_type) all_values.append([sliver_id, value_type, value]) return all_values
def get_view_node_id_synthesized_attribute_most_recent( node_id, value_type): log.debug("Get view by node ID for node: %s" %node_id) #for given node_id get value_type ordered by time (most recent first) db = store.get_bucket() str_startkey = "[\"" + node_id + "\",{}]" str_endkey = "[\"" + node_id + "\"]" view_by_node_id = db.view('_design/synthesized-timestamp/_view/get_synthesized-timestamp', startkey=str_startkey, endkey = str_endkey, descending = True, limit=1, include_docs= True) all_values = [] if(len(view_by_node_id)>0): node = view_by_node_id[0] json = node['doc'] document = json['json'] value = documentparser.get_value(document, value_type) return value
def get_view_node_id_attribute_json( node_id, value_type, limit): #for given node_id get value_type ordered by time (most recent first) log.debug("Get view by node ID for node: %s" %node_id) db = store.get_bucket() str_startkey = "[\"" + node_id + "\",{}]" str_endkey = "[\"" + node_id + "\"]" view_by_node_id = db.view('_design/node-timestamp/_view/get_node-timestamp', startkey=str_startkey, endkey = str_endkey, descending = True, include_docs= True, limit=limit) all_values = [] for node in view_by_node_id: json = node['doc'] document = json['json'] value = documentparser.get_value(document, value_type) server_timestamp = documentparser.return_server_timestamp(document) * 1000 all_values.insert(1, [server_timestamp,value]) # Keep most recent value at the end of the list to show graph as ascending time line return all_values
def get_view_node_id_attribute_async( node_id, value_type, limit=3000, start_time="", end_time ="{}"): log.debug("Get view by node ID for node: %s" %node_id) #for given node_id get value_type ordered by time (most recent first) db = store.get_bucket() if(start_time==""): str_startkey = "[\"" + node_id + "\"]" else: str_startkey = "[\"" + node_id + "\"," + str(start_time)+"]" str_endkey = "[\"" + node_id + "\"," + str(end_time)+"]" view_by_node_id = db.view('_design/node-timestamp/_view/get_node-timestamp', startkey=str_endkey, endkey = str_startkey,limit=limit, descending = True, include_docs= True) all_values = [] for node in view_by_node_id: json = node['doc'] document = json['json'] value = float(documentparser.get_value(document, value_type)) server_timestamp = documentparser.return_server_timestamp(document) * 1000 #convert to milliseconds for javascript all_values.insert(1, [server_timestamp,value]) # Keep most recent value at the end of the list to show graph as ascending time line return all_values
def node_slivers (request, parameter): server_ip = util.SERVER_IP server_port = util.SERVER_PORT all_values = [] values_graph = [] network_values = [] disk_values = [] memory_values=[] node_in_db = False name= parameter node_id = parameter document = fetchdocument.fetch_most_recent_document(node_id) slivers = documentparser.get_value(document, 'slivers') print " NODE ID: ", node_id count = 0 if(document): node_in_db = True if slivers: for container in slivers: sliver= slivers[container] count +=1 sliver_name = documentparser.get_value(sliver, 'sliver_name') sliver_cpu_usage = documentparser.get_value(sliver,'sliver_cpu_usage') sliver_slice_name = documentparser.get_value(sliver, 'sliver_slice_name') sliver_total_memory = documentparser.get_value(sliver, 'sliver_total_memory') sliver_total_memory_free = documentparser.get_value(sliver, 'sliver_total_memory_free') sliver_total_memory_percent_used = documentparser.get_value(sliver, 'sliver_total_memory_percent_used') sliver_ip = documentparser.get_value(sliver, 'sliver_ip') sliver_state = documentparser.get_value(sliver, 'sliver_state') sliver_total_memory, sliver_total_memory_free = util.convert_bytes_to_human_readable([sliver_total_memory, sliver_total_memory_free]) all_values.append({'sliver_name': sliver_name, 'sliver_cpu_usage':sliver_cpu_usage, 'sliver_slice_name':sliver_slice_name, 'sliver_total_memory':sliver_total_memory, 'sliver_total_memory_free': sliver_total_memory_free, 'sliver_total_memory_percent_used':sliver_total_memory_percent_used, 'sliver_ip':sliver_ip, 'sliver_state': sliver_state,'serial':count}) # Populate Treemap graph values = getview.get_view_sliver_most_recent_attribute_treemap( node_id, 'sliver_cpu_usage') values_graph = json.dumps(values) #Network Values name = documentparser.get_value(document, "name") nodeid = documentparser.get_value(document, "nodeid") network_values= documentparser.get_set(document, "network") #Disk Values disk_values= documentparser.get_set(document, "disk") #Disk Values memory_values= documentparser.get_set(document, "memory") return render_to_response('node_slivers.html',{'disk_values':disk_values, 'all_values':all_values, 'values_graph':values_graph, 'network_values':network_values,'nodeid':node_id, 'name':name, 'server_ip': server_ip, 'server_port': server_port, 'numberslivers': count, 'node_in_db':node_in_db, 'memory_values': memory_values}, context_instance=RequestContext(request))
def get_view_all_nodes_most_recent(): log.debug("Get most recent view for all nodes") db = store.get_bucket() all_values = {} view_by_node_most_recent = db.view('_design/node-mostrecent/_view/get_node-mostrecent', include_docs= True) for node in view_by_node_most_recent: json_value = node['doc'] document = json_value['json'] name = documentparser.get_value(document, "name") nodeid = documentparser.get_value(document, "nodeid") disk_size = documentparser.get_value(document, "disk_size") load_avg_1min = documentparser.get_value(document, "load_avg_1min") memory_percent_used = documentparser.get_value(document, "memory_percent_used") uptime_secs = documentparser.get_value(document, "uptime") last_updated = documentparser.return_server_time(document) total_memory = documentparser.get_value(document,"total_memory") num_cpu = documentparser.get_value(document, "number_of_cpus") cpu_usage = documentparser.get_value(document, "total_cpu_usage") data_sent= documentparser.get_value(document, "network_total_bytes_sent_last_sec") data_received = documentparser.get_value(document, "network_total_bytes_received_last_sec") ## Human readability###### uptime = util.convert_secs_to_time_elapsed(uptime_secs) disk_size,total_memory,data_sent, data_received = util.convert_bytes_to_human_readable([disk_size,total_memory, data_sent, data_received]) all_values.update({nodeid:{'num_cpu': num_cpu, 'percent_usage': cpu_usage , 'last_updated': last_updated , 'name':name, 'total_memory': total_memory , 'disk_size':disk_size, 'load_avg_1min':load_avg_1min, 'memory_percent_used':memory_percent_used, 'data_sent':data_sent, 'data_received':data_received, 'uptime':uptime}}) return all_values