def get_view_nodes_metric_stat_aggregated(metric, node_id=None, start_timestamp="", end_timestamp ="{}", group_level=1): log.debug("Get view by node ID for node: %s" %node_id) start_time_key = "" end_time_key = "" date_attributes = ['year', 'month', 'date', 'hour', 'minute', 'second'] pattern_list = ['%Y', '%m', '%d', '%H', '%M','%S'] pattern= "\'"+(', '.join(pattern_list[:group_level-1]))+"\'" print "pattern: "+ pattern db = store.get_bucket() if(start_timestamp==""): str_startkey = "[\"" + node_id + "\"]" else: start_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(start_timestamp) for i in range(group_level-1): start_time_key += start_time[date_attributes[i]]+"," start_time_key=start_time_key.rstrip(",") str_startkey = "[\"" + node_id + "\"," + start_time_key+"]" if(end_timestamp!= "{}"): end_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(end_timestamp) for i in range(group_level-1): end_time_key += end_time[date_attributes[i]]+"," end_time_key=end_time_key.rstrip(",") str_endkey = "[\"" + node_id + "\"," + end_time_key+"]" else: str_endkey = "[\"" + node_id + "\"," + end_timestamp+"]" log.info( "startkey: "+ str_startkey) log.info( "endkey: "+ str_endkey) view_stats= [] if(metric == 'total_cpu_usage'): view_stats = db.view('_design/all_nodes_cpu_stats/_view/get_all_nodes_cpu_stats', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) elif(metric == 'memory_percent_used'): view_stats = db.view('_design/all_nodes_mem_used/_view/get_all_nodes_mem_used', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) elif(metric == 'network_total_bytes_sent_last_sec'): view_stats = db.view('_design/all_nodes_bytes_sent/_view/get_all_nodes_bytes_sent', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) elif(metric == 'network_total_bytes_received_last_sec'): view_stats = db.view('_design/all_nodes_bytes_recv/_view/get_all_nodes_bytes_recv', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) elif(metric == 'load_avg_1min'): view_stats = db.view('_design/all_nodes_load_avg/_view/get_all_nodes_load_avg', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) all_values = [] for view in view_stats: document = view['value'] avg = document['sum']/document['count'] key = view['key'] log.info( 'key: '+str(key) +'avg: '+str(avg)) date_as_string= "\'"+str(key[1:]).strip('[]')+"\'" epoch_milli= util.convert_time_to_epoch(date_as_string,pattern) *1000 #multiply by 1000 to convert to milliseconds all_values.insert(1, [epoch_milli,avg]) return all_values
def get_view_nodes_set_metric_stat_aggregated(set, interface, metric, node_id=None, start_timestamp="", end_timestamp ="{}", group_level=1): log.debug("Get view by node ID for node: %s" %node_id) print "group level: "+ str(group_level) start_time_key = "" end_time_key = "" date_attributes = ['year', 'month', 'date', 'hour', 'minute', 'second'] pattern_list = ['%Y', '%m', '%d', '%H', '%M','%S'] pattern= "\'"+(', '.join(pattern_list[:group_level-3]))+"\'" print "pattern: "+ pattern db = store.get_bucket() if(start_timestamp==""): str_startkey = "[\"" + node_id + "\","+"\"" +interface+"\""+","+"\""+ metric+"\"" + "]" else: start_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(start_timestamp) for i in range(group_level-3): start_time_key += start_time[date_attributes[i]]+"," start_time_key=start_time_key.rstrip(",") str_startkey = "[\"" + node_id + "\"," +"\""+interface+"\""+ "," + "\""+metric +"\""+"," + start_time_key+"]" if(end_timestamp!= "{}"): end_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(end_timestamp) for i in range(group_level-3): end_time_key += end_time[date_attributes[i]]+"," end_time_key=end_time_key.rstrip(",") str_endkey = "[\"" + node_id + "\"," +"\""+interface+"\""+","+"\""+ metric+"\""+","+ end_time_key+"]" else: str_endkey = "[\"" + node_id + "\","+"\""+interface+"\""+","+"\""+ metric+"\"" +","+ end_timestamp+"]" log.info( "startkey: "+ str_startkey) log.info( "endkey: "+ str_endkey) view_stats= [] if set =='network': view_stats = db.view('_design/all_nodes_network_stat/_view/get_all_nodes_network_stat', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) if set == 'disk': view_stats = db.view('_design/all_nodes_disk_stat/_view/get_all_nodes_disk_stat', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level) all_values = [] for view in view_stats: document = view['value'] avg = document['sum']/document['count'] key = view['key'] log.info( 'key: '+str(key) +'avg: '+str(avg)) date_as_string= "\'"+str(key[3:]).strip('[]')+"\'" epoch_milli= util.convert_time_to_epoch(date_as_string,pattern) *1000 #multiply by 1000 to convert to milliseconds all_values.insert(1, [epoch_milli,avg]) return all_values