def get_view_nodes_set_metric_stat_aggregated(set, interface, metric, node_id=None, start_timestamp="", end_timestamp ="{}", group_level=1):
    log.debug("Get view by node ID for node: %s" %node_id)

    print "group level: "+ str(group_level)


    start_time_key = ""
    end_time_key = ""
    date_attributes = ['year', 'month', 'date', 'hour', 'minute', 'second']
    pattern_list = ['%Y', '%m', '%d', '%H', '%M','%S']

    pattern= "\'"+(', '.join(pattern_list[:group_level-3]))+"\'"


    print "pattern: "+ pattern

    db = store.get_bucket()

    if(start_timestamp==""):
        str_startkey = "[\"" + node_id + "\","+"\"" +interface+"\""+","+"\""+ metric+"\"" + "]"
    else:
        start_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(start_timestamp)
        for i in range(group_level-3):
            start_time_key += start_time[date_attributes[i]]+","
        start_time_key=start_time_key.rstrip(",")

        str_startkey = "[\"" + node_id + "\"," +"\""+interface+"\""+ "," + "\""+metric +"\""+"," + start_time_key+"]"


    if(end_timestamp!= "{}"):
        end_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(end_timestamp)
        for i in range(group_level-3):
            end_time_key += end_time[date_attributes[i]]+","
        end_time_key=end_time_key.rstrip(",")
        str_endkey = "[\"" + node_id + "\"," +"\""+interface+"\""+","+"\""+ metric+"\""+","+ end_time_key+"]"

    else:
        str_endkey = "[\"" + node_id + "\","+"\""+interface+"\""+","+"\""+ metric+"\"" +","+ end_timestamp+"]"

    log.info( "startkey: "+ str_startkey)
    log.info( "endkey: "+ str_endkey)

    view_stats= []

    if set =='network':
        view_stats = db.view('_design/all_nodes_network_stat/_view/get_all_nodes_network_stat', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)

    if set == 'disk':
        view_stats = db.view('_design/all_nodes_disk_stat/_view/get_all_nodes_disk_stat', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)

    all_values = []

    for view in view_stats:
        document = view['value']
        avg = document['sum']/document['count']
        key = view['key']
        log.info( 'key: '+str(key) +'avg: '+str(avg))
        date_as_string= "\'"+str(key[3:]).strip('[]')+"\'"

        epoch_milli= util.convert_time_to_epoch(date_as_string,pattern) *1000 #multiply by 1000 to convert to milliseconds
        all_values.insert(1, [epoch_milli,avg])

    return all_values
def get_view_nodes_metric_stat_aggregated(metric, node_id=None, start_timestamp="", end_timestamp ="{}", group_level=1):
    log.debug("Get view by node ID for node: %s" %node_id)


    start_time_key = ""
    end_time_key = ""
    date_attributes = ['year', 'month', 'date', 'hour', 'minute', 'second']
    pattern_list = ['%Y', '%m', '%d', '%H', '%M','%S']

    pattern= "\'"+(', '.join(pattern_list[:group_level-1]))+"\'"


    print "pattern: "+ pattern

    db = store.get_bucket()

    if(start_timestamp==""):
        str_startkey = "[\"" + node_id + "\"]"
    else:
        start_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(start_timestamp)
        for i in range(group_level-1):
            start_time_key += start_time[date_attributes[i]]+","
        start_time_key=start_time_key.rstrip(",")

        str_startkey = "[\"" + node_id + "\"," + start_time_key+"]"


    if(end_timestamp!= "{}"):
        end_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(end_timestamp)
        for i in range(group_level-1):
            end_time_key += end_time[date_attributes[i]]+","
        end_time_key=end_time_key.rstrip(",")
        str_endkey = "[\"" + node_id + "\"," + end_time_key+"]"

    else:
        str_endkey = "[\"" + node_id + "\"," + end_timestamp+"]"

    log.info( "startkey: "+ str_startkey)
    log.info( "endkey: "+ str_endkey)

    view_stats= []

    if(metric == 'total_cpu_usage'):
        view_stats = db.view('_design/all_nodes_cpu_stats/_view/get_all_nodes_cpu_stats', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)

    elif(metric == 'memory_percent_used'):
        view_stats = db.view('_design/all_nodes_mem_used/_view/get_all_nodes_mem_used', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    elif(metric == 'network_total_bytes_sent_last_sec'):
        view_stats = db.view('_design/all_nodes_bytes_sent/_view/get_all_nodes_bytes_sent', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    elif(metric == 'network_total_bytes_received_last_sec'):
        view_stats = db.view('_design/all_nodes_bytes_recv/_view/get_all_nodes_bytes_recv', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    elif(metric == 'load_avg_1min'):
        view_stats = db.view('_design/all_nodes_load_avg/_view/get_all_nodes_load_avg', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    all_values = []

    for view in view_stats:
        document = view['value']
        avg = document['sum']/document['count']
        key = view['key']
        log.info( 'key: '+str(key) +'avg: '+str(avg))
        date_as_string= "\'"+str(key[1:]).strip('[]')+"\'"

        epoch_milli= util.convert_time_to_epoch(date_as_string,pattern) *1000 #multiply by 1000 to convert to milliseconds
        all_values.insert(1, [epoch_milli,avg])

    return all_values
def get_view_all_nodes_average_attribute_treemap( start_timestamp="", end_timestamp ="{}", group_level=1):


    start_time_key = ""
    end_time_key = ""
    date_attributes = ['year', 'month', 'date', 'hour', 'minute', 'second']
    pattern_list = ['%Y', '%m', '%d', '%H', '%M','%S']

    pattern= "\'"+(', '.join(pattern_list[:group_level-1]))+"\'"


    print "pattern: "+ pattern

    db = store.get_bucket()

    #Treemap--CPU Usage and Free memory
    values_treemap_cpu = [['Id', 'parent', 'metricvalue'], ['Average CPU Usage', '', 0]]
    values_treemap_mem_used = [['Id', 'parent', 'metricvalue'], ['Average Memory Usage', '', 0]]
    values_treemap_data_sent = [['Id', 'parent', 'metricvalue'], ['Average Data Sent', '', 0]]
    values_treemap_data_received = [['Id', 'parent', 'metricvalue'], ['Average Data Received', '', 0]]

    if(start_timestamp==""):
        str_startkey = "[\"""\"]"
    else:
        start_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(start_timestamp)
        for i in range(group_level-1):
            start_time_key += start_time[date_attributes[i]]+","
        start_time_key=start_time_key.rstrip(",")

        str_startkey = "[\"" "\"," + start_time_key+"]"


    if(end_timestamp!= "{}"):
        end_time = util.convert_epoch_to_date_time_dict_attributes_strip_zeroes(end_timestamp)
        for i in range(group_level-1):
            end_time_key += end_time[date_attributes[i]]+","
        end_time_key=end_time_key.rstrip(",")
        str_endkey = "[\"{}\"," + end_time_key+"]"

    else:
        str_endkey = "[\"{}\"," + end_timestamp+"]"

    log.info( "startkey: "+ str_startkey)
    log.info( "endkey: "+ str_endkey)


    view_stats_cpu = db.view('_design/all_nodes_cpu_stats/_view/get_all_nodes_cpu_stats', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    view_stats_mem = db.view('_design/all_nodes_mem_used/_view/get_all_nodes_mem_used', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    view_stats_net_sent = db.view('_design/all_nodes_bytes_sent/_view/get_all_nodes_bytes_sent', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    view_stats_net_received = db.view('_design/all_nodes_bytes_recv/_view/get_all_nodes_bytes_recv', startkey=str_endkey, endkey = str_startkey, descending = True, reduce=True, group=True, group_level=group_level)


    node_id_cpu_avg = view_util.get_sum_count(view_stats_cpu)
    node_id_mem_used_avg = view_util.get_sum_count(view_stats_mem)
    node_id_net_sent_avg = view_util.get_sum_count(view_stats_net_sent)
    node_id_net_received_avg = view_util.get_sum_count(view_stats_net_received)

    for key,value in node_id_cpu_avg.items():
        values_treemap_cpu.append([key, 'Average CPU Usage',value['total_sum']/value['total_count']])

    for key,value in node_id_mem_used_avg.items():
        values_treemap_mem_used.append([key, 'Average Memory Usage',value['total_sum']/value['total_count']])

    for key,value in node_id_net_sent_avg.items():
        values_treemap_data_sent.append([key, 'Average Data Sent', value['total_sum']/value['total_count']])

    for key,value in node_id_net_received_avg.items():
        values_treemap_data_received.append([key, 'Average Data Received', value['total_sum']/value['total_count']])


    return node_treemap(cpu_usage= values_treemap_cpu, memory_usage= values_treemap_mem_used, data_sent= values_treemap_data_sent,
        data_received= values_treemap_data_received)