Beispiel #1
0
def prepare(metric, file, output):
    with open(file, 'r') as f:
        data = json.load(f)
    pairs = {}
    _rs = []
    _lm = []
    _am = []
    _al = []
    _as = []
    _ar = []
    _ls = []
    _ms = []
    _rl = []
    _rm = []

    for k, v in data.iteritems():
        for key, value in v.iteritems():
            if key == "rs" or key == "sr":
                _rs.append(value)
            elif key == "lm" or key == "ml":
                _lm.append(value)
            elif key == "am" or key == "ma":
                _am.append(value)
            elif key == "al" or key == "la":
                _al.append(value)
            elif key == "as" or key == "sa":
                _as.append(value)
            elif key == "ar" or key == "ra":
                _ar.append(value)
            elif key == "ls" or key == "sl":
                _ls.append(value)
            elif key == "ms" or key == "sm":
                _ms.append(value)
            elif key == "rl" or key == "lr":
                _rl.append(value)
            elif key == "rm" or key == "mr":
                _rm.append(value)

    _rs_avg = calc.calcular_full(_rs)
    _lm_avg = calc.calcular_full(_lm)
    _am_avg = calc.calcular_full(_am)
    _al_avg = calc.calcular_full(_al)
    _as_avg = calc.calcular_full(_as)
    _ar_avg = calc.calcular_full(_ar)
    _ls_avg = calc.calcular_full(_ls)
    _ms_avg = calc.calcular_full(_ms)
    _rl_avg = calc.calcular_full(_rl)
    _rm_avg = calc.calcular_full(_rm)
    _aa_avg = 1.0
    _ss_avg = 1.0
    _rr_avg = 1.0
    _ll_avg = 1.0
    _mm_avg = 1.0

    color_bar(_rs_avg, _lm_avg, _am_avg, _al_avg, _as_avg, _ar_avg, _ls_avg,
              _ms_avg, _rl_avg, _rm_avg, _aa_avg, _ss_avg, _rr_avg, _ll_avg,
              _mm_avg, output_dir)
def prepare(dataset, metric, graph_type, alg, file):
    metric_plot = {}  # Armazenar o nome da rede e o maior valor do métrica
    with open(file, "r") as f:
        data = json.load(f)
    for net in os.listdir(dataset):  # Para cada modelo de rede do dataset
        if os.path.isdir(dataset + net):
            t = data[
                net]  # Recebe o melhor threhsold determinado pelas métricas do Chen
            threshold = t['threshold']
            print net, threshold, t
            if not os.path.isfile(
                    str(dataset) + str(net) + "/" + str(threshold) + ".json"):
                print("Impossível abrir arquivo com threshold: " +
                      str(dataset) + str(net) + "/" + str(threshold) + ".json")
            else:
                with open(
                        str(dataset) + str(net) + "/" + str(threshold) +
                        ".json",
                        'r') as f:  # Abre arquivo com o melhor threshold
                    data1 = json.load(f)
                    if data1 is not None:
                        _metric = []
                        for k, v in data1.iteritems():
                            _metric.append(v[metric])
                        M = calc.calcular_full(
                            _metric)  #Calcula a média para a métrica...
                        if M is not None:
                            metric_plot[net] = {
                                'threshold': threshold,
                                metric: float(M['media']),
                                'std': float(M['desvio_padrao'])
                            }
    return metric_plot
Beispiel #3
0
def prepare(dataset, metric, qds):
    if not os.path.isdir(dataset):
        print("Diretório com " + str(metric) + " não encontrado: " +
              str(dataset))
    else:
        metric_plot = {
        }  # Armazenar o nome da rede e o maior valor do métrica

        for net in os.listdir(dataset):
            if os.path.isdir(dataset + net):
                threshold = qds[net]['threshold']
                if os.path.isfile(dataset + net + "/" + threshold + ".json"):
                    data_avg_values = []
                    with open(dataset + net + "/" + threshold + ".json",
                              'r') as f:
                        data = json.load(f)
                        if data is not None:
                            for k, v in data.iteritems():
                                data_avg_values.append(v)
                            #print len(data_avg_values)

                            M = calc.calcular_full(data_avg_values)
                            if M is not None:
                                metric_plot[net] = {
                                    'threshold': threshold,
                                    metric: float(M['media']),
                                    'std': float(M['desvio_padrao'])
                                }
        return metric_plot
def prepare(dataset,metric):
	if not os.path.isdir(dataset):
		print ("Diretório com "+str(metric)+" não encontrado: "+str(dataset))
	else:	
		metric_plot = {}																	# Armazenar o nome da rede e o maior valor do métrica

		for directory in os.listdir(dataset):
			if os.path.isdir(dataset+directory):
				net = str(directory)
				metric_plot[net] = {'threshold':' ',metric:float(0),'std':float(0)}
				
				for file in os.listdir(dataset+directory):
					data_avg_values = []
					threshold = file.split(".json")
					threshold = threshold[0]
					with open(dataset+directory+"/"+file, 'r') as f:
						data = json.load(f)
						if data is not None:
							for k,v in data.iteritems():
								data_avg_values.append(v['media'])
							print len(data_avg_values)
												
							M = calc.calcular_full(data_avg_values)
							if M is not None:						
								if	float(M['media']) > metric_plot[net][metric]:
									metric_plot[net] = {'threshold': threshold, metric:float(M['media']),'std':float(M['desvio_padrao'])}
		return metric_plot
def metric_calc_less_than(dataset,metric,graph_type,alg): # Verifica pelo melhor threshold. Em caso de empate pega o último threshold verificado.
	if not os.path.isdir(dataset):
		print ("Diretório com "+str(metric)+" não encontrado: "+str(dataset))
	else:	
		best_t = {}																	# Armazenar o nome da rede e o melhor valor do métrica

		for net in os.listdir(dataset):
			print
			if os.path.isdir(dataset+net):
				best_t[net] = {'threshold':' ',metric:float("-inf"),'std':float(0)}
				for file in os.listdir(dataset+net):
					data_avg_values = []
					threshold = file.split(".json")
					threshold = threshold[0]
					with open(dataset+net+"/"+file, 'r') as f:
						data = json.load(f)
						if data is not None:
							for k,v in data.iteritems():
								data_avg_values.append(v)
							#print len(data_avg_values)
												
							M = calc.calcular_full(data_avg_values)
							if M is not None:

								print threshold, M['media']
								
								if	float(M['media']) >= best_t[net][metric]:
									best_t[net] = {'threshold': threshold, metric:float(M['media']),'std':float(M['desvio_padrao'])}
			print																		
		with open(str(output)+str(graph_type)+"_"+str(alg)+"_"+str(metric)+"_best_threshold.json", "w") as f:
			f.write(json.dumps(best_t))
		return best_t
Beispiel #6
0
def lists_verify(file,alters_set):
	with open(file, 'r') as f:
		_ego_jaccard = []
		_users_by_list	= []											#partial
		_full_lists_jaccard = []
		lists_by_ego = 0
		users_total = 0
			
		for line in f:													# para cada Lista
			ubl = 0														# usuários por lista
			a = line.split(' ')
			list_set = set()

			for item in a:
				if item != "\n":
					list_set.add(long(item))
					users_total = users_total+1				# necessário pois não dá pra usar o tamanho do conjunto porque há casos em que o mesmo alter aparece em diversas Listas
					ubl+=1
		
			j_m = jaccard_modified(list_set,alters_set)
			_users_by_list.append(ubl)
			_ego_jaccard.append(j_m)
			_full_lists_jaccard.append(j_m)
			lists_by_ego = lists_by_ego+1

		users_by_list_average = float(users_total/lists_by_ego)
		ego_jaccard = calc.calcular_full(_ego_jaccard)			
		
#		print _users_by_list										#OK
#		print users_by_list_average							#OK
#		print lists_by_ego										#OK
#		print ego_jaccard['media']								#OK
#		time.sleep(10)												#OK

		return _users_by_list, users_by_list_average, lists_by_ego, ego_jaccard['media'], _full_lists_jaccard
Beispiel #7
0
def algorithm(comm_data_dir, metric):
    data = {
    }  # Armazenar todos os valores da Metrica para cada threshold do algoritmo em cada rede - Formato {'n8': {1: {'soma': 6.059981138000007, 'media': 0.025787153778723433, 'desvio_padrao': 0.006377214443559922, 'variancia': 4.0668864059149294e-05}, 2: {'soma': 6.059981138000007...}}
    data_overview = {
    }  # Armazenar o nome da rede e o maior valor do trheshold do algoritmo para a MetricaI - Formato {{'N1':0.012},...}

    if os.path.isdir(comm_data_dir):
        for file in os.listdir(comm_data_dir):
            network = file.split(
                ".json"
            )  # pegar o nome do arquivo que indica o a rede analisada
            network = network[0]
            data_overview[network] = {
                'threshold': ' ',
                metric: float(0),
                'std': float(0)
            }
            print("\n##################################################")
            print("Recuperando dados da rede " + str(network))

            if os.path.isfile(comm_data_dir + file):
                with open(comm_data_dir + file, 'r') as f:
                    partial = {}

                    for line in f:
                        comm_data = json.loads(line)
                        for k, v in comm_data.iteritems():
                            values = []
                            for item in v:
                                if not math.isnan(
                                        item
                                ):  # exclui calculo de da METRICA que retorna valor NaN
                                    values.append(item)

                            result = calc.calcular_full(
                                values
                            )  # Calcula média e outros dados da METRICA recuperados para o conjunto de egos usando o threshold k
                            if result is not None:
                                if float(result['media']
                                         ) > data_overview[network][metric]:
                                    data_overview[network] = {
                                        'threshold': k,
                                        metric: float(result['media']),
                                        'std': float(result['desvio_padrao'])
                                    }
                                partial[
                                    k] = result  # Adiciona os caclulos feitos num dicionário com indice k (ou seja, o threshold usado pelo algoritmo)
                                data[network] = partial
            else:
                print("Arquivo não encontrado: " + str(comm_data_dir + file))

            print data_overview[network]  # Maior média para a rede [network]
    else:
        print("Diretório não encontrado: " + str(comm_data_dir))
    print
    print("##################################################\n")
    return data, data_overview
    print("##################################################")
def algorithm(data_source,output_dir,metric):
	
	data_overview = {}																	# Armazenar o nome da rede e o maior valor do trheshold do algoritmo para a MetricaI - Formato {{'N1':0.012},...	
	data = {}

	if not os.path.isdir(data_source):
		print ("\n##################################################\n\n")
		print ("Diretório não encontrado: "+str(data_source))
		print ("\n\n##################################################\n")					
	else:	

		if not os.path.exists(output_dir):
			os.makedirs(output_dir)
	
		for file in os.listdir(data_source):
			network = file.split(".json")														# pegar o nome do arquivo que indica o a rede analisada
			network = network[0]

			data_overview[network] = {'threshold':' ',metric:float("-inf")}
			print ("##################################################")
			print ("Preparando resultados para a métrica: "+(metric)+" - Recuperando dados da rede "+str(network))	
		
			with open(data_source+file, 'r') as g:
				for line in g:
					comm_data = json.loads(line) 
					for k, v in comm_data.iteritems():										# Para cada threshold
						values = []
						for item in v:
							if not math.isnan(item):
								if item != float("inf") and item != float("-inf"): 											# exclui calculo de da METRICA que retorna valor NaN e Infinity
									values.append(item)
						print 						
						print metric
						print values
						print
													
						result = calc.calcular_full(values)									# Calcula média e outros dados da METRICA recuperados para o conjunto de egos usando o threshold k				 				
						if result is not None:	
														
							if	float(result['media']) > data_overview[network][metric]:
								data_overview[network] = {'threshold':k,metric:float(result['media'])}
								result['n_egos'] = len(values)
								result['t_egos'] = len(v)
								data[network] = {'threshold':k,metric:result}
		print ("##################################################")	
	
		with open(output_dir+metric+".json", 'w') as f:
			for k in data:
				output_file = {}
				output_file[k] = data[k]
				f.write(json.dumps(output_file)+"\n")
	
	return data_overview
Beispiel #9
0
def jaccard_verify(file, alters_set):
    with open(file, 'r') as f:
        _ego_jaccard = []
        _full_lists_jaccard = []

        for line in f:  # para cada Lista
            a = line.split(' ')
            list_set = set()
            if a is not None:
                for item in a:
                    if item != "\n":
                        list_set.add(long(item))

                j_m = jaccard_modified(list_set, alters_set)
                _ego_jaccard.append(j_m)
                _full_lists_jaccard.append(j_m)

        ego_jaccard = calc.calcular_full(_ego_jaccard)

        return ego_jaccard['media'], _full_lists_jaccard
def prepare(dataset):
    if not os.path.isdir(dataset):
        print("Diretório com modularidades não encontrado: " + str(dataset))
    else:
        modularity_plot = {
        }  # Armazenar o nome da rede e o maior valor do da modularidade - Formato {{'N1':0.012},...}

        for file in os.listdir(dataset):
            net = file.split(".json")
            net = net[0]
            with open(dataset + file, 'r') as f:
                data = json.load(f)
            M = calc.calcular_full(data)
            if M is not None:
                modularity_plot[net] = {
                    'threshold': '1',
                    'modularity': float(M['media']),
                    'std': float(M['desvio_padrao'])
                }

        return modularity_plot
def net_structure(dataset_dir, output_dir, net, IsDir, weight):
    print(
        "\n######################################################################\n"
    )
    if os.path.isfile(str(output_dir) + str(net) + "_clustering_coef.json"):
        print("Arquivo já existe: " + str(output_dir) + str(net) +
              "_clustering_coef.json")
    else:

        print("Dataset clustering coefficient - " + str(dataset_dir))

        cf = []  # Média dos coeficientes de clusterings por rede-ego
        gcf = []  # Média usando opção global
        n = []  # vetor com número de vértices para cada rede-ego
        e = []  # vetor com número de arestas para cada rede-ego
        i = 0

        for file in os.listdir(dataset_dir):

            i += 1
            print(
                str(output_dir) + str(net) + "/" + str(file) +
                " - Calculando propriedades para o ego " + str(i) + ": " +
                str(file))
            if IsDir is True:
                G = snap.LoadEdgeList(
                    snap.PNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
            else:
                G = snap.LoadEdgeList(
                    snap.PUNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
#			G.Dump()
#			time.sleep(5)

#####################################################################################

            n.append(G.GetNodes())  # Numero de vertices
            e.append(G.GetEdges())  # Numero de arestas
            n_nodes = G.GetNodes()
            n_edges = G.GetEdges()

            #####################################################################################
            #Usando opção local - Retorna o mesmo resultado do global
            if n_edges == 0:
                a = 0
                cf.append(a)
                print("Nenhuma aresta encontrada para a rede-ego " + str(i) +
                      " - (" + str(file))
            else:
                NIdCCfH = snap.TIntFltH()
                snap.GetNodeClustCf(G, NIdCCfH)
                _cf = []
                for item in NIdCCfH:
                    _cf.append(NIdCCfH[item])  # Clusterinf Coefficient
                result = calc.calcular(_cf)
                cf.append(result['media'])
                print("Clustering Coef para o ego " + str(i) + " (" +
                      str(file) + "): " + str(result['media']))
                print


#####################################################################################
#Usando opção global   - Retorna o mesmo resultado do local
#
#			if n_edges == 0:
#				a = 0
#				gcf.append(a)
#			else:
#				GraphClustCoeff = snap.GetClustCf (G)
#				gcf.append(GraphClustCoeff)
#				print "Clustering coefficient: %f" % GraphClustCoeff
#				print

#####################################################################################
        CF = calc.calcular_full(cf)

        overview = {}
        overview['ClusteringCoefficient'] = CF

        with open(str(output_dir) + str(net) + "_clustering_coef.json",
                  'w') as f:
            f.write(json.dumps(overview))

        with open(str(output_dir) + str(net) + "_clustering_coef.txt",
                  'w') as f:
            f.write(
                "\n######################################################################\n"
            )
            f.write(
                "Clustering Coef: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (CF['media'], CF['variancia'], CF['desvio_padrao']))
            f.write(
                "\n######################################################################\n"
            )

        print(
            "\n######################################################################\n"
        )
        print(
            "Clustering Coef: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
            % (CF['media'], CF['variancia'], CF['desvio_padrao']))
        print(
            "\n######################################################################\n"
        )
Beispiel #12
0
def net_structure(dataset_dir, output_dir, net, IsDir, weight):
    print(
        "\n######################################################################\n"
    )
    if os.path.isfile(str(output_dir) + str(net) + "_net_struct.json"):
        print("Arquivo já existe: " + str(output_dir) + str(net) +
              "_net_struct.json")
    else:

        print("Dataset network structure - " + str(dataset_dir))
        n = []  # Média dos nós por rede-ego
        e = []  # Média das arestas por rede-ego
        nodes = {}  # chave_valor para ego_id e numero de vertices
        edges = {}  # chave_valor para ego_id e numero de arestas

        d = []  # Média dos diametros por rede-ego
        diameter = {}  # chave_valor para ego_id e diametro

        dens = []
        density = {}
        cc = []  # Média dos Close Centrality
        bc_n = []  # média de betweenness centrality dos nós
        bc_e = []  # média de betweenness centrality das arestas

        degree = {
        }  # chave-valor para armazenar "grau dos nós - numero de nós com este grau"
        i = 0

        for file in os.listdir(dataset_dir):
            ego_id = file.split(".edge_list")
            ego_id = long(ego_id[0])
            i += 1
            print(
                str(output_dir) + str(net) +
                " - Calculando propriedades para o ego " + str(i) + ": " +
                str(file))
            if IsDir is True:
                G = snap.LoadEdgeList(
                    snap.PNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
            else:
                G = snap.LoadEdgeList(
                    snap.PUNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')

#####################################################################################
            n_nodes = G.GetNodes()
            n_edges = G.GetEdges()
            nodes[ego_id] = n_nodes  #Dicionário ego_id = vertices
            edges[ego_id] = n_edges
            n.append(n_nodes)  # Numero de vértices
            e.append(n_edges)  # Número de Arestas

            if n_edges == 0:
                a = 0
                d.append(a)
                cc.append(a)
                bc_n.append(a)
                bc_e.append(a)
                dens.append(a)
                density[ego_id] = a
                diameter[ego_id] = a
            else:
                #####################################################################################

                w = float(n_edges) / (float(n_nodes) * (float(n_nodes) - 1)
                                      )  # Calcular a densidade da rede
                dens.append(w)
                density[ego_id] = w

                #####################################################################################

                z = snap.GetBfsFullDiam(G, 100, IsDir)
                d.append(z)  # get diameter of G
                diameter[ego_id] = z

                #####################################################################################

                Normalized = True
                for NI in G.Nodes():
                    cc.append(
                        snap.GetClosenessCentr(
                            G, NI.GetId(), Normalized,
                            IsDir))  #get a closeness centrality

#####################################################################################
#### Tem que corrigir... fazer uma versão 5 com correção: se n_nodes < 3 a bc fica = n_edges e deveria ser bc=0
                if n_edges == 0 or n_nodes < 3:
                    bc_n.append(n_edges)
                    bc_e.append(n_edges)
                else:
                    Nodes = snap.TIntFltH()
                    Edges = snap.TIntPrFltH()
                    snap.GetBetweennessCentr(
                        G, Nodes, Edges, 1.0,
                        IsDir)  #Betweenness centrality Nodes and Edges

                    if IsDir is True:
                        max_betweenneess = (n_nodes - 1) * (n_nodes - 2)
                    else:
                        max_betweenneess = ((n_nodes - 1) * (n_nodes - 2)) / 2

                    for node in Nodes:
                        bc_n_normalized = float(
                            Nodes[node]) / float(max_betweenneess)
                        bc_n.append(bc_n_normalized)

                    for edge in Edges:
                        bc_e_normalized = float(
                            Edges[edge]) / float(max_betweenneess)
                        bc_e.append(bc_e_normalized)

#####################################################################################

                    DegToCntV = snap.TIntPrV()
                    snap.GetDegCnt(
                        G, DegToCntV)  #Grau de cada nó em cada rede-ego
                    for item in DegToCntV:
                        k = item.GetVal1()
                        v = item.GetVal2()
                        if degree.has_key(k):
                            degree[k] = degree[k] + v
                        else:
                            degree[k] = v

#####################################################################################

                print n[i - 1], e[i -
                                  1], dens[i -
                                           1], d[i -
                                                 1], cc[i -
                                                        1], bc_n[i -
                                                                 1], bc_e[i -
                                                                          1]
                print
#####################################################################################

        N = calc.calcular_full(n)
        E = calc.calcular_full(e)

        histogram.histogram(degree, output_dir + "histogram" + "/", N['soma'],
                            net)

        DENS = calc.calcular_full(dens)

        D = calc.calcular_full(d)

        CC = calc.calcular_full(cc)

        BC_N = calc.calcular_full(bc_n)
        BC_E = calc.calcular_full(bc_e)

        overview = {}

        overview['Nodes'] = N
        overview['Edges'] = E
        overview['Density'] = DENS
        overview['Diameter'] = D
        overview['CloseCentr'] = CC
        overview['BetweennessCentrNodes'] = BC_N
        overview['BetweennessCentrEdges'] = BC_E

        nodes_stats = calc.calcular_full(n)
        edges_stats = calc.calcular_full(e)

        overview_basics = {
            'nodes': n,
            'nodes_stats': nodes_stats,
            'edges': e,
            'edges_stats': edges_stats
        }

        output_basics = output_dir + "/" + str(net) + "/"
        if not os.path.exists(output_basics):
            os.makedirs(output_basics)

        with open(str(output_basics) + str(net) + "_nodes.json", 'w') as f:
            f.write(json.dumps(nodes))
        with open(str(output_basics) + str(net) + "_edges.json", 'w') as f:
            f.write(json.dumps(edges))
        with open(str(output_basics) + str(net) + "_density.json", 'w') as f:
            f.write(json.dumps(density))
        with open(str(output_basics) + str(net) + "_diameter.json", 'w') as f:
            f.write(json.dumps(diameter))

        with open(str(output_basics) + str(net) + "_overview.json", 'w') as f:
            f.write(json.dumps(overview_basics))

        with open(str(output_dir) + str(net) + "_net_struct.json", 'w') as f:
            f.write(json.dumps(overview))
def calc_metrics(communities,G,uw,ud):
######################################################################################################################################################################



	average_degree = [] 
	conductance = []		
	cut_ratio = []
	density = []
	expansion = []
	normal_cut_ratio = []
	separability = []
	clustering = []

	if ud is False:													#Para grafos direcionados...
		for k,community in communities.iteritems():
			_average_degree = 0 
			_conductance = 0		
			_cut_ratio = 0
			_density = 0
			_expansion = 0
			_normal_cut_ratio = 0
			_separability = 0
			
			average_degree.append(_average_degree)														#Armazena os resultados para cada partição para depois fazer a média do ego. 		
			conductance.append(_conductance)																#Armazena os resultados para cada partição para depois fazer a média do ego.
			cut_ratio.append(_cut_ratio)																	#Armazena os resultados para cada partição para depois fazer a média do ego.		
			density.append(_density)																		#Armazena os resultados para cada partição para depois fazer a média do ego.
			expansion.append(_expansion)																	#Armazena os resultados para cada partição para depois fazer a média do ego.
			normal_cut_ratio.append(_normal_cut_ratio)												#Armazena os resultados para cada partição para depois fazer a média do ego.		
			separability.append(_separability)															#Armazena os resultados para cada partição para depois fazer a média do ego.
	
	else:																			#Para grafos não direcionados...
		clustering_of_G = nx.average_clustering(G,weight='weight')			#Calcula o coeficiente de Clustering para o Grafo.
		
		for k,community in communities.iteritems():
			_average_degree = 0 
			_conductance = 0		
			_cut_ratio = 0
			_density = 0
			_expansion = 0
			_normal_cut_ratio = 0
			_separability = 0
############################################################################################################ 	CLUSTERING COEFFICIENT	 
			_cc = []										#Anexar os coeficientes de clustering de cada Nó na comunidade
			for Node in community:
				try:
					_cc.append(clustering_of_G[Node])
				except Exception as e:
					print ("Error - "+str(e))
			if _cc is not None:
				_clustering = calc.calcular(_cc)		#Trazer a média do coeficiente de clustering da comunidade
				clustering.append(_clustering['media'])												#Armazena os resultados para cada partição para depois fazer a média do ego.
			else:
				_clustering = 0
				clustering.append(_clustering)															#Armazena os resultados para cada partição para depois fazer a média do ego.
#############################################################################################################				
				
			average_degree.append(_average_degree)														#Armazena os resultados para cada partição para depois fazer a média do ego. 		
			conductance.append(_conductance)																#Armazena os resultados para cada partição para depois fazer a média do ego.
			cut_ratio.append(_cut_ratio)																	#Armazena os resultados para cada partição para depois fazer a média do ego.		
			density.append(_density)																		#Armazena os resultados para cada partição para depois fazer a média do ego.
			expansion.append(_expansion)																	#Armazena os resultados para cada partição para depois fazer a média do ego.
			normal_cut_ratio.append(_normal_cut_ratio)												#Armazena os resultados para cada partição para depois fazer a média do ego.		
			separability.append(_separability)															
				
	
	avg_ad = calc.calcular_full(average_degree)	
	avg_c = calc.calcular_full(conductance)
	avg_cut_r = calc.calcular_full(cut_ratio)
	avg_d = calc.calcular_full(density)
	avg_e = calc.calcular_full(expansion)
	avg_normal_cut = calc.calcular_full(normal_cut_ratio)
	avg_s = calc.calcular_full(separability)
	avg_cc = calc.calcular_full(clustering)		

	print avg_ad, avg_c, avg_cut_r, avg_d, avg_e, avg_normal_cut, avg_s, avg_cc
	time.sleep(5)
	return avg_ad, avg_c, avg_cut_r, avg_d, avg_e, avg_normal_cut, avg_s, avg_cc
def net_structure(dataset_dir, output_dir, net, IsDir, weight):
    print(
        "\n######################################################################\n"
    )
    print("Dataset network structure - " + str(dataset_dir))
    n = []  # Média dos nós por rede-ego
    nodes = {}
    e = []
    edges = {}  # Média das arestas por rede-ego
    i = 0

    output_basics = output_dir + "/" + str(net) + "/"
    if not os.path.exists(output_basics):
        os.makedirs(output_basics)

    if os.path.isfile(str(output_basics) + str(net) + "_overview.json"):
        print("Arquivo já existe! " + str(output_basics) + str(net) +
              "_overview.json")
    else:
        for file in os.listdir(dataset_dir):
            ego_id = file.split(".edge_list")
            ego_id = long(ego_id[0])
            i += 1
            print(
                str(output_dir) + str(net) +
                " - Calculando propriedades para o ego " + str(i) + ": " +
                str(file))
            if IsDir is True:
                G = snap.LoadEdgeList(
                    snap.PNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
            else:
                G = snap.LoadEdgeList(
                    snap.PUNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')

#####################################################################################
            n_nodes = G.GetNodes()
            n_edges = G.GetEdges()
            nodes[ego_id] = n_nodes
            edges[ego_id] = n_edges
            n.append(n_nodes)  # Numero de vértices
            e.append(n_edges)  # Número de Arestas
        nodes_stats = calc.calcular_full(n)
        edges_stats = calc.calcular_full(e)
        overview_basics = {
            'nodes': n,
            'nodes_stats': nodes_stats,
            'edges': e,
            'edges_stats': edges_stats
        }

        #####################################################################################

        with open(str(output_basics) + str(net) + "_nodes.json", 'w') as f:
            f.write(json.dumps(nodes))
        with open(str(output_basics) + str(net) + "_edges.json", 'w') as f:
            f.write(json.dumps(edges))

        with open(str(output_basics) + str(net) + "_overview.json", 'w') as f:
            f.write(json.dumps(overview_basics))
def statistics(dataset_dir, output_dir, net, isdir):
    print(
        "\n######################################################################\n"
    )
    print("Dataset statistics - " + str(dataset_dir))
    IsDir = isdir
    n = []  # Média dos nós por rede-ego
    e = []  # Média das arestas por rede-ego
    d = []  # Média dos diametros por rede-ego
    cc = []  # Média dos coeficientes de clusterings por rede-ego
    bc_n = []  # média de betweenness centrality dos nós
    bc_e = []  # média de betweenness centrality das arestas

    i = 0
    for file in os.listdir(dataset_dir):
        i += 1
        print("Calculando propriedades para o ego %d..." % (i))

        G = snap.LoadEdgeList(snap.PNGraph, dataset_dir + file, 0,
                              1)  # load from a text file
        n.append(G.GetNodes())  # Numero de vertices
        e.append(G.GetEdges())  # Numero de arestas
        d.append(snap.GetBfsFullDiam(G, 100, IsDir))  # get diameter of G
        #		cc.append(snap.GetClustCf(G))																		# clustering coefficient of G

        Nodes = snap.TIntFltH()
        Edges = snap.TIntPrFltH()
        snap.GetBetweennessCentr(G, Nodes, Edges, 1.0, IsDir)
        _bc_n = []
        _bc_e = []
        for node in Nodes:
            _bc_n.append(Nodes[node])
        for edge in Edges:
            _bc_e.append(Edges[edge])
        result = calc.calcular(_bc_n)
        bc_n.append(result['media'])
        result = calc.calcular(_bc_e)
        bc_e.append(result['media'])

#####################################################################################
    N = calc.calcular_full(n)
    E = calc.calcular_full(e)
    D = calc.calcular_full(d)
    BC_N = calc.calcular_full(bc_n)
    BC_E = calc.calcular_full(bc_e)
    print(
        "\n######################################################################\n"
    )
    print("NET: %s -- Egos-net: %d" % (net, len(n)))
    print("Nodes: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f" %
          (N['media'], N['variancia'], N['desvio_padrao']))
    print("Edges: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f" %
          (E['media'], E['variancia'], E['desvio_padrao']))
    print("Diameter: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f" %
          (D['media'], D['variancia'], D['desvio_padrao']))
    print(
        "Betweenness Centr Nodes: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f"
        % (BC_N['media'], BC_N['variancia'], BC_N['desvio_padrao']))
    print(
        "Betweenness Centr Edges: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f"
        % (BC_E['media'], BC_E['variancia'], BC_E['desvio_padrao']))
    print(
        "\n######################################################################\n"
    )
Beispiel #16
0
def prepare(metric, file, output):
    with open(file, 'r') as f:
        data = json.load(f)
    pairs = {}
    _rs = []
    _lm = []
    _am = []
    _al = []
    _as = []
    _ar = []
    _ls = []
    _ms = []
    _rl = []
    _rm = []

    for k, v in data.iteritems():
        for key, value in v.iteritems():
            if key == "rs":
                _rs.append(value)
            elif key == "lm":
                _lm.append(value)
            elif key == "am":
                _am.append(value)
            elif key == "al":
                _al.append(value)
            elif key == "as":
                _as.append(value)
            elif key == "ar":
                _ar.append(value)
            elif key == "ls":
                _ls.append(value)
            elif key == "ms":
                _ms.append(value)
            elif key == "rl":
                _rl.append(value)
            elif key == "rm":
                _rm.append(value)
    plot_hist(_rs, output, metric, "Retweets X Followee")
    plot_hist(_lm, output, metric, "Likes X Mentions")
    plot_hist(_am, output, metric, "Follow X Mentions")
    plot_hist(_al, output, metric, "Follow X Likes")
    plot_hist(_as, output, metric, "Follow X Followee")
    plot_hist(_ar, output, metric, "Follow X Retweets")
    plot_hist(_ls, output, metric, "Likes X Followee")
    plot_hist(_ms, output, metric, "Mentions X Followee")
    plot_hist(_rl, output, metric, "Retweets X Likes")
    plot_hist(_rm, output, metric, "Retweets X Mentions")

    _rs_avg = calc.calcular_full(_rs)
    _rs_avg = _rs_avg['media']

    _lm_avg = calc.calcular_full(_lm)
    _lm_avg = _lm_avg['media']

    _am_avg = calc.calcular_full(_am)
    _am_avg = _am_avg['media']

    _al_avg = calc.calcular_full(_al)
    _al_avg = _al_avg['media']

    _as_avg = calc.calcular_full(_as)
    _as_avg = _as_avg['media']

    _ar_avg = calc.calcular_full(_ar)
    _ar_avg = _ar_avg['media']

    _ls_avg = calc.calcular_full(_ls)
    _ls_avg = _ls_avg['media']

    _ms_avg = calc.calcular_full(_ms)
    _ms_avg = _ms_avg['media']

    _rl_avg = calc.calcular_full(_rl)
    _rl_avg = _rl_avg['media']

    _rm_avg = calc.calcular_full(_rm)
    _rm_avg = _rm_avg['media']

    _aa_avg = 1.0
    _ss_avg = 1.0
    _rr_avg = 1.0
    _ll_avg = 1.0
    _mm_avg = 1.0

    color_bar(_rs_avg, _lm_avg, _am_avg, _al_avg, _as_avg, _ar_avg, _ls_avg,
              _ms_avg, _rl_avg, _rm_avg, _aa_avg, _ss_avg, _rr_avg, _ll_avg,
              _mm_avg, output_dir)
Beispiel #17
0
def net_structure(dataset_dir, output_dir, graph_type, metric, net, alg):
    os.system('clear')
    print(
        "\n######################################################################\n"
    )
    print(
        "\nScript para cálculo da modularidade das comunidades detectadas\n")

    graphs_dir = "/home/amaury/graphs_hashmap_infomap_without_weight/" + str(
        net) + "/" + str(graph_type) + "/"

    if not os.path.exists(graphs_dir):
        print("Diretório não encontrado: " + str(graphs_dir))

    else:
        print(
            "\n######################################################################\n"
        )
        print(
            "\nScript para cálculo da modularidade das comunidades detectadas - Rede "
            + str(net) + "\n")

        if not os.path.isdir(dataset_dir + str(net) + "/"):
            print("Diretório com avaliações da rede " + str(net) +
                  " não encontrado: " + str(dataset_dir + str(net) + "/"))
        else:
            for threshold in os.listdir(dataset_dir + str(net) + "/"):
                if os.path.isfile(str(output_dir) + str(threshold) + ".json"):
                    print("Arquivo de destino já existe. " + str(output_dir) +
                          str(threshold) + ".json")
                else:

                    modularity = [
                    ]  # Vetor com a Média das modularidades de cada grafo
                    modularity_data = {
                    }  # Dicionário com o ego e as modularidades para cada comunidade
                    i = 0

                    for file in os.listdir(dataset_dir + str(net) + "/" +
                                           str(threshold) + "/"):
                        i += 1
                        ego_id = file.split(".txt")
                        ego_id = long(ego_id[0])
                        communities = [
                        ]  # Armazenar as comunidades da rede-ego
                        m_file = [
                        ]  # vetor de modularidade das comunidades do ego i

                        try:
                            G = snap.LoadEdgeList(
                                snap.PNGraph,
                                str(graphs_dir) + str(ego_id) + ".edge_list",
                                0, 1
                            )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
                            n_edges = G.GetEdges(
                            )  # Número de arestas do grafo

                            if n_edges == 0:
                                a = 0
                                m_file.append(a)
                            else:
                                try:
                                    with open(
                                            dataset_dir + str(net) + "/" +
                                            str(threshold) + "/" + str(file),
                                            'r') as f:
                                        for line in f:
                                            comm = [
                                            ]  #Lista para armazenar as comunidades
                                            a = line.split(' ')
                                            for item in a:
                                                if item != "\n":
                                                    comm.append(item)
                                            communities.append(comm)
                                except Exception as e:
                                    print(
                                        "\nERRO - Impossível carregar as comunidades: "
                                        + dataset_dir + str(net) + "/" +
                                        str(threshold) + "/" + str(file) +
                                        "\n")
                                    print e

                                for comm in communities:
                                    if comm is not None:
                                        Nodes = snap.TIntV()
                                        for nodeId in comm:
                                            if nodeId is not None:
                                                Nodes.Add(long(nodeId))
                                        m_file.append(
                                            snap.GetModularity(
                                                G, Nodes, n_edges)
                                        )  #Passar o número de arestas do grafo como parâmetro para agilizar o processo

                        except Exception as e:
                            print(
                                "\nERRO - Impossível carregar o grafo para o ego: "
                                + str(ego_id) + "  --  " + str(graphs_dir) +
                                str(ego_id) + ".edge_list\n")
                            print e

                        _m_file = calc.calcular(m_file)
                        modularity_data[ego_id] = m_file
                        if _m_file is not None:
                            modularity.append(_m_file['media'])

                            print(
                                str(graph_type) + " - Rede: " + str(net) +
                                " - Threshold: " + str(threshold) +
                                " - Modularidade para o ego " + str(i) + " (" +
                                str(file) + "): %5.3f" % (_m_file['media']))
                            print(
                                "######################################################################"
                            )
                    M = calc.calcular_full(modularity)

                    if M is not None:
                        overview = {
                            'threshold': threshold,
                            'modularity': M,
                            'modularity_data': modularity_data
                        }
                        print(
                            "\n######################################################################\n"
                        )
                        print(
                            "Rede: %s   ---   Threshold: %s   ---   Modularity: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f"
                            % (net, threshold, M['media'], M['variancia'],
                               M['desvio_padrao']))
                        print(
                            "\n######################################################################\n"
                        )

                    if overview is not None:
                        with open(
                                str(output_dir) + str(threshold) + ".json",
                                'a+') as f:
                            f.write(json.dumps(overview) + "\n")

    print(
        "\n######################################################################\n"
    )
Beispiel #18
0
def prepare(metric,file):
	with open(file,'r') as f:
		data = json.load(f)
 
 	_aa = []
	_as = []
	_ar = []	
	_al = []
	_am = [] 

 	_sa = []
	_ss = []
	_sr = []	
	_sl = []
	_sm = [] 

 	_ra = []
	_rs = []
	_rr = []	
	_rl = []
	_rm = [] 

 	_la = []
	_ls = []
	_lr = []	
	_ll = []
	_lm = [] 

 	_ma = []
	_ms = []
	_mr = []	
	_ml = []
	_mm = [] 

	
	
	for k,v in data.iteritems():
		for key,value in v.iteritems():
			if key == "aa":
				_aa.append(value)
			elif key == "as":
				_as.append(value)
			elif key == "ar":
				_ar.append(value)
			elif key == "al":
				_al.append(value)
			elif key == "am":
				_am.append(value)

			elif key == "sa":
				_sa.append(value)
			elif key == "ss":
				_ss.append(value)
			elif key == "sr":
				_sr.append(value)
			elif key == "sl":
				_sl.append(value)
			elif key == "sm":
				_sm.append(value)

			elif key == "ra":
				_ra.append(value)
			elif key == "rs":
				_rs.append(value)
			elif key == "rr":
				_rr.append(value)
			elif key == "rl":
				_rl.append(value)
			elif key == "rm":
				_rm.append(value)
				
			elif key == "la":
				_la.append(value)
			elif key == "ls":
				_ls.append(value)
			elif key == "lr":
				_lr.append(value)
			elif key == "ll":
				_ll.append(value)
			elif key == "lm":
				_lm.append(value)
				
				
			elif key == "ma":
				_ma.append(value)
			elif key == "ms":
				_ms.append(value)
			elif key == "mr":
				_mr.append(value)
			elif key == "ml":
				_ml.append(value)
			elif key == "mm":
				_mm.append(value)												

			else:
				print ("Rede inválida")
				sys.exit()	
						
	_aa_avg = calc.calcular_full(_aa)
	_as_avg = calc.calcular_full(_as)	
	_ar_avg = calc.calcular_full(_ar)
	_al_avg = calc.calcular_full(_al)
	_am_avg = calc.calcular_full(_am)

	_sa_avg = calc.calcular_full(_sa)
	_ss_avg = calc.calcular_full(_ss)	
	_sr_avg = calc.calcular_full(_sr)
	_sl_avg = calc.calcular_full(_sl)
	_sm_avg = calc.calcular_full(_sm)

	_ra_avg = calc.calcular_full(_ra)
	_rs_avg = calc.calcular_full(_rs)	
	_rr_avg = calc.calcular_full(_rr)
	_rl_avg = calc.calcular_full(_rl)
	_rm_avg = calc.calcular_full(_rm)

	_la_avg = calc.calcular_full(_la)
	_ls_avg = calc.calcular_full(_ls)	
	_lr_avg = calc.calcular_full(_lr)
	_ll_avg = calc.calcular_full(_ll)
	_lm_avg = calc.calcular_full(_lm)

	_ma_avg = calc.calcular_full(_ma)
	_ms_avg = calc.calcular_full(_ms)	
	_mr_avg = calc.calcular_full(_mr)
	_ml_avg = calc.calcular_full(_ml)
	_mm_avg = calc.calcular_full(_mm)		

	color_bar(metric,_aa_avg,_as_avg,_ar_avg,_al_avg,_am_avg, _sa_avg,_ss_avg,_sr_avg,_sl_avg,_sm_avg, _ra_avg,_rs_avg,_rr_avg,_rl_avg,_rm_avg, _la_avg,_ls_avg,_lr_avg,_ll_avg,_lm_avg, _ma_avg,_ms_avg,_mr_avg,_ml_avg,_mm_avg)
Beispiel #19
0
def instructions(type_graphs, singletons):
    source_dir = "/home/amaury/dataset/ground_truth_only_members/lists_users_TXT/" + str(
        singletons) + "/"
    output_dir = "/home/amaury/Dropbox/lists_properties_only_members/" + str(
        type_graphs) + "_" + str(singletons) + "/"
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    if not os.path.exists(source_dir):
        print(
            "\nImpossível encontrar diretório com ground-truth communities: "
            + str(source_dir))
    else:
        users_by_list_avg = []
        users_by_list = []
        lists_by_ego = []

        for file in os.listdir(source_dir):
            _users_by_list, _users_by_list_avg, _lists_by_ego = lists_verify(
                source_dir + file)
            users_by_list_avg.append(
                _users_by_list_avg)  # Usuários por lista - média por ego
            lists_by_ego.append(_lists_by_ego)
            for item in _users_by_list:
                users_by_list.append(item)

        graphics.histogram(users_by_list_avg,
                           output_dir,
                           title='Users by Lists - Average by Ego',
                           xaxis='Users',
                           yaxis='Lists')
        graphics.histogram(users_by_list,
                           output_dir,
                           title='Users by Lists - Full Egos',
                           xaxis='Users',
                           yaxis='Lists')
        graphics.histogram(lists_by_ego,
                           output_dir,
                           title='Lists by Ego',
                           xaxis='Lists',
                           yaxis='Egos')

        USERS_BY_LIST = calc.calcular_full(users_by_list)
        USERS_BY_LIST_AVG = calc.calcular_full(users_by_list_avg)
        LISTS_BY_EGO = calc.calcular_full(lists_by_ego)
        overview = {
            'users_by_list': USERS_BY_LIST,
            'users_by_list_avg': USERS_BY_LIST_AVG,
            'lists_by_ego': LISTS_BY_EGO
        }
        lists_details = {
            'users_by_lists_avg': users_by_list_avg,
            'number_of_lists': lists_by_ego
        }

        with open(output_dir + "lists_overview.json", 'w') as f:
            f.write(json.dumps(overview))
        with open(output_dir + "lists_details.json", 'w') as f:
            f.write(json.dumps(lists_details))

        for i in range(10):

            ego_jaccard = []
            full_lists_jaccard = []
            i += 1
            net = "n" + str(i)
            network = convert_label(net)
            graphs_dir = "/home/amaury/graphs/" + str(net) + "/" + str(
                type_graphs) + "/"

            if not os.path.isdir(graphs_dir):
                print("\nImpossível encontrar diretório com os grafos: " +
                      str(graphs_dir))
            else:
                i = 0
                for file in os.listdir(
                        graphs_dir):  # Para cada ego da rede $net
                    if not os.path.isfile(graphs_dir + file):
                        print(
                            "\nImpossível encontrar arquivo com lista de arestas: "
                            + str(graphs_dir) + str(file))
                    else:
                        ego_id = file.split(".edge_list")
                        ego_id = long(ego_id[0])

                        if not os.path.isfile(source_dir + str(ego_id) +
                                              ".txt"):
                            print(
                                "\nImpossível encontrar arquivo de ground truth: "
                                + str(source_dir) + str(ego_id) + ".txt")
                        else:

                            i += 1
                            print(
                                str(graphs_dir) +
                                " - recuperando alters para o ego: " + str(i) +
                                " - " + str(ego_id))
                            alters_set = recovery_alters(
                                str(graphs_dir) +
                                str(file))  # Recupera os alters para o ego.

                            _ego_jaccard, _full_lists_jaccard = jaccard_verify(
                                str(source_dir) + str(ego_id) + ".txt",
                                alters_set)

                            ego_jaccard.append(_ego_jaccard)

                            for item in _full_lists_jaccard:
                                full_lists_jaccard.append(item)

                output = str(output_dir) + str(network) + "/"
                if not os.path.exists(output):
                    os.makedirs(output)

                graphics.histogram(ego_jaccard,
                                   output,
                                   title=str(network) +
                                   ' - Jaccard Modified by Egos',
                                   xaxis='Jaccard Modified',
                                   yaxis='Egos')
                graphics.histogram(full_lists_jaccard,
                                   output,
                                   title=str(network) +
                                   ' - Jaccard Modified by Lists',
                                   xaxis='Jaccard Modified',
                                   yaxis='Lists')

                JACCARD_AVG = calc.calcular_full(ego_jaccard)
                FULL_LISTS_JACCARD = calc.calcular_full(full_lists_jaccard)

                overview = {
                    'jaccard_by_ego_avg': JACCARD_AVG,
                    'jaccard_by_lists': FULL_LISTS_JACCARD
                }

                with open(output + str(network) + ".json", 'w') as f:
                    f.write(json.dumps(overview))
Beispiel #20
0
def calc_metrics(communities,G,ud):
######################################################################################################################################################################
######################################################################################################################################################################	
	def calc_average_degree(n_edges,internal_edges,internal_nodes):
		if internal_nodes != 0:
			if ud is False:
				result = float(internal_edges)/float(internal_nodes)
			else:
				result = (2*float(internal_edges))/float(internal_nodes)
		else:
			result = 0		
		return result
######################################################################################################################################################################
######################################################################################################################################################################	
#Ref:Community detection in networks: A user guide - Santo Fortunato∗ - 2016 

	def calc_conductance(n_edges,internal_edges,external_edges,internal_nodes,external_nodes):		# Fração do volume total de arestas que fica fora do cluster.	
		if internal_edges != 0 and external_edges != 0:																# Não tem necessidade de comparar entre direcionados e não-direcionados.				
			result = float(external_edges)/(2*(float(internal_edges))+float(external_edges))				# Se a aresta é interna então temos que contar duas vezes para que o cálculo faça sentido. Estamos considerando que há arestas externas, nas quais estmos olhando apenas uma ponta. Nas arestas internas, vemos as duas pontas.
		else:
			result = 0						
		return result
######################################################################################################################################################################
######################################################################################################################################################################	
	def calc_cut_r(n_edges,internal_edges,external_edges,n_nodes,internal_nodes):							# Fração de arestas existentes (de todas possíveis) deixando o cluster.
		operator1 = external_edges
		operator2 = float(internal_nodes)*(float(n_nodes)-float(internal_nodes))
		if operator2 != 0: 
			result = float(operator1)/float(operator2)
		else:
			result = 0				
		return result
######################################################################################################################################################################
######################################################################################################################################################################	
	def calc_density(n_edges,internal_edges,internal_nodes):
		if internal_nodes-1 > 0:
			if ud is False: #Se for direcionado
				result = float(internal_edges)/(float(internal_nodes)*(float(internal_nodes)-1))				
			else:
				operator2 = (float(internal_nodes)*(float(internal_nodes)-1))/2
				result = float(internal_edges)/float(operator2)
		else:
			result = 0				
		return result
######################################################################################################################################################################
######################################################################################################################################################################			
	def calc_expansion(n_edges,internal_edges,external_edges,internal_nodes):		# Número de arestas por vértices que estão ligadas a vértices fora do cluster.
		if internal_nodes != 0:			
			result = float(external_edges)/float(internal_nodes)
		else:
			result = 0	
		return result	
######################################################################################################################################################################
######################################################################################################################################################################	
	def calc_normal_cut(n_edges,internal_edges,external_edges):
		if n_edges == internal_edges:
			result = 0
		else:
			if internal_edges == 0 and external_edges == 0:
				result = 0
			else:									# Estou trabalhando apenas com arestas e, portanto, não tem necessidade de precoupar com multiplicar ou dividir por dois (isso aconteceria em caso de cálculos envolvendo os vértices)
				operator1 = float(external_edges)/float((2*internal_edges)+external_edges)
				operator2 = float(external_edges)/float((2*(n_edges-internal_edges))+external_edges)					
				result = operator1+operator2
				
		return result		
######################################################################################################################################################################
######################################################################################################################################################################
	def calc_separability(n_edges,internal_edges,external_edges,internal_nodes): 
		if external_edges != 0:
			result = float(internal_edges)/float(external_edges)
		else:
			result = 0				
		return result
######################################################################################################################################################################
######################################################################################################################################################################
	
	n_nodes = (G.GetNodes())														# Numero de vértices
	n_edges = (G.GetEdges())														# Numero de arestas

	_metric_ad = []																	#AverageDegree by community
	_metric_c = []																		#Conductance by community
	_metric_cut_r = []																#Cut Ratio by community
	_metric_d = []																		#Density by community
	_metric_e = []																		#Expansion by community
	_metric_normal_cut = []															#Normalized Cut by community	
	_metric_s = []																		#Separability by community
#	print ("Número de vértices: "+str(n_nodes)+" - Número de Arestas: "+str(n_edges))			

	for k,community in communities.iteritems():
		internal_edges = 0												# Arestar que entram na comunidade
		external_edges = 0												# Arestas que saem da comunidade
		internal_nodes = 0												# Vértices da comunidades
		external_nodes = 0

		in_degree = 0														# Só pra confirmar que estamos verificando todas as arestas que entram na comunidade		
		out_degree = 0														# Só pra confirmar que estamos verificando todas as arestas que saem na comunidade
		community_degree = 0
				
		if ud is False:										#PARA GRAFOS DIRECIONADOS
			for NI in G.Nodes():									# Para os nós da rede-ego:
				if NI.GetId() in community:						# Se o nó está na comunidade:
					internal_nodes+=1										# internal_nodes recebe + 1												
					in_degree+=NI.GetInDeg()							# in_degree	acrescenta o grau de entrada do nó						
					out_degree+=NI.GetOutDeg()							# out_degree acrescenta o grau de saída do nó
					
					for edge in NI.GetOutEdges():				# Para arestas de saída do nó:
						if edge in community:						# se o destino da aresta está na comunidade (Para vértices de destino na comunidade: # o edge indica o vértice de destino)
							internal_edges+=1								#internal edge recebe +1		
						else:												# senao
							external_edges+=1								# external_edge recebe +1

					for edge in NI.GetInEdges():				# Para arestas de entrada no nó:
						if edge in community:						# se o destino da aresta está na comunidade  (Para vértices de destino na comunidade: # o edge indica o vértice de destino)
							internal_edges+=1								#internal edge recebe +1
						else:												# senao
							external_edges+=1								# external_edge recebe +1

			## ARESTA INTERNA - In e Out estão dentro da comunidade, portanto, conta duas vezes, pois observa cada vértice. Por isso temos que dividir a aresta interna por dois.
			## ARESTA EXTERNA - Conta só uma vez, pois a outra ponta da aresta está fora da comunidade e, então não tem perigo de contá-la duas vezes. Assim, não se divide por dois.
			## OBS. Isso independe se é direcionado ou não direcionado.			

			internal_edges = float(internal_edges)/2
			external_edges = float(external_edges)
			total_edges_community = float(internal_edges)+float(external_edges)						#Essas duas linhas devem retornar os mesmos resultados, embora os operandos sejam diferentes...			
			community_degree = (float(in_degree)+float(out_degree))/2									#Essas duas linhas devem retornar os mesmos resultados, embora os operandos sejam diferentes...		
				
		else:														#PARA GRAFOS NÃO DIRECIONADOS
			for NI in G.Nodes():									# Para os nós da rede-ego:
				if NI.GetId() in community:						# Se o nó NÃO está na comunidade:
					internal_nodes+=1								# internal_nodes recebe + 1
					in_degree+=NI.GetInDeg()					# in_degree	acrescenta o grau de entrada do nó
					#Não precisa do outdegree pq as arestas sao contadas tanto pra entrada quanto pra saida						
				
					for edge in NI.GetOutEdges():				# Para arestas de saída do nó:
						if edge in community:						# se o destino da aresta está na comunidade
							internal_edges+=1								#internal edge recebe +1					
						else:												# senao
							external_edges+=1								# external_edge recebe +1			
		
			internal_edges = float(internal_edges)/2
			external_edges = float(external_edges)
			total_edges_community = internal_edges+external_edges											#Essas duas linhas devem retornar os mesmos resultados, embora os operandos sejam diferentes...			
			community_degree = float(in_degree)/2																#Essas duas linhas devem retornar os mesmos resultados, embora os operandos sejam diferentes...			

######################################################################################################################################################################
######################################################################################################################################################################
		_result_ad = calc_average_degree(n_edges,internal_edges,internal_nodes) 
		_result_c = calc_conductance(n_edges,internal_edges,external_edges,internal_nodes,external_nodes)		
		_result_cut_r = calc_cut_r(n_edges,internal_edges,external_edges,n_nodes,internal_nodes)
		_result_d = calc_density(n_edges,internal_edges,internal_nodes)
		_result_e = calc_expansion(n_edges,internal_edges,external_edges,internal_nodes)
		_result_normal_cut = calc_normal_cut(n_edges,internal_edges,external_edges)
		_result_s = calc_separability(n_edges,internal_edges,external_edges,internal_nodes)

		_metric_ad.append(_result_ad)																			#Armazena os resultados para cada partição para depois fazer a média do ego. 		
		_metric_c.append(_result_c)																			#Armazena os resultados para cada partição para depois fazer a média do ego.
		_metric_cut_r.append(_result_cut_r)																	#Armazena os resultados para cada partição para depois fazer a média do ego.		
		_metric_d.append(_result_d)																			#Armazena os resultados para cada partição para depois fazer a média do ego.
		_metric_e.append(_result_e)																			#Armazena os resultados para cada partição para depois fazer a média do ego.
		_metric_normal_cut.append(_result_normal_cut)													#Armazena os resultados para cada partição para depois fazer a média do ego.		
		_metric_s.append(_result_s)																			#Armazena os resultados para cada partição para depois fazer a média do ego.
	
	avg_ad = calc.calcular_full(_metric_ad)	
	avg_c = calc.calcular_full(_metric_c)
	avg_cut_r = calc.calcular_full(_metric_cut_r)
	avg_d = calc.calcular_full(_metric_d)
	avg_e = calc.calcular_full(_metric_e)
	avg_normal_cut = calc.calcular_full(_metric_normal_cut)
	avg_s = calc.calcular_full(_metric_s)		

	return avg_ad, avg_c, avg_cut_r, avg_d, avg_e, avg_normal_cut, avg_s
def prepare_communities(community_file, n_nodes):
    i = 0

    communities = {
    }  # Dicionário com uma chave (id da community): e uma lista de ids dos membros da comunidade
    alters_set = set()
    size = []  # Lista com os tamanhos das communidades
    size_norm = [
    ]  # Lista com os tamanhos das communidades normalizada pelo número de vértices da rede-ego
    greater_comm_norm = 0  # Tamanho da maior comunidade normalizado pelo conjunto de vértices do grafo
    n_singletons = 0  # Número de Singletons (comunidades formada por apenas um vértice)
    n_non_singletons = 0  # Número de Não Singletons
    greater_comm = 0  # Tamanho da maior comunidade
    smaller_comm = "inf"  # Tamanho da menor comunidade

    for line in community_file:
        i += 1
        key = "com" + str(
            i)  # Chave para o dicionário comm - um identificador "comm1"
        comm = []  # Lista para armazenar as os membros da comunidade i
        a = line.split(' ')
        for item in a:
            if item != "\n":
                comm.append(long(item))
                alters_set.add(long(item))

        if len(comm) > 1:
            n_non_singletons += 1
        elif len(comm) == 1:
            n_singletons += 1

        if len(comm) > greater_comm:  # Tamanho da maior comunidade
            greater_comm = len(comm)

        if len(comm) < smaller_comm:  # Tamanho da menor comunidade
            smaller_comm = len(comm)

        communities[
            key] = comm  # dicionário communities recebe a lista de ids dos membros das comunidades tendo como chave o valor key
        b = float(len(comm)) / float(n_nodes)
        size.append(len(comm))
        size_norm.append(b)

    n_comm = len(
        communities)  # Quantidade de comunidades para o ego em questão
    greater_comm_norm = float(greater_comm) / float(n_nodes)

    if n_nodes > alters_set:
        alters_ignored = n_nodes - len(
            alters_set
        )  # Número de alters que foram ignorados no processo de detecção e não receberam rótulos.
        alters_ignored_norm = float(alters_ignored) / float(n_nodes)
    else:
        alters_ignored = 0
        alters_ignored_norm = 0

    avg_size = calc.calcular_full(
        size)  # Somar o vetor com o tamanho das comunidades...
    avg_size_norm = calc.calcular(
        size_norm
    )  # Somar o vetor com o tamanho das comunidades normalizado...

    overlap = float(avg_size['soma']) / float(
        n_nodes
    )  # The overlap: the average number of communities to which each vertex belongs. This is the sum of the sizes of all communities (including singletons) divided by the number of vertices, n.

    return communities, n_comm, size, avg_size['media'], avg_size[
        'desvio_padrao'], size_norm, avg_size_norm[
            'media'], overlap, n_singletons, n_non_singletons, alters_ignored, alters_ignored_norm, greater_comm, greater_comm_norm, smaller_comm
def net_structure(dataset_dir, output_dir, net, IsDir, weight):
    print(
        "\n######################################################################\n"
    )
    if os.path.isfile(str(output_dir) + str(net) + "_net_struct.json"):
        print("Arquivo já existe: " + str(output_dir) + str(net) +
              "_net_struct.json")
    else:

        print("Dataset network structure - " + str(dataset_dir))
        n = []  # Média dos nós por rede-ego
        e = []  # Média das arestas por rede-ego

        bc_n = []  # média de betweenness centrality dos nós
        bc_e = []  # média de betweenness centrality das arestas

        i = 0

        for file in os.listdir(dataset_dir):
            i += 1
            print(
                str(output_dir) + str(net) +
                " - Calculando propriedades para o ego " + str(i) + ": " +
                str(file))
            if IsDir is True:
                G = snap.LoadEdgeList(
                    snap.PNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
            else:
                G = snap.LoadEdgeList(
                    snap.PUNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')

#####################################################################################

            n.append(G.GetNodes())  # Numero de vertices
            e.append(G.GetEdges())  # Numero de arestas
            n_nodes = G.GetNodes()
            n_edges = G.GetEdges()

            #####################################################################################
            if n_edges == 0 or n_nodes < 3:
                bc_n.append(n_edges)
                bc_e.append(n_edges)
            else:
                Nodes = snap.TIntFltH()
                Edges = snap.TIntPrFltH()
                snap.GetBetweennessCentr(
                    G, Nodes, Edges, 1.0,
                    IsDir)  #Betweenness centrality Nodes and Edges
                _bc_n = []
                _bc_e = []
                if IsDir is True:
                    max_betweenneess = (n_nodes - 1) * (n_nodes - 2)
                else:
                    max_betweenneess = ((n_nodes - 1) * (n_nodes - 2)) / 2

                for node in Nodes:
                    bc_n_normalized = float(
                        Nodes[node]) / float(max_betweenneess)
                    _bc_n.append(bc_n_normalized)

                for edge in Edges:
                    bc_e_normalized = float(
                        Edges[edge]) / float(max_betweenneess)
                    _bc_e.append(bc_e_normalized)
                result = calc.calcular(_bc_n)
                bc_n.append(result['media'])
                result = calc.calcular(_bc_e)
                bc_e.append(result['media'])


#####################################################################################

        BC_N = calc.calcular_full(bc_n)
        BC_E = calc.calcular_full(bc_e)

        overview = {}

        overview['BetweennessCentrNodes'] = BC_N
        overview['BetweennessCentrEdges'] = BC_E

        with open(str(output_dir) + str(net) + "_net_struct.json", 'w') as f:
            f.write(json.dumps(overview))

        with open(str(output_dir) + str(net) + "_net_struct.txt", 'w') as f:
            f.write(
                "\n######################################################################\n"
            )
            f.write(
                "Betweenness Centr Nodes: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (BC_N['media'], BC_N['variancia'], BC_N['desvio_padrao']))
            f.write(
                "Betweenness Centr Edges: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (BC_E['media'], BC_E['variancia'], BC_E['desvio_padrao']))
            f.write(
                "\n######################################################################\n"
            )
Beispiel #23
0
def net_structure(dataset_dir, output_dir, net, IsDir, weight):
    print(
        "\n######################################################################\n"
    )
    if os.path.isfile(str(output_dir) + str(net) + "_connected_comp.json"):
        print("Arquivo já existe: " + str(output_dir) + str(net) +
              "_connected_comp.json")
    else:

        print("Componentes conectados - " + str(dataset_dir))

        cc = []  # Média do tamanho dos componentes conectados por rede-ego
        cc_normal = [
        ]  # Média (normalizada pelo número de vértices do grafo) do tamanho dos componentes conectados por rede-ego
        n_cc = []  # Média do número de componentes conectados por rede-ego
        n = []  # vetor com número de vértices para cada rede-ego
        e = []  # vetor com número de arestas para cada rede-ego
        i = 0

        for file in os.listdir(dataset_dir):

            i += 1
            print(
                str(output_dir) + str(net) + "/" + str(file) +
                " - Calculando propriedades para o ego " + str(i) + ": " +
                str(file))
            if IsDir is True:
                G = snap.LoadEdgeList(
                    snap.PNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
            else:
                G = snap.LoadEdgeList(
                    snap.PUNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')


#			G.Dump()
#			time.sleep(5)

#####################################################################################

            n.append(G.GetNodes())  # Numero de vertices
            e.append(G.GetEdges())  # Numero de arestas
            n_nodes = G.GetNodes()
            n_edges = G.GetEdges()

            #####################################################################################
            if n_edges == 0:
                a = 0
                cc.append(a)
                cc_normal.append(a)
                n_cc.append(a)
                print("Nenhuma aresta encontrada para a rede-ego " + str(i) +
                      " - (" + str(file))
            else:
                Components = snap.TCnComV()
                snap.GetWccs(G, Components)
                _cc = []
                _cc_normal = []
                _n_cc = 0
                for CnCom in Components:
                    _cc.append(CnCom.Len())
                    b = float(CnCom.Len()) / float(n_nodes)
                    _cc_normal.append(b)
                    _n_cc += 1
                result = calc.calcular(_cc)
                cc.append(result['media'])

                result_normal = calc.calcular(_cc_normal)
                cc_normal.append(result_normal['media'])

                n_cc.append(_n_cc)
                print("Número de componentes conectados para o ego " +
                      str(i) + " (" + str(file) + "): " + str(_n_cc))
                print(
                    "Média do tamanho dos componentes conectados para o ego "
                    + str(i) + " (" + str(file) + "): " + str(result['media']))
                print(
                    "Média (normalizada) do tamanho dos componentes conectados para o ego "
                    + str(i) + " (" + str(file) + "): " +
                    str(result_normal['media']))
                print

        N_CC = calc.calcular_full(n_cc)
        CC = calc.calcular_full(cc)
        CC_NORMAL = calc.calcular_full(cc_normal)

        overview = {}
        overview['Len_ConnectedComponents'] = CC
        overview['Len_ConnectedComponents_Normal'] = CC_NORMAL
        overview['N_ConnectedComponents'] = N_CC

        with open(str(output_dir) + str(net) + "_connected_comp.json",
                  'w') as f:
            f.write(json.dumps(overview))

        with open(str(output_dir) + str(net) + "_connected_comp.txt",
                  'w') as f:
            f.write(
                "\n######################################################################\n"
            )
            f.write(
                "Number_Connected_Comp: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (N_CC['media'], N_CC['variancia'], N_CC['desvio_padrao']))
            f.write(
                "Length_Connected_Comp: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (CC['media'], CC['variancia'], CC['desvio_padrao']))
            f.write(
                "Length_Connected_Comp_Normalized: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (CC_NORMAL['media'], CC_NORMAL['variancia'],
                   CC_NORMAL['desvio_padrao']))
            f.write(
                "\n######################################################################\n"
            )

        print(
            "\n######################################################################\n"
        )
        print(
            "Number_Connected_Comp: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
            % (N_CC['media'], N_CC['variancia'], N_CC['desvio_padrao']))
        print(
            "Length_Connected_Comp: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
            % (CC['media'], CC['variancia'], CC['desvio_padrao']))
        print(
            "Length_Connected_Comp_Normalized: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
            % (CC_NORMAL['media'], CC_NORMAL['variancia'],
               CC_NORMAL['desvio_padrao']))
        print(
            "\n######################################################################\n"
        )
def net_structure(dataset_dir, output_dir, net, IsDir, weight):
    print(
        "\n######################################################################\n"
    )
    if os.path.isfile(str(output_dir) + str(net) + "_net_struct.json"):
        print("Arquivo já existe: " + str(output_dir) + str(net) +
              "_net_struct.json")
    else:

        print("Dataset network structure - " + str(dataset_dir))
        n = []  # Média dos nós por rede-ego
        e = []  # Média das arestas por rede-ego
        nodes = {}  # chave_valor para ego_id e numero de vertices
        edges = {}  # chave_valor para ego_id e numero de arestas
        d = []  # Média dos diametros por rede-ego
        cc = []  # Média dos Close Centrality
        bc_n = []  # média de betweenness centrality dos nós
        bc_e = []  # média de betweenness centrality das arestas
        degree = {
        }  # chave-valor para armazenar "grau dos nós - numero de nós com este grau"
        i = 0

        for file in os.listdir(dataset_dir):
            ego_id = file.split(".edge_list")
            ego_id = long(ego_id[0])
            i += 1
            print(
                str(output_dir) + str(net) +
                " - Calculando propriedades para o ego " + str(i) + ": " +
                str(file))
            if IsDir is True:
                G = snap.LoadEdgeList(
                    snap.PNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
            else:
                G = snap.LoadEdgeList(
                    snap.PUNGraph, dataset_dir + file, 0, 1
                )  # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')

#####################################################################################
            n_nodes = G.GetNodes()
            n_edges = G.GetEdges()
            nodes[ego_id] = n_nodes  #Dicionário ego_id = vertices
            edges[ego_id] = n_edges
            n.append(n_nodes)  # Numero de vértices
            e.append(n_edges)  # Número de Arestas

            #####################################################################################
            if n_edges == 0:
                a = 0
                d.append(a)
                cc.append(a)
                bc_n.append(a)
                bc_e.append(a)
            else:
                d.append(snap.GetBfsFullDiam(G, 100,
                                             IsDir))  # get diameter of G

                #####################################################################################

                _cc = []
                Normalized = True
                for NI in G.Nodes():
                    _cc.append(
                        snap.GetClosenessCentr(
                            G, NI.GetId(), Normalized,
                            IsDir))  #get a closeness centrality
                result = calc.calcular(_cc)
                cc.append(result['media'])

#####################################################################################

            if n_edges == 0 or n_nodes < 3:
                bc_n.append(n_edges)
                bc_e.append(n_edges)
            else:
                Nodes = snap.TIntFltH()
                Edges = snap.TIntPrFltH()
                snap.GetBetweennessCentr(
                    G, Nodes, Edges, 1.0,
                    IsDir)  #Betweenness centrality Nodes and Edges
                _bc_n = []
                _bc_e = []
                if IsDir is True:
                    max_betweenneess = (n_nodes - 1) * (n_nodes - 2)
                else:
                    max_betweenneess = ((n_nodes - 1) * (n_nodes - 2)) / 2

                for node in Nodes:
                    bc_n_normalized = float(
                        Nodes[node]) / float(max_betweenneess)
                    _bc_n.append(bc_n_normalized)

                for edge in Edges:
                    bc_e_normalized = float(
                        Edges[edge]) / float(max_betweenneess)
                    _bc_e.append(bc_e_normalized)
                result = calc.calcular(_bc_n)
                bc_n.append(result['media'])
                result = calc.calcular(_bc_e)
                bc_e.append(result['media'])

                #####################################################################################

                DegToCntV = snap.TIntPrV()
                snap.GetDegCnt(G,
                               DegToCntV)  #Grau de cada nó em cada rede-ego
                for item in DegToCntV:
                    k = item.GetVal1()
                    v = item.GetVal2()
                    if degree.has_key(k):
                        degree[k] = degree[k] + v
                    else:
                        degree[k] = v

#####################################################################################

            print n[i - 1], e[i - 1], d[i - 1], cc[i - 1], bc_n[i -
                                                                1], bc_e[i - 1]
            print
#####################################################################################

        N = calc.calcular_full(n)
        E = calc.calcular_full(e)

        histogram.histogram(degree, output_dir + "histogram" + "/", N['soma'],
                            net)

        D = calc.calcular_full(d)

        CC = calc.calcular_full(cc)

        BC_N = calc.calcular_full(bc_n)
        BC_E = calc.calcular_full(bc_e)

        overview = {}
        overview['Nodes'] = N
        overview['Edges'] = E
        overview['Diameter'] = D
        overview['CloseCentr'] = CC
        overview['BetweennessCentrNodes'] = BC_N
        overview['BetweennessCentrEdges'] = BC_E

        nodes_stats = calc.calcular_full(n)
        edges_stats = calc.calcular_full(e)
        overview_basics = {
            'nodes': n,
            'nodes_stats': nodes_stats,
            'edges': e,
            'edges_stats': edges_stats
        }

        output_basics = output_dir + "/" + str(net) + "/"
        if not os.path.exists(output_basics):
            os.makedirs(output_basics)

        with open(str(output_basics) + str(net) + "_nodes.json", 'w') as f:
            f.write(json.dumps(nodes))
        with open(str(output_basics) + str(net) + "_edges.json", 'w') as f:
            f.write(json.dumps(edges))

        with open(str(output_basics) + str(net) + "_overview.json", 'w') as f:
            f.write(json.dumps(overview_basics))

        with open(str(output_dir) + str(net) + "_net_struct.json", 'w') as f:
            f.write(json.dumps(overview))

        with open(str(output_dir) + str(net) + "_net_struct.txt", 'w') as f:
            f.write(
                "\n######################################################################\n"
            )
            f.write("NET: %s -- Ego-nets: %d \n" % (net, len(n)))
            f.write(
                "Nodes: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n" %
                (N['media'], N['variancia'], N['desvio_padrao']))
            f.write(
                "Edges: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n" %
                (E['media'], E['variancia'], E['desvio_padrao']))
            f.write(
                "Diameter: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (D['media'], D['variancia'], D['desvio_padrao']))
            f.write(
                "CloseCentr: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (CC['media'], CC['variancia'], CC['desvio_padrao']))
            f.write(
                "Betweenness Centr Nodes: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (BC_N['media'], BC_N['variancia'], BC_N['desvio_padrao']))
            f.write(
                "Betweenness Centr Edges: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"
                % (BC_E['media'], BC_E['variancia'], BC_E['desvio_padrao']))
            f.write(
                "\n######################################################################\n"
            )