Esempio n. 1
0
def runonrealnets(networkpath, network, resultpath):
    weighted = 0
    str2 = networkpath + str(network)

    dtmod, dtcom = getSeries(str2, weighted)
    dtcom = _get_com_wise_nodes(partition_at_level(dtcom, len(dtcom) - 1))
    with open(resultpath + '_commu_benching_frac.pickle', 'wb') as handle:
        pickle.dump(dtcom, handle)
Esempio n. 2
0
def runformanynetworks(args):
    output = []

    networklist = args[:-1]
    tmp = args[-1]
    weighted = args[-1][0]
    modfilename = args[-1][1]
    networkpath = args[-1][2]
    detectedCommuFile = args[-1][3]
    print(detectedCommuFile)
    for network in networklist:
        #str2 = "./nets/" + str(network)
        str2 = networkpath + str(network)
        #print "START For Network ",str2
        dtmod, dtcom = getSeries(str2, weighted)
        gtmod, gtcom = computegtmod(str2, weighted)
        output.append((gtmod, dtmod))

        dtcom = _get_com_wise_nodes(partition_at_level(dtcom, len(dtcom) - 1))
        print(dtcom)
        nmi, ri = compute_nmi(gtcom, dtcom)

        ##print(str2+ ", " + str(gtmod) + ", " + str(dtmod))
        #break
        modfile = open(modfilename, 'a')
        modfile.write(str2 + ":	" + str(gtmod) + "  " + str(dtmod) + "  " +
                      str(nmi) + " " + str(ri) + "\n")
        modfile.close()

        #WRITE DETECTED COMMUNITIES TO FILE
        #p="./syntheticNetworkGeneration/netsForcomparingBaseline/results/"
        #p = "./syntheticNetworkGeneration/diggnetwork/"
        '''
		comfile = open(detectedCommuFile,'a')
		comfile.write(str2+":\n============================================================\n")
		comfile.write(str(len(dtcom))+"\n")
		for d in dtcom:
			towrite = ' '.join([str(node) for node in dtcom[d]])
			comfile.write(towrite + "\n")
		comfile.write("=======================================================\n")
		comfile.close()
		'''

    return output
Esempio n. 3
0
def __one_level(original_graph,
                original_status,
                graph,
                status,
                status_list,
                level_count,
                verbose=0):
    ##print("graph edges: ",graph.edges(data = True))
    modif = True
    modif_global = False
    nb_pass_done = 0

    #print "Status List length, level_count ",len(status_list),level_count
    if level_count == 1:
        verbose = True

##### GROUPING VERTICES IN SAME COMMUNITY BUT DIFFERENT LAYERS----------------
    group = {}

    for node1 in status.node2com:
        com1 = status.node2com[node1]
        for node2 in status.node2com:
            com2 = status.node2com[node2]
            if com1 == com2:
                if node1 not in group:
                    group[node1] = set()
                group[node1].add(node2)
    #pprint(status.node2com)
    #if(move_in_group ==1):
    #	sys.exit()
    #----------------------------------------------------------------------

    ffff = 0
    while modif and nb_pass_done != __PASS_MAX:
        modif = False
        nb_pass_done += 1
        move_in_group = 0
        #no_move=set()

        v = list(graph.nodes())
        random.shuffle(v)
        for node in v:
            print("for node:", node)
            '''### CHECKING IF THE CORRESPONDING GROUP HAS ALREADY BEEN CONSIDERED
			if node in no_move:
				continue
			'''
            move_in_group = 0
            com_node = status.node2com[node]

            best_com = com_node
            best_increase = 0

            status_list.append(status.node2com)
            original_status.node2com = partition_at_level(
                status_list, level_count)

            base_mod = __modularity(
                _get_commu_dict(partition_at_level(status_list, level_count)),
                original_status, original_graph)
            if ffff == 0:
                #print "base_mod, level ",base_mod,level_count,len(status_list)
                ffff = 1

            neigh_communities = __neighcom(node, graph, status)

            extended_neigh_communities = __neighcom(node, graph, status)
            ### EXTENDING NEIGHBORHOOD BY NEIGHBORS OF THE NODES IN THE SAME GROUP
            if node in group:
                for node1 in group[node]:
                    extended_neigh_communities.extend(
                        __neighcom(node1, graph, status))

            for com in neigh_communities:

                ### MODIFYING NODE2COM FOR ALL NODES IN SAME GROUP
                status.node2com[node] = com
                '''if node in group:
					for node1 in group[node]:
						status.node2com[node1] = com						
				'''
                status_list.append(status.node2com)
                original_status.node2com = partition_at_level(
                    status_list, level_count)

                incr = __modularity(
                    _get_commu_dict(
                        partition_at_level(status_list, level_count)),
                    original_status, original_graph) - base_mod

                if incr > best_increase:
                    best_increase = incr
                    best_com = com

                status_list.pop()

            for com in extended_neigh_communities:

                ### MODIFYING NODE2COM FOR ALL NODES IN SAME GROUP
                status.node2com[node] = com
                if node in group:
                    for node1 in group[node]:
                        status.node2com[node1] = com

                status_list.append(status.node2com)
                original_status.node2com = partition_at_level(
                    status_list, level_count)

                incr = __modularity(
                    _get_commu_dict(
                        partition_at_level(status_list, level_count)),
                    original_status, original_graph) - base_mod

                if incr > best_increase:
                    move_in_group = 1
                    best_increase = incr
                    best_com = com

                status_list.pop()

            #if com_node != best_com:
            #	if verbose:
            #		#print "node",node,"moved from", com_node, "to", best_com,":", best_increase

            if (move_in_group == 0):
                if node in group:  #REVERT ALL NODES IN GROUP TO ORIGINAL COMMUNITY
                    for node1 in group[node]:
                        status.node2com[node1] = com_node
                status.node2com[node] = best_com
            else:
                ### UPDATING NODE2COM FOR ALL NODES IN SAME GROUP
                ### ADDING GROUP NODES IN NO_MOVE
                status.node2com[node] = best_com
                if node in group:
                    for node1 in group[node]:
                        status.node2com[node1] = best_com
                        #no_move.add(node1)

            print("move in group: {0} , prev com: {1}, best_com: {2}".format(
                move_in_group, com_node, best_com))

            #p_temp = __renumber(status.node2com)
            #p_temp = status.node2com
            #status_list.append(p_temp)
            ##print __modularity(_get_com_wise_nodes(status_list[-1]), status, original_graph)
            #status_list.pop()

            original_status.node2com = partition_at_level(
                status_list, level_count)

            status_list.pop()

            if best_com != com_node:
                modif = True
                modif_global = True

        p_temp = status.node2com
        status_list.append(p_temp)
        new_mod = __modularity(
            _get_com_wise_nodes(partition_at_level(status_list, level_count)),
            original_status, original_graph)

        #print "In __one_level new_mod: ", new_mod
        #		if(verbose): #print("Status list[-1]: ",status_list[-1])

        status_list.pop()
        if modif == False:
            break
    return modif_global
Esempio n. 4
0
def getSeries(filename, weighted):
    '''network_name = "networks_low_couple/" + filename.split('/')[-1]

	# commented code block 	
		fp=open(network_name,'r')
		line=fp.readline()
		line=line.rstrip()
		n_layer=int(line)
		layer={}
		node_l={}
		l_ID=1
		edge_l={}
		edge_c={}
		# f_el = open(filename+'_edges_list_commod'+str(g), 'w')
		for i in range(0,n_layer):
			line=fp.readline()
			line=line.rstrip()
			line=line.split()
			layer[l_ID]=set()
			##print line
			for n in line:
				layer[l_ID].add(int(n))
			line=fp.readline()
			line=int(line.rstrip())
			n_edge=line
			##print n_edge
			edge_l[l_ID]=n_edge
			for j in range(0,n_edge):
				line=fp.readline()
				line=line.rstrip()
				line=line.split()
				n1=int(line[0])
				n2=int(line[1]) 
				if n1 not in node_l:
					node_l[n1]=set()
				node_l[n1].add(n2)	  
				if n2 not in node_l:
					node_l[n2]=set()
				node_l[n2].add(n1)
				# f_el.write(str(n1-1)+' '+str(n2-1)+'\n')

			l_ID+=1
			
		line=fp.readline()
		line=line.rstrip()
		n_couple=int(line)
		##print n_couple
		node_c={}	   
		top={}
		bot={}
		c_ID=1
		couple={}

		for i in range(0,n_couple):
			line=fp.readline()
			##print line
			line=line.rstrip()
			line=line.split()
			top[c_ID]=int(line[0])
			bot[c_ID]=int(line[1])
			
			couple[c_ID]=layer[top[c_ID]].union(layer[bot[c_ID]])
			
			line=fp.readline()
			line=int(line.rstrip())
			n_edge=line
			##print n_edge
			edge_c[c_ID]=n_edge
			count_edge = 0
			for j in range(0,n_edge):
				line=fp.readline()
				line=line.rstrip()
				line=line.split()
				n1=int(line[0])
				n2=int(line[1])
				if n1 not in node_c:
					node_c[n1]=set()
				node_c[n1].add(n2)
				if n2 not in node_c:
					node_c[n2]=set()
				node_c[n2].add(n1)  
				count_edge += 1
				# f_el.write(str(n1-1)+' '+str(n2-1)+'\n')
			edge_c[c_ID] = count_edge
			c_ID=c_ID+1

		line=fp.readline()
		line=line.rstrip()
		##print line
		n_comm=int(line)
		commu={}
		com_ID=1
		for i in range(0,n_comm):
			line=fp.readline()
			line=line.rstrip()
			line=line.split()
			commu[com_ID]=set()
			for n in line:
				commu[com_ID].add(int(n))
			com_ID+=1	   
		mu=0'''

    #with open(filename+'_ml_network.pickle') as handle:
    #	fnetwork = pickle.load(handle)
    ml_network, layer, node_l, node_c, top, bot, couple, edge_l, edge_c, mu, commu = read_raw_network(
        filename, weighted)

    #ml_network =build_network(layer, node_l, node_c, top, bot, couple, edge_l, edge_c)
    #with open(filename+'_ml_network.pickle', 'w') as handle:
    #	 pickle.dump([ml_network, layer, node_l, node_c, top, bot, couple, edge_l, edge_c, mu, commu], handle)
    dendogram, mod = louvain(ml_network, layer, node_l, node_c, top, bot,
                             couple, edge_l, edge_c, mu)

    status = Status()
    status.layer = layer
    status.node_l = node_l
    status.node_c = node_c
    status.top = top
    status.bot = bot
    status.edge_l = edge_l
    status.edge_c = edge_c
    status.couple = couple
    status.mu = mu
    mod_old = mod
    commu = _get_com_wise_nodes(
        partition_at_level(dendogram,
                           len(dendogram) - 1))
    #commu =_get_commu_dict(partition_at_level(dendogram, len(dendogram)-1))
    mod = __modularity(commu, status, ml_network)
    #print "--------- BEF RET ---------"
    #print mod_old, mod
    #print "----------------"

    return mod, dendogram
Esempio n. 5
0
def louvain(graph, layer, node_l, node_c, top, bot, couple, edge_l, edge_c,
            mu):
    current_graph = graph.copy()
    status = Status()

    status.layer = layer
    status.node_l = node_l
    status.node_c = node_c
    status.top = top
    status.bot = bot
    status.edge_l = edge_l
    status.edge_c = edge_c
    status.couple = couple
    status.mu = mu

    status.init(current_graph)

    old_status = status.copy()

    status_list = list()
    level_count = 0

    mod = __modularity(_get_commu_dict(status.node2com), status, graph)
    #print "Modularity before First Iteration ",mod

    __one_level(graph, old_status, current_graph, status, status_list,
                level_count)
    partition = __renumber(status.node2com)
    status_list.append(partition)
    current_graph, part, status = induced_graph_multilayer(
        partition, current_graph, status)

    mod1 = __modularity(_get_com_wise_nodes(part), status, current_graph)

    p = _get_com_wise_nodes(
        partition_at_level(status_list,
                           len(status_list) - 1))
    new_mod = __modularity(p, old_status, graph)
    #print "-> merge mod after level 0 : ", mod1
    #print "-> Modularity after level 0: ",new_mod, "\n"

    ##print("Louvain, partition: ",partition)
    #print("Louvain partition: ",part)
    A = nx.adjacency_matrix(current_graph)
    ##print(A.todense())

    status.init(current_graph)

    while True:
        level_count += 1
        ##print level_count
        modif = __one_level(graph, old_status, current_graph, status,
                            status_list, level_count, 1)

        partition = __renumber(status.node2com)
        status_list.append(partition)

        new_mod = __modularity(
            _get_commu_dict(partition_at_level(status_list, level_count)),
            old_status, graph)
        #print "-> Modularity after level ",level_count,": ",new_mod, "\n"

        #new_mod = __modularity(_get_commu_dict(partition), status, current_graph)
        #new_mod = __modularity(_get_com_wise_nodes(partition), status, current_graph)

        if modif == False:
            #if new_mod - mod < __MIN :

            break
        mod = new_mod
        #current_graph = induced_graph(partition, current_graph)
        current_graph, part, status = induced_graph_multilayer(
            partition, current_graph, status)
        #status.init(current_graph)
        status.init(current_graph, part)

        ##print("Louvain, partition: ",partition)
        #		#print("Louvain, part after: ",part)
        #		A = nx.adjacency_matrix(current_graph)
        ##print(A.todense())

    return status_list, mod