Esempio n. 1
0
es = igraph.EdgeSeq(networkGraph)
for edge in es:
    #print edge.tuple
    fNI.write(str(edge.tuple[0]) + '\t')
    fNI.write(str(edge.tuple[1]))
    fNI.write('\n')
fNI.close()

fSVQ = open('../../data/SVQ.txt', 'w')
rowN, colN = SVQ.shape
for x in range(0, rowN):
    for y in range(0, colN):
        fSVQ.write(str(SVQ[x, y]) + '\t')
    fSVQ.write('\n')

fSVQ.close()
# Here ends the file input part and the clustering algorithm starts

print "========Begin========"
Iteration = 10  # run the algrithm for 10 times

start_time = time.time()

for i in range(0, Iteration):
    membership = louvain(networkGraph, SVQ)
    clustering = igraph.Clustering(membership)
    print 'Modularity: ', igraph.Graph.modularity(networkGraph, membership)

end_time = time.time()
print 'Running time: ', (end_time - start_time) / Iteration
Esempio n. 2
0
def louvain(graph, SVQ):

    random.seed(10)
    is_update = True  #if updated
    #iteration = 0  #iteration
    #node_weight = graph.vs.degree() #node weight array g.vs.degree() {1,1,1,3,2,3,2,3,2}
    node_number = graph.vcount()  # node number = 9
    membership = numpy.arange(
        0, node_number, 1)  # initial one node one cluster {0,1,2,3,4,5,6,7,8}
    #total_edge_weight = graph.ecount() * 2  # total_edge_weight = 18
    #resolution = 1 / total_edge_weight
    random_order = numpy.arange(0, node_number, 1)
    random.shuffle(random_order)

    while is_update:  #or is_update
        # If there's no update or iteration exceeded
        # clusters weight = {1. 1. 1. 3. 2. 3. 2. 3. 2.}
        #node_weight_per_cluster = numpy.zeros(len(set(membership)))
        #for i in range(0,len(membership)):
        #node_weight_per_cluster[membership[i]] += node_weight[i]
        #enum_time = 0 # Enumeration times, to n, represents all points that have been traversed without moving
        node = 0  # which node
        is_update = False
        membership = resetMembership(membership)
        better_membership = membership.copy()
        if SVQ is None:
            better_modularity = igraph.Graph.modularity(graph, membership)
        else:
            better_modularity = igraph.Graph.modularity(
                graph, membership) - Modification.modIndex(
                    graph, igraph.Clustering(membership), SVQ)
        random_order_copy = list(random_order.copy())
        #move_i = False
        #while enum_time < len(set(membership)):
        for node in random_order_copy:
            #enum_time = 0
            origial_node_cluster = membership[
                node]  #get the original cluster number
            original_clusters = list(set(membership))
            original_clusters.remove(origial_node_cluster)
            subcluster = numpy.where(
                numpy.array(membership) == origial_node_cluster)[0]

            # add this node to the other cluster
            for new_node_cluster in original_clusters:
                temp_membership = membership.copy()
                # move i to another cluster
                #temp_membership[origial_node_cluster] = new_node_cluster
                for sc in subcluster:
                    temp_membership[sc] = new_node_cluster
                temp_membership = resetMembership(temp_membership)
                if SVQ is None:
                    temp_modularity = igraph.Graph.modularity(
                        graph, temp_membership)
                else:
                    temp_modularity = igraph.Graph.modularity(
                        graph, temp_membership) - Modification.modIndex(
                            graph, igraph.Clustering(temp_membership), SVQ)
                if temp_modularity > better_modularity:  #find better clusters
                    #enum_time = 0
                    is_update = True
                    better_membership = temp_membership.copy()
                    if SVQ is None:
                        better_modularity = igraph.Graph.modularity(
                            graph, better_membership)
                    else:
                        better_modularity = igraph.Graph.modularity(
                            graph, better_membership) - Modification.modIndex(
                                graph, igraph.Clustering(membership), SVQ)
                #else:
                #enum_time += 1
            #fjowieaf = 0
            '''
            for sc in subcluster:
                if sc in random_order_copy:
                    random_order_copy.remove(sc)
            '''
            #fjowieaf = 0
            #after all cluster is tested
            #node = (node + 1) % node_number

        membership = better_membership.copy()

    # reset the membership array
    membership = resetMembership(membership)

    return membership
Esempio n. 3
0
total_graphs_generated = numpy.prod(map(len, [communities, alphas, betas, graph_sizes])) * 1
for graph_size in graph_sizes:
    for community in communities:
        for alpha in alphas:
            for beta in betas:
                print('graph generating {0}/{1}'.format(i, total_graphs_generated))
                densities = []
                origin_results = []
                new_results = []
                edges_deleted = []
                for j in xrange(5):
                    origin_membership = StochasticGraphGenerator.generate_equal_size_membership(graph_size, community)

                    graph, beta_edges, alpha_edges = StochasticGraphGenerator().generate(graph_size, origin_membership,
                                                                                         alpha, beta)
                    origin_cluster = igraph.Clustering(origin_membership)
                    origin_results.append(calc_result_and_print(graph, origin_cluster))
                    density = graph.density()
                    densities.append(density)
                    threshold = 0.0144 * (density ** -0.431)
                    edges_deleted.append(run_expirement(graph, threshold))
                    new_results.append(calc_result_and_print(graph, origin_cluster))

                density = numpy.mean(densities)
                threshold = 0.0144 * (density ** -0.431)
                fg, im, lp, ml, wt = map(numpy.mean, zip(*origin_results))
                rows.append([i, 'origin', graph_size, alpha, beta, community, density, threshold, fg, im, lp, ml, wt, 0])
                fg, im, lp, ml, wt = map(numpy.mean, zip(*new_results))
                avg_deleted_edges = int(numpy.mean(edges_deleted))
                rows.append([i, 'delete_edges', graph_size, alpha, beta, community, density, threshold, fg, im, lp, ml, wt, avg_deleted_edges])
                i += 1
def louvain(graph, SVQ):

    random.seed(10)
    is_update = True  #if updated
    number_of_nodes = graph.vcount()
    clusters = newClusters(number_of_nodes)  #[[0],[1],[2]...,[12]]
    membership = changeClustersToMembership(number_of_nodes, clusters)
    node_weight = graph.degree()

    while is_update:
        is_update = False

        #temp data
        delta_Q_max = 0
        better_clusters = copy.deepcopy(clusters)
        better_node_weight = copy.deepcopy(node_weight)

        #random choose cluster
        out_random_order = randomOrder(clusters)

        for i in out_random_order:  #put ith node/shrink cluster into another

            copy_clusters = copy.deepcopy(clusters)
            copy_node_weight = copy.deepcopy(node_weight)
            #i is like 0, moving_cluster is like [1,2]
            moving_cluster = copy_clusters.pop(
                i
            )  #current move this cluster out and get the element from random order list
            moving_cluster_weight = copy_node_weight.pop(i)
            in_random_order = randomOrder(copy_clusters)

            for j in in_random_order:  #move i to j
                #copy_clusters[j] = copy_clusters[j] + moving_cluster
                #copy_node_weight[j] = copy_node_weight[j] + moving_cluster_weight
                temp_list = copy_clusters[j] + moving_cluster
                temp_weight = copy_node_weight[j] + moving_cluster_weight
                delta_Q = inEdgeWeight(graph.get_adjacency(), temp_list)

                if SVQ is None:
                    delta_Q = delta_Q - copy_node_weight[
                        j] * moving_cluster_weight / graph.ecount()
                else:
                    temp_clusters = copy.deepcopy(copy_clusters)
                    temp_clusters[j] = temp_list
                    membership = changeClustersToMembership(
                        number_of_nodes, temp_clusters)
                    delta_Q = delta_Q - copy_node_weight[
                        j] * moving_cluster_weight / graph.ecount(
                        ) - Modification.modIndex(
                            graph, igraph.Clustering(membership), SVQ)

                if delta_Q > delta_Q_max and delta_Q > 0:
                    delta_Q_max = delta_Q
                    better_clusters = copy.deepcopy(copy_clusters)
                    better_clusters[j] = temp_list
                    better_node_weight = copy.deepcopy(copy_node_weight)
                    better_node_weight[j] = temp_weight
                    is_update = True

        print better_clusters
        membership = changeClustersToMembership(number_of_nodes, temp_clusters)
        print igraph.Graph.modularity(graph, membership)
        clusters = better_clusters
        node_weight = better_node_weight

    return changeClustersToMembership(number_of_nodes, clusters)