Exemplo n.º 1
0
def _main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, False)
    graph_copy = deepcopy(graph)

    communities = community_detect(graph)
    number_of_nodes = 0
    com_dict = {}

    for i in range(len(communities)):
        com_dict[i] = communities[i]
        number_of_nodes += len(communities[i])

    print(number_of_nodes, ' nodes has been analyzed.')

    utils.print_comm_info_to_display(com_dict)
    print('modularity_value =', modularity(graph_copy, com_dict))

    com_dict2 = {}
    for k, v in com_dict.items():
        for node in v:
            com_dict2[node] = k

    print('NMI =', NMI(args.output, com_dict2))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    graph_copy = deepcopy(graph)

    communities = community_search(graph)
    com_dict = {}

    for i in range(len(communities)):
        com_dict[i] = communities[i]

    utils.print_comm_info_to_display(com_dict)
    print('modularity_value =', modularity(graph, com_dict))

    com_dict2 = {}
    for k, v in com_dict.items():
        for node in v:
            com_dict2[node] = k

    print('NMI =', NMI(args.output, com_dict2))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
Exemplo n.º 3
0
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    graph_copy = deepcopy(graph)

    preprocess(graph)
    c = greedy_modularity_communities(graph)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))

    communities = dict()
    for i in range(len(c)):
        communities[i] = list(c[i])

    partition = create_partition(communities)
    utils.print_comm_info_to_display(communities)
    # utils.write_comm_info_to_file(partition)

    print('modularity_value =', modularity(graph_copy, communities))
    print('NMI =', NMI(args.output, partition))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
def print_graph_info(G, com2nodes):
    print(nx.info(G))
    print('Modularity based on Newman formula =',
          round((measures.modularity(G, com2nodes)), 3))
    print('Number of communities =', len(com2nodes))

    print('sizes of comms = [', end="")
    for i in range(len(com2nodes)):
        if i == len(com2nodes) - 1:
            print(len(com2nodes[i]), end="")
        else:
            print(len(com2nodes[i]), ', ', end="")
    print(']\n')
Exemplo n.º 5
0
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    partition = best_partition(graph)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))

    communities = utils.extract_communities(partition)
    utils.print_comm_info_to_display(communities)
    # utils.write_comm_info_to_file(args.output, partition)

    print('modularity_value =', modularity(graph, communities))
    print('NMI =', NMI(args.output, partition))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
def _main():
    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w, self_loop=True)

    nodes = list(graph.nodes())
    print('num of nodes =', len(nodes))
    print('num of edges =', graph.number_of_edges())

    communities = dict()
    communities_file = args.output
    with open(communities_file, 'r') as file:
        lines = file.readlines()
        for line in lines:
            line = line.split()
            if not int(line[0]) in nodes:
                continue
            if not int(line[1]) in communities:
                communities[int(line[1])] = list()
            communities[int(line[1])].append(int(line[0]))

    print('modularity_value =', modularity(graph, communities))
Exemplo n.º 7
0
def __main():
    args = create_argument_parser()

    start_time = time.time()
    set_resolution_parameter(float(args.gamma))
    graph = load_graph(args.dataset)

    partition = make_singleton_communities(graph)
    partition, graph = run_leiden(graph, partition, report=False)
    communities = extract_final_communities_out_of_partition(partition, graph)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))

    for com_index, nodes in communities.items():
        print(com_index, ':', nodes, '\t', len(nodes))

    print('modularity_value =', modularity(graph, communities))
    print('NMI =', NMI(args.output, partition))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
Exemplo n.º 8
0
def report_performance(graph, partition, ground_truth_file_address):
    """prints some performace measure of the experiment, inclusing Qmodularity and NMI score of the partition.

	Args:
		graph ([nx.Graph]): [the given network]
		partition ([list]): [list of all discovered communities]
		ground_truth_file_address ([str]): [filename of the ground-truth information of communities]
	"""
    com2nodes = dict()
    for i in range(len(partition)):
        com2nodes[i] = partition[i]

    node2com = dict()
    for com_index, nodes in com2nodes.items():
        for node in nodes:
            node2com[node] = com_index

    report_str = 'Modularity = ' + str(measures.modularity(
        graph, com2nodes))[:5] + '\t'
    report_str += 'NMI = ' + str(
        measures.NMI(ground_truth_file_address, node2com))[:5]
    return report_str
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    graph_copy = deepcopy(graph)

    communities = community_detection(graph)
    com_dict = {}

    for i in range(len(communities)):
        com_dict[i] = communities[i]

    utils.print_comm_info_to_display(com_dict)
    # output_name = args.dataset[args.dataset.rindex('/'):]
    # utils.write_comm_info_to_file(output_name, com_dict)

    print('modularity_value =', modularity(graph_copy, com_dict))
    print('NMI =', NMI(args.output, com_dict))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
Exemplo n.º 10
0
def print_graph_info(G, com2nodes, decimal_precision=3, comm_size_info=False):
    """prints some information about the network, including the number of nodes, number of edges,
	the average degree, the modularity value, number and sizes of the communities.

	Args:
		G ([nx.Graph]): [the given network]
		com2nodes ([dict]): [a map of community indices to their corresponding nodes]
		decimal_precision (int, optional): [determines the precision of shown float numbers]. Defaults to 3.
		comm_size_info (bool, optional): [determines if community size info should be printed]. Defaults to False.
	"""
    print('|V| =', G.number_of_nodes(), '\t|E| =', G.number_of_edges(),
          '\t|C| =', len(com2nodes), '\tAvg(degree) =',
          sum(G.degree[x] for x in G.nodes()) / G.number_of_nodes())
    print('Modularity based on Newman formula =',
          round((measures.modularity(G, com2nodes)), decimal_precision))

    if comm_size_info == True:
        print('sizes of comms = [', end="")
        for i in range(len(com2nodes)):
            if i == len(com2nodes) - 1:
                print(len(com2nodes[i]), end="")
            else:
                print(len(com2nodes[i]), ', ', end="")
        print(']\n')
Exemplo n.º 11
0
    print("You need to change the parameters, this doesn't work.")
    exit(0)

comms = {frozenset(G.nodes[v]['community']) for v in G}
com2nodes = dict()
node2com = dict()
n = 0
for com in comms:
    com2nodes[n] = list(com)
    for node in com:
        node2com[node] = n
    n += 1

print(nx.info(G))
print('Modularity based on Newman formula =',
      round((measures.modularity(G, com2nodes)), 3))
print('Number of communities =', len(com2nodes))
for com_index, nodes in com2nodes.items():
    nodes.sort()
    print(com_index, '-> len of com =', len(nodes), '\t:', nodes)
print()

# determine number of links of the network and number of uncertain links
num_links = G.number_of_edges()
num_uncertain_links = int(num_links * prob_links_ratio)
print('Number of all links in the network =', num_links)
print('Number of uncertain links with probabilty less than one =',
      num_uncertain_links)
print()

# create a list of probability values and sort them descendingly
if num_of_tries == 10:
    print("You need to change the parameters, this doesn't work.")
    exit(0)

comms = {frozenset(G.nodes[v]['community']) for v in G}
com2nodes = dict()
node2com = dict()
n = 0
for com in comms:
    com2nodes[n] = list(com)
    for node in com:
        node2com[node] = n
    n += 1

print(nx.info(G))
print('Modularity based on Newman formula =', round((measures.modularity(G, com2nodes)), 3))
print('Number of communities =', len(com2nodes))
for com_index, nodes in com2nodes.items():
    nodes.sort()
    print(com_index, '-> len of com =', len(nodes), '\t:', nodes)
print()

graph_name = 'synthetic_' + str(num_nodes) + '_'  + str(len(com2nodes)) + '.mtx'
nx.write_edgelist(G, graph_name, delimiter='\t', data=False)

groundTruth_name = graph_name[:-4] + '_ground_truth.mtx'
with open(groundTruth_name, 'w') as file:
	for node, com in sorted(node2com.items()):
		lineTowrite = str(node) + '\t' + str(com) + '\n'
		file.write(lineTowrite)