def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    graph_copy = deepcopy(graph)

    communities = community_search(graph)
    com_dict = {}

    for i in range(len(communities)):
        com_dict[i] = communities[i]

    utils.print_comm_info_to_display(com_dict)
    print('modularity_value =', modularity(graph, com_dict))

    com_dict2 = {}
    for k, v in com_dict.items():
        for node in v:
            com_dict2[node] = k

    print('NMI =', NMI(args.output, com_dict2))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
示例#2
0
def _main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, False)
    graph_copy = deepcopy(graph)

    communities = community_detect(graph)
    number_of_nodes = 0
    com_dict = {}

    for i in range(len(communities)):
        com_dict[i] = communities[i]
        number_of_nodes += len(communities[i])

    print(number_of_nodes, ' nodes has been analyzed.')

    utils.print_comm_info_to_display(com_dict)
    print('modularity_value =', modularity(graph_copy, com_dict))

    com_dict2 = {}
    for k, v in com_dict.items():
        for node in v:
            com_dict2[node] = k

    print('NMI =', NMI(args.output, com_dict2))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
示例#3
0
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    graph_copy = deepcopy(graph)

    preprocess(graph)
    c = greedy_modularity_communities(graph)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))

    communities = dict()
    for i in range(len(c)):
        communities[i] = list(c[i])

    partition = create_partition(communities)
    utils.print_comm_info_to_display(communities)
    # utils.write_comm_info_to_file(partition)

    print('modularity_value =', modularity(graph_copy, communities))
    print('NMI =', NMI(args.output, partition))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
示例#4
0
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = load_graph(args.dataset, args.w)

    partition = find_comms(graph)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
def main():
	start_time = time.time()

	args = utils.create_argument_parser()
	graph = utils.load_graph(args.dataset, args.w)
	intended_node = int(args.output)

	community = community_search(graph, intended_node)
	print('community =', community, len(community))

	finish_time = time.time()
	print('\nDone in %.4f seconds.' %(finish_time - start_time))
def _main():
	args = utils.create_argument_parser()
	graph = utils.load_graph(args.dataset, args.w)
	communities = read_communities(args.output)

	num_of_disconnected_coms = 0
	for com_index, nodes in communities.items():
		small_graph = graph.subgraph(nodes)
		if not nx.is_connected(small_graph):
			num_of_disconnected_coms += 1

	print('\nPercent of disconnected communities = %.3f' %(num_of_disconnected_coms / len(communities)))
	print(num_of_disconnected_coms, '\t', len(communities))
示例#7
0
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    partition = best_partition(graph)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))

    communities = utils.extract_communities(partition)
    utils.print_comm_info_to_display(communities)
    # utils.write_comm_info_to_file(args.output, partition)

    # print('modularity_value =', modularity(graph, communities))
    # print('NMI =', NMI(args.output, partition))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
def _main():
    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w, self_loop=True)

    nodes = list(graph.nodes())
    print('num of nodes =', len(nodes))
    print('num of edges =', graph.number_of_edges())

    communities = dict()
    communities_file = args.output
    with open(communities_file, 'r') as file:
        lines = file.readlines()
        for line in lines:
            line = line.split()
            if not int(line[0]) in nodes:
                continue
            if not int(line[1]) in communities:
                communities[int(line[1])] = list()
            communities[int(line[1])].append(int(line[0]))

    print('modularity_value =', modularity(graph, communities))
示例#9
0
def _main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)

    communities = dict()

    for e, node in enumerate(graph.nodes()):
        community = community_search(graph, node)
        community = amend_by_dangles(graph, community)
        communities[node] = community

    ground_truth = read_ground_truth(args.output)
    precision, recall, f1_score = calc_accuracy(communities, ground_truth)

    print('precision =', precision, '\trecall =', recall, '\tf1-score =',
          f1_score)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
示例#10
0
def _main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)

    communities = dict()

    print('\n\n')
    for e, node in enumerate(sorted(graph.nodes())):
        community = community_search(graph, node)
        communities[node] = community
        # print('  Node =', node, ' : degree =', graph.degree[node], '->\t', community, '\t', len(community))

    ground_truth = read_ground_truth(args.output)
    precision, recall, f1_score = calc_accuracy(communities, ground_truth)

    print('precision =', precision, '\trecall =', recall, '\tf1-score =',
          f1_score)

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
def main():
    start_time = time.time()

    args = utils.create_argument_parser()
    graph = utils.load_graph(args.dataset, args.w)
    graph_copy = deepcopy(graph)

    communities = community_detection(graph)
    com_dict = {}

    for i in range(len(communities)):
        com_dict[i] = communities[i]

    utils.print_comm_info_to_display(com_dict)
    # output_name = args.dataset[args.dataset.rindex('/'):]
    # utils.write_comm_info_to_file(output_name, com_dict)

    print('modularity_value =', modularity(graph_copy, com_dict))
    print('NMI =', NMI(args.output, com_dict))

    finish_time = time.time()
    print('\nDone in %.4f seconds.' % (finish_time - start_time))
示例#12
0
        try:
            logger.info("New bot instance started")
            bot.polling(none_stop=True,
                        interval=BOT_INTERVAL,
                        timeout=BOT_TIMEOUT)
        except Exception as ex:  #Error in polling
            logger.warning(
                f"Bot polling failed, restarting in {BOT_TIMEOUT} sec. Error:\n{ex}"
            )
            bot.stop_polling()
            sleep(BOT_TIMEOUT)
        else:  #Clean exit
            bot.stop_polling()
            logger.error("Bot polling loop finished")
            break


if __name__ == "__main__":
    arg_parser = create_argument_parser()
    cmdline_arguments = arg_parser.parse_args()
    log_level = cmdline_arguments.loglevel if hasattr(cmdline_arguments,
                                                      "loglevel") else None

    if not log_level:
        log_level = DEFAULT_LOG_LEVEL

    logging.getLogger("main").setLevel(log_level)
    configure_colored_logging(log_level)

    bot_polling()