def load_gowalla_community(community_code): if community_code == 0: community = community_library.IA_gw_communities_unweighted() cgraph = community_library.IA_gw_cgraph_unweighted() if community_code == 2: community = community_library.ganxis_gw_communities_overlapping_ttl() cgraph = community_library.ganxis_gw_cgraph_overlapping_ttl() stationary = load_stationary('data/gowalla_ganxis_eigenvector.txt') return community, convert_com_table(community), cgraph, stationary
def load_foursquare_community(community_code): if community_code == 0: community = community_library.IA_fs_communities_unweighted() cgraph = community_library.IA_fs_cgraph_communities_unweighted() if community_code == 2: community = community_library.ganxis_fs_communities_overlapping_unweighted_ttl() cgraph = community_library.ganxis_fs_cgraph_communities_overlapping_unweighted_ttl() stationary = load_stationary('data/foursquare_ganxis_eigenvector.txt') return community, convert_com_table(community), cgraph, stationary
results = {} for i in xrange(len(communities)): val = [pagerank[node] for node in communities[i]] results[i] = average(val) return results network_code = 1 community_code = 2 network, locations, pagerank, states = routing_simulation_loader.load_network( network_code) communities, com_table, cgraph, stationary = routing_simulation_loader.load_community( network_code, community_code) com_table = convert_com_table(communities) com_pagerank = get_community_pagerank(communities) G = convert_network(network) for node, com in com_table.items(): for ci, cj in combinations(com, 2): val = com_pagerank[ci] - com_pagerank[cj] print ci, cj, val, abs(val) for u, v in G.edges_iter(): com_u = com_table[u] com_v = com_table[v] for ci, cj in product(com_u, com_v): if ci == cj: continue val = com_pagerank[ci] - com_pagerank[cj] #print ci, cj, val, abs(val)
if is_friend(node_i, node_j): count = count + 1 return results, count # Map the calculations among processors. def map_calc_statistics(work_input): indexes, community = work_input paths, iec = calc_shortest_path(indexes, community) dia = max(paths + [0]) return iec, sum(paths), dia # Step 1: Load the data into memory network, G = load_network() print "Finished loading network" communities = community_library.IA_fs_communities_unweighted() com_table = community_library.convert_com_table(communities) final = [] # Step 2: Divide the tasks among processors num_cpus = multiprocessing.cpu_count() for community in communities: pool = multiprocessing.Pool(num_cpus) indexes = range(len(community)) shuffle(indexes) work = split_work(indexes, num_cpus) work_input = [(element, list(community)) for element in work] results = pool.map(map_calc_statistics, work_input) pool.close() size = len(community) density = sum([subresults[0] for subresults in results]) / (0.5*size*(size-1))