def partition(self, src=0, src_k=30, trg_k=125, share_triplets=True): trg = 1 - src if share_triplets: trg_triplets = self.share_triplets(self.data.triples[src], self.data.triples[trg], self.train_set[src], self.train_map[src]) src_triplets = self.share_triplets(self.data.triples[trg], self.data.triples[src], self.train_set[trg], self.train_map[trg]) else: src_triplets, trg_triplets = reversed(self.data.triples) g0 = self.construct_graph(src_triplets, cnt_as_weight=True) print('construct src graph complete, total nodes={0}, total edges={1}'. format(len(g0.nodes), len(g0.edges))) mincut, src_nodes = nxmetis.partition(g0, src_k) print('src graph partition complete, mincut=', mincut) src_train, trg_train = self.subgraph_trainset(src_nodes, src) print('filter trainset complete') # g1 = self.construct_graph(trg_triplets, None, keep_inter_edges=True) g1 = self.construct_graph(trg_triplets, trg_train, keep_inter_edges=False) print('construct trg graph complete') mincut, trg_nodes = nxmetis.partition(g1, trg_k) print('trg graph partition complete, mincut=', mincut) return src_nodes, trg_nodes, src_train, self.subgraph_trainset( trg_nodes, trg, True)
def metis_partition_groups_seeds(G, maximum_seed_size): CC = [cc for cc in nx.connected_components(G)] GL = [] for subV in CC: if len(subV) > maximum_seed_size: # use metis to split the graph subG = nx.subgraph( G, subV ) nparts = int( len(subV)/maximum_seed_size + 1 ) ( edgecuts, parts ) = nxmetis.partition( subG, nparts, edge_weight='weight' ) # only add connected components for p in parts: pG = nx.subgraph( G, p ) GL += [list(cc) for cc in nx.connected_components( pG )] # add to group list #GL += parts else: GL += [list(subV)] SL = [] for p in GL: pG = nx.subgraph( G, p ) SL += [nx.maximal_independent_set( pG )] return GL, SL
def run_metis_partitioning(graph, nparts, ubvec, tpwgts, node_weight, edge_weight): # Format metis parameters if tpwgts != None: tpwgts = [[val] for val in tpwgts] ubvec = [ubvec] # Run metis logging.info( "Partitioning the graph using METIS (nparts=%s, ubvec=%s, tpwgts=%s, node_weight=%s, edge_weight=%s)", nparts, ubvec, tpwgts, node_weight, edge_weight) output = nxmetis.partition(graph, nparts, node_weight=node_weight, edge_weight=edge_weight, tpwgts=tpwgts, ubvec=ubvec) objval = output[0] partitions = output[1] logging.info( "The graph was partitioned into %s partitions by METIS (objval=%s)", len(partitions), objval) # Create assignments assignments = {} for index, partition in enumerate(partitions): for node in partition: assignments[ node] = index # node IDs start at 0, partition IDs start at 0 return assignments
def graph_partition(communication_graph): partition = nxmetis.partition(communication_graph, 2, None, None, 'weight', None, None, None, True) # LLP: checking if the partitions have the same size # if that is not the case, we move items from the biggest to the smallest until equilibrium is met if (len(partition[1][0]) != len(partition[1][1])): left_is_smallest = len(partition[1][0]) < len( partition[1][1]) # Gets who is the smallest one small, big = ((partition[1][1], partition[1][0]), ( partition[1][0], partition[1][1] ))[left_is_smallest] # calls the smallest as "small" while len(small) < len(big): small.append(big.pop(0)) left_graph = nx.Graph() left_graph.add_nodes_from(partition[1][0]) for u in partition[1][0]: for v in partition[1][0]: if task_graph.has_edge(u, v): val = task_graph.get_edge_data(u, v)['weight'] left_graph.add_edge(u, v, weight=val) right_graph = nx.Graph() right_graph.add_nodes_from(partition[1][1]) for u in partition[1][1]: for v in partition[1][1]: if task_graph.has_edge(u, v): val = task_graph.get_edge_data(u, v)['weight'] right_graph.add_edge(u, v, weight=val) global len_graph_part len_graph_part = len(left_graph) return left_graph, right_graph
def graph_node_partition( topology_graph): #proper case, if our topo is a graph partition = nxmetis.partition(topology_graph, 2, None, None, None, None, None, None, True) # LLP: checking if the partitions have the same size # if that is not the case, we move items from the biggest to the smallest until equilibrium is met if (len(partition[1][0]) != len(partition[1][1])): left_is_smallest = len(partition[1][0]) < len( partition[1][1]) # Gets who is the smallest one small, big = ((partition[1][1], partition[1][0]), ( partition[1][0], partition[1][1] ))[left_is_smallest] # calls the smallest as "small" while len(small) < len(big): small.append(big.pop(0)) left_graph = nx.Graph() left_graph.add_nodes_from(partition[1][0]) for u in partition[1][0]: for v in partition[1][0]: if processor_graph.has_edge(u, v): left_graph.add_edge(u, v) right_graph = nx.Graph() right_graph.add_nodes_from(partition[1][1]) for u in partition[1][1]: for v in partition[1][1]: if processor_graph.has_edge(u, v): right_graph.add_edge(u, v) return left_graph, right_graph
def solve(G, k, s, rowdy_groups, i): #TODO: Write this method as you like. We'd recommend changing the arguments here as well #precalculate rowdy groups #K = number of buses. #S = max bus size. #29's messed up if i == 29 or i == 1064: nodes = list(G.nodes()) step = len(nodes) // k return [nodes[j:j + step] for j in range(0, len(nodes), step)] seedr = random.randint(0, 100000) buses = [] options = nxmetis.MetisOptions(seed=seedr) vol, buses = nxmetis.partition(G, k, recursive=True, options=options) #adjust for < S if any([1 for bus in buses if len(bus) > s]): #readjust partition. print(s, k, [len(bus) for bus in buses]) #O(N) algorithm to by-hand pick off and rebalance picks = [] for j in range(len(buses)): if len(buses[j]) > s: picks.extend(buses[j][s:]) buses[j] = buses[j][:s] for j in range(len(buses)): if len(buses[j]) < s: space = s - len(buses[j]) buses[j].extend(picks[len(picks) - space:]) picks = picks[:len(picks) - space] print(s, k, [len(bus) for bus in buses]) return buses
def partition_by_edge(self, src=0, k=30): g0 = self.construct_edge_graph(self.data.triples[src]) trg = 1 - src print('construct src graph complete, total nodes={0}, total edges={1}'. format(len(g0.nodes), len(g0.edges))) mincut, src_edges = nxmetis.partition(g0, k) print('src graph partition complete, mincut=', mincut) pass
def __call__(self, model, n_chips): block_map = dict(enumerate(model.blocks)) block_rates = None if self.ensemble_rates is not None: block_rates = ensemble_to_block_rates(model, self.ensemble_rates) block_rates = { block: np.round(rate * self.rate_scale) for block, rate in block_rates.items() } block_conns = estimate_interblock_activity(block_map, block_rates=block_rates) # partition graph G = networkx.Graph() G.add_nodes_from(block_map.keys()) edge_map = set() for i in block_map: for j, val in block_conns[i].items(): if (i, j) in edge_map or (j, i) in edge_map: continue val = val + block_conns[j].get(i, 0) G.add_edge(i, j, weight=int(round(val))) # weights must be integers edge_map.add((i, j)) edge_map.add((j, i)) _, parts = nxmetis.partition(G, nparts=int(n_chips)) for i, part in enumerate(parts): if len(part) > 128: raise ValueError( f"Partition {i} has {len(part)} cores, " "which exceeds the available 128 cores" ) # --- create board board = Board() # add inputs to board for input in model.inputs: self.input_to_board(input, board) # blocks to chips for part in parts: chip = board.new_chip() for block_idx in part: block = block_map[block_idx] self.block_to_new_core(block, chip) # add probes board.probes.extend(model.probes) logger.info("METIS allocation across %d chips", board.n_chips) return board
def metis_partitions(graph, num_par, file_name, per): par_result = nxmetis.partition(graph, num_par)[1] name = '/'+file_name + str(num_par)+'_'+str(per) graph_name = '/'+file_name + str(num_par)+'_'+str(per)+'_graph' path = METIS_GRAPH_SAVE_PATH save_obj(par_result, name, path) #save_obj(graph, graph_name, path) return graph, par_result
def metis_partition(G): # For further details on metis-parameters, please refer to the manual settings = nxmetis.MetisOptions(ncuts=4, niter=200, ufactor=280) par = nxmetis.partition(G, 2, options=settings) the_edge_cut = par[0] community1 = par[1][0] community2 = par[1][1] comm = [community1, community2, the_edge_cut] return (comm)
def metis_partition(G): """ Takes a networkX graph and the number of clusters to be formed, and partitions the graph in that number of clusters using the METIS algorithm. Returns the graph with added cluster attributes. """ partition_list = partition(G, 2)[1] for i in range(2): for username in partition_list[i]: G.add_node(username, cluster=i) return G
def partition(self, num_partitions=5): for node in self.cg.graph.nodes: w = self.cg.graph.nodes[node]["node_data"].weight self.cg.graph.nodes[node]["weight"] = int(w) for nfrom, nto in self.cg.graph.edges: w = self.cg.graph[nfrom][nto]["edge_data"].weight self.cg.graph[nfrom][nto]["weight"] = int(w) bgraph = nx.Graph(self.cg.graph) #import pdb; pdb.set_trace() parts = nxmetis.partition(bgraph, num_partitions) return parts
def partition_metis(g, fpga, pe, ufactor=1): logger.debug("Dividing into {} partitions, ufactor: {}".format(fpga, ufactor)) ug = g.to_undirected() for node in ug.nodes(): ug.nodes[node]['weight'] = ug.degree(node) objval, fpgaparts = nxmetis.partition(ug, fpga, options=nxmetis.MetisOptions(contig=False, ufactor=ufactor)) logger.debug("Edges crossing: {} , expected from random partition: {}".format(objval , nx.number_of_edges(ug)*(fpga-1)/fpga)) logger.debug("Improvement: {}x".format((nx.number_of_edges(ug)*(fpga-1)/fpga)/objval)) parts = [] for part in fpgaparts: parts.extend(_partition_greedy(g, pe, part)) return relabel_with_parts(g, parts)
def partition_here(graph): if nx.is_empty(graph): return 0, 0 Gcc = sorted(nx.connected_components(graph), key=len, reverse=True) G = graph.subgraph(Gcc[0]) settings = nxmetis.MetisOptions(ncuts=4, niter=200, ufactor=280) par = nxmetis.partition(G, 2, options=settings) community1 = par[1][0] community2 = par[1][1] rwc = np.mean(randomwalk_polarization(G, 100, 0.02, 1000, community1, community2)) prc = len(G)/len(graph) return rwc, prc
def create_partition_graph(tgraph, pgraph): partition = nxmetis.partition(tgraph, len(pgraph), None, None, 'weight', None, None, None, True) partition_graph = nx.Graph() partition_graph.add_nodes_from(pgraph.nodes()) for i in range(len(partition[1])): for j in range(len(partition[1])): if j != i: val = 0 for elem_from_i in partition[1][i]: for elem_from_j in partition[1][j]: if tgraph.has_edge(elem_from_i, elem_from_j): val += tgraph.get_edge_data( elem_from_i, elem_from_j)['weight'] if val > 0: partition_graph.add_edge(i, j, weight=val) return partition, partition_graph
def patition(graph, num_blocks): block_node_sets = [] node_block_dict = {} block_id = -1 logger.debug('-' * 5 + ' starting partitioning ' + '-' * 5) (edge_cut, partitions) = nxmetis.partition(graph, num_blocks) logger.debug('number of edge cuts: {:d}'.format(edge_cut)) for ind, partition in enumerate(partitions): subgraph = nx.subgraph(graph, partition) logger.debug('partion: {:d}'.format(ind)) logger.debug('number of nodes: {:d}'.format( subgraph.number_of_nodes())) logger.debug('number of edges: {:d}'.format( subgraph.number_of_edges())) logger.debug('number of connected components: {:d}'.format( nx.number_connected_components(subgraph))) logger.debug('-' * 5) block_node_sets.append(sorted(partition)) for node in subgraph.nodes: node_block_dict[node] = ind block_boundary_edges_dict = { } # key - block id, value - boundary edge list cut = 0 for edge in graph.edges(): node_1, node_2 = edge node_1_block_id = node_block_dict[node_1] node_2_block_id = node_block_dict[node_2] if not node_1_block_id == node_2_block_id: cut += 1 if node_1_block_id not in block_boundary_edges_dict: block_boundary_edges_dict[node_1_block_id] = [] if node_2_block_id not in block_boundary_edges_dict: block_boundary_edges_dict[node_2_block_id] = [] block_boundary_edges_dict[node_1_block_id].append((node_1, node_2)) block_boundary_edges_dict[node_2_block_id].append((node_2, node_1)) logger.debug('number of edge cuts: {:d}'.format(cut)) return block_node_sets, node_block_dict, block_boundary_edges_dict
def nxmetis_partition_np_adj_mat(adj_mat, num_partitions): ''' Args: adj_mat: numpy array with adj_mat[i][j] = 1 if there is an edge between nodes i ad j and 0 otherwise num_partitions: number of partitions Return: partition2nodes: list of lists, each sublist is the nodes for the respective partition node2partition: size-num_nodes list, node2partition[i] is partition index of node i ''' G = np_adj_mat_to_nxgraph(adj_mat) _, partition2nodes = nxmetis.partition(G, num_partitions) node2partition = [0 for i in range(len(adj_mat))] for i, partition in enumerate(partition2nodes): for node in partition: node2partition[node] = i return partition2nodes, node2partition
def Metis(self, flag): s = time.time() (cut, parts) = nxmetis.partition(self.G, edge_weight='totalWeight', nparts=self.parts, recursive=True) e = time.time() self.partitioning = e - s print("partition cost:", e - s) partitions = [set() for i in range(self.parts)] for i in range(len(parts)): for node in parts[i]: #record the index of the partition the node belong to. self.G.node[node]['partitionID'] = i partitions[i].add(node) return partitions
def three_graph(snapshot_tuple, graph_type): retweet_graph = snapshot_tuple[0] mention_graph = snapshot_tuple[1] reply_graph = snapshot_tuple[2] partitions = nxmetis.partition(retweet_graph.to_undirected(), 2) left_side_nodes = partitions[1][0] right_side_nodes = partitions[1][1] if graph_type == 'retweet': main_graph = retweet_graph left_side = retweet_graph.subgraph(left_side_nodes) right_side = retweet_graph.subgraph(right_side_nodes) elif graph_type == 'mention': main_graph = mention_graph left_side = mention_graph.subgraph(left_side_nodes) right_side= mention_graph.subgraph(right_side_nodes) elif graph_type == 'reply': main_graph = reply_graph left_side = reply_graph.subgraph(left_side_nodes) right_side = reply_graph.subgraph(right_side_nodes) return tuple((main_graph, left_side, right_side))
def compareMetis(self): print("baseline metis") s = time.time() (cut, parts) = nxmetis.partition(self.G, edge_weight='trueweight', nparts=self.parts, recursive=True) e = time.time() self.baselineTime = e - s print("partition cost:", e - s) partitions = [set() for i in range(self.parts)] print("edge_cut by library:", cut) for i in range(len(parts)): for node in parts[i]: self.G.node[node]['partitionID'] = i partitions[i].add(node) self.quality(partitions, 0)
def create_partition_graph( tgraph, pgraph ): #we create a partition of a given app_graph to len(graph)-chunks, #several tasks that communicate tightly are combined into one block, an edge between each block has weight equal to sum of all edges between all tasks in two different blocks # partition = nxmetis.partition(tgraph, len(pgraph), None, None, 'weight', None, None, None, True) partition_graph = nx.Graph() partition_graph.add_nodes_from(pgraph.nodes()) for i in range(len(partition[1])): for j in range(len(partition[1])): if j != i: val = 0 for elem_from_i in partition[1][i]: for elem_from_j in partition[1][j]: if tgraph.has_edge(elem_from_i, elem_from_j): val += tgraph.get_edge_data( elem_from_i, elem_from_j)['weight'] if val > 0: partition_graph.add_edge(i, j, weight=val) return partition, partition_graph
def test_partition(self): partition = nxmetis.partition(self.G, 4) # When we choose one node from one part of the partitioned Graph, # It must be adjacent to one or more of the nodes in the same part. # This is to verify the continuity of the chain of nodes. parts = partition[1] # List containing partitioned node lists assert_equal(partition[0], 4) assert_equal(len(partition[1]), 4) for part in parts: assert_not_equal(0, len(part)) # Non-empty set assert_equal(len(part), len(set(part))) # Duplicate-free assert (nx.is_connected(self.G.subgraph(part))) # Connected # Disjoint sets for part1, part2 in itertools.combinations(parts, 2): assert_equal(set(), set(part1) & set(part2)) # These parts must be exhaustive with the node list of the Graph parts_combined = parts[0] + parts[1] + parts[2] + parts[3] assert_equal(set(parts_combined), set(self.G))
def partition_metis(g, fpga, pe, ufactor=1): logger.debug("Dividing into {} partitions, ufactor: {}".format( fpga, ufactor)) ug = g.to_undirected() for node in ug.nodes(): ug.nodes[node]['weight'] = ug.degree(node) objval, fpgaparts = nxmetis.partition(ug, fpga, options=nxmetis.MetisOptions( contig=False, ufactor=ufactor)) logger.debug( "Edges crossing: {} , expected from random partition: {}".format( objval, nx.number_of_edges(ug) * (fpga - 1) / fpga)) logger.debug("Improvement: {}x".format( (nx.number_of_edges(ug) * (fpga - 1) / fpga) / objval)) parts = [] for part in fpgaparts: parts.extend(_partition_greedy(g, pe, part)) return relabel_with_parts(g, parts)
def partiton_graph(graph: Graph, num_partitions: int, weighting_function: Optional[Callable[[Any], int]] = None, **METIS_opts): wfunc = weighting_function if weighting_function != None else default_weight_func weights = {node.idx: wfunc(node.weight) for node in graph.nodes} G = graph.asNetworkx() nx.set_node_attributes(G, weights, 'weight') _, parts = nxmetis.partition(G, num_partitions) parts = sorted((idx, n) for n, p in enumerate(parts)for idx in p) parts = [n for _, n in parts] post_process_partition(graph, parts) actual_nparts = len({n.part for n in graph.nodes}) if(actual_nparts < num_partitions): print( f"expected {num_partitions} partitions but only {actual_nparts} found implicating that the model to partition is too small") print("consider increasing the depth of graph or disabling the basic blocks option") return graph
def test_partition(self): partition = nxmetis.partition(self.G, 4) # When we choose one node from one part of the partitioned Graph, # It must be adjacent to one or more of the nodes in the same part. # This is to verify the continuity of the chain of nodes. parts = partition[1] # List containing partitioned node lists nose.tools.assert_equal(partition[0], 4) nose.tools.assert_equal(len(partition[1]), 4) for part in parts: nose.tools.assert_not_equal(0, len(part)) # Non-empty set nose.tools.assert_equal(len(part), len(set(part))) # Duplicate-free nose.tools.ok_(nx.is_connected(self.G.subgraph(part))) # Connected # Disjoint sets for part1, part2 in itertools.combinations(parts, 2): nose.tools.assert_equal(set(), set(part1) & set(part2)) # These parts must be exhaustive with the node list of the Graph parts_combined = parts[0] + parts[1] + parts[2] + parts[3] nose.tools.assert_equal(set(parts_combined), set(self.G))
def metis_partition_groups_seeds(G, only_seed=False, maximum_group_size=MAXIMUM_COMPONENT_SIZE): CC = [cc for cc in nx.connected_components(G)] GL = [] for subV in CC: if len(subV) > maximum_group_size: # use metis to split the graph subG = nx.subgraph(G, subV) nparts = int(len(subV) / maximum_group_size + 1) (edgecuts, parts) = nxmetis.partition(subG, nparts) # add to group list GL += parts else: GL += [list(subV)] SL = [] if only_seed: for p in GL: pG = nx.subgraph(G, p) SL += [nx.maximal_independent_set(G)] return GL, SL
matplotlib.use('Agg') threshold = 0.8 print('load sentiment model') model = load_model_sentiment( '/root/Sentiment-analysis/sentiment_module.model') print('load dataset') for file in [ 'germanwings_data', 'indiana_data', 'indiasdaughter_data', 'leadersdebate_data', 'mothersday_data' ]: dataloader = Dataloader('/root/tweets_dataset') dataset = dataloader.load_files(file) print('build retweet and reply dynamic graph') ret_graph = static_retweet_graph(dataset) print('partition retweet graph') partitions = nxmetis.partition(ret_graph, 2) op_nodes = partitions[1][0] agg_nodes = partitions[1][1] for graph_type in ['mention', 'reply']: rep_graphs = dynamic_graph(dataset, graph_type=graph_type, sentiment=True, cumulative=False) between_scores = {'neg': [], 'pos': []} op_scores = {'neg': [], 'pos': []} agg_scores = {'neg': [], 'pos': []} between_count = {'neg': [], 'pos': []} op_count = {'neg': [], 'pos': []} agg_count = {'neg': [], 'pos': []} for key, rep_graph in rep_graphs.items(): for edge in rep_graph.edges:
def node_selecting_scheme( graph_t0, graph_t1, reservoir_dict, limit=0.1, scheme=4 ): # currently, only focus on the changes of network **topology** ''' select nodes to be updated G0: previous graph @ t-1; G1: current graph @ t; reservoir_dict: will be always maintained in ROM limit: fix the number of node --> the percentage of nodes of a network to be updated (exclude new nodes) scheme 1 for greedy, 2 for random, 3 for modularity based, 4 for METIS based ''' G0 = graph_t0.copy() G1 = graph_t1.copy() edge_add = edge_s1_minus_s0(s1=set(G1.edges()), s0=set( G0.edges())) # one may directly use steam added edges if possible edge_del = edge_s1_minus_s0(s1=set(G0.edges()), s0=set(G1.edges())) node_affected_by_edge_add = unique_nodes_from_edge_set(edge_add) node_affected_by_edge_del = unique_nodes_from_edge_set(edge_del) node_affected = list( set(node_affected_by_edge_add + node_affected_by_edge_del)) node_add = [ node for node in node_affected_by_edge_add if node not in G0.nodes() ] node_del = [ node for node in node_affected_by_edge_del if node not in G1.nodes() ] exist_node_affected = list( set(node_affected) - set(node_add) - set(node_del) ) # now, we only consider the 1st-order affected nodes are in both G0 and G1; exist_node_not_affected = list( set(G1.nodes()) - set(node_add) - set(exist_node_affected) ) # for 2nd-order, see "select_most_affected_nodes_nbrs" if len(node_del) != 0: reservoir_key_list = list(reservoir_dict.keys()) for node in node_del: if node in reservoir_key_list: del reservoir_dict[ node] # if a node is deleted, also delete it from reservoir t1 = time.time() num_limit = int( G1.number_of_nodes() * limit ) # the maximum number of nodes to be selected i.e. **alpha** in the paper most_affected_nodes = [] # used in scheme 1 random_nodes = [] # used in scheme 2 diverse_nodes = [] # used in scheme 3 and scheme 4 node_update_list = [] # all the nodes to be updated reservoir_dict = update_reservoir_dict( G0, G1, reservoir_dict, exist_node_affected ) # update reservoir dict {node_ID: changes, ...} based on the steam edges #----------------------------------------------------------------------------------------------------------------- node selecting strategy 4 #NOTE: one may use different node selecting strategy, so that other desireable network topology can be encoded into random walks if True: print( 'scheme == 4, the METIS based diverse approach biased to most affected nodes' ) import nxmetis start_comm_det = time.time() cost_parts = nxmetis.partition(G=G1, nparts=num_limit) parts = cost_parts[1] # cost = cost_parts[0] useless empty_part_counter = 0 for part in parts: # part i.e. community, operation in one community at each loop if len(part) == 0: empty_part_counter += 1 else: node_scores = [] # node_scores within this part for node in part: try: node_scores.append( math.exp(reservoir_dict[node] / G0.degree[node])) except: node_scores.append(1) # (2 or e)^0 = 1 node_scores_prob = [] # normalize node_scores within this part part_sum = sum(node_scores) for i in range(len(node_scores)): node_scores_prob.append(node_scores[i] / part_sum) # sample one node from this part based on node_scores_prob, which bias to recent changes diverse_nodes.append(np.random.choice(part, p=node_scores_prob)) # ---- due to the limitation of METIS, there might be few empty parts ---- if empty_part_counter != 0: remaining_pool = list(G1.nodes() - set(node_add) - set(diverse_nodes)) remaining_pool_score = [] for node in remaining_pool: try: remaining_pool_score.append( math.exp(reservoir_dict[node] / G0.degree[node])) except: remaining_pool_score.append(1) remaining_pool_score_sum = sum(remaining_pool_score) remaining_pool_scores_prob = [] for i in range(len(remaining_pool_score)): remaining_pool_scores_prob.append(remaining_pool_score[i] / remaining_pool_score_sum) diverse_nodes.extend( np.random.choice(remaining_pool, size=empty_part_counter, replace=True, p=remaining_pool_scores_prob)) end_comm_det = time.time() print('MIETS time: ', end_comm_det - start_comm_det) node_update_list = node_add + diverse_nodes #----------------------------------------------------------------------------------------------------------------- END of node selecting strategy 4 for node in node_update_list: try: del reservoir_dict[node] # if updated, delete it from reservoir except: pass t2 = time.time() print(f'--> node selecting time; time cost: {(t2-t1):.2f}s') print(f'# num_limit {num_limit}, # nodes updated {len(node_update_list)}') print(f'# nodes added {len(node_add)}, # nodes deleted {len(node_del)}') print( f'# nodes most affected {len(most_affected_nodes)} \t ===== S1 =====') print(f'# of random nodes {len(random_nodes)} \t ===== S2 =====') print( f'# diverse nodes {len(diverse_nodes)} \t ===== S3 or S4 =====') print( f'# nodes in reservoir with accumulated changes but not updated {len(list(reservoir_dict))}' ) print(f'# all nodes affected {len(node_affected)}') return node_update_list, reservoir_dict
default=30, type=int, help="Number of nodes in Erdos-Renyi Graph") parser.add_argument( "--p", default=0.4, type=float, help="Probability of edge being present in Erdos-Renyi Graph") args = parser.parse_args() if __name__ == "__main__": g = generate_er_graph(args.nodes, args.p) options = nxmetis.MetisOptions(dbglvl=nxmetis.enums.MetisDbgLvl.time, niter=1) _, parts = nxmetis.partition(G=g, nparts=2, options=options, recursive=False) recursive_fiedler_values = nx.algebraic_connectivity(g.subgraph(parts[0])), \ nx.algebraic_connectivity(g.subgraph(parts[1])) MAX_FIEDLER_VALUEX = -sys.maxsize MAX_FIEDLER_VALUEY = -sys.maxsize for i in range(100): swap_vertices, partition_vector = heurisitc_algorithm(g, parts) parts[0] = [vtx for vtx, i in enumerate(partition_vector) if i == 0] parts[1] = [vtx for vtx, i in enumerate(partition_vector) if i == 1] # print(initial_fiedler_values) # print(parts) (max_x, max_y) = maximum_fiedler_value_swaps(g, swap_vertices, partition_vector, recursive_fiedler_values) # print("Heuristic Bisection: {}".format((max_x, max_y)))
# constructing nx.Graph and using metis in order to get min-cut partition G = nx.Graph() G.add_edges_from(non_anchor_edge_list) for node, degree in entity_degree.items(): if node in G: G.node[node]['node_weight'] = degree options = nxmetis.MetisOptions( # objtype=1 => vol ptype=-1, objtype=1, ctype=-1, iptype=-1, rtype=-1, ncuts=-1, nseps=-1, numbering=-1, niter=cur_iter, seed=-1, minconn=-1, no2hop=-1, contig=-1, compress=-1, ccorder=-1, pfactor=-1, ufactor=-1, dbglvl=-1) edgecuts, parts = nxmetis.partition(G, nparts=partition_num, node_weight='node_weight') # putting residue randomly into non anchor set residue = non_anchor_id.difference(non_anchor_edge_included_vertex) for v in residue: parts[randint(0, partition_num - 1)].append(v) # printing the number of entities in each paritions printt('[info] maxmin > # of entities in each partitions : [%s]' % " ".join([str(len(p)) for p in parts])) # 원소 여러 개를 한 번에 전송 master_sock.send(pack('!i', len(list(anchor)))) master_sock.send(pack('!' + 'i' * len(list(anchor)), *list(anchor)))
def find_metis_parts(conn, cur, parts): """TODO""" # Open a cursor to perform database operations (factor_view, variable_view, weight_view) = get_views(cur) # Obtain graph (factor, factor_pt, factor_ufo, fmap, edges) = \ get_factors(cur, factor_view) hyperedges = [] for f in factor: newedge = [] for i in range(f['ftv_offset'], f['ftv_offset'] + f['arity']): newedge.append(fmap[i]['vid']) hyperedges.append(newedge) G = nx.Graph() for e in hyperedges: for i in range(len(e)): for j in range(i + 1, len(e)): newedge = (e[i], e[j]) G.add_edge(*e) # Run metis to obtain partitioning metis_options = \ nxmetis.MetisOptions(objtype=nxmetis.enums.MetisObjType.vol) (cost, partitions) = \ nxmetis.partition(G, parts, options=metis_options) print(80 * "*") print(cost) print(partitions) print(80 * "*") # Find nodes to master master_variables = set([]) # Get all edges cut_edges = set(G.edges()) for p in partitions: H = G.subgraph(p) cut_edges -= set(H.edges()) print(H.edges()) H.clear() for edge in cut_edges: n1, n2 = edge master_variables.add(n1) master_variables.add(n2) # Store parition in DB try: cur.execute("CREATE TABLE variable_to_cc(dd_id bigint, cc_id bigint);") except: conn.rollback() cur.execute("TRUNCATE variable_to_cc;") rows = [] # Output master variables for node in master_variables: rows.append([node, -1]) print(master_variables) # Output minion variables pid = 0 for p in partitions: only_master = True for node in p: if node not in master_variables: only_master = False rows.append([node, pid]) if not only_master: pid += 1 print(rows) dataText = ','.join(cur.mogrify('(%s,%s)', row) for row in rows) print(dataText) try: cur.execute("INSERT INTO variable_to_cc VALUES " + dataText) if pid > 1: cur.execute("CREATE INDEX dd_cc ON variable_to_cc (dd_id);") conn.commit() G.clear() return True except: conn.rollback() G.clear() return False
from utils.network_building import * import nxmetis if __name__ == '__main__': for file in [ 'germanwings_data', 'indiana_data', 'indiasdaughter_data', 'leadersdebate_data', 'mothersday_data', 'baltimore_data', 'beefban_data' ]: print('load dataset : %s' % file) dataloader = Dataloader('/root/tweets_dataset') dataset = dataloader.load_files(file) # mention graph analysis mention_graph = static_mention_graph(dataset, directed=True) partitions = nxmetis.partition(mention_graph.to_undirected(), 2) bt_con = betweenness_centrality_controversy(mention_graph, partitions) RWW_con = random_walk_conteroversy(mention_graph, partitions, 1000) print("mention graph controversy measures") print("Betweennes Measure = %s" % bt_con) print("RWW Measure = %s" % RWW_con) # reply graph analysis reply_graph = static_reply_graph(dataset, directed=True) partitions = nxmetis.partition(reply_graph.to_undirected(), 2) bt_con = betweenness_centrality_controversy(reply_graph, partitions) RWW_con = random_walk_conteroversy(reply_graph, partitions, 1000) print("reply graph controversy measures") print("Betweennes Measure = %s" % bt_con) print("RWW Measure = %s" % RWW_con)
def metis_partition(self, parts): edgecuts, metis_list = nxmetis.partition(self.graph, parts) metis_set_list = [set(l) for l in metis_list] return metis_set_list