Esempio n. 1
1
def edgeCut(state,partitions=2):
    G = state
    if type(G) is not nx.classes.graph.Graph:
        G = nx.Graph()
        for node in xrange(len(state.nodeList)):
            G.add_node(node)
            for edge in state.nodeList[node]:
                G.add_edge(node,edge)

    (edgecuts,parts) = metis.part_graph(G,partitions)
    return edgecuts
def pre_part_graph(graph, k, df=None, verbose=False):
    if verbose:
        print("Begin clustering...")
    clusters = 0
    for i, p in enumerate(graph.nodes()):
        graph.nodes[p]['cluster'] = 0
    cnts = {}
    cnts[0] = len(graph.nodes())

    while clusters < k - 1:
        maxc = -1
        maxcnt = 0
        for key, val in cnts.items():
            if val > maxcnt:
                maxcnt = val
                maxc = key
        s_nodes = [n for n in graph.nodes if graph.nodes[n]['cluster'] == maxc]
        s_graph = graph.subgraph(s_nodes)
        edgecuts, parts = metis.part_graph(s_graph,
                                           2,
                                           objtype='cut',
                                           ufactor=250)
        new_part_cnt = 0
        for i, p in enumerate(s_graph.nodes()):
            if parts[i] == 1:
                graph.nodes[p]['cluster'] = clusters + 1
                new_part_cnt = new_part_cnt + 1
        cnts[maxc] = cnts[maxc] - new_part_cnt
        cnts[clusters + 1] = new_part_cnt
        clusters = clusters + 1

    edgecuts, parts = metis.part_graph(graph, k)
    if df is not None:
        df['cluster'] = nx.get_node_attributes(graph, 'cluster').values()
    return graph
Esempio n. 3
0
def k_way_partitioning(k:int,g:nx.Graph):

    # COARSENING
    start = time.time()
    print('starting coarsening phase')

    graphs_history , coarsening_history = coarse(g,k=k)

    end_coarsening = time.time()
    m, s = divmod((end_coarsening - start), 60)
    enlapsed_time = "%d minutes and %f seconds" % (m, s)
    print('finished coarsening after ', graphs_history.__len__(), ' steps in ', enlapsed_time)



    # INITIAL PARTITIONING
    print('starting initial partitioning phase')
    start_init_part = time.time()

    #initial_partitioning = spectral_bisection.initial_partitioning(graphs_history[-1], k)
    edge_cut, initial_partitioning = metis.part_graph(graphs_history[-1], k, recursive=True)

    end_init_part = time.time()
    m, s = divmod((end_init_part - start_init_part), 60)
    enlapsed_time = "%d minutes and %f seconds" % (m, s)
    print('finished initial partitioning in ', enlapsed_time)



    # UNCOARSENING
    start_uncoarsening = time.time()
    print('starting uncoarsening phase')

    final_partitioning = uncoarse(graphs_history,coarsening_history,initial_partitioning,k)

    end = time.time()
    m, s = divmod((end - start_uncoarsening), 60)
    enlapsed_time = "%d minutes and %f seconds" % (m, s)
    print('finished un-coarsening in', enlapsed_time)

    edge_cut = calculate_edge_cut(graphs_history[0],final_partitioning)
    print('the final edge cut is',edge_cut)

    m, s = divmod((end - start), 60)
    enlapsed_time = "%d minutes and %f seconds" % (m, s)
    print('the overall time is',enlapsed_time)

    # Write on file the result
    output_file(final_partitioning)

    # THE RESULT OBTAINED WITH METIS
    start_meth = time.time()
    edge_cut , metis_partitioning = metis.part_graph(graphs_history[0], k)
    end_meth = time.time()
    m, s = divmod((end_meth - start_meth), 60)
    enlapsed_time = "%d minutes and %f seconds" % (m, s)
    print('the edge cut obtained with metis is ',edge_cut)
    print('the time taken by metis was ', enlapsed_time)
def partition_graph(adj, idx_nodes, num_clusters):
    """partition a graph by METIS."""

    start_time = time.time()
    num_nodes = len(idx_nodes)

    train_adj = adj[idx_nodes, :][:, idx_nodes]
    train_adj_lil = train_adj.tolil()
    train_ord_map = dict()
    train_adj_lists = [[] for _ in range(num_nodes)]
    for i in range(num_nodes):
        rows = train_adj_lil[i].rows[0]
        # self-edge needs to be removed for valid format of METIS
        if i in rows:
            rows.remove(i)
        train_adj_lists[i] = rows
        train_ord_map[idx_nodes[i]] = i

    if num_clusters > 1:
        _, groups = metis.part_graph(train_adj_lists, num_clusters, seed=1)
    else:
        groups = [0] * num_nodes

    parts = [[] for _ in range(num_clusters)]
    for nd_idx in range(num_nodes):
        gp_idx = groups[nd_idx]
        nd_orig_idx = idx_nodes[nd_idx]
        parts[gp_idx].append(nd_orig_idx)
        

    part_size = [len(part) for part in parts]
    print('Partitioning done. %f seconds.'%(time.time() - start_time))
    print('Max part size %d, min part size %d'%(max(part_size), min(part_size)))

    return parts
Esempio n. 5
0
def metis_prm(csr, B = 16):
    import metis
    import networkx as nx
    import  pyamg
    import numpy as np
    from time import time
    bl_ar = []
    N = csr.shape[0]
    nparts = N/B
    G = nx.from_scipy_sparse_matrix(csr)
    n, l = metis.part_graph(G, nparts=nparts, recursive=True)
    prm = []
    tmp = []
    for i in xrange(nparts): tmp.append([])
    k = 0
    for i in l:
        tmp[i].append(k)
        k += 1
    k = 0
    for i in tmp:
        bl_ar.append(len(i))
        k += 1
        for j in i:
            prm.append(j)
    return prm, np.array(bl_ar)
Esempio n. 6
0
def metis_kway(g, k):
    """ Perform METIS k-way partition. """
    al = g.get_adjlist()
    if metis_import_exception is not None:
        raise metis_import_exception
    _, m = part_graph(al, k, recursive=False, contig=True, minconn=True)
    return m
Esempio n. 7
0
def metis_bipartition(g, n=2):
    """ Perform METIS bipartition. Do n cuts and choose the best. """
    al = g.get_adjlist()
    if metis_import_exception is not None:
        raise metis_import_exception
    _, m = part_graph(al, ncuts=n, recursive=False, contig=True, minconn=True)
    return array(m)
def get_districts_from_triangles(voters: List[Voter],
                                 raw_triangles: List[Polygon],
                                 n_districts: int, seed: int) -> List[Polygon]:
    # triangles = []
    # for rt in raw_triangles:
    #     triangles.append(Triangle(get_voters_in_polygon(rt, voters), rt))
    # triangles = sorted(triangles, key=lambda x: x.n_voters)
    # free_triangles = list(range(len(triangles)))
    # districts = []
    # for i in range(n_districts):
    #     index = random.random.sample(free_triangles, 1)
    #     d = District()
    #     d.append_triangle(triangles[index])
    #     districts.append(d)
    #     free_triangles.pop(index)
    graph = nx.Graph()
    graph.add_nodes_from(list(range(len(raw_triangles))))
    for index, triangle in enumerate(raw_triangles):
        adj_trs = find_adjacent_triangle(index, raw_triangles)
        for tr in adj_trs:
            graph.add_edge(index, tr)
    # Extension
    (edgecuts, parts) = metis.part_graph(graph,
                                         n_districts,
                                         ncuts=2,
                                         niter=20,
                                         contig=True)
    districts = []
    for i in range(n_districts):
        districts.append(District())
    for index, part in enumerate(parts):
        districts[part].append_triangle(raw_triangles[index])
    return [d.get_one_polygon() for d in districts]
Esempio n. 9
0
 def process(self):
     # divide the graph into partitions such that each partition contains around 50 nodes
     (self._edgecuts, self._partitions) = metis.part_graph(
         self._data_pointer_nx,
         nparts=int(ceil(self._data_pointer_nx.number_of_nodes() / 50.0)))
     self._color_count = [1] * (max(self._partitions) + 1)
     # run the monte carlo on each of the partitions
     self._partition_counts = Counter(self._partitions)
     for partition_num in xrange(max(self._partitions) + 1):
         while (sum([
                 self._colored_nodes[idx] for idx in xrange(self._num_nodes)
                 if self._partitions[idx] == partition_num
         ]) < self._partition_counts[partition_num]):
             # print sum([self._colors[idx] for idx in xrange(self._num_nodes) if self._partitions[idx] == partition_num]), self._partition_counts[partition_num]
             self._idle_nodes = [0] * self._num_nodes
             for i in xrange(self._num_nodes):
                 if self._colored_nodes[i] == 1:
                     self._idle_nodes[i] = 1
             temp_mis = self._get_mis(partition_num)
             for i in temp_mis:
                 self._colors[i] = self._color_count[self._partitions[i]]
                 self._colored_nodes[i] = 1
             self._color_count[self._partitions[i]] += 1
     # merge the colored partitions and resolve any coloring conflicts
     self._resolve_color_conflicts()
Esempio n. 10
0
    def gen_cluster_info(self, use_metis=False):
        G = nx.Graph()
        G.add_edges_from(
            torch.stack(self.edge_index).cpu().numpy().transpose())
        if use_metis:
            import metis

            _, parts = metis.part_graph(G, self.num_clusters)
        else:
            from sklearn.cluster import KMeans

            clustering = KMeans(n_clusters=self.num_clusters,
                                random_state=0).fit(self.features.cpu())
            parts = clustering.labels_

        node_clusters = [[] for i in range(self.num_clusters)]
        for i, p in enumerate(parts):
            node_clusters[p].append(i)
        self.central_nodes = np.array([])
        self.distance_vec = np.zeros((self.num_nodes, self.num_clusters))
        for i in range(self.num_clusters):
            subgraph = G.subgraph(node_clusters[i])
            center = None
            for node in subgraph.nodes:
                if center is None or subgraph.degree[node] > subgraph.degree[
                        center]:
                    center = node
            np.append(self.central_nodes, center)
            distance = dict(nx.shortest_path_length(G, source=center))
            for node in distance:
                self.distance_vec[node][i] = distance[node]
        self.distance_vec = torch.tensor(self.distance_vec).float().to(
            self.device)
Esempio n. 11
0
    def gen_cluster_info(self):
        import metis
        G = nx.Graph()
        G.add_edges_from(self.edge_index.cpu().t().numpy())

        _, parts = metis.part_graph(G, self.num_clusters)
        node_clusters = [[] for i in range(self.num_clusters)]
        for i, p in enumerate(parts):
            node_clusters[p].append(i)
        self.central_nodes = np.array([])
        self.distance_vec = np.zeros((self.num_nodes, self.num_clusters))
        for i in range(self.num_clusters):
            subgraph = G.subgraph(node_clusters[i])
            center = None
            #print(subgraph.nodes)
            for node in subgraph.nodes:
                if center is None or subgraph.degree[node] > subgraph.degree[
                        center]:
                    center = node
            np.append(self.central_nodes, center)
            distance = dict(nx.shortest_path_length(G, source=center))
            for node in distance:
                self.distance_vec[node][i] = distance[node]
        self.distance_vec = torch.tensor(self.distance_vec).float().to(
            self.device)
Esempio n. 12
0
def part_graph(graph, k, df=None):
    edgecuts, parts = metis.part_graph(graph, k)
    for i, p in enumerate(graph.nodes()):
        graph.node[p]['cluster'] = parts[i]
    if df is not None:
        df['cluster'] = nx.get_node_attributes(graph, 'cluster').values()
    return graph
Esempio n. 13
0
    def start(self):

        G = nx.Graph()

        for e in self.graph.get_edges():
            G.add_edge(e.left_id, e.right_id, weight=e.weight)

        G.graph['edge_weight_attr'] = 'weight'

        (edgecuts, parts) = metis.part_graph(G, self.n_par)
        print(edgecuts)
        print(len(parts))
        # print(parts)

        # for i, p in enumerate(parts):
        #     print(i, ": ", p)
        # for node in G.nodes:
        #     print(type(G[node]))
        #     print(G[node].keys())

        vertices = {}
        # edges = {(e.left_id, e.right_id): e.left_id for e in self.graph.get_edges()}

        for i, p in enumerate(parts):
            # networkx starts from 0 but we want the nodes to start from 1
            vertices[i + 1] = p

        return vertices, G.edges
Esempio n. 14
0
def graphPartition(G, clusterSize):
    numClusters = math.ceil(
        len(G) / float(clusterSize)
    )  # heuristic for # of clusters given approximately equal cluster size
    print numClusters
    (edgecuts,
     parts) = metis.part_graph(G, int(numClusters))  # k-way cut algorithm
    colors = [
        'red', 'blue', 'green', 'orange', 'yellow', 'purple', 'black',
        'maroon', 'brown', 'indigo', 'cyan'
    ]
    for i, p in enumerate(parts):
        #print i, p
        G.node[i]['color'] = colors[p]
    nx.drawing.nx_pydot.write_dot(
        G, 'example.dot')  # writes graph partition into dot file format
    result = pgv.AGraph(
        'example.dot'
    )  # load dot format representation of graph to create visualization
    result.node_attr.update(shape='circle')
    result.node_attr.update(label=' ')
    result.node_attr.update(style='filled')
    result.layout()  # default layout
    result.draw('example.svg')  # writes visualization as svg file
    print("Wrote example.svg")
Esempio n. 15
0
def create_metis_partitions(networks,
                            sizes=[2, 4, 8, 16, 32, 64, 128, 256],
                            use_weights=False,
                            seed=666):
    '''partition all networks and return unified result (partition tables for joint node set)

    Parameters
    ----------
    networks :  array of networkx Graphs
    sizes : list of partitioning sizes
         numbers of partitions to split the graphs into
    use_weights : (optional) boolean
         will use the 'weight' attribute of the networks edges for partitioning (True) or
         partition the unweighted graphs (False)
    seed : (optional) integer
         used as seed for the metis partitioning (for each call the same seed)
    '''
    p = []
    import os
    os.environ['METIS_DLL'] = "/usr/local/lib/libmetis.so"
    import metis
    for g in networks:
        partitions = []
        if use_weights:
            g.graph['edge_weight_attr'] = 'weight'
        for i in sizes:
            (_, parts) = metis.part_graph(g, i, seed=seed)
            partitions.append(parts)
        df = DataFrame(partitions).transpose()
        df.index = g.nodes()
        df.columns = [str(i) + '_' + g.graph['name'] for i in sizes]
        p.append(df)
        if use_weights:
            del g.graph['edge_weight_attr']
    return p
def new_partition_helper(G, num_buses, size_bus):
    graph_components = []
    nodes = list(G.nodes)
    cut_size = num_buses
    nodes_subgraph = []
    parts = None
    ran = 0
    while True:
        (edgecuts, parts) = metis.part_graph(G, cut_size)
        nodes_subgraph = [[] for _ in range(max(parts) + 1)]
        for i in range(len(parts)):
            lnum = parts[i]
            vertex = nodes[i]
            nodes_subgraph[lnum].append(vertex)
        nodes_subgraph = [x for x in nodes_subgraph if len(x) > 0]
        if len(nodes_subgraph) > 1:
            break
        cut_size -= 1
        ran += 1
        if cut_size == 1 or ran > 4:
            return None

    for i in nodes_subgraph:
        sub_graph = G.subgraph(i)
        if nx.number_of_nodes(sub_graph) > size_bus:
            r_comps = new_partition_helper(sub_graph, 2, size_bus)
            for s in r_comps:
                graph_components.append(s)
        else:
            graph_components.append([sub_graph, nx.number_of_nodes(sub_graph)])
    return graph_components
Esempio n. 17
0
    def createEdgesGraph(self, college):
        """
        print(graph.edges())
        print(graph.nodes())
        (cut, parts) = metis.part_graph(graph, 40)
        print("hola")
        #(cut2, parts2) = metis.partition(graph, 2)
        metisGraph = metis.networkx_to_metis(graph)
        print(metisGraph)
        graph['U27476']['U27661']['weight']=1
        print(graph['U27476']['U27661'])
        graph.graph['edge_weight_attr'] = 'weight'
        metisGraph = metis.networkx_to_metis(graph)
        (cutW, partsW) = metis.part_graph(metisGraph, 10)
        print("paso esto")
        print(graph)
        print(metisGraph)
        """
        graph = nx.read_gexf("mediumLinkedin.gexf")
        weightedGraph = nx.read_gexf("mediumLinkedin.gexf")

        # self.draw_graph(graph, parts)
        weightedGraph = self.setEdgesWeights(weightedGraph, college)

        weightedGraph.graph['edge_weight_attr'] = 'weight'
        metisWeightedGraph = metis.networkx_to_metis(weightedGraph)
        print("Llego hasta aca")

        # self.draw_graph(metisWeightedGraph,parts)

        (cutW, partsW) = metis.part_graph(metisWeightedGraph, 5)
        print(partsW)
        self.draw_graph(graph, partsW)
Esempio n. 18
0
def part_graph(graph, nparts, node_weight_attr, tpweights, recursive,
               **metis_options):
    """ Partition graph

    :param graph:
    :param nparts:
    :param node_weight_attr:
    :param tpweights:
    :param recursive:
    :param metis_options:
    :return:
    """

    # If contiguous partition is requested, only keep main contiguous graph component
    if metis_options["contig"]:
        graph = max(
            (graph.subgraph(c) for c in nx.connected_components(graph)),
            key=len)

    graph.graph["node_weight_attr"] = node_weight_attr
    _, parts = metis.part_graph(graph,
                                nparts,
                                tpwgts=tpweights,
                                ubvec=None,
                                recursive=recursive,
                                **metis_options)
    partition = [[] for _ in range(nparts)]
    for u, i in zip(graph, parts):
        partition[i].append(u)

    # Only return non-empty parts
    return [part for part in partition if part]
Esempio n. 19
0
def partition_graph(adj, idx_nodes, num_clusters):
    """partition a graph by METIS."""

    start_time = time.time()
    num_nodes = len(idx_nodes)
    num_all_nodes = adj.shape[0]
    parts = []
    cluster_size = num_all_nodes // num_clusters
    num_clusters = num_all_nodes // cluster_size
    for i in range(0, num_clusters):
        parts += [
            list(range(i * cluster_size, i * cluster_size + cluster_size))
        ]
    return None, parts

    neighbor_intervals = []
    neighbors = []
    edge_cnt = 0
    neighbor_intervals.append(0)
    train_adj_lil = adj[idx_nodes, :][:, idx_nodes].tolil()
    train_ord_map = dict()
    train_adj_lists = [[] for _ in range(num_nodes)]
    for i in range(num_nodes):
        rows = train_adj_lil[i].rows[0]
        # self-edge needs to be removed for valid format of METIS
        if i in rows:
            rows.remove(i)
        train_adj_lists[i] = rows
        neighbors += rows
        edge_cnt += len(rows)
        neighbor_intervals.append(edge_cnt)
        train_ord_map[idx_nodes[i]] = i

    if num_clusters > 1:
        _, groups = metis.part_graph(train_adj_lists, num_clusters, seed=1)
    else:
        groups = [0] * num_nodes

    part_row = []
    part_col = []
    part_data = []
    parts = [[] for _ in range(num_clusters)]
    for nd_idx in range(num_nodes):
        gp_idx = groups[nd_idx]
        nd_orig_idx = idx_nodes[nd_idx]
        parts[gp_idx].append(nd_orig_idx)
        for nb_orig_idx in adj[nd_orig_idx].indices:
            nb_idx = train_ord_map[nb_orig_idx]
            if groups[nb_idx] == gp_idx:
                part_data.append(1)
                part_row.append(nd_orig_idx)
                part_col.append(nb_orig_idx)
    part_data.append(0)
    part_row.append(num_all_nodes - 1)
    part_col.append(num_all_nodes - 1)
    part_adj = sp.coo_matrix((part_data, (part_row, part_col))).tocsr()

    tf.logging.info('Partitioning done. %f seconds.', time.time() - start_time)
    return part_adj, parts
Esempio n. 20
0
 def __cluster_hyperedges_201112(self, k):
   metis_graph = networkx_to_metis(self.metagraph)
   (_, indices) = metis.part_graph(metis_graph, nparts=k)
   print('meta clusters')
   print(indices)
   indptr = range(len(indices)+1)
   data = np.ones(len(indices))
   self.meta_clusters = scipy.sparse.csr_matrix((data, indices, indptr), dtype=np.int8).transpose()
def part_graph(graph, k, df=None):
    edgecuts, parts = metis.part_graph(graph, 2, objtype='cut', ufactor=250)
    # print(edgecuts)
    for i, p in enumerate(graph.nodes()):
        graph.nodes[p]['cluster'] = parts[i]
    if df is not None:
        df['cluster'] = nx.get_node_attributes(graph, 'cluster').values()
    return graph
Esempio n. 22
0
def run_metis(input_graph, number_of_parts):
    # this function partitions a network into k subgraphs
    partition = metis.part_graph(
        input_graph,
        nparts=number_of_parts,
        recursive=True,
    )
    return partition
Esempio n. 23
0
def partion_dgraph(dgraph_filename):
        partition_graph = nx.read_graphml(dgraph_filename)
        if worker_num != 1:
            (edgecuts, parts) = metis.part_graph(partition_graph, worker_num)
            for i in xrange(0,partition_graph.number_of_nodes()):
                id2block[i] = parts[i]
        else:
            for i in xrange(0,partition_graph.number_of_nodes()):
                id2block[i] = 0
    def pre_part_graph_gui(self,
                           graph,
                           k,
                           canvas,
                           ax,
                           df=None,
                           plotting=False):

        self.ind_fig = 1

        self.log.appendPlainText("Begin clustering...")

        clusters = 0
        for i, p in enumerate(graph.nodes()):
            graph.node[p]["cluster"] = 0
        cnts = OrderedDict({0: len(graph.nodes())})

        while clusters < k - 1:
            maxc = -1
            maxcnt = 0
            for key, val in cnts.items():
                if val > maxcnt:
                    maxcnt = val
                    maxc = key
            s_nodes = [
                n for n in graph.node if graph.node[n]["cluster"] == maxc
            ]
            s_graph = graph.subgraph(s_nodes)
            edgecuts, parts = metis.part_graph(s_graph,
                                               2,
                                               objtype="cut",
                                               ufactor=250,
                                               seed=42)
            new_part_cnt = 0
            new_biggest_clust_label = pd.Series(parts).value_counts().idxmax()
            for i, p in enumerate(s_graph.nodes()):
                if parts[i] == new_biggest_clust_label:
                    graph.node[p]["cluster"] = clusters + 1
                    new_part_cnt = new_part_cnt + 1
            if plotting is True:
                self.plot2d_graph_gui(
                    graph,
                    canvas=canvas,
                    ax=ax,
                    save_plots=self.save_plots,
                    ind_fig=self.ind_fig,
                    print_clust=False,
                )
                self.ind_fig += 1
            cnts[maxc] = cnts[maxc] - new_part_cnt
            cnts[clusters + 1] = new_part_cnt
            clusters = clusters + 1

        # edgecuts, parts = metis.part_graph(graph, k, seed=42)
        if df is not None:
            df["cluster"] = nx.get_node_attributes(graph, "cluster").values()
        return graph
Esempio n. 25
0
def partGraphIntoRooms(graph, roomSize):
    if graph.number_of_nodes() == 0: return ()
    partitions = ceil(graph.number_of_nodes() / roomSize)
    graph.graph['edge_weight_attr'] = 'weight'
    (edgecuts, parts) = ms.part_graph(graph, partitions, recursive=True)
    print(edgecuts)
    print("Divided into", len(set(parts)), "parts. Goal:", partitions,
          "partitions.")
    return parts
Esempio n. 26
0
    def fit_predict(self, X, y=None):
        """ Partitions a dataset """

        if (MultiPart.dataset_id_cache == id(X)):
            G = MultiPart.graph_cache
        else:
            # Build KD-Tree for efficient nearest-neighbors query
            kdt = KDTree(X, leaf_size=self.leaf_size, metric=self.metric)
            distances, indices = kdt.query(X,
                                           k=self.nearest_neighbors,
                                           return_distance=True)
            distances = MinMaxScaler(
                feature_range=(0, 1000)).fit_transform(distances)

            # Build graph for k-way paprtitioning
            G = nx.Graph()
            G.add_nodes_from(xrange(len(X)))

            # Add distances of nearest neighbors as weighted edges
            # The add_edge gets the tuple (index_from, index_to, distance)
            for i, x in enumerate(zip(distances, indices)):
                for j, dist in enumerate(x[0]):
                    # We insert opposite of distance since metis can only
                    # *minimze* edge-cut and we want the cut to be between
                    # least similar records (which have biggest distance)
                    G.add_edge(i, x[1][j], weight=int(1000 - dist))
            G.graph['edge_weight_attr'] = 'weight'

            # Cache the graph
            MultiPart.dataset_id_cache = id(X)
            MultiPart.graph_cache = G

        # Parition the graph
        if (self.seed is None):
            (edgecuts, parts) = metis.part_graph(G,
                                                 self.n_clusters,
                                                 objtype='cut')
        else:
            (edgecuts, parts) = metis.part_graph(G,
                                                 self.n_clusters,
                                                 objtype='cut',
                                                 seed=self.seed)
        return parts
Esempio n. 27
0
 def metis_clustering(self):
     """
     Clustering the graph with Metis. For details see:
     """
     (st, parts) = metis.part_graph(self.graph, self.cluster_number)
     self.clusters = list(set(parts))
     self.cluster_membership = {
         node: membership
         for node, membership in enumerate(parts)
     }
Esempio n. 28
0
def get_partition_list(g, psize):
    tmp_time = time()
    ng = g.to_networkx()
    print("getting adj using time{:.4f}".format(time() - tmp_time))
    print("run metis with partition size {}".format(psize))
    _, nd_group = metis.part_graph(ng, psize)
    print("metis finished in {} seconds.".format(time() - tmp_time))
    print("train group {}".format(len(nd_group)))
    al = arg_list(nd_group)
    return al
def partition_graph_metis(G, n_partitions=3):
    import metis
    assert nx.is_connected(G.to_undirected())
    cuts, parts = metis.part_graph(G.to_undirected(),
                                   n_partitions,
                                   contig=True)
    colors = ['red', 'blue', 'green', 'purple']
    for i, part in enumerate(parts):
        G.nodes[i]['color'] = colors[part]
    nx.nx_pydot.write_dot(G, 'graph_partitions.dot')
    return cuts, parts
Esempio n. 30
0
 def decompose(self, problem, n):
   g = InteractionGraph(problem)
   edgecuts, parts = metis.part_graph(g, n, recursive=False, objtype="cut",
                                      numbering=0, minconn=True, iptype="edge")
   groups = []
   for i in xrange(n):
     groups.append([])
   for i, j in enumerate(parts):
     groups[j].append(i)
   print parts
   print groups
Esempio n. 31
0
def part_graph(graph, k, df=None):
    """return the input graph with the clustering obtained through mincut-bisection"""
    edgecuts, parts = metis.part_graph(graph,
                                       2,
                                       objtype="cut",
                                       ufactor=250,
                                       seed=42)
    # print(edgecuts)
    for i, p in enumerate(graph.nodes()):
        graph.node[p]["cluster"] = parts[i]
    if df is not None:
        df["cluster"] = nx.get_node_attributes(graph, "cluster").values()
    return graph
Esempio n. 32
0
    def create_partitions(self):
        #print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"*10
        self.partitioned_graph = self.topo_graph.copy()
        nx.write_dot(self.partitioned_graph, 'slick.dot')
        #http://metis.readthedocs.org/en/latest/
        # We are using off the shelf software to partition the network.
        num_partitions = self.get_num_partitions( )
	if num_partitions > 1:
            # For now using the default partitioning algorithm.
            # Should experiment with different types and different partitioning algorithms.
            # Or ir does not matter whatever partition algorithm we use.
            (objective_function, partitions) = metis.part_graph(self.partitioned_graph, nparts = num_partitions)
            # For debugging
            #print "Objective Function:"*100, objective_function
            print "Partitions:", partitions
            # Add more colors to this array if get_num_partitions is > 4
            colors = ['red','blue','green','orange']
            node_ids = [ ]
            for node in self.partitioned_graph.nodes():
                node_ids.append(node)
            for i, p in enumerate(partitions):
                # Please note p starts from zero, one, two etc.
                #self.partitioned_graph.node[i+1] = {'color': colors[p], 'part_number': p}
                node_id = node_ids[i]
                self.partitioned_graph.node[node_id] = {'color': colors[p], 'part_number': p}
	else:
            (objective_function, partitions) = metis.part_graph(self.partitioned_graph, nparts = 2)
            colors = ['red','blue','green','orange']
            node_ids = [ ]
            for node in self.partitioned_graph.nodes():
                node_ids.append(node)
            for i, p in enumerate(partitions):
		p = 0
                node_id = node_ids[i]
                self.partitioned_graph.node[node_id] = {'color': colors[p], 'part_number': p}
        nx.write_dot(self.partitioned_graph, 'slick_parts.dot')
Esempio n. 33
0
def metis_clusters(G, num_clusters, additionalClusters=0):
    G.graph["edge_weight_attr"] = "weight"
    # G = metis.networkx_to_metis(G)
    if len(G.nodes()) == 0:
        print "Error! Graph has 0 nodes"
        sys.exit()

    elif len(G.nodes()) == 1:
        return [0]

    elif len(G.nodes()) < num_clusters:
        return range(len(G.nodes()))

    (edgecuts, parts) = metis.part_graph(
        G, nparts=num_clusters + additionalClusters, recursive=False, niter=1000, ufactor=700
    )
    return parts
Esempio n. 34
0
def offline_b_lin_method(nx_graph, attempt_split_parts=2, prob=PROB, approx_rank=False):

    """
    :param nx_graph: networkx graph
    :param attempt_split_parts:
    to split the graph into several parts
    note: might get fewer cuts desired
    :param prob: probability to restart to origin pos
    :param approx_rank: the similarity to decompose the W2 matrix
    :return: W_telta, Q1_I, U, A, V
    :note: if the W2 is a singular matrix then A add some value to it
    """

    # phase 1: graph partition
    import metis
    G = metis.networkx_to_metis(nx_graph)
    (objval, parts) = metis.part_graph(G, attempt_split_parts)
    # we should normalize parts since we CAN get the FEWER parts

    # record all node index in one partition {partitionId:[nodeIds]}
    groups = {}
    for r, gn in enumerate(parts):
        if gn not in groups.keys():
            groups[gn] = []
        groups[gn].append(r)
    for key in groups.keys():
        groups[key] = sorted(groups[key])
    # print (objval, parts)
    # we build the W_telta as a normalized matrix
    node_size = len(nx_graph.nodes())

    # first we construct the index
    row_idx = []
    for part in groups.keys():
        for idx in groups[part]:
            row_idx.append(idx)

    # represent and normalize the matrix
    # we could use other methods to normalize to see the effect
    W_telta = numpy_helper.normalize_matrix(nx_graph)

    # phase 2 and 3
    # w1 contains all within partition link
    W1_group = {}
    W1 = numpy.matrix([[0.0] * node_size] * node_size)
    cur_k_row = 0
    for gn in groups.keys():
        matrix_len = len(groups[gn])
        W1_group[gn] = numpy.matrix([[0.0]*matrix_len]*matrix_len)
        for i in range(0, matrix_len):
            for j in range(0, matrix_len):
                W1_group[gn][i, j] = W_telta[cur_k_row+i, cur_k_row+j]
                W1[cur_k_row+i, cur_k_row+j] = W_telta[cur_k_row+i, cur_k_row+j]
        cur_k_row += matrix_len
    # w2 contains all without partition link
    W2 = W_telta - W1

    # phase 4
    # pre-compute Q
    Q1_I_group = {}
    for key in W1_group.keys():
        Q1_I_group[key] = \
            (numpy.identity(W1_group[key].shape[0]) - prob*W1_group[key]).I

    # phase 5
    # do low rand approx
    # currently we use the default
    # *we may further use other approx to test*
    # #pymf#
    U, S, V = low_rank_approx_svd(W2, approx_rank)
    # TODO if u v s is not full rank matrix we need to compute NB_LIN
    try:
        S_I = S.I
    except Exception:
        # make it reverse
        S += 1e-15 *numpy.identity(S.shape[0])

    # phase 6 construct Q1_I
    Q1_I = numpy.matrix([[0.0]*node_size]*node_size)
    diag_index = 0
    for gn in groups.keys():
        if gn in Q1_I_group.keys():
            sz = Q1_I_group[gn].shape[0]
            for i in range(0, sz):
                for j in range(0, sz):
                    Q1_I[i+diag_index, j+diag_index] = Q1_I_group[gn][i, j]
            diag_index += sz
    A = (S.I - prob*V*Q1_I*U).I
    return W_telta, Q1_I, U, A, V
f.close()

# 答えデータ読み込み
answer_dict={}
f = open(answer, 'rb')
dataReader = csv.reader(f)
for row in dataReader:
    answer_dict[row[0]]=row[1]


f.close()

# グラフ分割
# partsはintのlist
(edgecuts, parts) = metis.part_graph(G,nparts=partition_N, recursive=True)

for i, part in enumerate(parts): #each_with_index
    if G.node[i]:
        pprint(G.node[i]['word'])
        G.node[i]['transnum']=part
        partition_G.node[i]['transnum']=part
        # print "part:"
        # pprint(part) #int
lang=nx.get_node_attributes(G,'lang')
word=nx.get_node_attributes(G,'word')
transnum=nx.get_node_attributes(G,'transnum')

#transnumが同じならpartition_Gにも枝をつける
for n1, n2 in G.edges_iter():
    if G.node[n1] and G.node[n2]:
Esempio n. 36
0
import networkx as nx
import metis

G = metis.example_networkx()

mg =  metis.networkx_to_metis(G)

(edgecuts, parts) = metis.part_graph(G, 3)
print("edgecuts", edgecuts)
print("parts", parts)

colors = ['red','blue','green']
for i, p in enumerate(parts):
    G.node[i]['color'] = colors[p]
nx.draw(G)

nx.write_dot(G, 'example.dot') # Requires pydot or pygraphviz
Esempio n. 37
0
for ie in range(nelem):
    #print "element", ie
    #loop over element nodes (local numbering)
    for i in elemnodes[ie]:
        #print "  node", i, " connectivity ",  nodalconnectivity[i]
        for k in nodalconnectivity[i]:
            if k != ie:
                adj[ie].add(k)
                adj[k].add(ie)

#print adj

#done; call metis now
try:
    from metis import part_graph
    cuts, part_vert = part_graph(adj, nparts)
except:
    print "metis module not installed or internal metis error encountered"
    exit(0)

#print "Metis"
#print " part_vert :", part_vert
#print " cuts:", cuts

# write partitioned mesh on output
print "Partition  local_nodes shared_nodes   elements"
print "----------------------------------------------"
nodalstatuses, nodalpart = classifyNodes(part_vert)
for i in range (nparts):
    writePartition(i, part_vert, nodalstatuses, nodalpart)
Esempio n. 38
0
        nStrand, nParticle, factor, refFrame, radius, frameFilter = pkl.load(file(prefix[0]+'info.dump', 'r'))
        strandGraph = pkl.load(file(prefix[0]+'mgB.dump'))

        # step 3
        _g = strandGraph

        import networkx as nx
        import metis
        import metis_graph as mg
        G = nx.Graph()
        G.add_nodes_from(range(nStrand))
        itr = mg.UndirectedIterator(_g)
        for edge in itr:
            G.add_edge(edge[0], edge[1], weight=edge[2])

        cut, vers = metis.part_graph(G, nGroup)
        import pymetis
        cut2, vers2 = pymetis.part_graph(nGroup, xadj=_g.xadj, adjncy=_g.adjncy, eweights=_g.eweights)

        f = open(prefix[0]+".group","wb")
        import struct
        f.write(struct.pack('i', len(vers)))
        for i in vers:
            f.write(struct.pack('i', i))
        f.close()
        import ipdb; ipdb.set_trace()

        # rand, opt, worst
        for iiii in range(3):
            opt = guideOpts[iiii]
Esempio n. 39
0
        sys.exit()

    f = sys.argv[1];

    if f.endswith(".dot"):
        dot = True
    elif f.endswith(".adjlist"):
        dot = False
    else:
        print "Usage: graphProcessor.py <path to file>"
        sys.exit()

    # Load the graph
    G = ge.LoadGraphFromDot(f) if dot else ge.LoadGraphFromAdjList(f)

    # Trim the leaves to make the graph look a little nicer.
    ge.TrimGraphLeaves(G)

    # write to dot file
    ge.WriteGraphToDot(G, "newout.dot")

    (edgecuts, parts) = metis.part_graph(G, 4, seed=1234);

    colors = ['blue', 'green', 'cyan', 'purple', 'brown'];

    # Update the colors of the graph nodes.
    for i,node in enumerate(parts):
        index = G.nodes()[i]
        G.node[index]["color"] = colors[node]

    ge.WriteGraphToDot(G,"partitionedTrimmed.dot")