コード例 #1
0
ファイル: solver.py プロジェクト: ritwikdixit/170projcode
def solve(G, k, s, rowdy_groups, i):
    #TODO: Write this method as you like. We'd recommend changing the arguments here as well
    #precalculate rowdy groups
    #K = number of buses.
    #S = max bus size.

    #29's messed up
    if i == 29 or i == 1064:
        nodes = list(G.nodes())
        step = len(nodes) // k
        return [nodes[j:j + step] for j in range(0, len(nodes), step)]

    seedr = random.randint(0, 100000)
    buses = []
    options = nxmetis.MetisOptions(seed=seedr)
    vol, buses = nxmetis.partition(G, k, recursive=True, options=options)
    #adjust for < S
    if any([1 for bus in buses if len(bus) > s]):
        #readjust partition.
        print(s, k, [len(bus) for bus in buses])
        #O(N) algorithm to by-hand pick off and rebalance
        picks = []
        for j in range(len(buses)):
            if len(buses[j]) > s:
                picks.extend(buses[j][s:])
                buses[j] = buses[j][:s]
        for j in range(len(buses)):
            if len(buses[j]) < s:
                space = s - len(buses[j])
                buses[j].extend(picks[len(picks) - space:])
                picks = picks[:len(picks) - space]
        print(s, k, [len(bus) for bus in buses])
    return buses
def metis_partition(G):

    # For further details on metis-parameters, please refer to the manual
    settings = nxmetis.MetisOptions(ncuts=4, niter=200, ufactor=280)
    par = nxmetis.partition(G, 2, options=settings)
    the_edge_cut = par[0]
    community1 = par[1][0]
    community2 = par[1][1]
    comm = [community1, community2, the_edge_cut]

    return (comm)
コード例 #3
0
def partition_here(graph):
    
    if nx.is_empty(graph):
        return 0, 0
    Gcc = sorted(nx.connected_components(graph), key=len, reverse=True)
    G = graph.subgraph(Gcc[0])
    settings = nxmetis.MetisOptions(ncuts=4, niter=200, ufactor=280)
    par = nxmetis.partition(G, 2, options=settings)
    
    community1 = par[1][0]
    community2 = par[1][1]

    rwc = np.mean(randomwalk_polarization(G, 100, 0.02, 1000, community1, community2))
    prc = len(G)/len(graph)
    
    return rwc, prc
コード例 #4
0
def partition_metis(g, fpga, pe, ufactor=1):
    logger.debug("Dividing into {} partitions, ufactor: {}".format(
        fpga, ufactor))
    ug = g.to_undirected()
    for node in ug.nodes():
        ug.nodes[node]['weight'] = ug.degree(node)
    objval, fpgaparts = nxmetis.partition(ug,
                                          fpga,
                                          options=nxmetis.MetisOptions(
                                              contig=False, ufactor=ufactor))
    logger.debug(
        "Edges crossing: {} , expected from random partition: {}".format(
            objval,
            nx.number_of_edges(ug) * (fpga - 1) / fpga))
    logger.debug("Improvement: {}x".format(
        (nx.number_of_edges(ug) * (fpga - 1) / fpga) / objval))

    parts = []
    for part in fpgaparts:
        parts.extend(_partition_greedy(g, pe, part))

    return relabel_with_parts(g, parts)
コード例 #5
0
                    action='store_true',
                    help='Specfiy if you want to save figures')
parser.add_argument("--nodes",
                    default=30,
                    type=int,
                    help="Number of nodes in Erdos-Renyi Graph")
parser.add_argument(
    "--p",
    default=0.4,
    type=float,
    help="Probability of edge being present in Erdos-Renyi Graph")
args = parser.parse_args()

if __name__ == "__main__":
    g = generate_er_graph(args.nodes, args.p)
    options = nxmetis.MetisOptions(dbglvl=nxmetis.enums.MetisDbgLvl.time,
                                   niter=1)
    _, parts = nxmetis.partition(G=g,
                                 nparts=2,
                                 options=options,
                                 recursive=False)
    recursive_fiedler_values = nx.algebraic_connectivity(g.subgraph(parts[0])), \
                               nx.algebraic_connectivity(g.subgraph(parts[1]))
    MAX_FIEDLER_VALUEX = -sys.maxsize
    MAX_FIEDLER_VALUEY = -sys.maxsize
    for i in range(100):
        swap_vertices, partition_vector = heurisitc_algorithm(g, parts)
        parts[0] = [vtx for vtx, i in enumerate(partition_vector) if i == 0]
        parts[1] = [vtx for vtx, i in enumerate(partition_vector) if i == 1]
        # print(initial_fiedler_values)
        # print(parts)
        (max_x, max_y) = maximum_fiedler_value_swaps(g, swap_vertices,
コード例 #6
0
            non_anchor_edge_included_vertex.add(h)
            non_anchor_edge_included_vertex.add(t)

        # constructing nx.Graph and using metis in order to get min-cut partition
        G = nx.Graph()
        G.add_edges_from(non_anchor_edge_list)

        for node, degree in entity_degree.items():

            if node in G:

                G.node[node]['node_weight'] = degree

        options = nxmetis.MetisOptions(     # objtype=1 => vol
            ptype=-1, objtype=1, ctype=-1, iptype=-1, rtype=-1, ncuts=-1,
            nseps=-1, numbering=-1, niter=cur_iter, seed=-1, minconn=-1, no2hop=-1,
            contig=-1, compress=-1, ccorder=-1, pfactor=-1, ufactor=-1, dbglvl=-1)

        edgecuts, parts = nxmetis.partition(G, nparts=partition_num, node_weight='node_weight')

        # putting residue randomly into non anchor set
        residue = non_anchor_id.difference(non_anchor_edge_included_vertex)

        for v in residue:

            parts[randint(0, partition_num - 1)].append(v)

        # printing the number of entities in each paritions
        printt('[info] maxmin > # of entities in each partitions : [%s]' % " ".join([str(len(p)) for p in parts]))

        # 원소 여러 개를 한 번에 전송
コード例 #7
0
ファイル: messages.py プロジェクト: vishalbelsare/numbskull
def find_metis_parts(conn, cur, parts):
    """TODO"""
    # Open a cursor to perform database operations
    (factor_view, variable_view, weight_view) = get_views(cur)
    # Obtain graph
    (factor, factor_pt, factor_ufo, fmap, edges) = \
        get_factors(cur, factor_view)

    hyperedges = []
    for f in factor:
        newedge = []
        for i in range(f['ftv_offset'], f['ftv_offset'] + f['arity']):
            newedge.append(fmap[i]['vid'])
        hyperedges.append(newedge)
    G = nx.Graph()
    for e in hyperedges:
        for i in range(len(e)):
            for j in range(i + 1, len(e)):
                newedge = (e[i], e[j])
                G.add_edge(*e)
    # Run metis to obtain partitioning
    metis_options = \
        nxmetis.MetisOptions(objtype=nxmetis.enums.MetisObjType.vol)
    (cost, partitions) = \
        nxmetis.partition(G, parts, options=metis_options)
    print(80 * "*")
    print(cost)
    print(partitions)
    print(80 * "*")

    # Find nodes to master
    master_variables = set([])
    # Get all edges
    cut_edges = set(G.edges())
    for p in partitions:
        H = G.subgraph(p)
        cut_edges -= set(H.edges())
        print(H.edges())
        H.clear()
    for edge in cut_edges:
        n1, n2 = edge
        master_variables.add(n1)
        master_variables.add(n2)
    # Store parition in DB
    try:
        cur.execute("CREATE TABLE variable_to_cc(dd_id bigint, cc_id bigint);")
    except:
        conn.rollback()
        cur.execute("TRUNCATE variable_to_cc;")

    rows = []
    # Output master variables
    for node in master_variables:
        rows.append([node, -1])

    print(master_variables)
    # Output minion variables
    pid = 0
    for p in partitions:
        only_master = True
        for node in p:
            if node not in master_variables:
                only_master = False
                rows.append([node, pid])
        if not only_master:
            pid += 1
    print(rows)
    dataText = ','.join(cur.mogrify('(%s,%s)', row) for row in rows)
    print(dataText)
    try:
        cur.execute("INSERT INTO variable_to_cc VALUES " + dataText)
        if pid > 1:
            cur.execute("CREATE INDEX dd_cc ON variable_to_cc (dd_id);")
        conn.commit()
        G.clear()
        return True
    except:
        conn.rollback()
        G.clear()
        return False
コード例 #8
0
ファイル: graph_analysis.py プロジェクト: pythseq/AGB
def split_graph(g_component, full_g, undirected_g, dict_edges, modified_dict_edges, loop_edges, edges_by_nodes, two_way_edges, last_idx, parts_info,
                  is_repeat_graph=False, fake_edges=None, find_hanging_nodes=False, mapping_info=None, chrom=None, contig_edges=None):
    graphs = []
    hanging_nodes = []
    connected_nodes = []
    num_enters = []
    num_exits = []

    complex_component = False
    if len(g_component) > MAX_NODES:
        complex_component = True
        target_graph_parts = int(math.ceil(len(g_component.nodes()) / MAX_SUB_NODES))
        # use METIS library to partition a graph into smaller subgraphs
        options = nxmetis.MetisOptions(ncuts=5, niter=100, ufactor=2, objtype=1, contig=not mapping_info, minconn=True)
        edgecuts, parts = nxmetis.partition(g_component.to_undirected(), target_graph_parts, options=options)
        graph_partition_dict = dict()
        for part_id, nodes in enumerate(parts):
            for node in nodes:
                graph_partition_dict[node] = part_id
        num_graph_parts = len(parts)
    else:
        graph_partition_dict = dict((n, 0) for n in g_component.nodes())
        parts = [[g_component.nodes()]]
        num_graph_parts = 1

    for part_id in range(num_graph_parts):
        parts_info['part' + str(last_idx + part_id)] = \
            {'n': len(parts[part_id]), 'big': False if num_graph_parts == 1 else True, 'idx': last_idx + part_id,
             'in': set(), 'out': set()}

    edges_count = defaultdict(int)

    def is_flanking_edge(edge_id):
        # check if the edge belongs to a component or is adjacent
        if mapping_info:
            return chrom not in mapping_info[edge_id]
        if is_repeat_graph:
            return not dict_edges[edge_id].repetitive
        if contig_edges:
            return edge_id not in contig_edges
        return False

    if fake_edges:
        g_component.remove_edges_from(fake_edges)
    # iterate through subgraphs of the component
    for part_id in range(num_graph_parts):
        subgraph = []
        subnodes = []
        sub_hanging_nodes = []
        sub_connected_nodes = []
        main_edges = defaultdict(set)
        flanking_edges = defaultdict(set)
        total_exits = 0
        total_enters = 0
        for e in g_component.edges():
            start, end = e[0], e[1]
            if part_id == graph_partition_dict[start] or part_id == graph_partition_dict[end]:
                # use edges that belongs to the current subgraph
                edges = edges_by_nodes[(start, end)] + two_way_edges[(start, end)]
                for edge_id in edges:
                    if not is_flanking_edge(edge_id):
                        main_edges[(start, end)].add(edge_id)
                flanking_edge_pairs = [(start, node) for node in undirected_g.neighbors(start)] + \
                                      [(node, start) for node in undirected_g.neighbors(start)] + \
                                      [(end, node) for node in undirected_g.neighbors(end)] + \
                                      [(node, end) for node in undirected_g.neighbors(end)]
                for start, end in flanking_edge_pairs:
                    for edge_id in edges_by_nodes[(start, end)] + two_way_edges[(start, end)]:
                        if is_flanking_edge(edge_id):
                            flanking_edges[(start, end)].add(edge_id)

        unique_nodes = set()
        for graph_edges, is_flanking in [(main_edges, False), (flanking_edges, True)]:
            for (start, end), edges in graph_edges.items():
                if is_repeat_graph and is_flanking:  # store nodes with unique edges for repeat-focused graph
                    unique_nodes.add(start)
                    unique_nodes.add(end)
                link_component = last_idx + part_id
                if start in graph_partition_dict and graph_partition_dict[start] != part_id:
                    # add node representing a hidden part of the graph
                    link_component = last_idx + graph_partition_dict[start]
                    start = 'part' + str(link_component)
                elif end in graph_partition_dict and graph_partition_dict[end] != part_id:
                    link_component = last_idx + graph_partition_dict[end]
                    end = 'part' + str(link_component)
                if link_component != last_idx + part_id:
                    # add edges to a hidden part of the graph
                    for edge_id in edges:
                        edge = dict_edges[edge_id]
                        new_edge_id = edge.id
                        if edges_count[edge.id]:
                            new_edge_id = edge.id + '_' + str(edges_count[edge.id])
                        edges_count[edge.id] += 1
                        if start == 'part' + str(link_component):
                            parts_info['part' + str(link_component)]['out'].add(new_edge_id)
                        else:
                            parts_info['part' + str(link_component)]['in'].add(new_edge_id)
                        new_edge = edge.create_copy(start, end)
                        modified_dict_edges[new_edge_id] = new_edge
                        subgraph.append(new_edge_id)
                elif start != end or len(edges) < 2:
                    for edge_id in edges:
                        edge = dict_edges[edge_id]
                        new_edge_id = edge.id
                        if edges_count[edge.id]:  # edges with the same id can belongs to several graph components
                            new_edge_id = edge.id + '_' + str(edges_count[edge.id])
                        edges_count[edge.id] += 1
                        new_edge = edge.create_copy(start, end)
                        modified_dict_edges[new_edge_id] = new_edge
                        subgraph.append(new_edge_id)
                else:
                    edge_id = 'loop%s' % start
                    loop_edge = Edge(edge_id)
                    loop_edge.start, loop_edge.end = start, end
                    loop_edge.is_complex_loop = True
                    subgraph.append(edge_id)
                    for loop_edge_id in edges:
                        edge = dict_edges[loop_edge_id]
                        modified_dict_edges[loop_edge_id] = edge
                        loop_edges[edge_id].add(loop_edge_id)

        graphs.append((len(subgraph) + 10000 * (num_graph_parts - part_id), subgraph))  # add unique subgraph id
        if find_hanging_nodes:
            # search for nodes with zero indegree or outdegree
            for n in g_component.nodes():
                if graph_partition_dict[n] != part_id:
                    continue
                in_multiplicity = 0
                out_multiplicity = 0
                for e in full_g.in_edges(n):
                    edges = edges_by_nodes[(e[0], e[1])] + two_way_edges[(e[0], e[1])]
                    for edge_id in edges:
                        in_multiplicity += dict_edges[edge_id].multiplicity
                for e in full_g.out_edges(n):
                    edges = edges_by_nodes[(e[0], e[1])] + two_way_edges[(e[0], e[1])]
                    for edge_id in edges:
                        out_multiplicity += dict_edges[edge_id].multiplicity
                if not full_g.in_degree(n) or not full_g.out_degree(n):
                    sub_hanging_nodes.append(n)
                elif int(out_multiplicity - in_multiplicity) != 0:
                    subnodes.append(n)
            hanging_nodes.append(sub_hanging_nodes)
        if is_repeat_graph:
            # search for nodes with zero indegree or outdegree
            for n in g_component.nodes():
                if graph_partition_dict[n] != part_id:
                    continue
                if not full_g.in_degree(n) or not full_g.out_degree(n):
                    sub_hanging_nodes.append(n)
            for n in unique_nodes:
                if not full_g.in_degree(n) or not full_g.out_degree(n):
                    sub_hanging_nodes.append(n)
            # add flanking unique edges and calculate number of entrances/exits to the repeat cluster
            for n in unique_nodes:
                enters = 0
                exits = 0
                other_edges = 0
                for e in full_g.in_edges(n):
                    start, end = e[0], e[1]
                    edges = edges_by_nodes[(start, end)] + two_way_edges[(start, end)]
                    for edge_id in edges:
                        if start == end:
                            continue
                        if edge_id in subgraph and dict_edges[edge_id].repetitive:
                            continue
                        if edge_id in subgraph:
                            if start != end:
                                exits += 1
                        else:
                            if start != end or n not in g_component.nodes():
                                other_edges += 1
                for e in full_g.out_edges(n):
                    start, end = e[0], e[1]
                    edges = edges_by_nodes[(start, end)] + two_way_edges[(start, end)]
                    for edge_id in edges:
                        if start == end:
                            continue
                        if edge_id in subgraph and dict_edges[edge_id].repetitive:
                            continue
                        if edge_id in subgraph:
                            if start != end:
                                enters += 1
                        else:
                            if start != end or n not in g_component.nodes():
                                other_edges += 1
                if other_edges:
                    sub_connected_nodes.append(n)
                    total_exits += exits
                    total_enters += enters
            connected_nodes.append(sub_connected_nodes)
            hanging_nodes.append(sub_hanging_nodes)
            num_enters.append(total_enters)
            num_exits.append(total_exits)
    last_idx += num_graph_parts
    viewer_data = ViewerData(graphs, hanging_nodes, connected_nodes, modified_dict_edges, parts_info,
                             enters=num_enters, exits=num_exits)
    return viewer_data, last_idx, complex_component
コード例 #9
0
ファイル: test_metis.py プロジェクト: benjaminpillot/greece
import networkx as nx
import metis
import numpy as np
import nxmetis
from matplotlib import pyplot as plt
from shapely.ops import cascaded_union

from gistools.geometry import katana_centroid, polygon_collection_to_graph, explode
from gistools.layer import PolygonLayer
from geopandas import GeoDataFrame

graph = nx.Graph()
graph.add_edges_from([(n, n + 1) for n in range(50)])
for n in graph.nodes:
    graph.add_node(n, weight1=[n, n], weight2=len(graph.nodes) - n)

nparts = 10
tpweights = [[1 / nparts, 1 / nparts] for _ in range(nparts)]
_, partition = nxmetis.partition(graph,
                                 nparts,
                                 node_weight="weight1",
                                 tpwgts=tpweights,
                                 recursive=False,
                                 options=nxmetis.MetisOptions(iptype=3,
                                                              rtype=1,
                                                              contig=True,
                                                              ncuts=10))

print(partition)