def test_min_vertex_cover(self):
        # create a simple star graph
        size = 50
        sg = nx.star_graph(size)
        cover = a.min_weighted_vertex_cover(sg)
        assert_equals(2, len(cover))
        for u, v in sg.edges_iter():
            ok_((u in cover or v in cover), "Node node covered!")

        wg = nx.Graph()
        wg.add_node(0, weight=10)
        wg.add_node(1, weight=1)
        wg.add_node(2, weight=1)
        wg.add_node(3, weight=1)
        wg.add_node(4, weight=1)

        wg.add_edge(0, 1)
        wg.add_edge(0, 2)
        wg.add_edge(0, 3)
        wg.add_edge(0, 4)

        wg.add_edge(1, 2)
        wg.add_edge(2, 3)
        wg.add_edge(3, 4)
        wg.add_edge(4, 1)

        cover = a.min_weighted_vertex_cover(wg, weight="weight")
        csum = sum(wg.node[node]["weight"] for node in cover)
        assert_equals(4, csum)

        for u, v in wg.edges_iter():
            ok_((u in cover or v in cover), "Node node covered!")
예제 #2
0
    def test_min_vertex_cover(self):
        # create a simple star graph
        size = 50
        sg = nx.star_graph(size)
        cover = a.min_weighted_vertex_cover(sg)
        assert_equals(2, len(cover))
        for u, v in sg.edges():
            ok_((u in cover or v in cover), "Node node covered!")

        wg = nx.Graph()
        wg.add_node(0, weight=10)
        wg.add_node(1, weight=1)
        wg.add_node(2, weight=1)
        wg.add_node(3, weight=1)
        wg.add_node(4, weight=1)

        wg.add_edge(0, 1)
        wg.add_edge(0, 2)
        wg.add_edge(0, 3)
        wg.add_edge(0, 4)

        wg.add_edge(1,2)
        wg.add_edge(2,3)
        wg.add_edge(3,4)
        wg.add_edge(4,1)

        cover = a.min_weighted_vertex_cover(wg, weight="weight")
        csum = sum(wg.node[node]["weight"] for node in cover)
        assert_equals(4, csum)

        for u, v in wg.edges():
            ok_((u in cover or v in cover), "Node node covered!")
예제 #3
0
 def test_unweighted_undirected(self):
     # create a simple star graph
     size = 50
     sg = nx.star_graph(size)
     cover = min_weighted_vertex_cover(sg)
     assert_equals(2, len(cover))
     ok_(is_cover(sg, cover))
def generate_wMVC_summ(input_path: str, reference_path: str, output_path: str, limit: int):
    for doc in listdir(input_path):
        with open(input_path + doc, 'r') as f:
            txt = f.read()
            if txt == '' or txt == 'None.':
                open(output_path + doc, 'w', encoding='utf8').write('')
                continue

        sent_tokens, factors = get_sentences_with_factors(input_path, reference_path, doc)

        if document_word_count(sent_tokens) < 250:
            open(output_path + doc, 'w', encoding='utf8').write('\n\n'.join(sent_tokens))
            continue

        dist = compute_entailment(sent_tokens)

        threshold = compute_threshold(dist, 0.5)
        large = 2 * max(factors) * len(sent_tokens)

        vertices = [(i, {'weight': large - factors[i] * (sum(dist[i]) - 1)})
                    for i in range(len(sent_tokens))]

        edges = create_edge_set(dist, threshold)

        G = Graph()
        G.add_nodes_from(vertices)
        G.add_edges_from(edges)

        wMVC = min_weighted_vertex_cover(G, 'weight')

        tops = sorted([i for i in wMVC],
                      key=lambda x: vertices[x][1]['weight'])

        write_summary(output_path, doc, limit, sent_tokens, tops)
예제 #5
0
 def test_unweighted_undirected(self):
     # create a simple star graph
     size = 50
     sg = nx.star_graph(size)
     cover = min_weighted_vertex_cover(sg)
     assert_equals(2, len(cover))
     ok_(is_cover(sg, cover))
 def test_unweighted_undirected(self):
     # create a simple star graph
     size = 50
     sg = nx.star_graph(size)
     cover = min_weighted_vertex_cover(sg)
     print(list(cover))
     assert 2 == len(cover)
     assert is_cover(sg, cover)
예제 #7
0
def vertex_cover(num_seeds, G):
	max_set = min_weighted_vertex_cover(G)
	seeds = random.sample(max_set, min(num_seeds, len(max_set)))

	if len(seeds) < num_seeds:
		seeds.extend(random.sample(set(G.nodes())-set(seeds), num_seeds-len(seeds)))
		# seeds = list(set(seeds))
	return seeds
 def _extract_features_for_subgraph(self, graph):
     res = {}
     deg_list = [i[1] for i in nx.degree(graph)]
     weights_list = [
         graph[edge[0]][edge[1]]['weight'] for edge in graph.edges
     ]
     res['connected'] = [1 if nx.is_connected(graph) else 0]
     res['density'] = ['{:.6f}'.format(nx.density(graph))]
     res['Avg_CC'] = [aprox.average_clustering(graph)]
     res['Median_deg'] = ['{:.6f}'.format(np.median(deg_list))]
     res['Variance_deg'] = ['{:.6f}'.format(np.var(deg_list))]
     res['Median_wights'] = [
         '{:.6f}'.format(
             np.median(weights_list) if len(weights_list) > 0 else -1)
     ]
     res['Variance_wights'] = [
         '{:.6f}'.format(
             np.var(weights_list) if len(weights_list) > 0 else 0)
     ]
     res['Avg_degree'] = [
         '{:.6f}'.format(sum(deg_list) / len(nx.degree(graph)))
     ]
     res['Avg_weight'] = [
         '{:.6f}'.format(
             sum(weights_list) /
             len(weights_list) if len(weights_list) > 0 else -1)
     ]
     res['Avg_weight_abs'] = [
         '{:.6f}'.format(
             abs(
                 sum(weights_list) /
                 len(weights_list) if len(weights_list) > 0 else -1))
     ]
     res['edges'] = [len(graph.edges)]
     res['nodes'] = [len(graph.nodes)]
     res['self_loops'] = [len(list(nx.nodes_with_selfloops(graph)))]
     res['edge_to_node_ratio'] = [
         '{:.6f}'.format(
             len(graph.nodes) /
             len(graph.edges) if len(graph.edges) > 0 else len(graph.nodes))
     ]
     res['negative_edges'] = [
         len([
             edge for edge in graph.edges
             if graph[edge[0]][edge[1]]['weight'] < 0
         ])
     ]
     res['Num_of_zero_weights'] = [
         len([
             e for e in graph.edges
             if 0.005 > abs(graph[e[0]][e[1]]['weight'] > 0)
         ])
     ]
     res['min_vc'] = [len(aprox.min_weighted_vertex_cover(graph))]
     for key in res.keys():
         res[key] = [float(res[key][0])]
     return res
 def test_unweighted_directed(self):
     # Create a star graph in which half the nodes are directed in
     # and half are directed out.
     G = nx.DiGraph()
     G.add_edges_from((0, v) for v in range(1, 26))
     G.add_edges_from((v, 0) for v in range(26, 51))
     cover = min_weighted_vertex_cover(G)
     assert 2 == len(cover)
     assert is_cover(G, cover)
예제 #10
0
 def test_unweighted_directed(self):
     # Create a star graph in which half the nodes are directed in
     # and half are directed out.
     G = nx.DiGraph()
     G.add_edges_from((0, v) for v in range(1, 26))
     G.add_edges_from((v, 0) for v in range(26, 51))
     cover = min_weighted_vertex_cover(G)
     assert_equals(2, len(cover))
     ok_(is_cover(G, cover))
예제 #11
0
    def test_unweighted_self_loop(self):
        slg = nx.Graph()
        slg.add_node(0)
        slg.add_node(1)
        slg.add_node(2)

        slg.add_edge(0, 1)
        slg.add_edge(2, 2)

        cover = min_weighted_vertex_cover(slg)
        assert 2 == len(cover)
        assert is_cover(slg, cover)
예제 #12
0
def nodes_to_delete(graph, colors, strategy):
    """Given graph and its possibly illegal coloring returns nodes that should be deleted to obtain legal coloring.

    Args:
        colors (dict): Full coloring of graph. If it is None than assume all colors are the same
            (fixed graph should become an independent set).
    """

    nodes_to_delete = []

    if colors is None:
        return nodes_to_delete

    illegal_edges = {(i, j)
                     for (i, j) in graph.edges()
                     if colors[i] == colors[j] and colors[i] != -1}
    subgraph_illegal_edges = nx.Graph()
    subgraph_illegal_edges.add_edges_from(illegal_edges)
    if strategy == 'min_vertex_cover':
        nodes_to_delete = approximation.min_weighted_vertex_cover(
            subgraph_illegal_edges)
    elif strategy == 'arora_kms':
        nodes_to_delete = subgraph_illegal_edges.nodes()
    elif strategy == 'arora_kms_prim':
        nodes_by_degree = get_nodes_sorted_by_degree(subgraph_illegal_edges)
        while nodes_by_degree:
            if subgraph_illegal_edges.degree[nodes_by_degree[0]] == 0:
                break
            edge_to_delete = random.choice(list(
                subgraph_illegal_edges.edges()))
            subgraph_illegal_edges.remove_nodes_from(edge_to_delete)
            nodes_to_delete.extend(edge_to_delete)
            nodes_by_degree = get_nodes_sorted_by_degree(
                subgraph_illegal_edges)
    elif strategy == 'max_degree_first':
        nodes_by_degree = get_nodes_sorted_by_degree(subgraph_illegal_edges)
        while nodes_by_degree:
            if subgraph_illegal_edges.degree[nodes_by_degree[0]] == 0:
                break
            nodes_to_delete.append(nodes_by_degree[0])
            subgraph_illegal_edges.remove_node(nodes_by_degree[0])
            nodes_by_degree = get_nodes_sorted_by_degree(
                subgraph_illegal_edges)
    else:
        raise Exception('Unknown node fixing strategy')

    return nodes_to_delete
 def extract_graph_features(self, graph):
     """
     ref: https://networkx.github.io/documentation/stable/_modules/networkx/algorithms/approximation/vertex_cover.html
     ref: https://networkx.github.io/documentation/stable/reference/algorithms/approximation.html#module-networkx.algorithms.approximation
     """
     res = {}
     deg_list = [i[1] for i in nx.degree(graph)]
     weights_list = [
         graph[edge[0]][edge[1]]['weight'] for edge in graph.edges
     ]
     if len(weights_list) == 0:
         return None
     # try:
     #     weights_list = [graph[edge[0]][edge[1]]['weight'] for edge in graph.edges]
     # except:
     #     return None
     res['connected'] = 1 if nx.is_connected(graph) else 0
     res['density'] = '{:.6f}'.format(nx.density(graph))
     res['Avg_CC'] = aprox.average_clustering(graph)
     res['Median_deg'] = '{:.6f}'.format(np.median(deg_list))
     res['Variance_deg'] = '{:.6f}'.format(np.var(deg_list))
     res['Median_wights'] = '{:.6f}'.format(np.median(weights_list))
     res['Variance_wights'] = '{:.6f}'.format(np.var(weights_list))
     res['Avg_degree'] = '{:.6f}'.format(
         sum(deg_list) / len(nx.degree(graph)))
     res['Avg_weight'] = '{:.6f}'.format(
         sum(weights_list) / len(weights_list))
     res['Avg_weight_abs'] = '{:.6f}'.format(
         abs(sum(weights_list) / len(weights_list)))
     res['edges'] = len(graph.edges)
     res['nodes'] = len(graph.nodes)
     res['self_loops'] = len(list(nx.nodes_with_selfloops(graph)))
     res['edge_to_node_ratio'] = '{:.6f}'.format(
         len(graph.nodes) / len(graph.edges))
     res['negative_edges'] = len([
         edge for edge in graph.edges
         if graph[edge[0]][edge[1]]['weight'] < 0
     ])
     res['Num_of_zero_weights'] = len([
         e for e in graph.edges
         if 0.005 > abs(graph[e[0]][e[1]]['weight'] > 0)
     ])
     res['min_vc'] = len(aprox.min_weighted_vertex_cover(graph))
     return res
예제 #14
0
def vertex_cover_strategy(G, num_seeds):
    ''' Picks top degreed nodes from vertex cover (VC) of 
    input graph. If there are less nodes in the VC than 
    num_seeds, then we pick from top degreed nodes of the 
    input graph.

    Given an undirected graph G = (V, E) and a function w 
    assigning nonnegative weights to its vertices, 
    find a minimum weight subset of V such that each edge in E is 
    incident to at least one vertex in the subset.
    
    Note: every vertex cover is a dominating set, but not every
    dominating set is a vertex cover

    Args:
        G --                the input graph
        num_seeds --        the number of seed nodes to select

    Returns: list of output nodes based on VC strategy
    '''

    vc_nodes = min_weighted_vertex_cover(G)
    nodes_to_remove = G.nodes() - vc_nodes
    subgraph = G.copy()
    subgraph.remove_nodes_from(nodes_to_remove)
    number_of_nodes = num_seeds
    if len(vc_nodes) < num_seeds:
        number_of_nodes = len(vc_nodes)
    centralities_dict = nx.degree_centrality(subgraph)
    sorted_centralities = nlargest(number_of_nodes,
                                   centralities_dict.items(),
                                   key=operator.itemgetter(1))
    node_keys = [i[0] for i in sorted_centralities]

    nodes_top_degrees = degree_centrality_strategy(G, num_seeds)
    # In case we need more seed nodes, we simply pull from the top degreed nodes of the input graph
    i = 0
    while (len(node_keys) != num_seeds):
        if nodes_top_degrees[i] not in node_keys:
            node_keys.append(nodes_top_degrees[i])
        i += 1
    assert (len(node_keys) == num_seeds)
    return node_keys
예제 #15
0
    def test_weighted(self):
        wg = nx.Graph()
        wg.add_node(0, weight=10)
        wg.add_node(1, weight=1)
        wg.add_node(2, weight=1)
        wg.add_node(3, weight=1)
        wg.add_node(4, weight=1)

        wg.add_edge(0, 1)
        wg.add_edge(0, 2)
        wg.add_edge(0, 3)
        wg.add_edge(0, 4)

        wg.add_edge(1, 2)
        wg.add_edge(2, 3)
        wg.add_edge(3, 4)
        wg.add_edge(4, 1)

        cover = min_weighted_vertex_cover(wg, weight="weight")
        csum = sum(wg.nodes[node]["weight"] for node in cover)
        assert_equals(4, csum)
        ok_(is_cover(wg, cover))
예제 #16
0
    def test_weighted(self):
        wg = nx.Graph()
        wg.add_node(0, weight=10)
        wg.add_node(1, weight=1)
        wg.add_node(2, weight=1)
        wg.add_node(3, weight=1)
        wg.add_node(4, weight=1)

        wg.add_edge(0, 1)
        wg.add_edge(0, 2)
        wg.add_edge(0, 3)
        wg.add_edge(0, 4)

        wg.add_edge(1, 2)
        wg.add_edge(2, 3)
        wg.add_edge(3, 4)
        wg.add_edge(4, 1)

        cover = min_weighted_vertex_cover(wg, weight="weight")
        csum = sum(wg.node[node]["weight"] for node in cover)
        assert_equals(4, csum)
        ok_(is_cover(wg, cover))
예제 #17
0
def get_summary(edgelist):

    start = timeit.default_timer()
    g = nx.read_edgelist(edgelist, delimiter=',')

    output = dict()

    output['num_nodes'] = len(g.nodes())
    output['num_edges'] = len(g.edges())
    print('1')
    output['average_node_degree'] = np.mean([x[1] for x in list(nx.degree(g))])
    output['num_connected_components'] = nx.number_connected_components(g)
    print('2')
    # output['average_clustering'] = nx.average_clustering(g) 
    # print('3')
    # output['node_connectivity'] = nx.node_connectivity(g)
    print('4')
    # output['max_clique_size'] = len(nx_approx.max_clique(g))
    # output['max_independent_set_size'] = len(nx_approx.maximum_independent_set(g))
    output['min_vertex_cover_size'] = len(nx_approx.min_weighted_vertex_cover(g))
    print('5')
    output['degree_assortativity_coefficient'] = \
            nx.degree_assortativity_coefficient(g)
    print('6')
    output['average_neighbor_degree'] = nx.average_neighbor_degree(g)
    print('7')
    # output['diameter'] = nx.diameter(g)
    print('8')
    # output['wiener_index'] = nx.wiener_index(g)


    stop = timeit.default_timer()
    output['runtime'] = stop-start
    output['edgelist'] = edgelist
    
    return output
예제 #18
0
def multiple_mixed_strategy(G, num_seeds):
    ''' Picks the top nodes based on four mixed strategies

    Args:
        G --                the input graph
        num_seeds --        the number of seed nodes to select

    Returns: list of output nodes based on the degree centrality
    '''

    number_of_nodes = num_seeds

    # Degree Centralities (25%)
    degree_centralities_dict = nx.degree_centrality(G)
    sorted_degree_centralities = nlargest(num_seeds / 4,
                                          degree_centralities_dict.items(),
                                          key=operator.itemgetter(1))
    degree_node_keys = [i[0] for i in sorted_degree_centralities]

    # Eigenvector Centralities (25%)
    eigenvector_centralities_dict = nx.eigenvector_centrality(G)
    eigenvector_centralities_dict = {
        key: eigenvector_centralities_dict[key]
        for key in eigenvector_centralities_dict if key not in degree_node_keys
    }
    sorted_eigenvector_centralities = nlargest(
        num_seeds / 4,
        eigenvector_centralities_dict.items(),
        key=operator.itemgetter(1))
    node_keys = degree_node_keys + [
        i[0] for i in sorted_eigenvector_centralities
    ]

    # Vertex Cover (25%)
    vc_nodes = min_weighted_vertex_cover(G)
    nodes_to_remove = G.nodes() - vc_nodes
    subgraph = G.copy()
    subgraph.remove_nodes_from(nodes_to_remove)
    if len(vc_nodes) < num_seeds:
        number_of_nodes = len(vc_nodes)
    vc_centralities_dict = nx.degree_centrality(subgraph)
    vc_centralities_dict = {
        key: vc_centralities_dict[key]
        for key in vc_centralities_dict if key not in node_keys
    }
    sorted_vc_centralities = nlargest(min(num_seeds / 4, number_of_nodes),
                                      vc_centralities_dict.items(),
                                      key=operator.itemgetter(1))
    node_keys += [i[0] for i in sorted_vc_centralities]

    # MST (25%)
    mst = nx.minimum_spanning_tree(G)
    number_of_nodes = num_seeds
    if len(mst) < num_seeds:
        number_of_nodes = len(mst)
    mst_centralities_dict = nx.degree_centrality(mst)
    mst_centralities_dict = {
        key: mst_centralities_dict[key]
        for key in mst_centralities_dict if key not in node_keys
    }
    sorted_mst_centralities = nlargest(min(num_seeds / 4, number_of_nodes),
                                       mst_centralities_dict.items(),
                                       key=operator.itemgetter(1))
    node_keys += [i[0] for i in sorted_mst_centralities]

    nodes_top_degrees = degree_centrality_strategy(G, num_seeds)
    # In case we need more seed nodes, we simply pull from the top degreed nodes of the input graph
    i = 0
    while (len(node_keys) != num_seeds):
        if nodes_top_degrees[i] not in node_keys:
            node_keys.append(nodes_top_degrees[i])
        i += 1
    assert (len(node_keys) == num_seeds)

    return node_keys
예제 #19
0
                    if line[0] == 'p':
                        s = line.split()
                        v = s[2]
                        e = s[3]
                        avg_deg = int(s[3]) / int(s[2])
                        data = ' '.join([str(file.name), str(v), str(e), str(avg_deg)])
                        output.write(data + '\n')
                        break

# get more detailed information on problem instances
with open("detail.txt", 'w') as output:
    output.write("file_name min_deg max_deg treewidth approx_vc\n")
    with os.scandir(input_dir) as dir:
        for file in dir:
            g = nx.Graph()
            with open(file, encoding="latin-1") as f:
                for line in f:
                    if line[0] == 'p':
                        s = line.split()
                        v = int(s[2])
                        g.add_nodes_from(range(1, v + 1))
                    elif line[0] != 'c':
                        e = line.split()
                        g.add_edge(int(e[0]), int(e[1]))
                min_deg = min(d for n, d in g.degree())
                max_deg = max(d for n, d in g.degree())
                tw = naa.treewidth_min_degree(g)[0]
                vc = len(naa.min_weighted_vertex_cover(g))
                data = ' '.join([str(file.name), str(min_deg), str(max_deg), str(tw), str(vc)])
                output.write(data + '\n')
예제 #20
0
info_file_name += str(num_nodes)
info_file_name += "_info.txt"

fh = open(info_file_name, "w")

for i in range(num_graphs):
    print("generating " + str(i) + "th graph ...")

    if (graph_type == "er"):
        graph = nx.erdos_renyi_graph(num_nodes, edge_possibility)
        #graph = nx.gnp_random_graph(num_nodes, edge_possibility)

    num_edges = len(graph.edges)
    print("there are " + str(num_edges) + " edges")

    cover = min_weighted_vertex_cover(graph)
    two_opt = len(cover)

    file_name = graph_type + str(num_nodes)
    #TODO: change the 015 here:
    file_name = file_name + "_015_" + str(num_edges) + "_"
    file_name = file_name + str(two_opt) + "_v" + str(i)

    file_name_temp = file_name + ".edgelist"
    file_name = file_name + ".txt"

    nx.write_edgelist(graph, file_name)
    nx.write_edgelist(graph, file_name_temp)

    goal = graph.number_of_edges()
    num_nodes = len(graph)
예제 #21
0
def get_min_vertex_cover(model):
    return appr.min_weighted_vertex_cover(model.graph)
예제 #22
0
def simpath(G, tol, l=10, k=2):
    ''' Algorithm 4 in paper Assembly of the Simpath algorithm
    Input: 
    G(V,E): Networkx Directed Graph Object
    tol: pruning coefficient
    l: length of potential seed nodes
    k: length of seed set

    Output: Set of seed (S)
    '''

    #*****************************************
    # Setting up steps [1-8] of Simpath algorithm
    #*****************************************

    G1 = G.to_undirected()
    C = approximation.min_weighted_vertex_cover(G1)  # list of vertices

    V = G.nodes()

    u1 = [v for v in V if v not in C]  # Nodes in V - C

    celfq = []
    tup_c = {}

    for c in C:  # line 2
        N_in = G.neighbors(c)  # Neighbors of C
        U = [v for v in u1 if v in N_in]  # U = {(V-C)& N(c)}

        spd_u, spdW_v = simpath_spread(
            c, tol, U=U)  # spread(u) on Graph of nodes: V-v = V
        # will give both spread in G as well
        # as G-S+u simultaneously

        tup_c[c] = spdW_v  # Adding to dictionary for sorting
        celfq.append(u)  # Appending to CELF queue

    for w in u1:  # line 6
        noutv = G.successors(w)  # Out-neighbors of w
        spdv = 1

        for nou in noutv:
            wgt = (G.edge[w][noutv]['weight'])
            spdv = spdv + (wgt * tup_c[nou])  # Theorem 2

        tup_c[w] = spdv
        celfq.append(w)  # Appending to CELF queue

    #*****************************************
    # Steps [9-14] - CELF
    #*****************************************

    S = []
    spd = 0

    def sorter(d, pos=l):
        ''' Returns a list of top-l keys from a Dictionary sorted (desc) on values '''

        tmtup = sorted(d.items(), key=lambda x: x[1], reverse=True)
        tmtup = tmtup[:pos]  # top-l nodes sorted on spread
        tmtupl = [x for x, y in tmtup.items()]
        return tmtupl

    u_flag = 0  # tracking iteration
    while len(S) < k:  # line 10
        lu = sorter(tup_c)
        lu1 = [(la, u_flag) for la in lu]

        spdV_x = {}  # New dictionary

        for ul in lu1:
            ul_x = [v for v in V if v != ul]  # V - x; want to keep V intact.

            spd, spdV_xS = simpath_spread(S, tol, U=ul_x)

        for index, x, y in enumerate(lu):
            if y == u_flag - 1:  # checking if x part of the previous iteration
                S = S + [x]
                spd = spd + spd
                celfq.remove(x)  # remove u
                break

            V_S = [v for v in V if v not in S]

            spreadV_Sx, skip = backtrack(x, tol, V_s, U=None)

            spreadSplusx = spreadV_Sx + spreadV_xS

            marg_gain_x = spreadSplux - spd

            u_flag += 1  # next iteration

            celfq.insert(index, x)

    return S
예제 #23
0
print("Srednji fitness =", populacijaJedinki.srednjiFitness)
print("Srednja velicina pokrivaca =", populacijaJedinki.velicina_pokrivaca)
print()
for iteration in range(1, iteracije + 1):
    populacijaJedinki.breed()
    populacijaJedinki.mutate()
    populacijaJedinki.fitnessRank()
    populacijaJedinki.evaluate_diversity_ranks()
    plot_fitness.append(populacijaJedinki.srednjiFitness)
    print("Iteracija", iteration)
    print("Srednji fitness =", populacijaJedinki.srednjiFitness)
    print("Srednja velicina pokrivaca =", populacijaJedinki.velicina_pokrivaca)
    print(" ")
  
najboljiPokrivac = None
najboljiFitness = 0
for pokrivacGrana in populacijaJedinki.pokrivac:
    if pokrivacGrana.fitness > najboljiFitness:
        najboljiPokrivac = pokrivacGrana

print("Vreme izvrsavanja:%s sekundi ---" % (time.time() - start_time))
print("Velicina pokrivaca = ", len(najboljiPokrivac))
print("Najbolji pokrivac = ", najboljiPokrivac.listaCvorova)
print("Broj cvorova = ", len(najboljiPokrivac.listaCvorova))
print("Broj grana = ", len(approximation.min_weighted_vertex_cover(G)))
plt.title("statistika")
plt.plot(range(iteracije + 1), plot_fitness, 'b--',)
plt.ylabel('fitnes')
plt.xlabel("broj iteracija")
plt.show()
예제 #24
0
def vertex_cover_partition(graph, nodes):
    prime_nodes = approximation.min_weighted_vertex_cover(graph)
    return ''
예제 #25
0
clique = find_biggest_clique(G)

"finding an order with DSATUR"
d, order = modified_greedy_coloring.greedy_color(G, clique, strategy="DSATUR")

start_time = time.time()
"exact coloring"
coloring = exact_dsatur(G, clique, order, d)
end_time = time.time()
print("elapsed time for coloring before kernelization=",
      (end_time - start_time) * 1000)
print("coloring before kernelization = ", coloring)
print("chromatic number = ", max(coloring.items(), key=lambda k: k[1])[1])

"2-approximate vertex cover"
v_approx = min_weighted_vertex_cover(G)
print("2-approximate vertex cover length = ", len(v_approx))

"exact vertex cover from ILP"
vertex_cover = []

with open('output.out') as g:
    for line in g:
        vertex_cover.append(int(line))

print("exact vertex cover ", vertex_cover)
print("exact vertex cover length =", len(vertex_cover))

"vertices outside of the approximate vertex cover"
outside_nodes_approx = set(G.nodes()) - set(v_approx)
    print("Mean L1 diversity =", population.mean_diversity)
    print("Mean VC size =", population.mean_vertex_cover_size)
    print()

# vertex cover with best fitness is our output
best_vertex_cover = None
best_fitness = 0
for vertex_cover in population.vertexcovers:
    if vertex_cover.fitness > best_fitness:
        best_vertex_cover = vertex_cover

print("Best Vertex Cover Size =", len(best_vertex_cover))
print("Best Vertex Cover = ", best_vertex_cover.vertexlist)

print("networkx approximation =",
      len(approximation.min_weighted_vertex_cover(G)))

# just to check
# is_valid_vertex_cover(best_vertex_cover)

# plotting again
# plot population stats
plt.subplot(2, 1, 1)
plt.title("Mean Population Stats")

plt.plot(
    range(num_iterations + 1),
    plot_fitness,
    'b--',
)
plt.ylabel('Fitness')
예제 #27
0
import networkx as nx
import networkx.algorithms.approximation as app
import matplotlib.pyplot as plt

G = nx.fast_gnp_random_graph(50, 0.1)
P = app.min_weighted_vertex_cover(G)
nx.draw(G, with_labels=True, font_weight='bold')
print P

# nx.draw_shell(G, nlist=[range(5, 10), range(5)], with_labels=True, font_weight='bold');

plt.show()

# plt.savefig("path.png");
예제 #28
0
def solve(g):
    slv = set()

    # include vertices with self-loops in any VC
    for e in nx.selfloop_edges(g):
        v, _ = e
        slv.add(v)

    # self-loop vertices can be removed from the graph
    for v in slv:
        g.remove_node(v)

    # run degree-one reduction once if relevant
    # this call of the reduction removes vertices to handle large trees quickly
    for _, d in g.degree():
        if d == 1:
            tree_vc = c.deg_one_redux(g, in_place=False)
            slv = slv.union(tree_vc)

            if g.size() == 0:
                return slv

            break

    # get 2-approx estimate for VC
    approx_vc = naa.min_weighted_vertex_cover(g)
    approx_ub = len(approx_vc)

    # start timer for branching
    init = timer()
    TIMEOUT = 2 * 60

    # track size of best solution encountered
    best_sol = len(g)

    def bnb(g, current=0, ub=-1, layer=0, is_split=False):
        nonlocal best_sol

        # check if branching has timed out
        # if it has, error out with size of best solution encountered
        if timer() - init > TIMEOUT:
            raise ValueError(best_sol + len(slv))

        # check if graph is empty or a single vertex
        n = len(g)
        if n <= 1:
            return set()

        # check if graph is a single edge
        if n == 2 and g.size() == 1:
            return set([list(g)[0]])

        vc = set()

        # apply degree-one reduction if relevant
        has_deg_one_redux = False
        for _, d in g.degree():
            if d == 1:
                has_deg_one_redux = True
                tree_vc, cover = c.deg_one_redux(g)
                vc = vc.union(tree_vc)
                current += len(tree_vc)

                if g.size() == 0:
                    for e in cover:
                        g.add_edge(*e)
                    if not is_split and len(vc) < best_sol:
                        best_sol = len(vc)
                    return vc

                break

        # get relevant instance info
        deg_list = sorted([(v, d) for v, d in g.degree()],
                          key=operator.itemgetter(1), reverse=True)
        v, max_deg = deg_list[0]

        # simple bound check
        if ub > 0 and current >= ub:
            # add all vertices to the VC, effectively terminating the branch
            for v in g:
                vc.add(v)

            if has_deg_one_redux:
                for e in cover:
                    g.add_edge(*e)

            return vc

        # split instance into connected components if relevant
        if not nx.is_connected(g):
            # ccs = [(g.subgraph(cc).copy(), ag.subgraph(cc).copy())
            #        for cc in nx.connected_components(g)]

            ccs = [[v for v in g.subgraph(cc)] for cc in nx.connected_components(g)]

            # for (cc, acc) in ccs:
            #     vc = vc.union(bnb(cc, acc, current=current, ub=ub, is_split=True))

            for cc in ccs:
                if len(cc) > 1:
                    excluded_vertices = list(set([v for v in g]) - set(cc))
                    excluded_edges = []
                    for v in excluded_vertices:
                        v_edges = [e for e in g.edges(v)]
                        excluded_edges += v_edges
                        g.remove_node(v)

                    cc_vc = bnb(g, current=current, ub=-1, layer=layer, is_split=True)
                    vc = vc.union(cc_vc)
                    current += len(cc_vc)

                    for v in excluded_vertices:
                        g.add_node(v)
                    for e in excluded_edges:
                        g.add_edge(*e)

            if has_deg_one_redux:
                for e in cover:
                    g.add_edge(*e)

            if not is_split and len(vc) < best_sol:
                best_sol = len(vc)

            return vc

        # bound check from Lemma 2.3 of Cygan et al.
        if max_deg <= ub and (len(g) > (ub**2 + ub) or g.size() > ub**2):
            # add all vertices to the VC, effectively terminating the branch
            for v in g:
                vc.add(v)

            if has_deg_one_redux:
                for e in cover:
                    g.add_edge(*e)

            return vc

        # find mirrors of v
        neighbors = [u for u in g[v]]
        second_neighbors = c.neighborhood(g, v, 2)
        mirrors = [v] # always include v itself for branching
        for u in second_neighbors:
            neighbor_diff = list(set(neighbors) - set([w for w in g[u]]))
            s = len(neighbor_diff)
            if s == 0:
                mirrors.append(u)
            else:
                max_edges = (s * (s + 1)) / 2
                if g.subgraph(neighbor_diff).size() == max_edges:
                    mirrors.append(u)

        # if no mirrors, find satellites of v
        satellites = set([v])
        if len(mirrors) == 1:
            for w in neighbors:
                neighbor_diff = list(set([u for u in g[w]]) - set(neighbors + [v]))
                if len(neighbor_diff) == 1:
                    satellites.add(neighbor_diff[0])

        # branch 1: M[v] in VC
        in_cover = []
        in_vc = set(mirrors)
        in_current = current + len(in_vc)
        for u in mirrors:
            in_subcover = [e for e in g.edges(u)]
            for e in in_subcover:
                in_cover.append(e)
                g.remove_edge(*e)
        in_result = bnb(g, current=in_current, ub=ub, layer=layer+1, is_split=is_split)
        in_current += len(in_result)
        in_vc = in_vc.union(in_result)
        if ub < 0 or in_current < ub:
            ub = in_current
        for e in in_cover:
            g.add_edge(*e)
        if not is_split and len(vc.union(in_vc)) < best_sol:
            best_sol = len(vc.union(in_vc))

        # branch 2: N(S[v]) in VC
        out_cover = []
        out_vc = set()
        out_current = current
        satellite_neighbors = set()
        for u in satellites:
            for w in g[u]:
                satellite_neighbors.add(w)
        for w in satellite_neighbors:
            out_subcover = [e for e in g.edges(w)]
            for e in out_subcover:
                out_cover.append(e)
                g.remove_edge(*e)
            out_vc.add(w)
            out_current += 1
        out_result = bnb(g, current=out_current, ub=ub, layer=layer+1, is_split=is_split)
        out_current += len(out_result)
        out_vc = out_vc.union(out_result)
        for e in out_cover:
            g.add_edge(*e)
        # if not is_split and len(vc.union(out_vc)) < best_sol:
        #     best_sol = len(vc.union(out_vc))

        if has_deg_one_redux:
            for e in cover:
                g.add_edge(*e)

        if in_current < out_current:
            return vc.union(in_vc)
        else:
            vc = vc.union(out_vc)
            if not is_split and len(vc) < best_sol:
                best_sol = len(vc)
            return vc

    return slv.union(bnb(g, ub=approx_ub))
예제 #29
0
파일: gnd.py 프로젝트: pholme/gnd
        # Step 2. === Construct a subgraph of nodes at the border of the partition. ===
        H = nx.Graph()
        for u, v in LCC.edges():
            # Adding edges between nodes of different sign. (Note that just < (not <=) would mis-classify
            # nodes in symmetric graphs like 2 in 1-2-3.)
            if Fiedler[ii[u]] * Fiedler[ii[v]] <= 0.0:
                H.add_edge(u, v)

        # Step 3. === Construct a minimal vertex cover of H w.r.t. degree in G / degree in H. ===
        for v in H.nodes():  # calculate weight
            H.nodes[v]['weight'] = 1.0 / H.degree(v)
            if G.graph['cost type'] == 'degree':
                H.nodes[v]['weight'] *= LCC.degree(v)

        cover = list(min_weighted_vertex_cover(
            H, weight='weight'))  # get the vertex cover
        shuffle(cover)  # shuffle away dependence on input

        # Uncomment the following lines to follow the original code, but not the paper:
        #if G.graph['cost type'] == 'degree':
        #	cover.sort(key=LCC.degree())
        #else:
        #	cover.sort(key=LCC.degree(),reverse=True)

        # Step 4. === Delete the nodes in cover. ===
        for v in cover:
            remove_and_print_update(G, v)

    # Step 5. === Deleting the (trivial) rest of the nodes ===
    for v in [v for u, v in G.edges()]:  # half of the degree = 1 nodes
        remove_and_print_update(G, v)
예제 #30
0
파일: e1.py 프로젝트: satemochi/saaaaah
                               with_labels=False,
                               node_size=50,
                               nodelist=vl)
    if el:
        nx.draw_networkx_edges(g,
                               pos=pos,
                               ax=ax,
                               edge_color='r',
                               width=3,
                               edgelist=el)
    ax.set_title(title)
    ax.set_axis_off()
    ax.set_aspect('equal')


if __name__ == '__main__':
    g = gen()
    fix_outer_cycle_pos(g, get_cycle(g, 3))
    fix_all_pos(g)

    vc = min_weighted_vertex_cover(g)
    m = min_maximal_matching(g)
    print m

    fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(8, 4))
    draw(g, ax1, 'vertex cover', vl=list(vc))
    draw(g, ax2, 'matching', el=list(m))

    plt.tight_layout()
    plt.show()