コード例 #1
0
def two_maximal_independent_set(G):
    """Return a set of nodes from a bipartite subgraph"""
    i0 = nx.maximal_independent_set(G)
    i1 = nx.maximal_independent_set(G, i0)

    b = set(i0) | set(i1)

    return b
コード例 #2
0
	   def non_connected_nodes(self, date, graph):	   	   
	   	   
	   	   try:
	   	   	    
	   	   	    self.non_connected_nodes_dict[date] =len(nx.maximal_independent_set(graph, None))
	   	   	    print 'Maximal Independent Set',len(nx.maximal_independent_set(graph, None))
	   	   except:
	   	   	  print 'Non Connected Component'
	   	   	  pass
コード例 #3
0
def compute_1_connected_k_dominating_set(G, CDS):

    MIScandidates = []
    add_flag = True

    MIScandidates.append(CDS[0])

    for i in range(1, len(CDS)):
        for j in range(0, len(MIScandidates)):
            if G.has_edge(CDS[i], MIScandidates[j]):
                add_flag = False
                break
        if add_flag:
            MIScandidates.append(CDS[i])

            # print "MIScandidates"
            # print MIScandidates

    MIS = nx.maximal_independent_set(G, MIScandidates)
    # print "Maximal Independent Set"
    # print MIS

    C = list(set(CDS) - set(MIScandidates))
    # print "C is",
    # print C

    newCDS = MIS
    # print newCDS

    for i in C:
        newCDS.append(i)

    CDSGraph = G.subgraph(newCDS)

    if not (nx.is_dominating_set(G, newCDS) and nx.is_connected(CDSGraph)):
        print "Error, C and I did not create a CDS"
        sys.exit()

    I1 = MIS

    # Here we just construct a 3-dominating set
    # Could be a loop to m, for an m-dominating set
    newG = G.copy()

    newG.remove_nodes_from(I1)

    I2 = nx.maximal_independent_set(newG)

    # Union of sets
    DA = list(set(I1) | set(I2) | set(C))

    return DA
コード例 #4
0
        def get_maimal_indep_sets_by_coloring(graph,
                                              random_repeat=200
                                              ):  #->list of tuples
            """
            На вход принимает граф, кол-во запусков рандомной раскраски
             и возвращает не максимальные независимые множнества по включению.
             Независимые множества найдем с помощью раскраски графа,
            использется разные стратегии стратегии раскрасок (200 запусков на рандомную раскраску)

            graph: networkx.graph
            return: indep_nodes
            """
            indep_nodes = []
            strategies = [
                nx.coloring.strategy_independent_set,
                nx.coloring.strategy_largest_first,
                nx.coloring.strategy_connected_sequential_bfs,
                nx.coloring.strategy_connected_sequential_dfs,
                nx.coloring.strategy_saturation_largest_first
            ]
            for strategy in strategies:
                node_color_dict = nx.coloring.greedy_color(graph,
                                                           strategy=strategy)
                for color in set(node_color_dict.values()):
                    indep_set = set(node
                                    for node, col in node_color_dict.items()
                                    if col == color)
                    indep_set = set(
                        nx.maximal_independent_set(graph, indep_set))
                    indep_nodes.append(indep_set)
            # Рандомная раскраска, random_repeat запусков этой раскраски
            for i in range(random_repeat):
                node_color_dict = nx.coloring.greedy_color(
                    G=graph, strategy=nx.coloring.strategy_random_sequential)
                for color in set(node_color_dict.values()):
                    indep_set = set(node
                                    for node, col in node_color_dict.items()
                                    if col == color)
                    indep_set = set(
                        nx.maximal_independent_set(graph, indep_set))
                    if indep_set not in indep_nodes:
                        indep_nodes.append(indep_set)
            #берем только уникальные независимые множество узлов и те, размер которых > 1
            indep_nodes = set(
                [tuple(ind) for ind in indep_nodes if len(ind) > 2])
            indep_nodes = list(map(lambda x: set(x),
                                   indep_nodes))  #list of set
            return indep_nodes
コード例 #5
0
ファイル: IGtools.py プロジェクト: lordgrilo/interference
def leaf_removal(g, verbose=False):
    G = g.copy()
    stop = 0;
    potential_mis = [];
    isolated = [x for x in g.nodes() if nx.degree(g,x)==0];
    potential_mis.extend(isolated);
    G.remove_nodes_from(isolated);
    while stop==0:
        deg = G.degree();
        if 1 in deg.values():
            for n in G.nodes_iter():
                if deg[n]==1:
                    L = n;
                    break;
            nn = nx.neighbors(G,L)[0]
            G.remove_node(L);
            G.remove_node(nn);
            potential_mis.append(L);
            isolated = [x for x in G.nodes() if nx.degree(G,x)==0];
            potential_mis.extend(isolated);
            G.remove_nodes_from(isolated);
        else:
            stop=1;
    core_mis = [];
    if G.number_of_nodes()>=1:
        core_mis = nx.maximal_independent_set(G);
        if verbose==True:
            print len(potential_mis), len(core_mis), N;
        potential_mis.extend(core_mis);
    else:
        if verbose==True:
            print len(potential_mis), len(core_mis), N;
    return potential_mis, core_mis;
コード例 #6
0
    def get_important_nodes(self, write_to_file = False, out_dir = None):

        dict = nx.harmonic_centrality(self.db_graph)

        sorted_dict = sorted(dict.items(), key=operator.itemgetter(1), reverse=True)
        if write_to_file is True:
            file_name = "{0}/salon24_eigenvector_centrality_{1}".format(out_dir, self.index)
            with open(file_name, 'w') as file:
                file.write(pickle.dumps(sorted_dict)) # use `pickle.loads` to do the reverse

        number_of_items_to_get = int(math.floor(len(sorted_dict) * self.importance_threshold))

        undirected = self.db_graph.to_undirected()


        try:
            independent_set = nx.maximal_independent_set(undirected)
        except nx.NetworkXUnfeasible:
            independent_set = []

        result = []

        for item in sorted_dict[:number_of_items_to_get]:
            if item[0] in independent_set:
                result.append((item[0], item[1]))

        if len(result) == 0:
            return sorted_dict[:number_of_items_to_get]

        # lista tupli
        return result
コード例 #7
0
def main():
    """ Reads in a value of k and a graph and prints an approximate k-centers solution """

    k = int(sys.argv[1])
    graph_file_path = sys.argv[2]

    input_graph = nx.read_weighted_edgelist(graph_file_path)

    # sort the edges of G by nondecreasing weight
    all_edges = list(input_graph.edges.items())
    all_edges.sort(key=lambda pair: pair[1]['weight'])

    # Construct the squares of the edge-induced subgraphs for each edge subset [1..i]
    power_graphs = []
    for i in range(0, len(all_edges)):
        edges_to_remove = list(map(lambda pair: pair[0], all_edges[i + 1:]))
        induced_graph = nx.restricted_view(input_graph, [], edges_to_remove)
        power_graphs.append(nx.power(induced_graph, 2))

    # Compute a maximal independent set for each power graph
    # If its size is less than k, return it as our approximate solution
    for pow_graph in power_graphs:
        indep_set = nx.maximal_independent_set(pow_graph)
        if len(indep_set) <= k:
            print("k centers are:", indep_set)
            break
コード例 #8
0
ファイル: handle_to_teeth.py プロジェクト: shelleywu1921/tsp
def find_comb(F, G, handle_pool):
    counter = 0
    viol_comb_set = set()
    for handle in handle_pool:
        eligible_teeth = find_all_teeth(F, G, handle)
        if len(list(eligible_teeth.nodes())) >= 3:
            for k in range(5):
                odd_teeth = nx.maximal_independent_set(
                    eligible_teeth)  # this is a set
                if len(odd_teeth) >= 3:
                    if len(odd_teeth) % 2 == 0:
                        odd_teeth.pop()
                    print('Number of teeth: %d' % len(odd_teeth))

                    x_delta_H = x_delta_S(F, handle)
                    LHS = x_delta_H + sum(
                        x_delta_S(F, G.node[T]['vertices']) for T in odd_teeth)
                    comb_surplus = LHS - 3 * len(odd_teeth)
                    if comb_surplus < 1:
                        viol_comb = dict()
                        viol_comb['handle'] = handle
                        viol_comb['teeth'] = odd_teeth
                        viol_comb['comb_surplus'] = comb_surplus
                        viol_comb_set.add(viol_comb)
                        counter += 1
                    print('comb surplus: %.5f' % comb_surplus)
    print('total number of violated comb is %d:' % counter)
    print('And they are:')
    print(viol_comb_set)
    return viol_comb_set
コード例 #9
0
def leaf_removal(g, verbose=False):
    G = g.copy()
    stop = 0
    potential_mis = []
    isolated = [x for x in g.nodes() if nx.degree(g, x) == 0]
    potential_mis.extend(isolated)
    G.remove_nodes_from(isolated)
    while stop == 0:
        deg = G.degree()
        if 1 in deg.values():
            for n in G.nodes_iter():
                if deg[n] == 1:
                    L = n
                    break
            nn = nx.neighbors(G, L)[0]
            G.remove_node(L)
            G.remove_node(nn)
            potential_mis.append(L)
            isolated = [x for x in G.nodes() if nx.degree(G, x) == 0]
            potential_mis.extend(isolated)
            G.remove_nodes_from(isolated)
        else:
            stop = 1
    core_mis = []
    if G.number_of_nodes() >= 1:
        core_mis = nx.maximal_independent_set(G)
        if verbose == True:
            print len(potential_mis), len(core_mis), N
        potential_mis.extend(core_mis)
    else:
        if verbose == True:
            print len(potential_mis), len(core_mis), N
    return potential_mis, core_mis
コード例 #10
0
def find_stable_set(G, total_surplus_bound, max_teeth_num):
    max_stable_set = nx.maximal_independent_set(G)
    if len(max_stable_set) < 3:
        return None
    max_stable_set.sort(key=lambda x: G.node[x]['surplus'])
    first_node = max_stable_set[0]
    candidate_dom = [first_node]
    total_surplus = G.node[first_node]['surplus']

    for i in range(1, len(max_stable_set)):
        if i % 2 == 1:
            pass
        else:
            i_minus_one_node = max_stable_set[i - 1]
            i_node = max_stable_set[i]
            i_minus_one_surplus = G.node[i_minus_one_node]['surplus']
            i_surplus = G.node[i_node]['surplus']

            if total_surplus + i_minus_one_surplus + i_surplus < total_surplus_bound:
                candidate_dom = candidate_dom + [i_minus_one_node, i_node]
                total_surplus = total_surplus + i_minus_one_surplus + i_surplus
            else:
                break

            if len(candidate_dom) == max_teeth_num:  # up to 5 teeth
                break

    if len(candidate_dom) < 3:  # changed from 5 to 3
        return None
    else:
        return [candidate_dom, total_surplus]
コード例 #11
0
def leaf_removal(g, verbose=False):
    G = g.copy()
    stop = 0;
    potential_mis = [];
    isolated = [x for x in g.nodes() if list(nx.degree(g,x))==0];
    potential_mis.extend(isolated);
    G.remove_nodes_from(isolated);
    while stop==0:
        deg = list(G.degree());
        if 1 in list(deg.values()):
            for n in G.nodes_iter():
                if deg[n]==1:
                    L = n;
                    break;
            nn = nx.neighbors(G,L)[0]
            G.remove_node(L);
            G.remove_node(nn);
            potential_mis.append(L);
            isolated = [x for x in G.nodes() if list(nx.degree(G,x))==0];
            potential_mis.extend(isolated);
            G.remove_nodes_from(isolated);
        else:
            stop=1;
    core_mis = [];
    if G.number_of_nodes()>=1:
        core_mis = nx.maximal_independent_set(G);
        if verbose==True:
            print(len(potential_mis), len(core_mis), N)
        potential_mis.extend(core_mis);
    else:
        if verbose==True:
            print(len(potential_mis), len(core_mis), N)
    return potential_mis, core_mis;
コード例 #12
0
ファイル: utils.py プロジェクト: baohq1595/meta-deep-binning
def metis_partition_groups_seeds(G, maximum_seed_size):
    CC = [cc for cc in nx.connected_components(G)]
    GL = []
    for subV in CC:
        if len(subV) > maximum_seed_size:
            # use metis to split the graph
            subG = nx.subgraph( G, subV )
            nparts = int( len(subV)/maximum_seed_size + 1 )
            ( edgecuts, parts ) = nxmetis.partition( subG, nparts, edge_weight='weight' )
            
            # only add connected components
            for p in parts:
                pG = nx.subgraph( G, p )
                GL += [list(cc) for cc in nx.connected_components( pG )]
            
            # add to group list
            #GL += parts
        else:
            GL += [list(subV)]

    SL = []
    for p in GL:
        pG = nx.subgraph( G, p )
        SL += [nx.maximal_independent_set( pG )]

    return GL, SL
コード例 #13
0
 def test_florentine_family(self):
     G = self.florentine
     indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
     assert sorted(indep) == sorted([
         "Medici", "Bischeri", "Castellani", "Pazzi", "Ginori",
         "Lamberteschi"
     ])
コード例 #14
0
def max_indep_set(G):
    n = []
    for i in range(10000):
        g = nx.maximal_independent_set(G)
        k = len(g)
        n.append(k)
    return max(n)
def mc_lower_bound(G):
    """
	INPUT:
	 - "G" Networkx Undirected Graph
	OUTPUT:
	 - "lower bound" list of variables which form a clique in G
	"""
    return nx.maximal_independent_set(nx.complement(G))
コード例 #16
0
def final_comb_surplus(F, G, handle, eligible_teeth):
    stable_set = nx.maximal_independent_set(
        eligible_teeth)  #,[teeth for teeth in candidate_dom])
    if len(stable_set) % 2 == 0:
        stable_set.pop()
    print('number of teeth in our final comb: %d' % len(stable_set))
    return find_delta_weight(F, handle) + sum(
        find_delta_weight(F, G.node[node]['vertices']) for node in stable_set)
コード例 #17
0
def gen(a):
    # Generate maximal independent set each time for a graph as argument in this function to avoid getting the wrong thing
    Arial = G
    temp = nx.Graph()
    temp = Arial.subgraph(a)
    lis = []
    lis = nx.maximal_independent_set(temp)
    return lis
コード例 #18
0
    def compute_features(self):

        self.add_feature(
            "size_max_indep_set",
            lambda graph: len(nx.maximal_independent_set(graph)),
            "The number of nodes in the maximal independent set",
            InterpretabilityScore(3),
        )
コード例 #19
0
ファイル: indset.py プロジェクト: lm2221/algorithms_code
def networkx_max_independent_set(G):
    best=0
    for i in xrange(10):
        current=len(nx.maximal_independent_set(G))
        print current
        if current > best:
            best=current
    return best
コード例 #20
0
def generate_mis(G, sample_size, nodes=None):

    """Returns a random approximate maximum independent set.

    Parameters
    ----------
    G: NetworkX graph
       Undirected graph

    nodes: list, optional
        a list of nodes the approximate maximum independent set must contain.

    sample_size: int
        number of maximal independent sets sampled from

    Returns
    -------
    max_ind_set: list or None
        list of nodes in the apx-maximum independent set
        NoneType object if any two specified nodes share an edge

    """

    # list of maximal independent sets
    max_ind_set_list=[]

    # iterates from 0 to the number of samples chosen
    for i in range(sample_size):

        # for each iteration generates a random maximal independent set that contains
        # UnitedHealth and Amazon
        max_ind_set = nx.maximal_independent_set(G, nodes=nodes, seed=i)

        # if set is not a duplicate
        if max_ind_set not in max_ind_set_list:

            # appends set to the above list
            max_ind_set_list.append(max_ind_set)

        # otherwise pass duplicate set
        else:
            pass

    # list of the lengths of the maximal independent sets
    mis_len_list=[]

    # iterates over the above list
    for i in max_ind_set_list:

        # appends the lengths of each set to the above list
        mis_len_list.append(len(i))

    # extracts the largest maximal independent set, i.e., the maximum independent set (MIS)
    ## Note: this MIS may not be unique as it is possible there are many MISs of the same length
    max_ind_set = max_ind_set_list[mis_len_list.index(max(mis_len_list))]

    return max_ind_set
コード例 #21
0
 def _generate_nlist():
     G = self.graph
     # TODO: imaginative, but shit. revise.
     isolates = set(nx.isolates(G))
     independent = set(nx.maximal_independent_set(G)) - isolates
     dominating = set(nx.dominating_set(G)) - independent - isolates
     rest = set(G.nodes()) - dominating - independent - isolates
     nlist = list(map(sorted, filter(None, (isolates, independent, dominating, rest))))
     return nlist
コード例 #22
0
def independent_numbers(G, nodes=None, seed=None):
    if not G:
        raise Exception("You must provice path graph value")
    if nx.is_directed(G):
        C = G.to_undirected()

    else:
        C = G
    return nx.maximal_independent_set(C, nodes=None, seed=None)
def vc_upper_bound(G):
	"""
	INPUT:
	 - "G" Networkx Undirected Graph
	OUTPUT:
	 - "upper bound" list of variables which form a vertex cover in G
	"""
	res = nx.maximal_independent_set(G)
	return list(set(list(G.nodes()))-set(res))
コード例 #24
0
def b(edges, neighbours):
    # build up a graph
    G = nx.Graph()
    G.add_nodes_from(neighbours)
    G.add_edges_from(edges)
     
    # Independent set
    maximal_iset = nx.maximal_independent_set(G)
    return len(maximal_iset)
コード例 #25
0
ファイル: test_mis.py プロジェクト: kswgit/networkx
 def test_random_graphs(self):
     """Generate 50 random graphs of different types and sizes and
     make sure that all sets are independent and maximal."""
     for i in range(0, 50, 10):
         G = nx.random_graphs.erdos_renyi_graph(i * 10 + 1, random.random())
         IS = nx.maximal_independent_set(G)
         assert_false(list(G.subgraph(IS).edges()))
         neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
         for v in set(G.nodes()).difference(IS):
             assert_true(v in neighbors_of_MIS)
コード例 #26
0
 def f38(self):
     start = 0
     try:
         d = nx.maximal_independent_set(self.G)
         res = len(d)
     except nx.exception.NetworkXUnfeasible:
         res = "ND"
     stop = 0
     # self.feature_time.append(stop - start)
     return res
コード例 #27
0
ファイル: test_mis.py プロジェクト: nickp60/Ragout
 def test_random_graphs(self):
     """Generate 50 random graphs of different types and sizes and
     make sure that all sets are independent and maximal."""
     for i in range(0, 50, 10):
         G = nx.random_graphs.erdos_renyi_graph(i * 10 + 1, random.random())
         IS = nx.maximal_independent_set(G)
         assert_false(G.subgraph(IS).edges())
         neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
         for v in set(G.nodes()).difference(IS):
             assert_true(v in neighbors_of_MIS)
コード例 #28
0
 def f38(self):
     start = 0
     try:
         d = nx.maximal_independent_set(self.G)
         res = len(d)
     except nx.exception.NetworkXUnfeasible:
         res = "ND"
     stop = 0
     # self.feature_time.append(stop - start)
     return res
コード例 #29
0
def find_max_independent_set(graph, params):
    """Find the maximum independent set of an input graph given some optimized QAOA parameters.

    The code you write for this challenge should be completely contained within this function
    between the # QHACK # comment markers. You should create a device, set up the QAOA ansatz circuit
    and measure the probabilities of that circuit using the given optimized parameters. Your next
    step will be to analyze the probabilities and determine the maximum independent set of the
    graph. Return the maximum independent set as an ordered list of nodes.

    Args:
        graph (nx.Graph): A NetworkX graph
        params (np.ndarray): Optimized QAOA parameters of shape (2, 10)

    Returns:
        list[int]: the maximum independent set, specified as a list of nodes in ascending order
    """

    max_ind_set = []

    # QHACK #
    '''
    cost_h, mixer_h = qml.qaoa.min_vertex_cover(graph, constrained=False)
    
    def qaoa_layer(gamma, alpha):
        qml.qaoa.cost_layer(gamma, cost_h)
        qml.qaoa.mixer_layer(alpha, mixer_h)
    
    biggest = 0
    for i in graph.edges():
        if i[0] > biggest: biggest = i[0]
        if i[1] > biggest: biggest = i[1]
    
    wires = range(biggest+1)
    depth = 10

    def circuit(params):
        for w in wires:
            qml.Hadamard(wires=w)
        qml.layer(qaoa_layer, depth, params[0], params[1])
    
    dev = qml.device("default.qubit", wires=wires)
    @qml.qnode(dev)
    def probability_circuit(gamma, alpha):
        circuit([gamma, alpha])
        return qml.probs(wires=wires)


    probs = probability_circuit(params[0], params[1])'''
    from networkx.algorithms import approximation
    max_ind_set = nx.maximal_independent_set(graph, approximation.maximum_independent_set(graph))
    max_ind_set.sort()
    
    # QHACK #

    return max_ind_set
コード例 #30
0
def find_comb(F, G, handle_pool):
    global newfile, light_handles
    global acceptable_handle_csb

    counter = 0
    viol_comb_set = list()

    for handle in handle_pool:
        handle_counter = 0  # prevents duplication of handles in the list light_handles

        newfile.write('\n Handle: \n')
        newfile.write(repr(handle) + '\n')

        eligible_teeth = find_all_teeth(F, G, handle)
        if len(list(eligible_teeth.nodes())) >= 3:
            for k in range(10):
                odd_teeth = nx.maximal_independent_set(
                    eligible_teeth)  # this is a set
                if len(odd_teeth) >= 3:
                    if len(odd_teeth) % 2 == 0:
                        odd_teeth.pop()

                    newfile.write(' Maximal disjoint teeth set: \n')
                    newfile.write(repr(odd_teeth) + '\n')
                    print('Number of disjoint teeth: %d' % len(odd_teeth))
                    newfile.write(' Number of disjoint teeth: %d \n' %
                                  len(odd_teeth))

                    x_delta_H = x_delta_S(F, handle)
                    LHS = x_delta_H + sum(
                        x_delta_S(F, G.node[T]['vertices']) for T in odd_teeth)
                    comb_surplus = LHS - 3 * len(odd_teeth)
                    if comb_surplus <= acceptable_handle_csb and handle_counter == 0:
                        light_handles.append(handle)
                        handle_counter += 1
                    if comb_surplus < 1:
                        viol_comb = dict()
                        viol_comb['handle'] = handle
                        viol_comb['teeth'] = odd_teeth
                        viol_comb['comb_surplus'] = comb_surplus
                        viol_comb_set.append(viol_comb)

                        counter += 1
                    newfile.write(' comb surplus (<1.0 is good!): %.5f \n\n' %
                                  comb_surplus)
                    print('comb surplus: %.5f' % comb_surplus)
    newfile.write('total number of violated comb is %d: \n ' % counter)
    newfile.write('And they are: \n ')
    newfile.write(repr(viol_comb_set) + '\n')

    print('total number of violated comb is %d:' % counter)
    print('And they are:')
    print(viol_comb_set)

    return viol_comb_set
コード例 #31
0
ファイル: benchmark_MIS.py プロジェクト: xuwei684/mrbait
def approximateIndependentSet(nodes, num):
    array = np.empty(num)
    for i in range(num):
        z = nx.utils.create_degree_sequence(nodes,powerlaw_sequence)
        G = nx.configuration_model(z)
        graph=nx.Graph(G)
        graph.remove_edges_from(graph.selfloop_edges())
        new = nx.maximal_independent_set(graph, nodes=None)
        array[i] = len(new)
    avg = np.average(array)
    print("Average number of nodes: ",avg)
コード例 #32
0
def draw_independent_set(ax):
    BasicGraphSet.ax_set(ax, 'Max Independent Set')
    g = nx.Graph()
    g.add_edges_from([('a', 'b'), ('b', 'c'), ('b', 'd'), ('c', 'd'),
                      ('d', 'a'), ('c', 'e'), ('e', 'a'), ('b', 'e'),
                      ('a', 'c')])
    # only one maximal independent set, e must be in this independent set
    indep_set = nx.maximal_independent_set(g, ['e'])
    node_colors = BasicGraphSet.set_property_for_nodes(g.nodes, indep_set, 'r',
                                                       'g')
    BasicGraphSet.basic_draw_color(g, ax, node_colors=node_colors)
コード例 #33
0
def maximalIndepSet(encoder):
    import networkx as nx

    g = nx.Graph()

    edges = [(a1.name, a2.name) for a1, a2 in encoder.mutexes]

    g.add_edges_from(edges)

    m = nx.maximal_independent_set(g)

    return len(m)
コード例 #34
0
def main():
	# build up a graph
	filename = '../../florentine_families_graph.gpickle'
	G = nx.read_gpickle(filename)

	# Indepedent set
	maximal_iset = nx.maximal_independent_set(G)
	out_file = 'florentine_families_graph_maximal_iset.png'
	PlotGraph.plot_graph(G, filename=out_file, colored_nodes=maximal_iset)

	maximum_iset = nxaa.maximum_independent_set(G)
	out_file = 'florentine_families_graph_maximum_iset.png'
	PlotGraph.plot_graph(G, filename=out_file, colored_nodes=maximum_iset)
コード例 #35
0
def main():
    # build up a graph
    filename = '../../florentine_families_graph.gpickle'
    G = nx.read_gpickle(filename)

    # Indepedent set
    maximal_iset = nx.maximal_independent_set(G)
    out_file = 'florentine_families_graph_maximal_iset.png'
    PlotGraph.plot_graph(G, filename=out_file, colored_nodes=maximal_iset)

    maximum_iset = nxaa.maximum_independent_set(G)
    out_file = 'florentine_families_graph_maximum_iset.png'
    PlotGraph.plot_graph(G, filename=out_file, colored_nodes=maximum_iset)
コード例 #36
0
ファイル: test.py プロジェクト: Ritvik29/Discrete_Maths
def independent_sets(G):
    nodes = G.nodes()
    done_nodes = []

    ISs = []
    for v in nodes:
        if v not in done_nodes:
            IS = nx.maximal_independent_set(G, [v])
            sorted_IS = sorted(IS)

            if sorted_IS not in ISs: ISs.append(sorted_IS)

    print(ISs)
コード例 #37
0
def CNDP_serial(k, G):
    GSize = G.size()
    NodeList = G.nodes()
    component = list()
    for i in range(GSize):
        component.append(0)

    max_component = GSize
    sizes = list()
    marked = list()
    for i in range(max_component):
        sizes.append(0)
    for i in range(max_component):
        marked.append(0)

    MIS = nx.maximal_independent_set(G)

    component_id = 0
    forbidden_count = 0
    print(len(MIS))
    for i in range(GSize):
        if(i in MIS):
            component[i] = component_id
            # print(len(sizes))
            sizes[component_id] = 1
            component_id += 1
        else:
            forbidden_count += 1
    print(forbidden_count)
    if forbidden_count < k:
        X = random.sample(range(0, len(MIS)), k - forbidden_count)
        for x in range(len(X)):
            sizes[component[NodeList[x]]] = 0
            component[NodeList[x]] = 0
    print(len(MIS))
    res = []
    while(forbidden_count > k):
        # print(component)
        
        cand_node = next_candidate(G, component, sizes, marked)
        # res.append(cand_node)
        united_comp = any_neighbour_component(G, cand_node, component, marked)
        if(united_comp != -1):
            unite(G, cand_node, marked, united_comp, sizes, component)
            forbidden_count -= 1

    print(forbidden_count, MIS)
    # print(len(MIS))
    return list(set(NodeList) - set(MIS))
コード例 #38
0
ファイル: features.py プロジェクト: MichelleBadri/MassExodus
def compute_features(G,checks=False):
    """ Computes features of the graph. """

    n = G.order()
    ### read in graph as IGraph network
    edge = list(G.edges())
    g = ig.Graph()
    for i in xrange(1,n+2):
        g.add_vertex(i)
    g.add_edges(edge)

    ## LARGEST EIGENVALUE
    # nmeval = max(nx.adjacency_spectrum(G)) / n  ### Original Calculation for largest Eigenval. Too slow 
    # IGRAPH REWORK
    e = g.evcent(directed=False, scale= False,return_eigenvalue=True)
    nmeval= round((max(e[0])),5) 
    
    ## NETWORKX CONNECTED COMPONENTS 
    # comps = nx.number_connected_components(G)  ### Original Calculation too slow 
    # IGRAPH REWORK
    comps = len(g.components()) - 1 

    ## MAXIMAL INDEPENDENT SET 
    mis = len(nx.maximal_independent_set(G)) / n ### Networkx version is fastest

    ## DENSITY 
    # density = nx.density(G) ### Original Calculation too slow
    # IGRAPH REWORK
    density = g.density() 

    ## CLUSTERING COEFFICIENT
    # cc = nx.average_clustering(G) ### Original Calculation is too slow
    # IGRAPH REWORK
    cc= g.transitivity_avglocal_undirected()

    # tris = sum(nx.triangles(G).values()) / n #### TOO SLOW
    # fracdeg1 = sum([len(G.neighbors(u)) == 1 for u in G]) / n
    # fracdeg0 = sum([len(G.neighbors(u)) == 0 for u in G]) / n

    # Gcc_list = list(nx.connected_component_subgraphs(G))
    # Gcc = Gcc_list[0]
    # ngcc = Gcc.order()
    # mgcc = Gcc.size()


    # return (nmeval,comps,mis,density,cc,tris,fracdeg1,fracdeg0,ngcc,mgcc)
    return (nmeval,comps,mis,density,cc)
コード例 #39
0
	def korner_entropy(self, definitiongraph):
		nodes=definitiongraph.nodes()
		stable_sets=[]
		for v in nodes:
			stable_sets.append(nx.maximal_independent_set(definitiongraph.to_undirected(),[v]))
		print "korner_entropy(): Stable Independent Sets:",stable_sets
		entropy=0.0
		prob_v_in_stableset=0.0
		for v in nodes:
			for s in stable_sets:
				if v in s:
					prob_v_in_stableset=math.log(0.999999)
				else:
					prob_v_in_stableset=math.log(0.000001)
				entropy += (-1.0) * float(1.0/len(nodes)) * prob_v_in_stableset
			if entropy < self.entropy:
				self.entropy = entropy
			entropy=0.0
		return self.entropy
コード例 #40
0
ファイル: features.py プロジェクト: bdelgado014/MassExodus
def compute_features(G,checks=False):
    """ Computes features of the graph. """

    n = G.order()
    
    nmeval = max(nx.adjacency_spectrum(G)) / n
    comps = nx.number_connected_components(G)
    mis = len(nx.maximal_independent_set(G)) / n
    density = nx.density(G)
    cc = nx.average_clustering(G)
    tris = sum(nx.triangles(G).values()) / n
    fracdeg1 = sum([len(G.neighbors(u)) == 1 for u in G]) / n
    fracdeg0 = sum([len(G.neighbors(u)) == 0 for u in G]) / n

    Gcc_list = list(nx.connected_component_subgraphs(G))
    Gcc = Gcc_list[0]
    ngcc = Gcc.order()
    mgcc = Gcc.size()
    
    return (nmeval,comps,mis,density,cc,tris,fracdeg1,fracdeg0,ngcc,mgcc)
コード例 #41
0
ファイル: kcenter.py プロジェクト: weddige/kcenter
def hochbaum_shmoys(k, graph):
    """This function gives a 2-approximation for the k-center problem on a complete graph.
    See "A best possible heuristic for the k-center problem" by
    Dorit S. Hochbaum and David B. Shmoys for more details.

    This implementation follows "k-Center in Verkehrsnetzwerken – ein Vergleich geometrischer
    und graphentheoretischer Ansätze" by Valentin Breuß.

    :param k: int
    :param graph: Graph
    :return: list
    """
    edges = list()
    for edge in sorted(graph.edges(data=True), key=lambda e: e[2]['weight']):
        edges.append(edge)
        squared = networkx.Graph(edges)
        squared.add_nodes_from(graph.nodes())
        squared = squared_graph(squared)
        maximal_independent_set = networkx.maximal_independent_set(squared)
        if len(maximal_independent_set) <= k:
            return maximal_independent_set
コード例 #42
0
ファイル: color_maximal_sets.py プロジェクト: M0nd4/discopt
    def color_graph(G, max_colors):
        """Color successive maximal subsets until all vertices are colored,
        simple program that returns decent but not optimal results"""
        subgraph = G.copy()
        n = len(G)
        constraints = {i: list(reversed(range(n))) for i in G}
        cols = {i: None for i in G}
        num_colored = len([i for i in cols if cols[i] != None])
        # print str(len(G))

        count = 0
        while(num_colored < n):
            # color the maximal set
            ind_set = nx.maximal_independent_set(subgraph)
            if len(ind_set) == 0:
                return 0, []
            for node in ind_set:
                col = constraints[node].pop()
                cols[node] = col
                num_colored += 1
                subgraph.remove_node(node)
                for nbr in G[node]:
                    if col in constraints[nbr]:
                        constraints[nbr].remove(col)

            # print "ind set length: %d, col %d" % (len(ind_set), col)
            
            # check constraints
            # for node in G:
            #     for nbr in G[node]:
            #         assert cols[node] not in constraints[nbr],\
            #             "Constraints violated after %d iteration" % (count)
            #         if cols[node]:
            #             assert all([cols[node] != cols[nbr]]),\
            #                 "Constraints violated after %d iteration, (%d, %d)"\
            #                 % (count, node, nbr)
            count += 1

        return len(set(cols.values())), cols.values()
コード例 #43
0
(1, 7),
(1, 8),
(2, 11),
(2, 16),
(2, 17),
(3, 14),
(3, 16),
(3, 17),
(4, 7),
(4, 13),
(4, 17),
(5, 6),
(5, 11),
(6 ,18),
(9 ,12),
(10, 13),
(11, 17),
(13, 15),
(15 ,17),
(16 ,19)]
graph=nx.Graph()

graph.add_edges_from(graphEdges)
print(graph.nodes())
print(graph.edges())
print(min_maximal_matching(graph))
print(graph)

g1=nx.barabasi_albert_graph(1000,400)
print(nx.maximal_independent_set((g1)))
solution=[-1]*g1
コード例 #44
0
ファイル: test_mis.py プロジェクト: kswgit/networkx
 def test_bipartite(self):
     G = nx.complete_bipartite_graph(12, 34)
     indep = nx.maximal_independent_set(G, [4, 5, 9, 10])
     assert_equal(sorted(indep), list(range(12)))
コード例 #45
0
ファイル: test_mis.py プロジェクト: kswgit/networkx
 def test_florentine_family(self):
     G = self.florentine
     indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
     assert_equal(sorted(indep), sorted(["Medici", "Bischeri", "Castellani", "Pazzi", "Ginori", "Lamberteschi"]))
コード例 #46
0
ファイル: test_mis.py プロジェクト: kswgit/networkx
 def test_K55(self):
     """Maximal independent set: K55"""
     G = nx.complete_graph(55)
     for node in G:
         assert_equal(nx.maximal_independent_set(G, [node]), [node])
def graph_optimize(query_results):
##NOTE:  At this point, it is assumed that the variable query_results is 
#          a list of all potential courses (as Section() objects), returned 
#          from some database query or web scraping action.

#  populate this list with Section() objects from the query
#    query_results = []

#  populate this with the total number of sections that should be on
#  the calendar, assuming there are no scheduling impossibilities 
#  (e.g.  1 tutorial, 1 lab and 1 lecture each for 3 courses would 
#  result in requiredNumberOfSections = 9)
    requiredSections = types_within_subset(query_results)
    requiredNumberOfSections = len(requiredSections)

  #remove all empty sections from the query
    if (UserPrefs.RespectRegistration):
        query_results = [i for i in query_results if i.remainingSeats > 0 ]

#    for i in range(10):
#        print "!"*100
#    for CRN in query_results:
#        CRN.printToScreen()

#  SOME CONFIGURATION:
    calculate_how_many = config.generate_this_many_schedules
    max_attempts = config.maximum_attempts_per_schedule
    write_stats = config.write_out_stats
##start code

#    query_results = generate_dense_data() #uncomment this and the next line to use fake course data
#    requiredNumberOfSections = 9

#Construct the graph object
    G = nx.Graph()
#    iG = ig.Graph()
#add all potential courses as nodes to the graph
    for Sec in query_results:
        G.add_node(Sec, label=Sec.course[0:2],selected=1.0)


    if len(UserPrefs.preferredCRNs) > 0:
        preferred_crn(G.nodes())



    # If the user wants days off, create pseudo-events that span every day.  
    # Weight them so that if possible, they will be selected.
    if UserPrefs.MaximizeDaysOff:
        for pseudoBlock in pseudo_blocks.add_days_off_blocks():
            G.add_node(pseudoBlock, label="XX", selected = 3.0, score = 1.0 )

    # If the user prefers to have mornings (or afternoons or evenings) FREE, then
    # create the corresponding pseudo-blocks to conflict with all courses at those times.
    if not(UserPrefs.PreferTimeOfDay == ""):
        for pseudoBlock in pseudo_blocks.TimeCut():
            G.add_node(pseudoBlock, label="XX", selected = 3.0, score = 1.0 )


#map the type to a float for coloring the graph output
# {
    typemapping = { 'Lec': styles.colours.lec, 'Tut':styles.colours.tut, 'Lab':styles.colours.lab, 'Oth': styles.colours.oth }
    colors = [typemapping[node.cType] for node in G.nodes() ]
# }


    all_edges = []
#  Add edges between the nodes, representing conflicts (two courses for which a user
#  cannot be simultaneously registered.
    for i,iSec in enumerate(G.nodes()):
        for j,jSec in enumerate(G.nodes()):
            if i<j:
                have_edge = False
                if ( (iSec.cType == jSec.cType) and (jSec.course==iSec.course)):
                    # If the two sections are from the same course
                    # and are the same type, then they're incompatible.
                    # (e.g. can't take two physics tutorials)
                    have_edge = True
                else:
                    for itimeslot in iSec.timeslots:
                        for jtimeslot in jSec.timeslots:
                            #If they overlap in time:
                            if (  (itimeslot.eTime >= jtimeslot.sTime) and (itimeslot.sTime <= jtimeslot.eTime) and itimeslot.day==jtimeslot.day ):
                                have_edge = True
                            elif (  (jtimeslot.eTime >= itimeslot.sTime) and (jtimeslot.sTime <= itimeslot.eTime) and itimeslot.day==jtimeslot.day ):
                                have_edge = True
                            elif (itimeslot.sTime == jtimeslot.sTime and itimeslot.day==jtimeslot.day ):
                                have_edge = True
                            elif (jtimeslot.sTime == itimeslot.sTime and jtimeslot.day==itimeslot.day ):
                                have_edge = True

                if have_edge:
                #If have_edge is, at this point, true then
                #these two nodes are incompatible with each other.
                #Add an edge between them
                    G.add_edge(iSec,jSec)


    all_valid = []
    consolation = []
    best_score = -1.0e9

    globalFailure = True


    if config.make_graph_image:
        import matplotlib.pyplot as plt
        typemapping = { 'Lec': styles.colours.lec, 'Tut':styles.colours.tut, 'Lab':styles.colours.lab, 'Oth': styles.colours.oth }
        colors = [typemapping[node.cType] for node in G.nodes() ]

        #print colors
        plt.figure(figsize=[24,20])
        nx.draw_spring(G,
#           with_labels=True,
#           labels=nx.get_node_attributes(G,'label'),
            node_color=colors,
            node_size=500,
#           linewidths=nx.get_node_attributes(G,'selected').values(),
        )
        plt.axis('off')
        plt.savefig('graph.png')




    # Here we begin generating many different schedules.  After each schedule
    # is found (and verified to contain the correct number of courses), it is scored and
    # added to a list of all valid courses.
    for potentialSchedule in xrange(calculate_how_many):
        #print "Attempting to build schedule",potentialSchedule
        successfully_scheduled_sections = 0
        tries = 0
        bestTry = []
        bestTryCount = 0
        failure = True
        for tries in xrange(max_attempts):
            # compute the maximal independent set.  
            # This is NOT the MAXIMUM independent set. Thus we must loop a few times
            # to get the largest possible set.
            thissched = nx.maximal_independent_set(G)

            # We need to count the number of pseudo-blocks in the generated schedule since we
            # must only break out of this loop once we have enough sections in our schedule.  
            # Pseudo blocks count, by default, and must be subtracted.
            numberofblanks = 0
            for CRN in thissched:
                if CRN.CRN == "55555":
                    numberofblanks+=1
            successfully_scheduled_sections = len(thissched)

            if (successfully_scheduled_sections >= requiredNumberOfSections + numberofblanks):
                # If there are enough CRNs in the calcuated schedule, then it didn't fail and
                # we should break out of the loop.
                failure=False
                break
            else:
            # If there are not enough CRNs present, then keep track of the best schedule, 
            # but try again to get a better one.
                if successfully_scheduled_sections > bestTryCount:
                    bestTryCount = successfully_scheduled_sections
                    bestTry = thissched
                thissched = bestTry

        #Build a timetable object to hold schedule, notes, etc.
        thisTimeTable = Timetable(thissched, compute_schedule_score(thissched))

        #Remove all pseudo events now.  They've served their purpose.
        newsched = []
        for i in range(len(thisTimeTable.Schedule)):
            if not(thisTimeTable.Schedule[i].CRN=="55555"):
                newsched.append(thisTimeTable.Schedule[i])
        thisTimeTable.Schedule = newsched

        if not(failure):
            thisTimeTable.isValid = "VALID"
            globalFailure = False
            # all_valid is a list of all valid timetable objects
            all_valid.append(thisTimeTable)
        else:
            #consolation is a list of all invalid timetables.
            consolation.append(thisTimeTable)
            thisTimeTable.isValid = "INVALID"


    # Sort the valid timetable list by score, and the consolation list by
    # the number of events (we'd rather a lower score, if it has more of the
    # requested courses).
    all_valid = sorted(all_valid, key = lambda x: x.score, reverse=True)
    consolation = sorted(consolation, key = lambda x: len(x.Schedule), reverse=True)

   # if globalFailure:
        #print "FAILURE to find even one valid schedule"

    good_schedules = len(all_valid)
    for tt in all_valid:
        tt.generateKey()

    unique_valid = len(set(all_valid))

    if len(all_valid) > 0:
        max_score = all_valid[0].score
    else:
        max_score = 0.0


    if config.write_out_stats:
        stats = open("statistics.txt",'a')
        stats.write("{0}\t{1}\t{2}\t{3}\t{4}\n".format(
            config.generate_this_many_schedules,
            config.maximum_attempts_per_schedule,
            unique_valid,
            len(all_valid),
            max_score
        ))
        stats.close()




    #print "UNIQUE SCHEDULES POSSIBLE: {0} (of {1} valid schedules)".format(unique_valid,len(all_valid))

    if good_schedules >= config.number_of_schedules_to_show_user:
        schedules_to_return = all_valid[0:config.number_of_schedules_to_show_user]
    else:
        schedules_to_return = all_valid[0:good_schedules] + consolation[0:config.number_of_schedules_to_show_user - good_schedules]

    for tt in schedules_to_return:
        if not(UserPrefs.RespectRegistration):
            tt.warnings.append("WARNING: You chose to ignore current registration numbers. Sections on the above timetable could be full.");
        #print "  Score --> ",tt.score
        #print tt.key
        missingCourses(tt,requiredSections)
        tt.notes.append("List of CRNs displayed on this time table:")
        last_course = ""
        stringg = ""
        for CRN in sorted(tt.Schedule, key=lambda x: x.course):
            if CRN.remainingSeats == 0:
                tt.warnings.append(CRN.course + " " + CRN.cType + " is full (CRN " + str(CRN.CRN) + ").")
            if not(CRN.course == last_course):
                tt.notes.append(stringg)
                stringg = CRN.course + ": "
                last_course = CRN.course
            else:
                stringg = stringg + ", "
            stringg = stringg + CRN.CRN #+ ", "
        tt.notes.append(stringg)

#        #print len(tt.Schedule)
#        for wn in tt.warnings:
#            print wn
#        for nn in tt.notes:
#            print nn

    #print "FOUND",len(schedules_to_return),"schedules to return"

    returnData = []
    for thisSchedule in schedules_to_return:
        thisSchedule.w1JSON,thisSchedule.w2JSON = JSON_dump(thisSchedule.Schedule)
        returnData.append(  [ thisSchedule.w1JSON,thisSchedule.w2JSON,thisSchedule.notes,thisSchedule.warnings,thisSchedule.score     ]  )


    return returnData
コード例 #48
0
ファイル: shannon_capacity.py プロジェクト: pombreda/python-1
import networkx as nx
import numpy as np

G = nx.cycle_graph(2)
Gi = G
for i in xrange(1, 10):
    Gi = nx.tensor_product(Gi,G)
    alpha = len(nx.maximal_independent_set(Gi))
    theta = alpha**(1/float(i+1))
    print i, alpha, theta
コード例 #49
0
ファイル: ind_set.py プロジェクト: M0nd4/discopt
nx.isolates(G)

# Data
G = readData('../data/gc_20_1')

# successively color maximal subsets until all vertices are colored
subgraph = G.copy()
n = len(G)
constraints = {i: list(reversed(range(n))) for i in G}
cols = {i: None for i in G}
num_colored = len([i for i in cols if cols[i] != None])

count = 0
while(num_colored < n):
    # color the maximal set
    ind_set = nx.maximal_independent_set(subgraph)
    for node in ind_set:
        col = constraints[node].pop()
        cols[node] = col
        for nbr in G[node]:
            if col in constraints[nbr]:
                constraints[nbr].remove(col)

    # Now do greedy coloring
    # subgraph = G.copy()
    colored = [i for i in cols if cols[i] != None]
    num_colored = len(colored)
    for node in colored:
        if node in subgraph:
            subgraph.remove_node(node)
コード例 #50
0
ファイル: test_networkx.py プロジェクト: brainey421/libbvg
nx.periphery(Gcc)
nx.radius(Gcc)

# flows (seg fault currently)
#nx.max_flow(Gcc, 1, 2)
#nx.min_cut(G, 1, 2)

# isolates
nx.is_isolate(G, 1)     # False
nx.is_isolate(G, 5)     # True

# HITS
nx.hits(G,max_iter=1000)  # cannot converge?

# maximal independent set
nx.maximal_independent_set(G)

# shortest path
nx.shortest_path(G)     # need "predecessors_iter"
nx.all_pairs_shortest_path(G)
nx.all_pairs_shortest_path_length(G)

nx.predecessor(G, 1)
nx.predecessor(G, 1, 378)

nx.dijkstra_path(G, 1, 300)
nx.dijkstra_path_length(G, 1, 300)
nx.single_source_dijkstra_path(G, 1)
nx.single_source_dijkstra_path_length(G, 1)
nx.all_pairs_dijkstra_path(G)
nx.all_pairs_dijkstra_path_length(G)
コード例 #51
0
ファイル: test_mis.py プロジェクト: networkx/networkx
 def test_random_seed(self):
     G = nx.complete_graph(5)
     for node in G:
         assert_equal(nx.maximal_independent_set(G, [node], seed=1), [node])